├── .gitignore ├── .travis.yml ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── appveyor.yml ├── examples ├── extensions.rs ├── lex_tokens.rs └── parse_nodes.rs ├── export_doc.sh ├── src ├── engine │ └── mod.rs ├── environment │ └── mod.rs ├── error │ ├── engine.rs │ ├── mod.rs │ ├── runtime.rs │ └── template.rs ├── extension │ ├── core │ │ ├── error.rs │ │ ├── mod.rs │ │ └── token_parsers │ │ │ ├── mod.rs │ │ │ ├── parser_block.rs │ │ │ ├── parser_do.rs │ │ │ ├── parser_embed.rs │ │ │ ├── parser_extends.rs │ │ │ ├── parser_filter.rs │ │ │ ├── parser_flush.rs │ │ │ ├── parser_for.rs │ │ │ ├── parser_from.rs │ │ │ ├── parser_if.rs │ │ │ ├── parser_import.rs │ │ │ ├── parser_include.rs │ │ │ ├── parser_macro.rs │ │ │ ├── parser_set.rs │ │ │ ├── parser_spaceless.rs │ │ │ └── parser_use.rs │ ├── escaper │ │ └── mod.rs │ └── mod.rs ├── function │ └── mod.rs ├── instructions │ ├── compiler │ │ ├── body.rs │ │ ├── expr.rs │ │ ├── mod.rs │ │ └── module.rs │ └── mod.rs ├── lib.rs ├── loader.rs ├── mold │ └── mod.rs ├── nodes │ ├── mod.rs │ ├── node │ │ ├── body.rs │ │ ├── expr.rs │ │ ├── mod.rs │ │ └── module.rs │ ├── parser │ │ ├── body.rs │ │ ├── expr.rs │ │ ├── mod.rs │ │ └── module.rs │ └── token_parser.rs ├── operator │ └── mod.rs ├── tokens │ ├── lexer │ │ ├── delimiters.rs │ │ ├── iter.rs │ │ ├── matchers.rs │ │ ├── mod.rs │ │ └── options.rs │ ├── mod.rs │ └── token.rs └── value │ ├── mod.rs │ └── ops.rs ├── templates └── fos_login.html.twig └── tests ├── fixture_tests.rs ├── fixtures └── autoescape │ └── filename.test ├── lexer └── mod.rs ├── lexer_tests.rs ├── parser ├── array_expression.rs ├── can_only_assign_to_names.rs ├── mod.rs ├── named_and_default_args.rs ├── string_expression.rs └── support │ └── mod.rs ├── parser_tests.rs └── support └── mod.rs /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | .DS_Store 4 | /.idea -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: rust 3 | 4 | env: 5 | global: 6 | - CRATE=twig 7 | 8 | matrix: 9 | allow_failures: 10 | - rust: nightly 11 | include: 12 | - rust: nightly 13 | env: DOC=true 14 | - rust: beta 15 | - rust: stable 16 | - rust: 1.3.0 17 | 18 | after_success: 19 | - /bin/bash export_doc.sh 20 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | [root] 2 | name = "twig" 3 | version = "0.1.0" 4 | dependencies = [ 5 | "byteorder 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", 6 | "difference 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", 7 | "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", 8 | "little 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", 9 | "log 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", 10 | "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", 11 | "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", 12 | "serde_json 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", 13 | "sha1 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", 14 | "term 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", 15 | "uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", 16 | ] 17 | 18 | [[package]] 19 | name = "aho-corasick" 20 | version = "0.5.3" 21 | source = "registry+https://github.com/rust-lang/crates.io-index" 22 | dependencies = [ 23 | "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", 24 | ] 25 | 26 | [[package]] 27 | name = "aho-corasick" 28 | version = "0.6.3" 29 | source = "registry+https://github.com/rust-lang/crates.io-index" 30 | dependencies = [ 31 | "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", 32 | ] 33 | 34 | [[package]] 35 | name = "bitflags" 36 | version = "0.7.0" 37 | source = "registry+https://github.com/rust-lang/crates.io-index" 38 | 39 | [[package]] 40 | name = "byteorder" 41 | version = "0.4.2" 42 | source = "registry+https://github.com/rust-lang/crates.io-index" 43 | 44 | [[package]] 45 | name = "conv" 46 | version = "0.3.3" 47 | source = "registry+https://github.com/rust-lang/crates.io-index" 48 | dependencies = [ 49 | "custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", 50 | ] 51 | 52 | [[package]] 53 | name = "custom_derive" 54 | version = "0.1.7" 55 | source = "registry+https://github.com/rust-lang/crates.io-index" 56 | 57 | [[package]] 58 | name = "difference" 59 | version = "0.4.1" 60 | source = "registry+https://github.com/rust-lang/crates.io-index" 61 | dependencies = [ 62 | "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", 63 | ] 64 | 65 | [[package]] 66 | name = "dtoa" 67 | version = "0.4.2" 68 | source = "registry+https://github.com/rust-lang/crates.io-index" 69 | 70 | [[package]] 71 | name = "env_logger" 72 | version = "0.4.3" 73 | source = "registry+https://github.com/rust-lang/crates.io-index" 74 | dependencies = [ 75 | "log 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", 76 | "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", 77 | ] 78 | 79 | [[package]] 80 | name = "getopts" 81 | version = "0.2.14" 82 | source = "registry+https://github.com/rust-lang/crates.io-index" 83 | 84 | [[package]] 85 | name = "itoa" 86 | version = "0.3.4" 87 | source = "registry+https://github.com/rust-lang/crates.io-index" 88 | 89 | [[package]] 90 | name = "kernel32-sys" 91 | version = "0.2.1" 92 | source = "registry+https://github.com/rust-lang/crates.io-index" 93 | dependencies = [ 94 | "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", 95 | "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", 96 | ] 97 | 98 | [[package]] 99 | name = "lazy_static" 100 | version = "0.2.9" 101 | source = "registry+https://github.com/rust-lang/crates.io-index" 102 | 103 | [[package]] 104 | name = "libc" 105 | version = "0.1.12" 106 | source = "registry+https://github.com/rust-lang/crates.io-index" 107 | 108 | [[package]] 109 | name = "libc" 110 | version = "0.2.31" 111 | source = "registry+https://github.com/rust-lang/crates.io-index" 112 | 113 | [[package]] 114 | name = "little" 115 | version = "0.3.0" 116 | source = "registry+https://github.com/rust-lang/crates.io-index" 117 | dependencies = [ 118 | "byteorder 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", 119 | "log 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", 120 | ] 121 | 122 | [[package]] 123 | name = "log" 124 | version = "0.3.3" 125 | source = "registry+https://github.com/rust-lang/crates.io-index" 126 | dependencies = [ 127 | "libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", 128 | ] 129 | 130 | [[package]] 131 | name = "magenta" 132 | version = "0.1.1" 133 | source = "registry+https://github.com/rust-lang/crates.io-index" 134 | dependencies = [ 135 | "conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", 136 | "magenta-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", 137 | ] 138 | 139 | [[package]] 140 | name = "magenta-sys" 141 | version = "0.1.1" 142 | source = "registry+https://github.com/rust-lang/crates.io-index" 143 | dependencies = [ 144 | "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", 145 | ] 146 | 147 | [[package]] 148 | name = "memchr" 149 | version = "0.1.11" 150 | source = "registry+https://github.com/rust-lang/crates.io-index" 151 | dependencies = [ 152 | "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", 153 | ] 154 | 155 | [[package]] 156 | name = "memchr" 157 | version = "1.0.1" 158 | source = "registry+https://github.com/rust-lang/crates.io-index" 159 | dependencies = [ 160 | "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", 161 | ] 162 | 163 | [[package]] 164 | name = "num-traits" 165 | version = "0.1.40" 166 | source = "registry+https://github.com/rust-lang/crates.io-index" 167 | 168 | [[package]] 169 | name = "rand" 170 | version = "0.3.16" 171 | source = "registry+https://github.com/rust-lang/crates.io-index" 172 | dependencies = [ 173 | "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", 174 | "magenta 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", 175 | ] 176 | 177 | [[package]] 178 | name = "regex" 179 | version = "0.1.80" 180 | source = "registry+https://github.com/rust-lang/crates.io-index" 181 | dependencies = [ 182 | "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", 183 | "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", 184 | "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", 185 | "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", 186 | "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", 187 | ] 188 | 189 | [[package]] 190 | name = "regex" 191 | version = "0.2.2" 192 | source = "registry+https://github.com/rust-lang/crates.io-index" 193 | dependencies = [ 194 | "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", 195 | "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", 196 | "regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", 197 | "thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", 198 | "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", 199 | ] 200 | 201 | [[package]] 202 | name = "regex-syntax" 203 | version = "0.3.9" 204 | source = "registry+https://github.com/rust-lang/crates.io-index" 205 | 206 | [[package]] 207 | name = "regex-syntax" 208 | version = "0.4.1" 209 | source = "registry+https://github.com/rust-lang/crates.io-index" 210 | 211 | [[package]] 212 | name = "rustc-serialize" 213 | version = "0.3.24" 214 | source = "registry+https://github.com/rust-lang/crates.io-index" 215 | 216 | [[package]] 217 | name = "serde" 218 | version = "1.0.15" 219 | source = "registry+https://github.com/rust-lang/crates.io-index" 220 | 221 | [[package]] 222 | name = "serde_json" 223 | version = "1.0.3" 224 | source = "registry+https://github.com/rust-lang/crates.io-index" 225 | dependencies = [ 226 | "dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", 227 | "itoa 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", 228 | "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", 229 | "serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)", 230 | ] 231 | 232 | [[package]] 233 | name = "sha1" 234 | version = "0.1.1" 235 | source = "registry+https://github.com/rust-lang/crates.io-index" 236 | dependencies = [ 237 | "byteorder 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", 238 | ] 239 | 240 | [[package]] 241 | name = "term" 242 | version = "0.2.13" 243 | source = "registry+https://github.com/rust-lang/crates.io-index" 244 | dependencies = [ 245 | "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", 246 | "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", 247 | ] 248 | 249 | [[package]] 250 | name = "thread-id" 251 | version = "2.0.0" 252 | source = "registry+https://github.com/rust-lang/crates.io-index" 253 | dependencies = [ 254 | "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", 255 | "libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", 256 | ] 257 | 258 | [[package]] 259 | name = "thread_local" 260 | version = "0.2.7" 261 | source = "registry+https://github.com/rust-lang/crates.io-index" 262 | dependencies = [ 263 | "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", 264 | ] 265 | 266 | [[package]] 267 | name = "thread_local" 268 | version = "0.3.4" 269 | source = "registry+https://github.com/rust-lang/crates.io-index" 270 | dependencies = [ 271 | "lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", 272 | "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", 273 | ] 274 | 275 | [[package]] 276 | name = "unreachable" 277 | version = "1.0.0" 278 | source = "registry+https://github.com/rust-lang/crates.io-index" 279 | dependencies = [ 280 | "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", 281 | ] 282 | 283 | [[package]] 284 | name = "utf8-ranges" 285 | version = "0.1.3" 286 | source = "registry+https://github.com/rust-lang/crates.io-index" 287 | 288 | [[package]] 289 | name = "utf8-ranges" 290 | version = "1.0.0" 291 | source = "registry+https://github.com/rust-lang/crates.io-index" 292 | 293 | [[package]] 294 | name = "uuid" 295 | version = "0.1.18" 296 | source = "registry+https://github.com/rust-lang/crates.io-index" 297 | dependencies = [ 298 | "rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", 299 | "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", 300 | ] 301 | 302 | [[package]] 303 | name = "void" 304 | version = "1.0.2" 305 | source = "registry+https://github.com/rust-lang/crates.io-index" 306 | 307 | [[package]] 308 | name = "winapi" 309 | version = "0.2.5" 310 | source = "registry+https://github.com/rust-lang/crates.io-index" 311 | 312 | [[package]] 313 | name = "winapi-build" 314 | version = "0.1.1" 315 | source = "registry+https://github.com/rust-lang/crates.io-index" 316 | 317 | [metadata] 318 | "checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66" 319 | "checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699" 320 | "checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" 321 | "checksum byteorder 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96c8b41881888cc08af32d47ac4edd52bc7fa27fef774be47a92443756451304" 322 | "checksum conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "78ff10625fd0ac447827aa30ea8b861fead473bb60aeb73af6c1c58caf0d1299" 323 | "checksum custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ef8ae57c4978a2acd8b869ce6b9ca1dfe817bff704c220209fdef2c0b75a01b9" 324 | "checksum difference 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ffef4c144e881a906ed5bd6e1e749dc1955cd3f0c7969d3d34122a971981c5ea" 325 | "checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" 326 | "checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" 327 | "checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" 328 | "checksum itoa 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8324a32baf01e2ae060e9de58ed0bc2320c9a2833491ee36cd3b4c414de4db8c" 329 | "checksum kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b5b5e7edf375e6d26243bde172f1d5ed1446f4a766fc9b7006e1fd27258243f1" 330 | "checksum lazy_static 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c9e5e58fa1a4c3b915a561a78a22ee0cac6ab97dca2504428bc1cb074375f8d5" 331 | "checksum libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "e32a70cf75e5846d53a673923498228bbec6a8624708a9ea5645f075d6276122" 332 | "checksum libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)" = "d1419b2939a0bc44b77feb34661583c7546b532b192feab36249ab584b86856c" 333 | "checksum little 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f2d017773be9c1c980e17465e3185c67bff0804f1f994152af23445031b7028" 334 | "checksum log 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fbb9a8cc55c114657af9fd2297fad569cb0d126851aa9a132b1e93b5789d67f9" 335 | "checksum magenta 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf0336886480e671965f794bc9b6fce88503563013d1bfb7a502c81fe3ac527" 336 | "checksum magenta-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "40d014c7011ac470ae28e2f76a02bfea4a8480f73e701353b49ad7a8d75f4699" 337 | "checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" 338 | "checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4" 339 | "checksum num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "99843c856d68d8b4313b03a17e33c4bb42ae8f6610ea81b28abe076ac721b9b0" 340 | "checksum rand 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "eb250fd207a4729c976794d03db689c9be1d634ab5a1c9da9492a13d8fecbcdf" 341 | "checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f" 342 | "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b" 343 | "checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957" 344 | "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" 345 | "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" 346 | "checksum serde 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "6a7046c9d4c6c522d10b2d098f9bebe2bef227e0e74044d8c1bfcf6b476af799" 347 | "checksum serde_json 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d243424e06f9f9c39e3cd36147470fd340db785825e367625f79298a6ac6b7ac" 348 | "checksum sha1 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a307a40d5834140e4213a6952483b84e9ad53bdcab918b7335a6e305e505a53c" 349 | "checksum term 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "7d4680517d27f8704f094356884e019de315d1a12acec97de9fa850900189289" 350 | "checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03" 351 | "checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5" 352 | "checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14" 353 | "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" 354 | "checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" 355 | "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" 356 | "checksum uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "78c590b5bd79ed10aad8fb75f078a59d8db445af6c743e55c4a53227fc01c13f" 357 | "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" 358 | "checksum winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "dc3583688b861fcd83c2823d37cf2cd2446c233dd7ba3f97884d1a7302817537" 359 | "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" 360 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | 3 | name = "twig" 4 | version = "0.1.0" 5 | description = "Twig templating engine for Rust; work in progress." 6 | documentation = "http://nercury.github.io/twig-rs/twig/index.html" 7 | repository = "https://github.com/Nercury/twig-rs" 8 | authors = [ 9 | "Rust Twig Team", 10 | "Nerijus Arlauskas " 11 | ] 12 | license = "BSD-3-Clause" 13 | 14 | [dev-dependencies] 15 | env_logger = "0.4" 16 | serde = "1.0" 17 | serde_json = "1.0" 18 | difference = "0.4" 19 | term = "0.2" 20 | 21 | [dependencies] 22 | regex = "0.1.80" 23 | uuid = "0.1" 24 | log = "0.3" 25 | little = "0.3" 26 | sha1 = "0.1" 27 | byteorder = "^0.4" 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 by the Rust Twig Team. 2 | Included work Copyright (c) 2009-2014 by the Twig Team. 3 | 4 | Some rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are 8 | met: 9 | 10 | * Redistributions of source code must retain the above copyright 11 | notice, this list of conditions and the following disclaimer. 12 | 13 | * Redistributions in binary form must reproduce the above 14 | copyright notice, this list of conditions and the following 15 | disclaimer in the documentation and/or other materials provided 16 | with the distribution. 17 | 18 | * The names of the contributors may not be used to endorse or 19 | promote products derived from this software without specific 20 | prior written permission. 21 | 22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 23 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 24 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 25 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 26 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 27 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 28 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 29 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 30 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 31 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 32 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 33 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Twig template engine for Rust 2 | 3 | [![Linux Build Status](https://travis-ci.org/Nercury/twig-rs.svg?branch=master)](https://travis-ci.org/Nercury/twig-rs) 4 | [![Windows Build status](https://ci.appveyor.com/api/projects/status/64gv1ytgvoutyve2/branch/master?svg=true)](https://ci.appveyor.com/project/Nercury/twig-rs/branch/master) 5 | 6 | [Read `twig-rs` library docs](http://nercury.github.io/twig-rs) 7 | 8 | Flexible, fast, secure template engine for Rust. 9 | The aim is to be 100% syntactically compatible with [Twig for PHP][twig-for-php]. 10 | The secondary aim is to provide functionally equivalent ways to extend 11 | and customize template with extensions. 12 | 13 | Note that at this moment this is very much work in progress, and is not usable. 14 | 15 | The goal of 1.0 version is to pass test suite functionally equivalent to Twig 2.0 ([issue #1](https://github.com/Nercury/twig-rs/issues/1)). 16 | 17 | [twig-for-php]: http://twig.sensiolabs.org/ 18 | 19 | ## Motivation 20 | 21 | - Designers are familiar with Twig. 22 | - Reuse existing IDE support for Twig. 23 | 24 | ## Build Requirements 25 | 26 | - Minimum Rust version: 1.3.0. 27 | 28 | ## TODO list 29 | 30 | - Parser implementation is not finished ([issue #3](https://github.com/Nercury/twig-rs/issues/3)). 31 | - LLTL (low level template language), basics implemented in little-rs subproject, [issue #4](https://github.com/Nercury/twig-rs/issues/4). 32 | 33 | # Example of working lexer 34 | 35 | Run example that iterates over template in [templates/fos_login.html.twig][tmp]: 36 | 37 | ```bash 38 | cargo run --example lex_tokens 39 | ``` 40 | 41 | Will produce list of tokens in console: 42 | 43 | ``` 44 | Ok(Token { value: BlockStart, line_num: 1 }) 45 | Ok(Token { value: Name("extends"), line_num: 1 }) 46 | Ok(Token { value: String(FOSUserBundle::layout.html.twig), line_num: 1 }) 47 | Ok(Token { value: BlockEnd, line_num: 1 }) 48 | Ok(Token { value: Text("\n"), line_num: 2 }) 49 | Ok(Token { value: BlockStart, line_num: 3 }) 50 | ... 51 | ``` 52 | 53 | # Example of working parser 54 | 55 | Run example that parses this template: 56 | 57 | ```twig 58 | test {{ var + 1 }} 59 | ``` 60 | 61 | ```bash 62 | cargo run --example parse_nodes 63 | ``` 64 | 65 | Will output parsed module in console: 66 | 67 | ``` 68 | Ok( 69 | Module { 70 | body: List { 71 | items: [ 72 | Text { 73 | value: "test ", 74 | line: 1 75 | }, 76 | Print { 77 | expr: Expr { 78 | line: 1, 79 | value: BinaryOperator { 80 | value: "+", 81 | left: Expr { 82 | line: 1, 83 | value: Name( 84 | "var" 85 | ) 86 | }, 87 | right: Expr { 88 | line: 1, 89 | value: Constant( 90 | Int( 91 | 1 92 | ) 93 | ) 94 | } 95 | } 96 | }, 97 | line: 1 98 | } 99 | ] 100 | } 101 | } 102 | ) 103 | ``` 104 | 105 | [tmp]: https://github.com/Nercury/twig-rs/blob/master/templates/fos_login.html.twig 106 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | environment: 2 | matrix: 3 | - RUST: 1.4.0 4 | TARGET: x86_64-pc-windows-msvc 5 | install: 6 | - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-${env:RUST}-${env:TARGET}.exe" 7 | - rust-%RUST%-%TARGET%.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" 8 | - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin 9 | - SET PATH=%PATH%;C:\MinGW\bin 10 | - rustc -V 11 | - cargo -V 12 | 13 | build: false 14 | 15 | test_script: 16 | - cargo test --verbose 17 | -------------------------------------------------------------------------------- /examples/extensions.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | extern crate env_logger; 3 | 4 | use twig::operator::Operator; 5 | use twig::environment::Environment; 6 | use twig::tokens::Lexer; 7 | use twig::nodes::{ Parser, Parse, Module }; 8 | 9 | fn main() { 10 | env_logger::init().unwrap(); 11 | 12 | let mut env = Environment::default(); 13 | env.push_operators(vec![ 14 | Operator::new_binary_left("newop", 10, |_, _| unimplemented!()), 15 | ]); 16 | 17 | let env = env.init_all(); 18 | 19 | let lexer = Lexer::default(&env.lexing); 20 | let maybe_module = Module::parse( 21 | &mut Parser::new(&env.parsing, &mut lexer.tokens( 22 | "test {{ var newop 1 }}" 23 | )) 24 | ); 25 | 26 | println!("{:#?}", maybe_module); 27 | } 28 | -------------------------------------------------------------------------------- /examples/lex_tokens.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | 3 | use twig::environment::Environment; 4 | use twig::tokens::Lexer; 5 | use std::fs::File; 6 | use std::io::Read; 7 | use std::env; 8 | 9 | /// Tokenises twig template and prints the tokens. 10 | fn main() { 11 | let mut path = env::current_dir().unwrap() 12 | .join("templates/fos_login.html.twig"); 13 | 14 | let mut f = File::open(&path) 15 | .ok() 16 | .expect(&format!("failed to open example template at {:?}", path)); 17 | let mut template = String::new(); 18 | f.read_to_string(&mut template).unwrap(); 19 | 20 | let env = Environment::default().init_all(); 21 | let lexer = Lexer::default(&env.lexing); 22 | 23 | for token in lexer.tokens(&template) { 24 | println!("{:?}", token); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /examples/parse_nodes.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | extern crate env_logger; 3 | 4 | use twig::environment::Environment; 5 | use twig::tokens::Lexer; 6 | use twig::nodes::{ Parser, Parse, Module }; 7 | 8 | fn main() { 9 | env_logger::init().unwrap(); 10 | 11 | let env = Environment::default().init_all(); 12 | 13 | let lexer = Lexer::default(&env.lexing); 14 | let maybe_module = Module::parse( 15 | &mut Parser::new(&env.parsing, &mut lexer.tokens( 16 | "test {{ var + 1 }}" 17 | )) 18 | ); 19 | 20 | println!("{:#?}", maybe_module); 21 | } 22 | -------------------------------------------------------------------------------- /export_doc.sh: -------------------------------------------------------------------------------- 1 | if [ "$TRAVIS_BRANCH" != 'master' ]; then 2 | echo "skipping doc export because branch is not master" 3 | exit; 4 | fi 5 | 6 | if [ "$DOC" != "true" ]; then 7 | echo "skipping doc export" 8 | exit; 9 | fi 10 | 11 | echo "exporting docs" 12 | 13 | export PATH=$HOME/.local/bin:$PATH 14 | cargo doc 15 | echo "" > target/doc/index.html 16 | pip install ghp-import --user `whoami` 17 | ghp-import -n target/doc 18 | git push -qf https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages 19 | -------------------------------------------------------------------------------- /src/engine/mod.rs: -------------------------------------------------------------------------------- 1 | use std::mem; 2 | use std::fmt; 3 | use std::collections::HashMap; 4 | use environment::{ Environment, CompiledEnvironment }; 5 | use error::Result; 6 | use tokens::Lexer; 7 | use loader::Loader; 8 | use nodes::parse; 9 | use value::{ Value, HashKey }; 10 | use instructions::compile; 11 | use std::io::{ Read, Write }; 12 | use std::error::Error; 13 | use little::interpreter::{ Interpreter }; 14 | use little::{ Fingerprint, Sha1Hasher, IdentifyValue, Template, Function, LittleValue, Build, Execute }; 15 | use sha1::Sha1; 16 | use std::result; 17 | 18 | impl LittleValue for Value { } 19 | 20 | struct FingerprintHasher { 21 | hasher: Sha1, 22 | } 23 | 24 | impl FingerprintHasher { 25 | fn new() -> FingerprintHasher { 26 | FingerprintHasher { 27 | hasher: Sha1::new() 28 | } 29 | } 30 | } 31 | 32 | impl Sha1Hasher for FingerprintHasher { 33 | /// Completes a round of hashing, producing the output hash generated. 34 | fn finish(&self) -> Fingerprint { 35 | let mut buf = [0;20]; 36 | self.hasher.output(&mut buf); 37 | Fingerprint::new(buf) 38 | } 39 | 40 | /// Writes some data into this `Sha1Hasher` 41 | fn write(&mut self, bytes: &[u8]) { 42 | self.hasher.update(bytes); 43 | } 44 | } 45 | 46 | impl IdentifyValue for Value { 47 | fn identify_value(&self) -> Option { 48 | let mut hasher = FingerprintHasher::new(); 49 | match self.hash_value(&mut hasher) { 50 | Ok(_) => Some(hasher.finish()), 51 | Err(_) => None, 52 | } 53 | } 54 | 55 | fn hash_value(&self, hasher: &mut H) -> result::Result<(), ()> { 56 | match *self { 57 | Value::Null => { 58 | hasher.write(b"n"); 59 | }, 60 | Value::Int(ref v) => { 61 | hasher.write(b"i"); 62 | hasher.write_i64(*v); 63 | }, 64 | Value::Float(_) => return Err(()), 65 | Value::Str(ref v) => { 66 | hasher.write(b"s"); 67 | hasher.write(v.as_bytes()); 68 | }, 69 | Value::Array(ref v) => { 70 | hasher.write(b"a"); 71 | for i in v { 72 | try!(i.hash_value(hasher)); 73 | } 74 | }, 75 | Value::Hash(ref v) => { 76 | hasher.write(b"h"); 77 | for (k, v) in v { 78 | match *k { 79 | HashKey::Int(ref v) => { 80 | hasher.write(b"i"); 81 | hasher.write_i64(*v); 82 | }, 83 | HashKey::Str(ref v) => { 84 | hasher.write(b"s"); 85 | hasher.write(v.as_bytes()); 86 | }, 87 | } 88 | try!(v.hash_value(hasher)); 89 | } 90 | }, 91 | Value::Obj(_) | Value::Func(_) => return Err(()), 92 | }; 93 | Ok(()) 94 | } 95 | } 96 | 97 | impl Default for Value { 98 | fn default() -> Value { 99 | Value::Null 100 | } 101 | } 102 | 103 | impl fmt::Display for Value { 104 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 105 | match *self { 106 | Value::Int(ref v) => write!(f, "{}", v), 107 | Value::Float(v) => write!(f, "{}", v), 108 | Value::Str(ref v) => write!(f, "{}", v), 109 | _ => Ok(()), 110 | } 111 | } 112 | } 113 | 114 | /// Twig Engine. 115 | /// 116 | /// Given the specified environment settings, converts templates 117 | /// to output string. 118 | pub struct Engine { 119 | loader: L, 120 | env: CompiledEnvironment, 121 | lexer: Option, 122 | functions: HashMap<&'static str, Box>>, 123 | } 124 | 125 | impl Engine { 126 | pub fn new(loader: L, env: Environment) -> Engine { 127 | let mut engine = Engine { 128 | loader: loader, 129 | env: env.init_all(), 130 | lexer: None, 131 | functions: HashMap::new(), 132 | }; 133 | 134 | engine.lexer = Some(Lexer::default(&engine.env.lexing)); 135 | 136 | engine 137 | } 138 | 139 | pub fn get>(&mut self, name: &str, data: D) 140 | -> Result 141 | { 142 | let lexer = self.take_lexer(); 143 | 144 | let compiled_template = try!(self.get_compiled_template(&lexer, name)); 145 | 146 | let funs = HashMap::new(); 147 | let mut i = Interpreter::new(); 148 | let p = match i.build("", compiled_template, &funs) { 149 | Ok(p) => p, 150 | Err(e) => panic!("not implemented - handle build_processor error {:?}", e), 151 | }; 152 | 153 | let mut res = String::new(); 154 | let mut interpreter = p.execute(Value::Null); 155 | loop { 156 | match interpreter.read_to_string(&mut res) { 157 | Err(e) => { 158 | match e.description() { 159 | "interupt" => { 160 | unreachable!("unimplemented interupt handling"); 161 | }, 162 | e => unreachable!("unimplemented other error {:?}", e), 163 | }; 164 | }, 165 | Ok(_) => break, 166 | } 167 | } 168 | 169 | self.return_lexer(lexer); 170 | 171 | Ok(res) 172 | } 173 | 174 | fn get_compiled_template<'r>(&mut self, lexer: &'r Lexer, name: &'r str) 175 | -> Result> 176 | { 177 | let source = try!(self.loader.get_source(name)); 178 | let mut tokens = lexer.tokens(&source); 179 | let module = try!(parse(&self.env.parsing, &mut tokens)); 180 | Ok(try!(compile((), &module))) 181 | } 182 | 183 | fn take_lexer(&mut self) -> Lexer { 184 | let mut ninja_lexer = None; 185 | mem::swap(&mut ninja_lexer, &mut self.lexer); 186 | 187 | match ninja_lexer { 188 | None => unreachable!("lexer is gone"), 189 | Some(lexer) => lexer, 190 | } 191 | } 192 | 193 | fn return_lexer(&mut self, lexer: Lexer) { 194 | let mut ninja_lexer = Some(lexer); 195 | mem::swap(&mut ninja_lexer, &mut self.lexer); 196 | } 197 | } 198 | -------------------------------------------------------------------------------- /src/environment/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::collections::HashSet; 3 | 4 | use extension::{ Extension, CoreExtension }; 5 | use operator::{ Operator, OperatorKind, OperatorOptions }; 6 | use function::Function; 7 | use nodes::{ TokenParser, TokenParserExtension }; 8 | 9 | /// Environment configuration. 10 | pub struct Config { 11 | pub autoescape: String, 12 | } 13 | 14 | impl Config { 15 | pub fn default() -> Config { 16 | Config { 17 | autoescape: "html".into() 18 | } 19 | } 20 | 21 | pub fn from_hashmap(map: HashMap) -> Config { 22 | let default = Config::default(); 23 | 24 | Config { 25 | autoescape: map.get("autoescape").cloned().unwrap_or(default.autoescape), 26 | } 27 | } 28 | } 29 | 30 | /// Project configuration container. 31 | #[derive(Debug)] 32 | pub struct Environment { 33 | pub operators: Vec, 34 | pub token_parsers: Vec, 35 | pub functions: Vec, 36 | } 37 | 38 | impl Environment { 39 | 40 | pub fn new(config: Config) -> Environment { 41 | let mut staged = Environment { 42 | operators: Vec::new(), 43 | token_parsers: Vec::new(), 44 | functions: Vec::new(), 45 | }; 46 | 47 | CoreExtension::apply(&mut staged); 48 | 49 | staged 50 | } 51 | 52 | pub fn default() -> Environment { 53 | Environment::new(Config::default()) 54 | } 55 | 56 | pub fn init_all(self) -> CompiledEnvironment { 57 | CompiledEnvironment { 58 | lexing: LexingEnvironment { 59 | operators: { 60 | self.operators.iter() 61 | .filter_map(|i| match i.options.kind { 62 | OperatorKind::Unary { value, .. } => Some(value), 63 | OperatorKind::Binary { value, .. } => Some(value), 64 | OperatorKind::Other => None, 65 | }) 66 | .collect() 67 | }, 68 | }, 69 | parsing: ParsingEnvironment { 70 | operators: { 71 | self.operators.into_iter() 72 | .filter_map(|i| match i.options.kind { 73 | OperatorKind::Unary { value, .. } => Some((value, i.options)), 74 | OperatorKind::Binary { value, .. } => Some((value, i.options)), 75 | OperatorKind::Other => None, 76 | }) 77 | .collect() 78 | }, 79 | handlers: { 80 | self.token_parsers.into_iter() 81 | .map(|i| (i.tag, i.extension)) 82 | .collect() 83 | }, 84 | functions: { 85 | self.functions.iter() 86 | .map(|f| f.name) 87 | .collect() 88 | } 89 | }, 90 | } 91 | } 92 | 93 | pub fn push_operators>(&mut self, ops: I) { 94 | self.operators.extend(ops); 95 | } 96 | 97 | pub fn push_token_parsers>(&mut self, ops: I) { 98 | self.token_parsers.extend(ops); 99 | } 100 | 101 | pub fn push_functions>(&mut self, funs: I) { 102 | self.functions.extend(funs); 103 | } 104 | } 105 | 106 | pub struct LexingEnvironment { 107 | pub operators: HashSet<&'static str>, 108 | } 109 | 110 | pub struct ParsingEnvironment { 111 | pub operators: HashMap<&'static str, OperatorOptions>, 112 | pub handlers: HashMap<&'static str, Box>, 113 | pub functions: HashSet<&'static str>, 114 | } 115 | 116 | /// Project configuration container with all extensions applied. 117 | pub struct CompiledEnvironment { 118 | pub lexing: LexingEnvironment, 119 | pub parsing: ParsingEnvironment, 120 | } 121 | 122 | impl CompiledEnvironment { 123 | 124 | pub fn default() -> CompiledEnvironment { 125 | Environment::default() 126 | .init_all() 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /src/error/engine.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | use std::path::PathBuf; 3 | use error::{ Error, Caused }; 4 | 5 | #[derive(Clone, Debug)] 6 | pub enum EngineError { 7 | TemplateNotFound { name: String, search_paths: Vec }, 8 | } 9 | 10 | impl EngineError { 11 | pub fn caused_by>(self, cause: I) -> Caused { 12 | Caused::new(self, Some(cause.into())) 13 | } 14 | } 15 | 16 | impl fmt::Display for EngineError { 17 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 18 | match *self { 19 | EngineError::TemplateNotFound { ref name, ref search_paths } => { 20 | if search_paths.len() == 0 { 21 | write!(f, "Template \"{}\" was not found", name) 22 | } else { 23 | try!(write!(f, "Template \"{}\" was not found, looked in ", name)); 24 | write!(f, "{:?}", search_paths) 25 | } 26 | } 27 | } 28 | } 29 | } 30 | 31 | impl From for Error { 32 | fn from(inner: EngineError) -> Error { 33 | Error::Engine(Caused::new(inner, None)) 34 | } 35 | } 36 | 37 | impl From> for Error { 38 | fn from(inner: Caused) -> Error { 39 | Error::Engine(inner) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/error/mod.rs: -------------------------------------------------------------------------------- 1 | mod template; 2 | mod runtime; 3 | mod engine; 4 | 5 | use std::fmt; 6 | use std::result; 7 | 8 | pub use self::template::{ TemplateError, Received }; 9 | pub use self::runtime::{ RuntimeError, TracedRuntimeError, CastTarget, CastError }; 10 | pub use self::engine::{ EngineError }; 11 | 12 | #[derive(Clone, Debug)] 13 | pub enum Error { 14 | /// Error lexing or parsing the template source file. 15 | Template(At), 16 | /// Error reading files, compiling templates or writing cache. 17 | Engine(Caused), 18 | /// Error executing template. 19 | Runtime(TracedRuntimeError), 20 | } 21 | 22 | pub trait ExtensionError: fmt::Display { 23 | fn boxed_clone(&self) -> Box; 24 | } 25 | 26 | impl Clone for Box { 27 | fn clone(&self) -> Box { 28 | self.boxed_clone() 29 | } 30 | } 31 | 32 | impl fmt::Debug for Box { 33 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 34 | (&**self).fmt(f) 35 | } 36 | } 37 | 38 | impl fmt::Display for Error { 39 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 40 | match *self { 41 | Error::Template(ref e) => e.fmt(f), 42 | Error::Engine(ref e) => e.fmt(f), 43 | Error::Runtime(ref e) => e.fmt(f), 44 | } 45 | } 46 | } 47 | 48 | /// Adds optional cause to error. 49 | #[derive(Clone, Debug)] 50 | pub struct Caused { 51 | pub err: E, 52 | pub cause: Box>, 53 | } 54 | 55 | impl Caused { 56 | pub fn new(err: E, cause: Option) -> Caused { 57 | Caused { 58 | err: err, 59 | cause: Box::new(cause), 60 | } 61 | } 62 | } 63 | 64 | impl fmt::Display for Caused { 65 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 66 | match *self.cause { 67 | None => self.err.fmt(f), 68 | Some(ref cause) => { 69 | write!(f, "{}\ncaused by\n{}", self.err, cause) 70 | }, 71 | } 72 | } 73 | } 74 | 75 | /// Pins any error type to source file location. 76 | #[derive(Copy, Clone, Debug)] 77 | pub struct At { 78 | pub loc: Location, 79 | pub err: E, 80 | } 81 | 82 | impl At { 83 | pub fn new(err: E, loc: Location) -> At { 84 | At { 85 | loc: loc, 86 | err: err, 87 | } 88 | } 89 | } 90 | 91 | impl fmt::Display for At { 92 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 93 | let message = { 94 | let raw_message = format!("{}", self.err); 95 | let ends_with_dot = { 96 | let len = raw_message.len(); 97 | if len > 0 { 98 | if &raw_message[len - 1 ..] == "." { 99 | true 100 | } else { 101 | false 102 | } 103 | } else { 104 | false 105 | } 106 | }; 107 | 108 | if ends_with_dot { 109 | let len = raw_message.len(); 110 | let without_dot = &raw_message[0 .. len - 1]; 111 | 112 | format!("{} at line {}.", without_dot, self.loc.line) 113 | } else { 114 | format!("{} at line {}", raw_message, self.loc.line) 115 | } 116 | }; 117 | 118 | write!(f, "{}", message) 119 | } 120 | } 121 | 122 | /// Location record in source file. 123 | #[derive(Debug, Copy, Clone)] 124 | pub struct Location { 125 | pub line: usize, 126 | } 127 | 128 | impl Location { 129 | pub fn new(line: usize) -> Location { 130 | Location { line: line } 131 | } 132 | } 133 | 134 | pub type Result = result::Result; 135 | pub type TemplateResult = result::Result>; 136 | pub type RuntimeResult = result::Result; 137 | pub type TracedRuntimeResult = result::Result; 138 | -------------------------------------------------------------------------------- /src/error/runtime.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | use error::{ Location, Error }; 3 | 4 | /// Stack trace record. 5 | #[derive(Clone, Debug)] 6 | pub enum TraceEntry { 7 | /// Trace source file change that caused the error. 8 | SourceFile { target: String }, 9 | /// Trace operator call that caused the error. 10 | Operator { target: String, extension: String }, 11 | /// Trace position in last known source that cause the error. 12 | Position { from: Location }, 13 | } 14 | 15 | #[derive(Debug, Clone)] 16 | pub enum CastTarget { 17 | Int, 18 | Float, 19 | Number, 20 | } 21 | 22 | #[derive(Clone, Debug)] 23 | pub enum CastError { 24 | /// Float is infinite, target can not be infinite. 25 | FloatIsInfinite(f64), 26 | /// Float is not a number, target has to be a number. 27 | FloatNotANumber(f64), 28 | /// Float can not be represented, target does not support the range. 29 | FloatRange(f64), 30 | /// Null can not be represented. 31 | Null, 32 | /// Target can not be created from Array. 33 | Array, 34 | /// Target is not be created from Hash. 35 | Hash, 36 | /// Target is not be created from Object. 37 | Object, 38 | /// Target is not be created from Function. 39 | Function, 40 | /// Empty string can not be represented. 41 | StringEmpty, 42 | /// String could not be parsed as number. 43 | StringNotNumerical(String), 44 | } 45 | 46 | impl fmt::Display for CastError { 47 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 48 | match *self { 49 | CastError::FloatIsInfinite(v) => write!(f, "Infinite float {:?}", v), 50 | CastError::FloatNotANumber(v) => write!(f, "Nonnumerical float {:?}", v), 51 | CastError::FloatRange(v) => write!(f, "Out-of-range float {:?}", v), 52 | CastError::Null => "Null".fmt(f), 53 | CastError::Array => "Array".fmt(f), 54 | CastError::Hash => "Associative array".fmt(f), 55 | CastError::Object => "Object".fmt(f), 56 | CastError::Function => "Function".fmt(f), 57 | CastError::StringEmpty => "Empty string".fmt(f), 58 | CastError::StringNotNumerical(ref v) => write!(f, "Nonnumerical string {:?}", v), 59 | } 60 | } 61 | } 62 | 63 | #[derive(Clone, Debug)] 64 | /// Runtime error message. 65 | pub enum RuntimeError { 66 | /// Callable invoked with argument count that does not match defined count. 67 | InvalidArgumentCount { defined: usize, given: usize }, 68 | /// Tried to access object property that does not exist. 69 | ObjectHasNoProperty(String), 70 | /// Tried to access object method that does not exist. 71 | ObjectHasNoMethod(String), 72 | /// Tried to access object property but it was a method. 73 | ObjectPropertyIsNotMethod(String), 74 | /// Tried to access object method but it was a property. 75 | ObjectMethodIsNotProperty(String), 76 | /// Tried to call object method with wrong argument count. 77 | ObjectMethodArgumentMismatch { name: String, defined: u16, given: u16 }, 78 | /// Value casting error. 79 | ImpossibleCast { target: CastTarget, reason: CastError }, 80 | } 81 | 82 | impl RuntimeError { 83 | pub fn at(self, stack_trace: Vec) -> TracedRuntimeError { 84 | TracedRuntimeError { 85 | message: self, 86 | stack_trace: stack_trace 87 | } 88 | } 89 | } 90 | 91 | impl fmt::Display for RuntimeError { 92 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 93 | match *self { 94 | RuntimeError::InvalidArgumentCount { ref defined, ref given } => { 95 | write!(f, "Target requires {} arguments, called with {}", defined, given) 96 | }, 97 | RuntimeError::ObjectHasNoProperty(ref name) => { 98 | write!(f, "Object has no property {:?}", name) 99 | }, 100 | RuntimeError::ObjectHasNoMethod(ref name) => { 101 | write!(f, "Object has no method {:?}", name) 102 | }, 103 | RuntimeError::ObjectPropertyIsNotMethod(ref name) => { 104 | write!(f, "Property is not a method {:?}", name) 105 | }, 106 | RuntimeError::ObjectMethodIsNotProperty(ref name) => { 107 | write!(f, "Method is not a property {:?}", name) 108 | }, 109 | RuntimeError::ObjectMethodArgumentMismatch { ref name, ref defined, ref given } => { 110 | write!(f, "Method {:?} requires {} arguments, called with {}", name, defined, given) 111 | }, 112 | RuntimeError::ImpossibleCast { ref target, ref reason } => { 113 | write!(f, "{} is not {}", reason, match *target { 114 | CastTarget::Float => "a float", 115 | CastTarget::Int => "an integer", 116 | CastTarget::Number => "a number", 117 | }) 118 | } 119 | } 120 | } 121 | } 122 | 123 | /// Runtime error with stack trace. 124 | #[derive(Clone, Debug)] 125 | pub struct TracedRuntimeError { 126 | pub message: RuntimeError, 127 | pub stack_trace: Vec, 128 | } 129 | 130 | impl TracedRuntimeError { 131 | pub fn new(message: RuntimeError) -> TracedRuntimeError { 132 | TracedRuntimeError { 133 | message: message, 134 | stack_trace: Vec::new(), 135 | } 136 | } 137 | } 138 | 139 | impl fmt::Display for TracedRuntimeError { 140 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 141 | write!(f, "{:?}", self.message) 142 | } 143 | } 144 | 145 | impl From for Error { 146 | fn from(inner: TracedRuntimeError) -> Error { 147 | Error::Runtime(inner) 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /src/error/template.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | use tokens::TokenValue; 3 | use error::{ Error, ExtensionError, At, Location }; 4 | 5 | #[derive(Clone, Debug)] 6 | pub enum Received { 7 | Token(TokenValue), 8 | EndOfStream, 9 | } 10 | 11 | #[derive(Clone, Debug)] 12 | pub enum TemplateError { 13 | UnexpectedEndOfTemplate, 14 | ExpectedTokenTypeButReceived((TokenValue, Received)), 15 | UnexpectedTokenValue(TokenValue), 16 | ExpectedOtherTokenValue((TokenValue, TokenValue)), 17 | ExpectedArrayElement, 18 | ArrayValueMustBeFollowedByComma, 19 | ArrayNotClosed, 20 | ExpectedHashElement, 21 | HashValueMustBeFollowedByComma, 22 | InvalidHashKey { unexpected: TokenValue }, 23 | HashKeyMustBeFollowedByColon, 24 | HashNotClosed, 25 | ExpectedNameOrNumber, 26 | ListOfArgumentsMustBeginWithParenthesis, 27 | ArgumentsMustBeSeparatedByComma, 28 | ListOfArgumentsMustCloseWithParenthesis, 29 | Unclosed(String), 30 | UnclosedComment, 31 | UnclosedBlock(String), 32 | Unexpected(String), 33 | UnexpectedCharacter(String), 34 | ParenthesisNotClosed, 35 | MustStartWithTagName, 36 | DefaultValueForArgumentMustBeConstant, 37 | ParameterNameMustBeAString { given: String }, 38 | TemplateNotFound(String), 39 | CustomError(Box), 40 | } 41 | 42 | impl TemplateError { 43 | pub fn at(self, line: usize) -> At { 44 | At::new(self, Location::new(line)) 45 | } 46 | } 47 | 48 | impl fmt::Display for TemplateError { 49 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 50 | match *self { 51 | TemplateError::UnexpectedEndOfTemplate => write!(f, "Unexpected end of template"), 52 | TemplateError::ExpectedTokenTypeButReceived((ref token, ref received)) => { 53 | let (english_name, _) = token.get_english(); 54 | match *received { 55 | Received::EndOfStream => write!(f, "Expected token \"{}\" but received the end of stream", english_name), 56 | Received::Token(ref other) => { 57 | let (other_english_name, value) = other.get_english(); 58 | match value { 59 | Some(value) => write!(f, "Expected \"{}\" but received \"{}\" with value {:?}", english_name, other_english_name, value), 60 | None => write!(f, "Expected \"{}\" but received \"{}\"", english_name, other_english_name), 61 | } 62 | }, 63 | } 64 | }, 65 | TemplateError::ExpectedOtherTokenValue((ref token, ref other)) => { 66 | let unexpected_message = format!("{}", TemplateError::UnexpectedTokenValue(token.clone())); 67 | let (other_english_name, other_value) = other.get_english(); 68 | match other_value { 69 | Some(value) => write!(f, "{} (\"{}\" expected with value {:?})", unexpected_message, other_english_name, value), 70 | None => write!(f, "{} (\"{}\" expected)", unexpected_message, other_english_name), 71 | } 72 | }, 73 | TemplateError::ExpectedArrayElement => write!(f, "An array element was expected"), 74 | TemplateError::ArrayValueMustBeFollowedByComma => write!(f, "An array element must be followed by a comma"), 75 | TemplateError::ArrayNotClosed => write!(f, "An opened array is not properly closed"), 76 | TemplateError::ExpectedHashElement => write!(f, "A hash element was expected"), 77 | TemplateError::HashValueMustBeFollowedByComma => write!(f, "A hash value must be followed by a comma"), 78 | TemplateError::InvalidHashKey { ref unexpected } => { 79 | let (english_name, value) = unexpected.get_english(); 80 | match value { 81 | Some(value) => write!(f, "A hash key must be a quoted string, a number, a name, or an expression enclosed in parentheses: unexpected token \"{}\" of value {:?}", english_name, value), 82 | None => write!(f, "A hash key must be a quoted string, a number, a name, or an expression enclosed in parentheses: unexpected token \"{}\"", english_name), 83 | } 84 | }, 85 | TemplateError::HashKeyMustBeFollowedByColon => write!(f, "A hash key must be followed by a colon (:)"), 86 | TemplateError::HashNotClosed => write!(f, "An opened hash is not properly closed"), 87 | TemplateError::ExpectedNameOrNumber => write!(f, "Expected name or number"), 88 | TemplateError::ListOfArgumentsMustBeginWithParenthesis => write!(f, "A list of arguments must begin with an opening parenthesis"), 89 | TemplateError::ArgumentsMustBeSeparatedByComma => write!(f, "Arguments must be separated by a comma"), 90 | TemplateError::ListOfArgumentsMustCloseWithParenthesis => write!(f, "A list of arguments must be closed by a parenthesis"), 91 | TemplateError::UnexpectedTokenValue(ref token) => { 92 | let (english_name, value) = token.get_english(); 93 | match value { 94 | Some(value) => write!(f, "Unexpected token \"{}\" of value {:?}", english_name, value), 95 | None => write!(f, "Unexpected token \"{}\"", english_name), 96 | } 97 | }, 98 | TemplateError::Unclosed(ref s) => write!(f, "Unclosed \"{}\"", s), 99 | TemplateError::UnclosedComment => write!(f, "Unclosed comment"), 100 | TemplateError::UnclosedBlock(ref s) => write!(f, "Unexpected end of file: Unclosed \"{}\" block", s), 101 | TemplateError::Unexpected(ref s) => write!(f, "Unexpected \"{}\"", s), 102 | TemplateError::UnexpectedCharacter(ref s) => write!(f, "Unexpected character \"{}\"", s), 103 | TemplateError::ParenthesisNotClosed => write!(f, "An opened parenthesis is not properly closed"), 104 | TemplateError::MustStartWithTagName => write!(f, "A block must start with a tag name"), 105 | TemplateError::DefaultValueForArgumentMustBeConstant => write!(f, "A default value for an argument must be a constant (a boolean, a string, a number, or an array)."), 106 | TemplateError::ParameterNameMustBeAString { ref given } => write!(f, "A parameter name must be a string, \"{}\" given", given), 107 | TemplateError::TemplateNotFound(ref name) => write!(f, "Template \"{}\" was not found", name), 108 | TemplateError::CustomError(ref e) => write!(f, "{}", e), 109 | } 110 | } 111 | } 112 | 113 | impl From> for Error { 114 | fn from(inner: At) -> Error { 115 | Error::Template(inner) 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/extension/core/error.rs: -------------------------------------------------------------------------------- 1 | use std::convert::From; 2 | use std::fmt; 3 | use error::{ ExtensionError, Location, At, TemplateError }; 4 | 5 | #[derive(Debug, Clone)] 6 | pub enum CoreTemplateError { 7 | OnlyVariablesCanBeAssignedTo, 8 | ExpectedEndmacroName { expected: String, given: String }, 9 | CanNotAssignTo(String), 10 | } 11 | 12 | impl CoreTemplateError { 13 | pub fn at(self, line: usize) -> At { 14 | At::new(self, Location::new(line)) 15 | } 16 | } 17 | 18 | impl ExtensionError for CoreTemplateError { 19 | fn boxed_clone(&self) -> Box { 20 | Box::new(self.clone()) 21 | } 22 | } 23 | 24 | impl fmt::Display for CoreTemplateError { 25 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 26 | match *self { 27 | CoreTemplateError::OnlyVariablesCanBeAssignedTo => write!(f, "Only variables can be assigned to"), 28 | CoreTemplateError::ExpectedEndmacroName { ref expected, ref given } => write!(f, "Expected endmacro for macro \"{}\" (but \"{}\" given)", expected, given), 29 | CoreTemplateError::CanNotAssignTo(ref v) => write!(f, "You cannot assign a value to \"{}\"", v), 30 | } 31 | } 32 | } 33 | 34 | impl From> for At { 35 | fn from(At { loc: Location { line }, err }: At) -> At { 36 | TemplateError::CustomError(Box::new(err)) 37 | .at(line) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/extension/core/mod.rs: -------------------------------------------------------------------------------- 1 | use environment::Environment; 2 | use extension::Extension; 3 | use operator::Operator; 4 | use nodes::TokenParser; 5 | use function::{ Function, Arg }; 6 | use little::Instruction; 7 | use instructions::CompiledExpression; 8 | 9 | pub mod token_parsers; 10 | pub mod error; 11 | 12 | pub struct CoreExtension; 13 | 14 | impl Extension for CoreExtension { 15 | fn apply(env: &mut Environment) { 16 | env.push_operators(vec![ 17 | Operator::new_unary("not", 50, |_| unimplemented!()), 18 | Operator::new_unary("-", 500, |_| unimplemented!()), 19 | Operator::new_unary("+", 500, |_| unimplemented!()), 20 | 21 | Operator::new_binary_left("or" , 10, |_, _| unimplemented!()), 22 | Operator::new_binary_left("and" , 15, |_, _| unimplemented!()), 23 | Operator::new_binary_left("b-or" , 16, |_, _| unimplemented!()), 24 | Operator::new_binary_left("b-xor" , 17, |_, _| unimplemented!()), 25 | Operator::new_binary_left("b-and" , 18, |_, _| unimplemented!()), 26 | Operator::new_binary_left("==" , 20, |_, _| unimplemented!()), 27 | Operator::new_binary_left("!=" , 20, |_, _| unimplemented!()), 28 | Operator::new_binary_left("<" , 20, |_, _| unimplemented!()), 29 | Operator::new_binary_left(">" , 20, |_, _| unimplemented!()), 30 | Operator::new_binary_left(">=" , 20, |_, _| unimplemented!()), 31 | Operator::new_binary_left("<=" , 20, |_, _| unimplemented!()), 32 | Operator::new_binary_left("not in" , 20, |_, _| unimplemented!()), 33 | Operator::new_binary_left("in" , 20, |_, _| unimplemented!()), 34 | Operator::new_binary_left("matches" , 20, |_, _| unimplemented!()), 35 | Operator::new_binary_left("starts with", 20, |_, _| unimplemented!()), 36 | Operator::new_binary_left("ends with" , 20, |_, _| unimplemented!()), 37 | Operator::new_binary_left(".." , 25, |_, _| unimplemented!()), 38 | Operator::new_binary_left("+" , 30, |_, _| unimplemented!()), 39 | Operator::new_binary_left("-" , 30, |_, _| unimplemented!()), 40 | Operator::new_binary_left("~" , 40, |_, _| unimplemented!()), 41 | Operator::new_binary_left("*" , 60, |_, _| unimplemented!()), 42 | Operator::new_binary_left("/" , 60, |_, _| unimplemented!()), 43 | Operator::new_binary_left("//" , 60, |_, _| unimplemented!()), 44 | Operator::new_binary_left("%" , 60, |_, _| unimplemented!()), 45 | Operator::new_binary_left("is" , 100, |_, _| unimplemented!()), 46 | Operator::new_binary_left("is not" , 100, |_, _| unimplemented!()), 47 | 48 | Operator::new_binary_right("**" , 200, |_, _| unimplemented!()), 49 | ]); 50 | 51 | env.push_token_parsers(vec![ 52 | TokenParser::new(token_parsers::For::new()), 53 | TokenParser::new(token_parsers::If::new()), 54 | TokenParser::new(token_parsers::Extends::new()), 55 | TokenParser::new(token_parsers::Include::new()), 56 | TokenParser::new(token_parsers::Block::new()), 57 | TokenParser::new(token_parsers::Use::new()), 58 | TokenParser::new(token_parsers::Filter::new()), 59 | TokenParser::new(token_parsers::Macro::new()), 60 | TokenParser::new(token_parsers::Import::new()), 61 | TokenParser::new(token_parsers::From::new()), 62 | TokenParser::new(token_parsers::Set::new()), 63 | TokenParser::new(token_parsers::Spaceless::new()), 64 | TokenParser::new(token_parsers::Flush::new()), 65 | TokenParser::new(token_parsers::Do::new()), 66 | TokenParser::new(token_parsers::Embed::new()), 67 | ]); 68 | 69 | env.push_functions(vec![ 70 | Function::new_static("include", vec![Arg::Anon], |staging| { 71 | //staging.instr(Instruction::Push(staging.use_str_const("include"))); 72 | //staging.instr(Instruction::Push(staging.use_str_const("include"))); 73 | Ok(CompiledExpression::empty("include")) 74 | }) 75 | ]); 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/mod.rs: -------------------------------------------------------------------------------- 1 | mod parser_for; 2 | mod parser_if; 3 | mod parser_extends; 4 | mod parser_include; 5 | mod parser_block; 6 | mod parser_use; 7 | mod parser_filter; 8 | mod parser_macro; 9 | mod parser_import; 10 | mod parser_from; 11 | mod parser_set; 12 | mod parser_spaceless; 13 | mod parser_flush; 14 | mod parser_do; 15 | mod parser_embed; 16 | 17 | pub use self::parser_for::For; 18 | pub use self::parser_if::If; 19 | pub use self::parser_extends::Extends; 20 | pub use self::parser_include::Include; 21 | pub use self::parser_block::Block; 22 | pub use self::parser_use::Use; 23 | pub use self::parser_filter::Filter; 24 | pub use self::parser_macro::Macro; 25 | pub use self::parser_import::Import; 26 | pub use self::parser_from::From; 27 | pub use self::parser_set::Set; 28 | pub use self::parser_spaceless::Spaceless; 29 | pub use self::parser_flush::Flush; 30 | pub use self::parser_do::Do; 31 | pub use self::parser_embed::Embed; 32 | 33 | use nodes::Parser; 34 | use nodes::expr::{ Expr, ExprValue }; 35 | use tokens::TokenValueRef; 36 | use super::error::*; 37 | use error::TemplateResult; 38 | 39 | const INVALID_LVALUES: [&'static str; 3] = ["true", "false", "none"]; 40 | 41 | pub fn parse_assignment_expression<'p, 'c>(parser: &mut Parser<'p, 'c>) 42 | -> TemplateResult>> 43 | { 44 | trace!("parse_assignment_expression"); 45 | 46 | let mut targets = Vec::new(); 47 | loop { 48 | let token = try!(parser.current()); 49 | let name = match token.value { 50 | TokenValueRef::Name(name) => { 51 | try!(parser.next()); 52 | name 53 | }, 54 | _ => return Err( 55 | CoreTemplateError::OnlyVariablesCanBeAssignedTo 56 | .at(token.line) 57 | .into() 58 | ), 59 | }; 60 | 61 | if INVALID_LVALUES.contains(&name) { 62 | return Err( 63 | CoreTemplateError::CanNotAssignTo(name.into()) 64 | .at(token.line) 65 | .into() 66 | ) 67 | } 68 | 69 | targets.push(Expr::new_at(ExprValue::AssignName(name), token.line)); 70 | 71 | if !try!(parser.skip_to_next_if(TokenValueRef::Punctuation(','))) { 72 | break; 73 | } 74 | } 75 | 76 | Ok(targets) 77 | } 78 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_block.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Block; 7 | 8 | impl Block { 9 | pub fn new() -> Block { 10 | Block 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Block { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Block::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "block" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_do.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Do; 7 | 8 | impl Do { 9 | pub fn new() -> Do { 10 | Do 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Do { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Do::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "do" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_embed.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Embed; 7 | 8 | impl Embed { 9 | pub fn new() -> Embed { 10 | Embed 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Embed { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Embed::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "embed" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_extends.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Extends; 7 | 8 | impl Extends { 9 | pub fn new() -> Extends { 10 | Extends 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Extends { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Extends::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "extends" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_filter.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Filter; 7 | 8 | impl Filter { 9 | pub fn new() -> Filter { 10 | Filter 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Filter { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Filter::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "filter" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_flush.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Flush; 7 | 8 | impl Flush { 9 | pub fn new() -> Flush { 10 | Flush 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Flush { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Flush::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "flush" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_for.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct For; 7 | 8 | impl For { 9 | pub fn new() -> For { 10 | For 11 | } 12 | } 13 | 14 | impl TokenParserExtension for For { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented For::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "for" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_from.rs: -------------------------------------------------------------------------------- 1 | use tokens::{ TokenRef, TokenValueRef }; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::{ Body, ImportTarget }; 4 | use error::TemplateResult; 5 | 6 | use nodes::expr_parser::parse_expression; 7 | 8 | pub struct From; 9 | 10 | impl From { 11 | pub fn new() -> From { 12 | From 13 | } 14 | } 15 | 16 | impl TokenParserExtension for From { 17 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 18 | -> TemplateResult>> 19 | { 20 | trace!("From::parse {:?}", token); 21 | 22 | let macro_expr = try!(parse_expression(parser, 0)); 23 | 24 | try!(parser.expect(TokenValueRef::Name("import"))); 25 | 26 | let mut targets = Vec::new(); 27 | loop { 28 | let name = try!(parser.expect_name()); 29 | let mut alias = name; 30 | if try!(parser.skip_to_next_if(TokenValueRef::Name("as"))) { 31 | alias = try!(parser.expect_name()); 32 | } 33 | targets.push((alias, name)); 34 | if !try!(parser.skip_to_next_if(TokenValueRef::Punctuation(','))) { 35 | break; 36 | } 37 | } 38 | try!(parser.expect(TokenValueRef::BlockEnd)); 39 | 40 | let mut target_slots = Vec::new(); 41 | for (alias, name) in targets { 42 | target_slots.push( 43 | ( 44 | parser.add_imported_function(alias, name), 45 | alias, 46 | ImportTarget::Function { symbol: name } 47 | ) 48 | ); 49 | } 50 | 51 | Ok(Some(Body::Import { 52 | source: Box::new(macro_expr), 53 | targets: target_slots, 54 | line: token.line, 55 | })) 56 | } 57 | 58 | fn get_tag(&self) -> &'static str { 59 | "from" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_if.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct If; 7 | 8 | impl If { 9 | pub fn new() -> If { 10 | If 11 | } 12 | } 13 | 14 | impl TokenParserExtension for If { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented If::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "if" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_import.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Import; 7 | 8 | impl Import { 9 | pub fn new() -> Import { 10 | Import 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Import { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Import::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "import" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_include.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Include; 7 | 8 | impl Include { 9 | pub fn new() -> Include { 10 | Include 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Include { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Include::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "include" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_macro.rs: -------------------------------------------------------------------------------- 1 | use tokens::{ TokenRef, TokenValueRef }; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | use nodes::expr_parser::parse_named_arguments; 6 | use nodes::body_parser::{ subparse, BlockEnd }; 7 | use extension::core::error::*; 8 | 9 | pub struct Macro; 10 | 11 | impl Macro { 12 | pub fn new() -> Macro { 13 | Macro 14 | } 15 | } 16 | 17 | impl TokenParserExtension for Macro { 18 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 19 | -> TemplateResult>> 20 | { 21 | trace!("Macro::parse, {:?}", token); 22 | 23 | let name = try!(parser.expect_name()); 24 | let arguments = try!(parse_named_arguments(parser, true)); 25 | let line = token.line; 26 | 27 | try!(parser.expect(TokenValueRef::BlockEnd)); 28 | parser.push_local_scope(); 29 | 30 | let body = try!(subparse(parser, |token| match token.value { 31 | TokenValueRef::Name("endmacro") => Some(BlockEnd { drop_needle: true }), 32 | _ => None, 33 | })); 34 | let token = try!(parser.current()); 35 | if let TokenValueRef::Name(value) = token.value { 36 | try!(parser.next()); 37 | 38 | if value != name { 39 | return Err( 40 | CoreTemplateError::ExpectedEndmacroName { given: value.into(), expected: name.into() } 41 | .at(try!(parser.current()).line) 42 | .into() 43 | ) 44 | } 45 | } 46 | 47 | parser.pop_local_scope(); 48 | try!(parser.expect(TokenValueRef::BlockEnd)); 49 | 50 | Ok(Some(Body::Macro { 51 | name: name, 52 | body: Box::new(body), 53 | arguments: arguments, 54 | line: line, 55 | })) 56 | } 57 | 58 | fn get_tag(&self) -> &'static str { 59 | "macro" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_set.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | use super::parse_assignment_expression; 7 | 8 | pub struct Set; 9 | 10 | impl Set { 11 | pub fn new() -> Set { 12 | Set 13 | } 14 | } 15 | 16 | impl TokenParserExtension for Set { 17 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 18 | -> TemplateResult>> 19 | { 20 | trace!("Set::parse"); 21 | 22 | let _line = token.line; 23 | let _targets = try!(parse_assignment_expression(parser)); 24 | 25 | unreachable!("not fully implemented Set::parse") 26 | } 27 | 28 | fn get_tag(&self) -> &'static str { 29 | "set" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_spaceless.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Spaceless; 7 | 8 | impl Spaceless { 9 | pub fn new() -> Spaceless { 10 | Spaceless 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Spaceless { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Spaceless::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "spaceless" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/core/token_parsers/parser_use.rs: -------------------------------------------------------------------------------- 1 | use tokens::TokenRef; 2 | use nodes::{ Parser, TokenParserExtension }; 3 | use nodes::body::Body; 4 | use error::TemplateResult; 5 | 6 | pub struct Use; 7 | 8 | impl Use { 9 | pub fn new() -> Use { 10 | Use 11 | } 12 | } 13 | 14 | impl TokenParserExtension for Use { 15 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 16 | -> TemplateResult>> 17 | { 18 | unreachable!("not implemented Use::parse") 19 | } 20 | 21 | fn get_tag(&self) -> &'static str { 22 | "use" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/extension/escaper/mod.rs: -------------------------------------------------------------------------------- 1 | use environment::Environment; 2 | use extension::Extension; 3 | 4 | pub struct EscaperExtension; 5 | 6 | impl Extension for EscaperExtension { 7 | fn apply(env: &mut Environment) { 8 | 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/extension/mod.rs: -------------------------------------------------------------------------------- 1 | mod core; 2 | mod escaper; 3 | 4 | use environment::Environment; 5 | 6 | pub use self::core::CoreExtension; 7 | pub use self::escaper::EscaperExtension; 8 | 9 | /// Implement this trait to create a new Twig extension. 10 | pub trait Extension { 11 | fn apply(env: &mut Environment); 12 | } 13 | -------------------------------------------------------------------------------- /src/function/mod.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | use value::Value; 3 | use error::{ RuntimeResult, TemplateResult }; 4 | use mold::Staging; 5 | use instructions::CompiledExpression; 6 | 7 | pub enum Arg { 8 | Anon, 9 | Named(&'static str), 10 | } 11 | 12 | /// Callable implementation. 13 | pub enum Callable { 14 | /// Executable at runtime. 15 | Dynamic(Box< 16 | for<'e> Fn(&'e [Value]) -> RuntimeResult 17 | >), 18 | /// Inlined into instructions at compile time. 19 | Static { 20 | arguments: Vec, 21 | compile: Box< 22 | for<'c> Fn(&mut Staging<'c, Value>) -> TemplateResult 23 | > 24 | } 25 | } 26 | 27 | /// Represents environment function. 28 | pub struct Function { 29 | pub name: &'static str, 30 | pub callable: Callable, 31 | } 32 | 33 | impl Function { 34 | pub fn new_dynamic( 35 | name: &'static str, 36 | callable: F 37 | ) 38 | -> Function 39 | where 40 | F: for<'e> Fn(&'e [Value]) -> RuntimeResult 41 | { 42 | Function { 43 | name: name, 44 | callable: Callable::Dynamic(Box::new(callable)), 45 | } 46 | } 47 | 48 | pub fn new_static>( 49 | name: &'static str, 50 | arguments: I, 51 | compile: F 52 | ) 53 | -> Function 54 | where 55 | F: for<'c> Fn(&mut Staging<'c, Value>) -> TemplateResult 56 | { 57 | Function { 58 | name: name, 59 | callable: Callable::Static { 60 | arguments: arguments.into_iter().collect(), 61 | compile: Box::new(compile) 62 | }, 63 | } 64 | } 65 | } 66 | 67 | impl fmt::Debug for Function { 68 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 69 | write!(f, "{}()", self.name) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/instructions/compiler/body.rs: -------------------------------------------------------------------------------- 1 | use little::{ Instruction }; 2 | use instructions::{ Compile, CompileExpression }; 3 | use nodes::body::Body; 4 | use value::Value; 5 | use error::{ TemplateResult }; 6 | use mold::Staging; 7 | 8 | impl<'c> Compile<'c> for Body<'c> { 9 | fn compile<'r>(&'r self, stage: &'r mut Staging<'c, Value>) -> TemplateResult<()> { 10 | trace!("Body::compile"); 11 | match *self { 12 | Body::List { ref items } => { 13 | trace!("Body::List::compile"); 14 | for item in items { 15 | try!(item.compile(stage)); 16 | } 17 | Ok(()) 18 | }, 19 | Body::Text { .. } => unreachable!("Body::Text::compile"), 20 | Body::Print { ref expr, .. } => { 21 | trace!("Body::Print::compile"); 22 | 23 | let ce = try!(expr.compile(stage)); 24 | if let Some(result) = ce.result() { 25 | stage.instr(Instruction::Output { location: result }); 26 | }; 27 | try!(ce.finalize(stage)); 28 | 29 | Ok(()) 30 | }, 31 | Body::Import { .. } => unreachable!("Body::Import::compile"), 32 | Body::Macro { .. } => unreachable!("Body::Macro::compile"), 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/instructions/compiler/expr.rs: -------------------------------------------------------------------------------- 1 | use instructions::{ CompileExpression, CompiledExpression }; 2 | use nodes::expr::{ Expr, ExprValue }; 3 | use value::Value; 4 | use error::TemplateResult; 5 | use mold::Staging; 6 | 7 | impl<'c> CompileExpression<'c> for Expr<'c> { 8 | fn compile<'r>(&'r self, stage: &'r mut Staging<'c, Value>) -> TemplateResult { 9 | trace!("Expr::compile"); 10 | Ok(match self.value { 11 | ExprValue::Constant(_) => unreachable!("ExprValue::Constant::compile"), 12 | ExprValue::Name(name) => { 13 | let maybe_mem = stage.use_name(name); 14 | 15 | let name_mem = match maybe_mem { 16 | Some(mem) => { 17 | trace!("use mem {:?} for name {:?}", mem, name); 18 | mem 19 | }, 20 | None => { 21 | stage.include_const(Value::Str(name.into())) 22 | } 23 | }; 24 | 25 | CompiledExpression::with_result("ExprValue::Name", name_mem) 26 | }, 27 | ExprValue::AssignName(_) => unreachable!("ExprValue::AssignName::compile"), 28 | ExprValue::Array(_) => unreachable!("ExprValue::Array::compile"), 29 | ExprValue::Hash(_) => unreachable!("ExprValue::Hash::compile"), 30 | ExprValue::UnaryOperator { .. } => unreachable!("ExprValue::UnaryOperator::compile"), 31 | ExprValue::BinaryOperator { .. } => unreachable!("ExprValue::UnaryOperator::compile"), 32 | ExprValue::Concat { .. } => unreachable!("ExprValue::Concat::compile"), 33 | ExprValue::Conditional { .. } => unreachable!("ExprValue::Conditional::compile"), 34 | ExprValue::GetAttr { .. } => unreachable!("ExprValue::GetAttr::compile"), 35 | ExprValue::ImportedFunctionCall { .. } => unreachable!("ExprValue::ImportedFunctionCall::compile"), 36 | ExprValue::FunctionCall { name, ref arguments } => { 37 | CompiledExpression::empty("function call") 38 | }, 39 | }) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/instructions/compiler/mod.rs: -------------------------------------------------------------------------------- 1 | use little::{ Mem, Instruction }; 2 | use value::Value; 3 | use error::TemplateResult; 4 | use mold::Staging; 5 | 6 | mod body; 7 | mod expr; 8 | mod module; 9 | 10 | pub trait Compile<'c> { 11 | fn compile<'r>(&'r self, stage: &'r mut Staging<'c, Value>) -> TemplateResult<()>; 12 | } 13 | 14 | /// Represents a mess created by a compiled expression. 15 | /// 16 | /// It is up to the caller to clean up this mess by calling `finalize` on this struct. 17 | pub struct CompiledExpression { 18 | origin: &'static str, 19 | stack_length: u16, 20 | result: Option, 21 | finalized: bool, 22 | } 23 | 24 | impl CompiledExpression { 25 | pub fn with_result(origin: &'static str, result: Mem) -> CompiledExpression { 26 | CompiledExpression { 27 | origin: origin, 28 | stack_length: 0, 29 | result: Some(result), 30 | finalized: false, 31 | } 32 | } 33 | 34 | pub fn empty(origin: &'static str) -> CompiledExpression { 35 | CompiledExpression { 36 | origin: origin, 37 | stack_length: 0, 38 | result: None, 39 | finalized: false, 40 | } 41 | } 42 | 43 | pub fn new(origin: &'static str, result: Mem, stack_length: u16) -> CompiledExpression { 44 | CompiledExpression { 45 | origin: origin, 46 | stack_length: stack_length, 47 | result: Some(result), 48 | finalized: false, 49 | } 50 | } 51 | 52 | pub fn result(&self) -> Option { 53 | self.result.clone() 54 | } 55 | 56 | pub fn finalize<'c, 'r>(mut self, stage: &'r mut Staging<'c, Value>) -> TemplateResult<()> { 57 | if self.stack_length > 0 { 58 | trace!("finalize {}", self.origin); 59 | stage.instr(Instruction::Pop { times: self.stack_length }); 60 | } 61 | self.finalized = true; 62 | Ok(()) 63 | } 64 | } 65 | 66 | impl Drop for CompiledExpression { 67 | fn drop(&mut self) { 68 | if !self.finalized { 69 | panic!("finalize never called on {}!", self.origin); 70 | } 71 | } 72 | } 73 | 74 | pub trait CompileExpression<'c> { 75 | /// Compiles ast subnodes that return result in 76 | fn compile<'r>(&'r self, stage: &'r mut Staging<'c, Value>) -> TemplateResult; 77 | } 78 | -------------------------------------------------------------------------------- /src/instructions/compiler/module.rs: -------------------------------------------------------------------------------- 1 | use instructions::Compile; 2 | use nodes::Module; 3 | use value::Value; 4 | use error::TemplateResult; 5 | use mold::Staging; 6 | 7 | impl<'c> Compile<'c> for Module<'c> { 8 | fn compile<'r>(&'r self, stage: &'r mut Staging<'c, Value>) -> TemplateResult<()> { 9 | trace!("Module::compile"); 10 | self.body.compile(stage) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/instructions/mod.rs: -------------------------------------------------------------------------------- 1 | pub use self::compiler::{ Compile, CompileExpression, CompiledExpression }; 2 | 3 | use nodes::Module; 4 | use error::Result; 5 | use value::Value; 6 | use little::{ Template }; 7 | use mold::Staging; 8 | 9 | mod compiler; 10 | 11 | pub fn compile(env: (), nodes: &Module) -> Result> { 12 | trace!("compile"); 13 | let mut stage = Staging::new(); 14 | try!(nodes.compile(&mut stage)); 15 | Ok(stage.into()) 16 | } 17 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | /*! 2 | 3 | Fork me on GitHub 4 | 5 | 6 | */ 7 | 8 | /** 9 | * This module is part of twig-rs. 10 | * 11 | * (c) 2015 Rust Twig Team 12 | * 13 | * For the full copyright and license information, please view the LICENSE 14 | * file that was distributed with this source code. 15 | */ 16 | 17 | extern crate sha1; 18 | extern crate byteorder; 19 | extern crate regex; 20 | extern crate uuid; 21 | extern crate little; 22 | #[macro_use] extern crate log; 23 | 24 | pub mod tokens; 25 | pub mod nodes; 26 | pub mod instructions; 27 | pub mod loader; 28 | pub mod error; 29 | pub mod environment; 30 | pub mod extension; 31 | pub mod operator; 32 | pub mod function; 33 | pub mod value; 34 | pub mod mold; 35 | 36 | mod engine; 37 | 38 | pub use engine::Engine; 39 | 40 | /// Returns different output based on expected value. 41 | pub trait Expect { 42 | type Output; 43 | 44 | fn expect(&mut self, expected: V) -> Self::Output; 45 | } 46 | -------------------------------------------------------------------------------- /src/loader.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use error::{ Result, EngineError }; 3 | 4 | pub trait Loader { 5 | fn get_source(&self, name: &str) -> Result; 6 | } 7 | 8 | #[derive(Debug)] 9 | pub struct ArrayLoader { 10 | files: HashMap, 11 | } 12 | 13 | impl ArrayLoader { 14 | pub fn new(sources: HashMap) -> ArrayLoader { 15 | ArrayLoader { 16 | files: sources, 17 | } 18 | } 19 | } 20 | 21 | impl Loader for ArrayLoader { 22 | fn get_source(&self, name: &str) -> Result { 23 | match self.files.get(name) { 24 | Some(contents) => Ok(contents.clone()), 25 | None => Err(EngineError::TemplateNotFound { 26 | name: name.into(), 27 | search_paths: Vec::new() 28 | }.into()), 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/mold/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{ HashMap, VecDeque }; 2 | use little::*; 3 | use value::Value; 4 | 5 | pub struct Staging<'c, V: LittleValue> { 6 | next_constant: Constant, 7 | unique_constants: HashMap, 8 | pub locals: VecDeque>, 9 | template: Template, 10 | } 11 | 12 | impl<'c, V: LittleValue> Staging<'c, V> { 13 | pub fn new<'r>() -> Staging<'r, V> { 14 | let mut st = Staging { 15 | next_constant: Constant(0), 16 | unique_constants: HashMap::new(), 17 | locals: VecDeque::new(), 18 | template: Template::empty(), 19 | }; 20 | 21 | st.locals.push_front(Basket::new(Binding(0), |Binding(p)| Binding(p + 1))); 22 | 23 | st 24 | } 25 | 26 | pub fn include_const(&mut self, const_value: V) -> Mem { 27 | // next constant to insert. 28 | let mut next = self.next_constant; 29 | 30 | let constant = match const_value.identify_value() { 31 | Some(fingerprint) => { // constant is identifiable 32 | let mut added = false; 33 | // we can ensure that identifiable constant is kept only once 34 | let identifier = *self.unique_constants.entry(fingerprint).or_insert_with(|| { 35 | // when we insert it, we increment identifier. 36 | let identifier = next; 37 | next = match next { 38 | Constant(v) => Constant(v + 1), 39 | }; 40 | added = true; 41 | identifier 42 | }); 43 | // and add it to constant list only once. 44 | if added { 45 | self.template.push_constant(identifier, const_value); 46 | } 47 | identifier 48 | }, 49 | None => { // constant can not be identified 50 | // always add value to constant list for every constant. 51 | let identifier = next; 52 | next = match next { 53 | Constant(v) => Constant(v + 1), 54 | }; 55 | self.template.push_constant(identifier, const_value); 56 | identifier 57 | }, 58 | }; 59 | 60 | // update next constant. 61 | self.next_constant = next; 62 | 63 | Mem::Const(constant) 64 | } 65 | 66 | pub fn use_name(&mut self, name: &'c str) -> Option { 67 | for basket in &self.locals { 68 | if let Some(ref binding) = basket.get(name) { 69 | return Some(Mem::Binding(binding.clone())); 70 | } 71 | } 72 | None 73 | } 74 | 75 | pub fn instr(&mut self, instruction: Instruction) { 76 | trace!("instr {:?}", &instruction); 77 | self.template.push_instruction(instruction); 78 | } 79 | } 80 | 81 | impl<'a> Into> for Staging<'a, Value> { 82 | fn into(self) -> Template { 83 | self.template 84 | } 85 | } 86 | 87 | pub struct Basket<'c, T> { 88 | pub map: HashMap<&'c str, T>, 89 | next: Box T>, 90 | current: T, 91 | } 92 | 93 | impl<'c, T> Basket<'c, T> where T: Eq + Clone { 94 | 95 | pub fn new<'r, N: Fn(T) -> T + 'static>(initial: T, next: N) -> Basket<'r, T> { 96 | Basket { 97 | map: HashMap::new(), 98 | next: Box::new(next), 99 | current: initial, 100 | } 101 | } 102 | 103 | pub fn assign_space(&mut self, name: &'c str) -> T { 104 | let len = self.map.len(); 105 | let current = self.current.clone(); 106 | let result = self.map.entry(&name).or_insert(current.clone()).clone(); 107 | if self.map.len() > len { 108 | self.current = (self.next)(current); 109 | } 110 | result 111 | } 112 | 113 | pub fn get(&self, name: &str) -> Option { 114 | self.map.get(name).cloned() 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /src/nodes/mod.rs: -------------------------------------------------------------------------------- 1 | /*! 2 | 3 | Produces an abstract syntax tree from a stream of tokens. 4 | 5 | */ 6 | 7 | mod parser; 8 | mod node; 9 | mod token_parser; 10 | 11 | pub use self::node::body; 12 | pub use self::node::expr; 13 | pub use self::node::module::Module; 14 | pub use self::token_parser::{ TokenParser }; 15 | pub use self::parser::{ Parser, Parse, ImportedFunction }; 16 | pub use self::parser::body as body_parser; 17 | pub use self::parser::expr as expr_parser; 18 | pub use self::parser::module as module_parser; 19 | 20 | use environment::ParsingEnvironment; 21 | use tokens::{ TokenRef, TokenIter }; 22 | use error::TemplateResult; 23 | 24 | #[derive(Debug)] 25 | pub struct Block; 26 | 27 | #[derive(Debug)] 28 | pub struct Macro; 29 | 30 | #[derive(Debug)] 31 | pub struct Trait; 32 | 33 | #[derive(Debug)] 34 | pub struct EmbededTemplate; 35 | 36 | pub trait TokenParserExtension 37 | { 38 | fn get_tag(&self) -> &'static str; 39 | fn parse<'p, 'c>(&self, parser: &mut Parser<'p, 'c>, token: TokenRef<'c>) 40 | -> TemplateResult>>; 41 | } 42 | 43 | /// Parse given token stream into a node tree. 44 | pub fn parse<'r, 'c>(env: &'r ParsingEnvironment, tokens: &'r mut TokenIter<'r, 'c>) -> TemplateResult> { 45 | let mut parser = Parser::new( 46 | env, tokens 47 | ); 48 | Module::parse(&mut parser) 49 | } 50 | -------------------------------------------------------------------------------- /src/nodes/node/body.rs: -------------------------------------------------------------------------------- 1 | use nodes::expr::Expr; 2 | use uuid::Uuid; 3 | 4 | #[derive(Debug)] 5 | pub enum ImportTarget<'c> { 6 | Function { symbol: &'c str }, 7 | } 8 | 9 | #[derive(Debug)] 10 | pub enum Body<'c> { 11 | List { items: Vec> }, 12 | Text { value: &'c str, line: usize }, 13 | Print { expr: Box>, line: usize }, 14 | Import { 15 | /// Target template to import, which can be evaluated at runtime from 16 | /// provided expression. 17 | source: Box>, 18 | /// Target list alias => name. 19 | targets: Vec<(Uuid, &'c str, ImportTarget<'c>)>, 20 | line: usize 21 | }, 22 | Macro { 23 | name: &'c str, 24 | body: Box>, 25 | arguments: Vec<(Option<&'c str>, Expr<'c>)>, 26 | line: usize 27 | } 28 | } 29 | 30 | impl<'c> Body<'c> { 31 | pub fn new() -> Body<'c> { 32 | Body::List { items: Vec::new() } 33 | } 34 | 35 | pub fn expect_print<'r>(&'r self) -> &'r Expr<'c> { 36 | match *self { 37 | Body::Print { expr: ref e, .. } => e, 38 | ref what => panic!("Expected expect_print to return Expr but received {:?}", what), 39 | } 40 | } 41 | 42 | pub fn expect_list<'r>(&'r self) -> &'r Vec> { 43 | match *self { 44 | Body::List { items: ref list } => list, 45 | ref what => panic!("Expected expect_list to return Vec but received {:?}", what), 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/nodes/node/expr.rs: -------------------------------------------------------------------------------- 1 | use uuid::Uuid; 2 | 3 | #[derive(Debug, PartialEq, Clone)] 4 | pub struct Expr<'c> { 5 | pub line: usize, 6 | pub value: ExprValue<'c>, 7 | } 8 | 9 | impl<'c> Expr<'c> { 10 | pub fn new_at<'r>(value: ExprValue<'r>, line: usize) -> Expr<'r> { 11 | Expr { 12 | line: line, 13 | value: value 14 | } 15 | } 16 | 17 | pub fn new_array<'r>(value: Vec>, line: usize) -> Expr<'r> { 18 | Expr::new_at(ExprValue::Array(value), line) 19 | } 20 | 21 | pub fn new_hash<'r>(value: Vec<(Expr<'r>, Expr<'r>)>, line: usize) -> Expr<'r> { 22 | Expr::new_at(ExprValue::Hash(value), line) 23 | } 24 | 25 | pub fn new_str_constant<'r>(value: &'r str, line: usize) -> Expr<'r> { 26 | Expr::new_at(ExprValue::Constant(ExprConstant::Str(value)), line) 27 | } 28 | 29 | pub fn new_int_constant<'r>(value: i64, line: usize) -> Expr<'r> { 30 | Expr::new_at(ExprValue::Constant(ExprConstant::Int(value)), line) 31 | } 32 | 33 | pub fn new_bool<'r>(value: bool, line: usize) -> Expr<'r> { 34 | Expr::new_at(ExprValue::Constant(ExprConstant::Bool(value)), line) 35 | } 36 | 37 | pub fn new_null<'r>(line: usize) -> Expr<'r> { 38 | Expr::new_at(ExprValue::Constant(ExprConstant::Null), line) 39 | } 40 | 41 | pub fn new_name<'r>(name: &'r str, line: usize) -> Expr<'r> { 42 | Expr::new_at(ExprValue::Name(name), line) 43 | } 44 | 45 | pub fn is_constant(&self) -> bool { 46 | match self.value { 47 | ExprValue::Array(ref items) => items.iter().all(|i| i.is_constant()), 48 | ExprValue::AssignName(_) => false, 49 | ExprValue::BinaryOperator { .. } => false, 50 | ExprValue::Concat { .. } => false, 51 | ExprValue::Conditional { .. } => false, 52 | ExprValue::Constant(_) => true, 53 | ExprValue::Name(_) => false, 54 | ExprValue::UnaryOperator { value: "-", ref expr } => expr.is_constant(), 55 | ExprValue::UnaryOperator { value: "+", ref expr } => expr.is_constant(), 56 | ExprValue::UnaryOperator { .. } => false, 57 | ExprValue::Hash(ref items) => items.iter().all(|&(ref k, ref v)| k.is_constant() && v.is_constant()), 58 | ExprValue::GetAttr { .. } => false, 59 | ExprValue::ImportedFunctionCall { .. } => false, 60 | ExprValue::FunctionCall { .. } => false, 61 | } 62 | } 63 | } 64 | 65 | #[derive(Debug, PartialEq, Clone)] 66 | pub enum ExprConstant<'c> { 67 | Str(&'c str), 68 | Bool(bool), 69 | Int(i64), 70 | Float(f64), 71 | Big(&'c str), 72 | Null, 73 | } 74 | 75 | #[derive(Debug, PartialEq, Clone)] 76 | pub enum ExprValue<'c> { 77 | Constant(ExprConstant<'c>), 78 | Name(&'c str), 79 | AssignName(&'c str), 80 | Array(Vec>), 81 | Hash(Vec<(Expr<'c>, Expr<'c>)>), 82 | UnaryOperator { value: &'c str, expr: Box> }, 83 | BinaryOperator { value: &'c str, left: Box>, right: Box> }, 84 | Concat { left: Box>, right: Box> }, 85 | Conditional { expr: Box>, yay: Box>, nay: Box> }, 86 | GetAttr { 87 | node: Box>, 88 | arg: Box>, 89 | arguments: Vec>, 90 | call_type: ExprCallType 91 | }, 92 | ImportedFunctionCall { uuid: Uuid, alias: &'c str, arguments: Vec> }, 93 | FunctionCall { name: &'c str, arguments: Vec<(Option<&'c str>, Expr<'c>)> } 94 | } 95 | 96 | #[derive(Debug, PartialEq, Clone)] 97 | pub enum ExprCallType { 98 | Any, 99 | Method, 100 | Array, 101 | } 102 | -------------------------------------------------------------------------------- /src/nodes/node/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod body; 2 | pub mod expr; 3 | pub mod module; 4 | -------------------------------------------------------------------------------- /src/nodes/node/module.rs: -------------------------------------------------------------------------------- 1 | use nodes::body::Body; 2 | 3 | #[derive(Debug)] 4 | pub struct Module<'c> { 5 | // Sub nodes. 6 | pub body: Body<'c>, 7 | // pub blocks: Vec, 8 | // pub macros: Vec, 9 | // pub traits: Vec, 10 | 11 | // Attributes. 12 | // file_id: Option, // this must NOT be treated as file name 13 | // index: i32, // TODO: wtf is this 14 | // embedded_templates: Vec, 15 | 16 | // TODO: check usage of things bellow 17 | // display_start: Body<'c>, 18 | // display_end: Body<'c>, 19 | // constructor_start: Body<'c>, 20 | // constructor_end: Body<'c>, 21 | // class_end: Body<'c>, 22 | } 23 | 24 | /// Root Twig AST node. 25 | impl<'c> Module<'c> { 26 | pub fn new() -> Module<'c> { 27 | Module { 28 | body: Body::new(), 29 | // blocks: vec![], 30 | // macros: vec![], 31 | // traits: vec![], 32 | 33 | // file_id: None, 34 | // index: 0, 35 | // embedded_templates: vec![], 36 | // 37 | // display_start: Body::new(), 38 | // display_end: Body::new(), 39 | // constructor_start: Body::new(), 40 | // constructor_end: Body::new(), 41 | // class_end: Body::new(), 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/nodes/parser/body.rs: -------------------------------------------------------------------------------- 1 | use nodes::{ Parser, Parse }; 2 | use nodes::body::Body; 3 | use nodes::expr::Expr; 4 | use tokens::{ TokenRef, TokenValueRef }; 5 | use Expect; 6 | use error::{ TemplateResult, TemplateError }; 7 | 8 | impl<'c> Parse<'c> for Body<'c> { 9 | type Output = Body<'c>; 10 | 11 | fn parse<'r>(parser: &mut Parser<'r, 'c>) 12 | -> TemplateResult> 13 | { 14 | trace!("Body::parse"); 15 | 16 | subparse(parser, |_| None) 17 | } 18 | } 19 | 20 | pub struct BlockEnd { 21 | pub drop_needle: bool, 22 | } 23 | 24 | pub fn subparse<'p, 'c, D>(parser: &mut Parser<'p, 'c>, test: D) 25 | -> TemplateResult> 26 | where D: Fn(&TokenRef<'c>) -> Option 27 | { 28 | let mut maybe_line = None; 29 | let mut rv = Vec::new(); 30 | 31 | while let Some(token) = try!(parser.maybe_current()) { 32 | if let None = maybe_line { 33 | maybe_line = Some(token.line); 34 | } 35 | match token.value { 36 | TokenValueRef::Text(t) => { 37 | try!(parser.next()); 38 | rv.push(Body::Text { value: t, line: token.line }) 39 | }, 40 | TokenValueRef::VarStart => { 41 | try!(parser.next()); 42 | let expr = try!(Expr::parse(parser)); 43 | try!(parser.expect(TokenValueRef::VarEnd)); 44 | rv.push(Body::Print { expr: Box::new(expr), line: token.line }); 45 | }, 46 | TokenValueRef::BlockStart => { 47 | try!(parser.next()); 48 | let token = try!(parser.current()); 49 | 50 | let tag_name = match token.value { 51 | TokenValueRef::Name(n) => n, 52 | _ => return Err(TemplateError::MustStartWithTagName.at(token.line)), 53 | }; 54 | 55 | if let Some(end) = test(&token) { 56 | if end.drop_needle { 57 | try!(parser.next()); 58 | } 59 | return if 1 == rv.len() { 60 | Ok(rv.remove(0)) 61 | } else { 62 | Ok(Body::List { items: rv }) 63 | } 64 | } 65 | 66 | let subparser = match parser.env.handlers.get(tag_name) { 67 | Some(sp) => sp, 68 | None => { 69 | unreachable!("errors when subparser not found not implemented") 70 | } 71 | }; 72 | 73 | try!(parser.next()); 74 | let maybe_node = try!(subparser.parse(parser, token)); 75 | if let Some(node) = maybe_node { 76 | rv.push(node); 77 | } 78 | }, 79 | tv => { panic!("not implemented {:?}", tv) }, 80 | }; 81 | } 82 | 83 | if rv.len() == 1 { 84 | Ok(rv.remove(0)) 85 | } else { 86 | Ok(Body::List { items: rv }) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /src/nodes/parser/expr.rs: -------------------------------------------------------------------------------- 1 | use nodes::{ Parse, Parser, ImportedFunction }; 2 | use nodes::expr::{ Expr, ExprValue, ExprConstant, ExprCallType }; 3 | use tokens::{ TokenValueRef, ConstRef, ConstNumberRef }; 4 | use operator::{ OperatorOptions, OperatorKind, Associativity }; 5 | use error::{ TemplateResult, TemplateError }; 6 | use Expect; 7 | use std::collections::VecDeque; 8 | 9 | impl<'c> Parse<'c> for Expr<'c> { 10 | type Output = Expr<'c>; 11 | 12 | fn parse<'r>(parser: &mut Parser<'r, 'c>) 13 | -> TemplateResult> 14 | { 15 | trace!("Expr::parse"); 16 | parse_expression(parser, 0) 17 | } 18 | } 19 | 20 | pub fn parse_expression<'p, 'c>(parser: &mut Parser<'p, 'c>, min_precedence: u16) 21 | -> TemplateResult> 22 | { 23 | trace!("parse_expression"); 24 | 25 | let mut expr = try!(get_primary(parser)); 26 | let mut token = try!(parser.current()); 27 | 28 | loop { 29 | if let TokenValueRef::Operator(op_str) = token.value { 30 | if let OperatorOptions { kind: OperatorKind::Binary { associativity, .. }, precedence: Some(precedence), .. } = parser.get_operator_options(op_str) { 31 | if precedence >= min_precedence { 32 | try!(parser.next()); 33 | 34 | // if callable ... 35 | // TODO: Callable. 36 | // else 37 | let expr1 = try!(parse_expression(parser, match associativity { 38 | Associativity::Left => precedence + 1, 39 | Associativity::Right => precedence, 40 | })); 41 | expr = Expr::new_at(ExprValue::BinaryOperator { 42 | value: op_str, 43 | left: Box::new(expr.clone()), 44 | right: Box::new(expr1), 45 | }, token.line); 46 | // endif 47 | 48 | token = try!(parser.current()); 49 | 50 | continue; 51 | } 52 | } 53 | } 54 | break; 55 | } 56 | 57 | if 0 == min_precedence { 58 | return parse_conditional_expression(parser, expr); 59 | } 60 | 61 | Ok(expr) 62 | } 63 | 64 | pub fn get_primary<'p, 'c>(parser: &mut Parser<'p, 'c>) 65 | -> TemplateResult> 66 | { 67 | trace!("get_primary"); 68 | 69 | let token = try!(parser.current()); 70 | 71 | if let TokenValueRef::Operator(op_str) = token.value { 72 | if let OperatorOptions { kind: OperatorKind::Unary { .. }, precedence: Some(precedence), .. } = parser.get_operator_options(op_str) { 73 | try!(parser.next()); 74 | let expr = try!(parse_expression(parser, precedence)); 75 | let parsed_expr = Expr::new_at(ExprValue::UnaryOperator { 76 | value: op_str, 77 | expr: Box::new(expr), 78 | }, token.line); 79 | return parse_postfix_expression(parser, parsed_expr); 80 | } 81 | } 82 | 83 | if let TokenValueRef::Punctuation('(') = token.value { 84 | try!(parser.next()); 85 | let parsed_expr = try!(parse_expression(parser, 0)); 86 | if let Err(_) = parser.expect(TokenValueRef::Punctuation(')')) { 87 | return Err(TemplateError::ParenthesisNotClosed.at(token.line)); 88 | } 89 | return parse_postfix_expression(parser, parsed_expr); 90 | } 91 | 92 | parse_primary_expression(parser) 93 | } 94 | 95 | /// Parses expression and returns handle to one that should be executed first. 96 | pub fn get_function_node<'p, 'c>(parser: &mut Parser<'p, 'c>, name: &'c str, line: usize) 97 | -> TemplateResult> 98 | { 99 | trace!("get_function_node"); 100 | 101 | match name { 102 | "parent" => unreachable!("function node parent"), 103 | "block" => unreachable!("function node block"), 104 | "attribute" => unreachable!("function node attribute"), 105 | _ => { 106 | if let Some(ImportedFunction { uuid, alias, .. }) = parser.get_imported_function(name) { 107 | return Ok(Expr::new_at(ExprValue::ImportedFunctionCall { 108 | uuid: uuid, 109 | alias: alias, 110 | arguments: try!(parse_unnamed_arguments(parser, false)) 111 | }, line)); 112 | } 113 | 114 | return Ok(Expr::new_at(ExprValue::FunctionCall { 115 | name: name, 116 | arguments: try!(parse_named_arguments(parser, false)) 117 | }, line)); 118 | } 119 | } 120 | } 121 | 122 | pub fn parse_primary_expression<'p, 'c>(parser: &mut Parser<'p, 'c>) 123 | -> TemplateResult> 124 | { 125 | trace!("parse_primary_expression"); 126 | let token = try!(parser.current()); 127 | 128 | let expr = match token.value { 129 | TokenValueRef::Name(name) => { 130 | try!(parser.next()); 131 | match name { 132 | "true" | "TRUE" => 133 | Expr::new_bool(true, token.line), 134 | "false" | "FALSE" => 135 | Expr::new_bool(false, token.line), 136 | "none" | "NONE" | "null" | "NULL" => 137 | Expr::new_null(token.line), 138 | name => { 139 | let current_token = try!(parser.current()); 140 | match current_token.value { 141 | TokenValueRef::Punctuation('(') => try!(get_function_node(parser, name, token.line)), 142 | _ => Expr::new_name(name, token.line), 143 | } 144 | }, 145 | } 146 | }, 147 | TokenValueRef::Value(ref value) => match *value { 148 | ConstRef::Num(num) => { 149 | try!(parser.next()); 150 | get_number_expr(num, token.line) 151 | }, 152 | ConstRef::Str(_) => try!(parse_string_expression(parser)), 153 | }, 154 | TokenValueRef::InterpolationStart => try!(parse_string_expression(parser)), 155 | TokenValueRef::Operator(_) => unreachable!("TokenValueRef::Operator"), 156 | TokenValueRef::Punctuation('[') => try!(parse_array_expression(parser)), 157 | TokenValueRef::Punctuation('{') => try!(parse_hash_expression(parser)), 158 | other => return Err( 159 | TemplateError::UnexpectedTokenValue(other.into()) 160 | .at(token.line) 161 | ), 162 | }; 163 | 164 | parse_postfix_expression(parser, expr) 165 | } 166 | 167 | pub fn get_number_expr<'c>(num: ConstNumberRef<'c>, line: usize) -> Expr<'c> { 168 | Expr::new_at(ExprValue::Constant(match num { 169 | ConstNumberRef::Big(v) => ExprConstant::Big(v), 170 | ConstNumberRef::Float(v) => ExprConstant::Float(v), 171 | ConstNumberRef::Int(v) => ExprConstant::Int(v), 172 | }), line) 173 | } 174 | 175 | pub fn parse_string_expression<'p, 'c>(parser: &mut Parser<'p, 'c>) 176 | -> TemplateResult> 177 | { 178 | trace!("parse_string_expression"); 179 | 180 | let mut nodes = VecDeque::new(); 181 | let mut next_can_be_string = true; 182 | 183 | loop { 184 | let token = try!(parser.current()); 185 | 186 | if let (true, TokenValueRef::Value(ConstRef::Str(value))) = (next_can_be_string, token.value) { 187 | try!(parser.next()); 188 | nodes.push_back(Expr::new_str_constant(value, token.line)); 189 | next_can_be_string = false; 190 | continue; 191 | } 192 | 193 | if let TokenValueRef::InterpolationStart = token.value { 194 | try!(parser.next()); 195 | nodes.push_back(try!(parse_expression(parser, 0))); 196 | try!(parser.expect(TokenValueRef::InterpolationEnd)); 197 | next_can_be_string = true; 198 | continue; 199 | } 200 | 201 | break; 202 | } 203 | 204 | let mut expr = nodes.pop_front() 205 | .expect("twig bug: expected first node to be string when in parse_string_expression state"); 206 | 207 | for node in nodes { 208 | let line = node.line; 209 | expr = Expr::new_at( 210 | ExprValue::Concat { left: Box::new(expr), right: Box::new(node) }, 211 | line 212 | ); 213 | } 214 | 215 | Ok(expr) 216 | } 217 | 218 | pub fn parse_array_expression<'p, 'c>(parser: &mut Parser<'p, 'c>) 219 | -> TemplateResult> 220 | { 221 | trace!("parse_array_expression"); 222 | 223 | try!(parser.expect_or_error(TokenValueRef::Punctuation('['), TemplateError::ExpectedArrayElement)); 224 | 225 | let mut items = Vec::new(); 226 | 227 | let mut token = try!(parser.current()); 228 | let start_line = token.line; 229 | let mut first = true; 230 | 231 | while token.value != TokenValueRef::Punctuation(']') { 232 | if !first { 233 | try!(parser.expect_or_error(TokenValueRef::Punctuation(','), TemplateError::ArrayValueMustBeFollowedByComma)); 234 | token = try!(parser.current()); 235 | 236 | // trailing ,? 237 | if token.value == TokenValueRef::Punctuation(']') { 238 | break; 239 | } 240 | } 241 | first = false; 242 | 243 | items.push(try!(parse_expression(parser, 0))); 244 | token = try!(parser.current()); 245 | } 246 | try!(parser.expect_or_error(TokenValueRef::Punctuation(']'), TemplateError::ArrayNotClosed)); 247 | 248 | Ok(Expr::new_array(items, start_line)) 249 | } 250 | 251 | pub fn parse_hash_expression<'p, 'c>(parser: &mut Parser<'p, 'c>) 252 | -> TemplateResult> 253 | { 254 | trace!("parse_hash_expression"); 255 | 256 | try!(parser.expect_or_error(TokenValueRef::Punctuation('{'), TemplateError::ExpectedHashElement)); 257 | 258 | let mut items = Vec::new(); 259 | 260 | let mut token = try!(parser.current()); 261 | let start_line = token.line; 262 | let mut first = true; 263 | 264 | while token.value != TokenValueRef::Punctuation('}') { 265 | if !first { 266 | try!(parser.expect_or_error(TokenValueRef::Punctuation(','), TemplateError::HashValueMustBeFollowedByComma)); 267 | token = try!(parser.current()); 268 | 269 | // trailing ,? 270 | if token.value == TokenValueRef::Punctuation('}') { 271 | break; 272 | } 273 | } 274 | first = false; 275 | 276 | // a hash key can be: 277 | // 278 | // * a number -- 12 279 | // * a string -- 'a' 280 | // * a name, which is equivalent to a string -- a 281 | // * an expression, which must be enclosed in parentheses -- (1 + 2) 282 | let key = match token.value { 283 | TokenValueRef::Value(ConstRef::Str(v)) => { 284 | try!(parser.next()); 285 | Expr::new_str_constant(v, token.line) 286 | }, 287 | TokenValueRef::Name(v) => { 288 | try!(parser.next()); 289 | Expr::new_str_constant(v, token.line) 290 | }, 291 | TokenValueRef::Value(ConstRef::Num(num)) => { 292 | try!(parser.next()); 293 | get_number_expr(num, token.line) 294 | }, 295 | TokenValueRef::Punctuation('(') => { 296 | try!(parse_expression(parser, 0)) 297 | } 298 | _ => return Err( 299 | TemplateError::InvalidHashKey { unexpected: token.value.into() } 300 | .at(token.line) 301 | ), 302 | }; 303 | 304 | try!(parser.expect_or_error(TokenValueRef::Punctuation(':'), TemplateError::HashKeyMustBeFollowedByColon)); 305 | 306 | let value = try!(parse_expression(parser, 0)); 307 | token = try!(parser.current()); 308 | 309 | items.push((key, value)); 310 | } 311 | try!(parser.expect_or_error(TokenValueRef::Punctuation('}'), TemplateError::HashNotClosed)); 312 | 313 | Ok(Expr::new_hash(items, start_line)) 314 | } 315 | 316 | pub fn parse_postfix_expression<'p, 'c>(parser: &mut Parser<'p, 'c>, mut node: Expr<'c>) 317 | -> TemplateResult> 318 | { 319 | trace!("parse_postfix_expression"); 320 | 321 | loop { 322 | let token = try!(parser.current()); 323 | if let TokenValueRef::Punctuation(ch) = token.value { 324 | node = match ch { 325 | '.' | '[' => try!(parse_subscript_expression(parser, node)), 326 | '|' => try!(parse_filter_expression(parser, node)), 327 | _ => break, 328 | }; 329 | 330 | continue; 331 | } 332 | 333 | break; 334 | } 335 | 336 | Ok(node) 337 | } 338 | 339 | pub fn parse_subscript_expression<'p, 'c>(parser: &mut Parser<'p, 'c>, node: Expr<'c>) 340 | -> TemplateResult> 341 | { 342 | trace!("parse_subscript_expression"); 343 | 344 | let mut token = try!(parser.next()); 345 | let line = token.line; 346 | let mut arguments = Vec::>::new(); 347 | let mut call_type = ExprCallType::Any; 348 | 349 | let arg = match token.value { 350 | TokenValueRef::Punctuation('.') => { 351 | token = try!(parser.next()); 352 | let arg = match token.value { 353 | TokenValueRef::Name(v) => Expr::new_str_constant(v, line), 354 | TokenValueRef::Value(ConstRef::Num(num)) => get_number_expr(num, line), 355 | // OMG the hack here is _hilarious_: 356 | // TODO: ($token->getType() == Twig_tokens::OPERATOR_TYPE && preg_match(Twig_Lexer::REGEX_NAME, $token->getValue())) 357 | _ => return Err(TemplateError::ExpectedNameOrNumber.at(line)) 358 | }; 359 | 360 | token = try!(parser.current()); 361 | if let TokenValueRef::Punctuation('(') = token.value { 362 | call_type = ExprCallType::Method; 363 | arguments = try!(parse_unnamed_arguments(parser, false)); 364 | } 365 | 366 | // TODO: Block of bad code 367 | 368 | unimplemented!() 369 | 370 | //arg 371 | }, 372 | _ => { 373 | call_type = ExprCallType::Array; 374 | 375 | unimplemented!() 376 | } 377 | }; 378 | 379 | Ok(Expr::new_at( 380 | ExprValue::GetAttr { 381 | node: Box::new(node), 382 | arg: Box::new(arg), 383 | arguments: arguments, 384 | call_type: call_type 385 | }, 386 | line 387 | )) 388 | } 389 | 390 | pub fn parse_filter_expression<'p, 'c>(parser: &mut Parser<'p, 'c>, expr: Expr<'c>) 391 | -> TemplateResult> 392 | { 393 | trace!("parse_filter_expression"); 394 | unimplemented!() 395 | } 396 | 397 | pub fn parse_unnamed_arguments<'p, 'c>(parser: &mut Parser<'p, 'c>, definition: bool) 398 | -> TemplateResult>> 399 | { 400 | trace!("parse_unnamed_arguments, definition {:?}", definition); 401 | 402 | let mut args = Vec::new(); 403 | 404 | try!(parser.expect_or_error(TokenValueRef::Punctuation('('), TemplateError::ListOfArgumentsMustBeginWithParenthesis)); 405 | 406 | while !try!(parser.test(TokenValueRef::Punctuation(')'))) { 407 | if args.len() > 0 { 408 | try!(parser.expect_or_error(TokenValueRef::Punctuation(','), TemplateError::ArgumentsMustBeSeparatedByComma)); 409 | } 410 | 411 | let value = if definition { 412 | unreachable!("argument definition parsing not implemented"); 413 | } else { 414 | try!(parse_expression(parser, 0)) 415 | }; 416 | 417 | if definition { 418 | unreachable!("argument definition parsing not implemented"); 419 | } else { 420 | args.push(value); 421 | } 422 | } 423 | try!(parser.expect_or_error(TokenValueRef::Punctuation(')'), TemplateError::ListOfArgumentsMustCloseWithParenthesis)); 424 | 425 | Ok(args) 426 | } 427 | 428 | pub fn parse_named_arguments<'p, 'c>(parser: &mut Parser<'p, 'c>, definition: bool) 429 | -> TemplateResult, Expr<'c>)>> 430 | { 431 | trace!("parse_named_arguments, definition {:?}", definition); 432 | 433 | let mut args = Vec::new(); 434 | 435 | try!(parser.expect_or_error(TokenValueRef::Punctuation('('), TemplateError::ListOfArgumentsMustBeginWithParenthesis)); 436 | 437 | while !try!(parser.test(TokenValueRef::Punctuation(')'))) { 438 | if args.len() > 0 { 439 | try!(parser.expect_or_error(TokenValueRef::Punctuation(','), TemplateError::ArgumentsMustBeSeparatedByComma)); 440 | } 441 | 442 | let (name_expr, token) = if definition { 443 | let name = try!(parser.expect_name()); 444 | let token = try!(parser.current()); 445 | (Expr::new_name(name, token.line), token) 446 | } else { 447 | (try!(parse_expression(parser, 0)), try!(parser.current())) 448 | }; 449 | 450 | let (name, value) = if try!(parser.skip_to_next_if(TokenValueRef::Operator("="))) { 451 | let token = try!(parser.current()); 452 | 453 | let name = match name_expr { 454 | Expr { value: ExprValue::Name(n), .. } => n, 455 | other => return Err( 456 | TemplateError::ParameterNameMustBeAString { 457 | given: format!("{:?}", other) 458 | }.at(token.line) 459 | ), 460 | }; 461 | 462 | let value = if definition { 463 | let value = try!(parse_primary_expression(parser)); 464 | 465 | if !value.is_constant() { 466 | return Err(TemplateError::DefaultValueForArgumentMustBeConstant.at(try!(parser.current()).line)); 467 | } 468 | 469 | value 470 | } else { 471 | try!(parse_expression(parser, 0)) 472 | }; 473 | 474 | (Some(name), value) 475 | } else { 476 | (None, name_expr) 477 | }; 478 | 479 | args.push(if definition { 480 | match name { 481 | None => ( 482 | Some(match value { 483 | Expr { value: ExprValue::Name(n), .. } => n, 484 | other => unreachable!("twig bug: expected that expression is a name"), 485 | }), 486 | Expr::new_null(try!(parser.current()).line) 487 | ), 488 | Some(name) => (Some(name), value), 489 | } 490 | } else { 491 | (name, value) 492 | }) 493 | } 494 | try!(parser.expect_or_error(TokenValueRef::Punctuation(')'), TemplateError::ListOfArgumentsMustCloseWithParenthesis)); 495 | 496 | Ok(args) 497 | } 498 | 499 | pub fn parse_conditional_expression<'p, 'c>(parser: &mut Parser<'p, 'c>, mut expr: Expr<'c>) 500 | -> TemplateResult> 501 | { 502 | trace!("parse_conditional_expression"); 503 | 504 | while try!(parser.skip_to_next_if(TokenValueRef::Punctuation('?'))) { 505 | let (expr2, expr3) = 506 | if !try!(parser.skip_to_next_if(TokenValueRef::Punctuation(':'))) { 507 | let expr2 = try!(parse_expression(parser, 0)); 508 | if try!(parser.skip_to_next_if(TokenValueRef::Punctuation(':'))) { 509 | (expr2, try!(parse_expression(parser, 0))) 510 | } else { 511 | (expr2, Expr::new_str_constant("", try!(parser.current()).line)) 512 | } 513 | } else { 514 | (expr.clone(), try!(parse_expression(parser, 0))) 515 | }; 516 | expr = Expr::new_at(ExprValue::Conditional { 517 | expr: Box::new(expr), 518 | yay: Box::new(expr2), 519 | nay: Box::new(expr3) 520 | }, try!(parser.current()).line); 521 | } 522 | 523 | Ok(expr) 524 | } 525 | -------------------------------------------------------------------------------- /src/nodes/parser/mod.rs: -------------------------------------------------------------------------------- 1 | use std::iter::Peekable; 2 | use std::collections::HashMap; 3 | use tokens::{ TokenRef, TokenValueRef, TokenValue, TokenIter }; 4 | use environment::ParsingEnvironment; 5 | use error::{ TemplateResult, TemplateError, Received }; 6 | use operator::{ OperatorOptions, OperatorKind }; 7 | use uuid::Uuid; 8 | 9 | pub mod body; 10 | pub mod module; 11 | pub mod expr; 12 | 13 | #[derive(Copy, Clone)] 14 | pub struct ImportedFunction<'c> { 15 | pub uuid: Uuid, 16 | pub name: &'c str, 17 | pub alias: &'c str, 18 | } 19 | 20 | impl<'c> ImportedFunction<'c> { 21 | pub fn new<'r>(uuid: Uuid, alias: &'r str, name: &'r str) -> ImportedFunction<'r> { 22 | ImportedFunction { 23 | uuid: uuid, name: name, alias: alias 24 | } 25 | } 26 | } 27 | 28 | pub struct ImportedSymbols<'c> { 29 | pub functions: HashMap<&'c str, ImportedFunction<'c>> 30 | } 31 | 32 | impl<'c> ImportedSymbols<'c> { 33 | pub fn new<'r>() -> ImportedSymbols<'r> { 34 | ImportedSymbols { 35 | functions: HashMap::new() 36 | } 37 | } 38 | } 39 | 40 | pub trait Parse<'c> { 41 | type Output; 42 | 43 | fn parse<'p>(parser: &mut Parser<'p, 'c>) 44 | -> TemplateResult; 45 | } 46 | 47 | /// Helpers for manipulating and inspecting token iterators when creating AST. 48 | /// 49 | /// Has methods to inspect state, like "current" token, and advance to next. 50 | /// 51 | /// Current token is actually implemented as "peekable" next token. However, 52 | /// in all parsing code this "peekable" becomes "current". 53 | pub struct Parser<'p, 'c: 'p> 54 | { 55 | /// Project options for parsing, containing data collected from all added 56 | /// extensions. 57 | pub env: &'p ParsingEnvironment, 58 | /// Token stream. 59 | pub tokens: Peekable<&'p mut TokenIter<'p, 'c>>, 60 | /// Imported symbol stack. 61 | pub imported_symbols: Vec>, 62 | } 63 | 64 | impl<'p, 'c: 'p> Parser<'p, 'c> 65 | { 66 | pub fn new<'r, 'z>( 67 | env: &'r ParsingEnvironment, 68 | tokens: &'r mut TokenIter<'r, 'z> 69 | ) -> Parser<'r, 'z> 70 | { 71 | Parser { 72 | env: env, 73 | tokens: tokens.peekable(), 74 | imported_symbols: vec![ImportedSymbols::new()], 75 | } 76 | } 77 | 78 | pub fn push_local_scope<'r>(&'r mut self) { 79 | self.imported_symbols.push(ImportedSymbols::new()); 80 | } 81 | 82 | pub fn pop_local_scope<'r>(&'r mut self) { 83 | self.imported_symbols.pop(); 84 | } 85 | 86 | /// Registers pecified alias as imported function, further parsing might 87 | /// depend on this (use this function). 88 | pub fn add_imported_function<'r>(&'r mut self, alias: &'c str, name: &'c str) -> Uuid { 89 | let uuid = Uuid::new_v4(); 90 | self.imported_symbols 91 | .last_mut().unwrap() 92 | .functions 93 | .insert(alias, ImportedFunction::new(uuid.clone(), alias, name)); 94 | uuid 95 | } 96 | 97 | /// Finds a function that was previosly imported in this or parent scope. 98 | pub fn get_imported_function<'r>(&'r self, name: &str) -> Option> { 99 | for symbols in &self.imported_symbols { 100 | if let Some(found) = symbols.functions.get(name) { 101 | return Some(*found); 102 | } 103 | } 104 | None 105 | } 106 | 107 | /// Get current token or fail. 108 | /// 109 | /// Returns current token, does not modify iterator position. 110 | /// Expects current token to exist, and if it is not (the end of file), returns 111 | /// UnexpectedEndOfTemplate error. 112 | pub fn current<'r>(&'r mut self) -> TemplateResult> 113 | { 114 | Ok(match self.tokens.peek() { 115 | Some(&Ok(ref t)) => t.clone(), 116 | None => return Err(TemplateError::UnexpectedEndOfTemplate.at(1)), 117 | Some(&Err(ref e)) => return Err(e.clone()), 118 | }) 119 | } 120 | 121 | /// Get current token or the end of stream. 122 | /// 123 | /// Returns current token, does not modify iterator position. 124 | /// If the end of stream, returns None. 125 | pub fn maybe_current<'r>(&'r mut self) -> TemplateResult>> 126 | { 127 | Ok(match self.tokens.peek() { 128 | Some(&Ok(ref t)) => Some(t.clone()), 129 | None => None, 130 | Some(&Err(ref e)) => return Err(e.clone()), 131 | }) 132 | } 133 | 134 | /// Advances to the next token and returns previous. 135 | /// 136 | /// Expects the next token to exist. If it does not exist (the end of file), returns 137 | /// UnexpectedEndOfTemplate error. 138 | pub fn next<'r>(&'r mut self) -> TemplateResult> 139 | { 140 | let token = match self.tokens.peek() { 141 | Some(&Ok(ref t)) => t.clone(), 142 | None => return Err(TemplateError::UnexpectedEndOfTemplate.at(1)), 143 | Some(&Err(ref e)) => return Err(e.clone()), 144 | }; 145 | 146 | match self.tokens.next() { 147 | None => return Err(TemplateError::UnexpectedEndOfTemplate.at(token.line)), 148 | Some(Err(e)) => return Err(e), 149 | _ => (), 150 | }; 151 | 152 | Ok(token) 153 | } 154 | 155 | /// Advances to the next token if expected token value is the same as current and 156 | /// returns current. 157 | /// 158 | /// Expects these tokens to exist. If they do not exist (the end of file), returns 159 | /// UnexpectedEndOfTemplate error. 160 | pub fn skip_to_next_if<'r>(&'r mut self, expected: TokenValueRef<'c>) -> TemplateResult 161 | { 162 | let (line, skip) = { 163 | let token = match self.tokens.peek() { 164 | Some(&Ok(ref token)) => token, 165 | _ => return Err(TemplateError::UnexpectedEndOfTemplate.at(1)), 166 | }; 167 | (token.line, token.value == expected) 168 | }; 169 | if skip { 170 | match self.tokens.next() { 171 | Some(Ok(_)) => Ok(true), 172 | None => return Err(TemplateError::UnexpectedEndOfTemplate.at(line)), 173 | Some(Err(e)) => return Err(e), 174 | } 175 | } else { 176 | Ok(false) 177 | } 178 | } 179 | 180 | /// Expects the current token to match value and advances to next token. 181 | /// 182 | /// Error condition same as `expect_match_or`. 183 | pub fn expect<'r>(&'r mut self, expected: TokenValueRef<'c>) -> TemplateResult> 184 | { 185 | self.expect_match_or( 186 | |token| if token.value == expected { 187 | Ok(token.clone()) 188 | } else { 189 | Err( 190 | TemplateError::ExpectedOtherTokenValue((token.value.into(), expected.into())) 191 | .at(token.line) 192 | ) 193 | } 194 | ) 195 | } 196 | 197 | /// Expects the current token to be name type and advances to next token. 198 | /// 199 | /// Returns found name string. 200 | /// 201 | /// Error condition same as `expect_match_or`. 202 | pub fn expect_name<'r>(&'r mut self) -> TemplateResult<&'c str> 203 | { 204 | self.expect_match_or( 205 | |token| match token.value { 206 | TokenValueRef::Name(name) => Ok(name), 207 | _ => Err( 208 | TemplateError::ExpectedTokenTypeButReceived( 209 | (TokenValue::Name("".into()), Received::Token(token.value.into())) 210 | ).at(token.line) 211 | ) 212 | } 213 | ) 214 | } 215 | 216 | /// Expects the current token to match value and advances to the next token. 217 | /// 218 | /// Error condition same as `expect_match_or`. 219 | pub fn expect_or_error<'r>(&'r mut self, expected: TokenValueRef<'c>, error_message: TemplateError) -> TemplateResult> 220 | { 221 | self.expect_match_or( 222 | |token| if token.value == expected { 223 | Ok(token.clone()) 224 | } else { 225 | Err(error_message.at(token.line)) 226 | } 227 | ) 228 | } 229 | 230 | /// Expects the current token to pass `check` and advances to next token. 231 | /// 232 | /// Expects these tokens (current and next) to exist. If they do not exist (the end of file), 233 | /// returns `UnexpectedEndOfTemplate` error. 234 | pub fn expect_match_or<'r, C, T>(&'r mut self, check: C) -> TemplateResult 235 | where 236 | C: for<'a> FnOnce(&'a TokenRef<'c>) -> TemplateResult 237 | { 238 | let res = match self.tokens.peek() { 239 | Some(&Ok(ref t)) => { 240 | check(&t) 241 | }, 242 | None => return Err(TemplateError::UnexpectedEndOfTemplate.at(1)), 243 | Some(&Err(ref e)) => return Err(e.clone()), 244 | }; 245 | try!(self.next()); 246 | 247 | res 248 | } 249 | 250 | /// Test the current token to match value. 251 | /// 252 | /// Expects these token to exist. If it does not exist (the end of file), returns 253 | /// UnexpectedEndOfTemplate error. 254 | pub fn test<'r>(&'r mut self, expected: TokenValueRef<'c>) -> TemplateResult 255 | { 256 | match self.tokens.peek() { 257 | Some(&Ok(ref t)) => { 258 | if t.value == expected { 259 | Ok(true) 260 | } else { 261 | Ok(false) 262 | } 263 | }, 264 | None => Err(TemplateError::UnexpectedEndOfTemplate.at(1)), 265 | Some(&Err(ref e)) => Err(e.clone()), 266 | } 267 | } 268 | 269 | /// Returns options structure for specified operator. 270 | /// 271 | /// Operator must exist in environment, otherwise panics. 272 | pub fn get_operator_options<'r>(&'r self, op_str: &'c str) -> OperatorOptions { 273 | self.env.operators 274 | .get(op_str) 275 | .cloned() 276 | .unwrap_or(OperatorOptions { precedence: None, kind: OperatorKind::Other }) 277 | } 278 | } 279 | -------------------------------------------------------------------------------- /src/nodes/parser/module.rs: -------------------------------------------------------------------------------- 1 | use nodes::{ Parse, Parser, Module }; 2 | use nodes::body::Body; 3 | use error::TemplateResult; 4 | 5 | impl<'c> Parse<'c> for Module<'c> { 6 | type Output = Module<'c>; 7 | 8 | fn parse<'r>(parser: &mut Parser<'r, 'c>) 9 | -> TemplateResult> 10 | { 11 | trace!("Module::parse"); 12 | 13 | let mut module = Module::new(); 14 | let body = try!(Body::parse(parser)); 15 | 16 | module.body = body; 17 | 18 | Ok(module) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/nodes/token_parser.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | use super::TokenParserExtension; 3 | 4 | pub struct TokenParser { 5 | pub tag: &'static str, 6 | pub extension: Box, 7 | } 8 | 9 | impl fmt::Debug for TokenParser { 10 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 11 | write!(f, "({:?}, extension)", self.tag) 12 | } 13 | } 14 | 15 | impl TokenParser { 16 | pub fn new(parser_extension: E) 17 | -> TokenParser 18 | where E: TokenParserExtension 19 | { 20 | TokenParser { 21 | tag: parser_extension.get_tag(), 22 | extension: Box::new(parser_extension), 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/operator/mod.rs: -------------------------------------------------------------------------------- 1 | use std::cmp::Ordering; 2 | use std::fmt; 3 | use value::{ Value }; 4 | use error::{ RuntimeError, RuntimeResult }; 5 | use function::Callable; 6 | 7 | /// Operator kind. 8 | #[derive(PartialEq, Debug, Copy, Clone)] 9 | pub enum OperatorKind { 10 | /// Single argument operator, i.e negation. 11 | Unary { value: &'static str }, 12 | /// Two argument operator, i.e sum. 13 | Binary { value: &'static str, associativity: Associativity }, 14 | /// Any operator handled by extension (i.e. the "=" operator). 15 | Other, 16 | } 17 | 18 | impl OperatorKind { 19 | pub fn new_binary(value: &'static str, associativity: Associativity) -> OperatorKind { 20 | OperatorKind::Binary { value: value, associativity: associativity } 21 | } 22 | 23 | pub fn new_binary_left(value: &'static str) -> OperatorKind { 24 | OperatorKind::Binary { value: value, associativity: Associativity::Left } 25 | } 26 | 27 | pub fn new_binary_right(value: &'static str) -> OperatorKind { 28 | OperatorKind::Binary { value: value, associativity: Associativity::Right } 29 | } 30 | 31 | pub fn new_unary(value: &'static str) -> OperatorKind { 32 | OperatorKind::Unary { value: value } 33 | } 34 | 35 | pub fn new_other() -> OperatorKind { 36 | OperatorKind::Other 37 | } 38 | } 39 | 40 | /// Operator options for parsing, sets precedence and weather it is unary/binary. 41 | #[derive(Debug, Copy, Clone)] 42 | pub struct OperatorOptions { 43 | pub precedence: Option, 44 | pub kind: OperatorKind, 45 | } 46 | 47 | impl OperatorOptions { 48 | 49 | pub fn new_binary(chars: &'static str, precedence: u16, associativity: Associativity) -> OperatorOptions { 50 | OperatorOptions { 51 | precedence: Some(precedence), 52 | kind: OperatorKind::new_binary(chars, associativity), 53 | } 54 | } 55 | 56 | pub fn new_binary_left(chars: &'static str, precedence: u16) -> OperatorOptions { 57 | OperatorOptions::new_binary(chars, precedence, Associativity::Left) 58 | } 59 | 60 | pub fn new_binary_right(chars: &'static str, precedence: u16) -> OperatorOptions { 61 | OperatorOptions::new_binary(chars, precedence, Associativity::Right) 62 | } 63 | 64 | pub fn new_unary(chars: &'static str, precedence: u16) -> OperatorOptions { 65 | OperatorOptions { 66 | precedence: Some(precedence), 67 | kind: OperatorKind::new_unary(chars), 68 | } 69 | } 70 | 71 | pub fn new_other() -> OperatorOptions { 72 | OperatorOptions { 73 | precedence: None, 74 | kind: OperatorKind::new_other(), 75 | } 76 | } 77 | } 78 | 79 | /// Represents environment operator. 80 | pub struct Operator { 81 | /// Operator options. 82 | pub options: OperatorOptions, 83 | /// Operator callable. 84 | pub callable: Callable, 85 | } 86 | 87 | impl fmt::Debug for Operator { 88 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 89 | write!(f, "({:?}, callable)", self.options) 90 | } 91 | } 92 | 93 | impl Operator { 94 | 95 | pub fn new_binary( 96 | chars: &'static str, 97 | precedence: u16, 98 | associativity: Associativity, 99 | callable: F 100 | ) 101 | -> Operator 102 | where 103 | F: for<'e> Fn(&'e Value, &'e Value) -> RuntimeResult 104 | { 105 | Operator { 106 | options: OperatorOptions::new_binary(chars, precedence, associativity), 107 | callable: Callable::Dynamic(Box::new(move |args| { 108 | if args.len() != 2 { 109 | return Err( 110 | RuntimeError::InvalidArgumentCount { 111 | defined: 2, 112 | given: args.len() 113 | } 114 | ) 115 | } 116 | 117 | callable( 118 | unsafe { args.get_unchecked(0) }, 119 | unsafe { args.get_unchecked(1) } 120 | ) 121 | })), 122 | } 123 | } 124 | 125 | pub fn new_binary_left( 126 | chars: &'static str, 127 | precedence: u16, 128 | callable: F 129 | ) 130 | -> Operator 131 | where 132 | F: for<'e> Fn(&'e Value, &'e Value) -> RuntimeResult 133 | { 134 | Operator::new_binary( 135 | chars, 136 | precedence, 137 | Associativity::Left, 138 | callable 139 | ) 140 | } 141 | 142 | pub fn new_binary_right( 143 | chars: &'static str, 144 | precedence: u16, 145 | callable: F 146 | ) 147 | -> Operator 148 | where 149 | F: for<'e> Fn(&'e Value, &'e Value) -> RuntimeResult 150 | { 151 | Operator::new_binary( 152 | chars, 153 | precedence, 154 | Associativity::Right, 155 | callable 156 | ) 157 | } 158 | 159 | pub fn new_unary( 160 | chars: &'static str, 161 | precedence: u16, 162 | callable: F 163 | ) 164 | -> Operator 165 | where 166 | F: for<'e> Fn(&'e Value) -> RuntimeResult 167 | { 168 | Operator { 169 | options: OperatorOptions::new_unary(chars, precedence), 170 | callable: Callable::Dynamic(Box::new(move |args| { 171 | if args.len() != 1 { 172 | return Err( 173 | RuntimeError::InvalidArgumentCount { 174 | defined: 1, 175 | given: args.len() 176 | } 177 | ) 178 | } 179 | 180 | callable( 181 | unsafe { args.get_unchecked(0) } 182 | ) 183 | })), 184 | } 185 | } 186 | } 187 | 188 | /// Operator associativity. 189 | #[derive(Debug, Copy, Clone, Eq, PartialEq)] 190 | pub enum Associativity { 191 | Left, 192 | Right, 193 | } 194 | 195 | impl PartialOrd for Associativity { 196 | fn partial_cmp(&self, other: &Associativity) -> Option { 197 | match (*self, *other) { 198 | (Associativity::Left, Associativity::Right) => Some(Ordering::Less), 199 | (Associativity::Right, Associativity::Left) => Some(Ordering::Greater), 200 | _ => Some(Ordering::Equal), 201 | } 202 | } 203 | } 204 | 205 | impl Ord for Associativity { 206 | fn cmp(&self, other: &Associativity) -> Ordering { 207 | match (*self, *other) { 208 | (Associativity::Left, Associativity::Right) => Ordering::Less, 209 | (Associativity::Right, Associativity::Left) => Ordering::Greater, 210 | _ => Ordering::Equal, 211 | } 212 | } 213 | } 214 | 215 | #[cfg(test)] 216 | mod test { 217 | use super::Associativity; 218 | 219 | #[test] 220 | fn associativity_left_should_be_less_than_right() { 221 | assert!(Associativity::Left < Associativity::Right); 222 | } 223 | 224 | #[test] 225 | fn associativity_right_should_be_greater_than_left() { 226 | assert!(Associativity::Right > Associativity::Left); 227 | } 228 | 229 | #[test] 230 | fn associativity_right_should_be_equal_to_right() { 231 | assert!(Associativity::Right == Associativity::Right); 232 | } 233 | } 234 | -------------------------------------------------------------------------------- /src/tokens/lexer/delimiters.rs: -------------------------------------------------------------------------------- 1 | #[derive(Copy, Clone)] 2 | pub struct Delimiters { 3 | pub start: &'static str, 4 | pub end: &'static str, 5 | } 6 | 7 | impl Delimiters { 8 | pub fn new(start: &'static str, end: &'static str) -> Delimiters { 9 | Delimiters { 10 | start: start, 11 | end: end, 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/tokens/lexer/matchers.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | use regex::{ Regex, quote }; 3 | 4 | use tokens::LexerOptions; 5 | 6 | pub struct Matchers { 7 | pub whitespace: Regex, 8 | pub regex_name: Regex, 9 | pub regex_number: Regex, 10 | pub regex_string: Regex, 11 | pub regex_dq_string_delim: Regex, 12 | pub lex_var: Regex, 13 | pub lex_block: Regex, 14 | pub lex_raw_data: Regex, 15 | pub lex_verbatim_data: Regex, 16 | pub lex_operator: Regex, 17 | pub lex_comment: Regex, 18 | pub lex_block_raw: Regex, 19 | pub lex_block_line: Regex, 20 | pub lex_tokens_start: Regex, 21 | pub interpolation_start: Regex, 22 | pub interpolation_end: Regex, 23 | } 24 | 25 | impl Matchers { 26 | pub fn new( 27 | options: &LexerOptions, 28 | operators: &HashSet<&'static str> 29 | ) -> Matchers { 30 | Matchers { 31 | whitespace: { 32 | Regex::new( 33 | r#"\A\s+"# 34 | ).ok().expect("Failed to init whitespace") 35 | }, 36 | regex_name: { 37 | Regex::new( 38 | r#"\A[a-zA-Z_\x7F-\xFF][a-zA-Z0-9_\x7F-\xFF]*"# 39 | ).ok().expect("Failed to init regex_name") 40 | }, 41 | regex_number: { 42 | Regex::new( 43 | r#"\A[0-9]+(?:\.[0-9]+)?"# 44 | ).ok().expect("Failed to init regex_number") 45 | }, 46 | regex_string: { 47 | Regex::new( 48 | r#"\A(?s:"([^#"\\]*(?:\\.[^#"\\]*)*)"|'([^'\\]*(?:\\.[^'\\]*)*)')"# 49 | ).ok().expect("Failed to init regex_string") 50 | }, 51 | regex_dq_string_delim: { 52 | Regex::new( 53 | r#"\A""# 54 | ).ok().expect("Failed to init regex_dq_string_delim") 55 | }, 56 | // regex_dq_string_part - no negative forward lookup in rust regex lib 57 | lex_var: { 58 | Regex::new( 59 | &format!( 60 | r#"\A(?:\s*{}{}\s*|\s*{})"#, 61 | "e(&options.whitespace_trim), 62 | "e(&options.tag_variable.end), 63 | "e(&options.tag_variable.end) 64 | ) 65 | ).ok().expect("Failed to init lex_var") 66 | }, 67 | lex_block: { 68 | Regex::new( 69 | &format!( 70 | r#"\A\s*(?:{}{}\s*|\s*{})\n?"#, 71 | "e(&options.whitespace_trim), 72 | "e(&options.tag_block.end), 73 | "e(&options.tag_block.end) 74 | ) 75 | ).ok().expect("Failed to init lex_block") 76 | }, 77 | lex_raw_data: { 78 | Regex::new( 79 | &format!( 80 | r#"(?s)({}{}|{})\s*(?:endraw)\s*(?:{}{}\s*|\s*{})"#, 81 | "e(&options.tag_block.start), 82 | "e(&options.whitespace_trim), 83 | "e(&options.tag_block.start), 84 | "e(&options.whitespace_trim), 85 | "e(&options.tag_block.end), 86 | "e(&options.tag_block.end) 87 | ) 88 | ).ok().expect("Failed to init lex_raw_data") 89 | }, 90 | lex_verbatim_data: { 91 | Regex::new( 92 | &format!( 93 | r#"(?s)({}{}|{})\s*(?:endverbatim)\s*(?:{}{}\s*|\s*{})"#, 94 | "e(&options.tag_block.start), 95 | "e(&options.whitespace_trim), 96 | "e(&options.tag_block.start), 97 | "e(&options.whitespace_trim), 98 | "e(&options.tag_block.end), 99 | "e(&options.tag_block.end) 100 | ) 101 | ).ok().expect("Failed to init lex_verbatim_data") 102 | }, 103 | lex_operator: Self::get_operator_regex( 104 | operators 105 | ), 106 | lex_comment: { 107 | Regex::new( 108 | &format!( 109 | r#"(?s)(?:{}{}\s*|{})\n?"#, 110 | "e(&options.whitespace_trim), 111 | "e(&options.tag_comment.end), 112 | "e(&options.tag_comment.end) 113 | ) 114 | ).ok().expect("Failed to init lex_comment") 115 | }, 116 | lex_block_raw: { 117 | Regex::new( 118 | &format!( 119 | r#"\A(?s)\s*(raw|verbatim)\s*(?:{}{}\s*|\s*{})"#, 120 | "e(&options.whitespace_trim), 121 | "e(&options.tag_block.end), 122 | "e(&options.tag_block.end) 123 | ) 124 | ).ok().expect("Failed to init lex_block_raw") 125 | }, 126 | lex_block_line: { 127 | Regex::new( 128 | &format!( 129 | r#"\A(?s)\s*line\s+(\d+)\s*{}"#, 130 | "e(&options.tag_block.end) 131 | ) 132 | ).ok().expect("Failed to init lex_block_line") 133 | }, 134 | lex_tokens_start: { 135 | Regex::new( 136 | &format!( 137 | r#"(?s)({}|{}|{})({})?"#, 138 | "e(&options.tag_variable.start), 139 | "e(&options.tag_block.start), 140 | "e(&options.tag_comment.start), 141 | "e(&options.whitespace_trim) 142 | ) 143 | ).ok().expect("Failed to init lex_tokens_start") 144 | }, 145 | interpolation_start: { 146 | Regex::new( 147 | &format!( 148 | r#"\A{}\s*"#, 149 | "e(&options.interpolation.start) 150 | ) 151 | ).ok().expect("Failed to init interpolation_start") 152 | }, 153 | interpolation_end: { 154 | Regex::new( 155 | &format!( 156 | r#"\A\s*{}"#, 157 | "e(&options.interpolation.end) 158 | ) 159 | ).ok().expect("Failed to init interpolation_end") 160 | }, 161 | } 162 | } 163 | 164 | /// If matches strign contents up to #{, return pos as (start, end). 165 | /// 166 | /// This is /[^#"\\]*(?:(?:\\.|#(?!\{))[^#"\\]*)*/As regular expression written 167 | /// manually. 168 | pub fn match_regex_dq_string_part(&self, code: &str) -> (usize, usize) { 169 | enum MatchMode { 170 | Normal, 171 | Escape, 172 | MaybeInterpolation(usize), 173 | } 174 | 175 | let mut index = 0; 176 | let mut mode = MatchMode::Normal; 177 | 178 | for c in code.chars() { 179 | match mode { 180 | MatchMode::Normal => { 181 | match c { 182 | '\\' => mode = MatchMode::Escape, 183 | '#' => mode = MatchMode::MaybeInterpolation(index), 184 | '"' => return (0, index), 185 | _ => (), 186 | }; 187 | }, 188 | MatchMode::Escape => mode = MatchMode::Normal, 189 | MatchMode::MaybeInterpolation(started_at) => { 190 | match c { 191 | '{' => return (0, started_at), 192 | _ => mode = MatchMode::Normal, 193 | }; 194 | } 195 | }; 196 | 197 | index += 1; 198 | } 199 | 200 | (0, index) 201 | } 202 | 203 | #[allow(deprecated)] 204 | fn get_operator_regex( 205 | operators: &HashSet<&'static str> 206 | ) -> Regex { 207 | let mut all: Vec<_> = Some("=").into_iter() 208 | .chain( 209 | operators.iter() 210 | .map(|v| *v) 211 | ) 212 | .collect(); 213 | 214 | all.sort_by(|a, b| b.len().cmp(&a.len())); 215 | 216 | let mut regex_items = Vec::new(); 217 | 218 | for operator in all { 219 | let length = operator.len(); 220 | 221 | assert!(length > 0); 222 | 223 | // an operator that ends with a character must be followed by 224 | // a whitespace or a parenthesis 225 | let mut r = match operator.chars().last() { 226 | Some(c) if c.is_alphabetic() => format!( 227 | "{}{}", 228 | quote(operator), 229 | r#"[\s()]"# 230 | ), 231 | _ => format!( 232 | "{}", 233 | quote(operator) 234 | ), 235 | }; 236 | 237 | r = r.replace(" ", "\\s+"); 238 | 239 | regex_items.push(r); 240 | } 241 | 242 | let regex_string = format!("\\A(?:{})", ®ex_items.connect("|")); 243 | 244 | match Regex::new( 245 | ®ex_string, 246 | ) { 247 | Ok(regex) => regex, 248 | Err(e) => panic!("Failed to init operator_regex \n{}\n{:?}", regex_string, e), 249 | } 250 | } 251 | } 252 | 253 | #[cfg(test)] 254 | mod test_match_regex_dq_string_part { 255 | use std::collections::HashSet; 256 | use tokens::LexerOptions; 257 | use super::Matchers; 258 | 259 | #[test] 260 | fn should_match_full_str_with_first_esc_char() { 261 | assert_eq!((0, 2), matchers().match_regex_dq_string_part("##")) 262 | } 263 | 264 | #[test] 265 | fn should_match_empty_str() { 266 | assert_eq!((0, 0), matchers().match_regex_dq_string_part("")) 267 | } 268 | 269 | #[test] 270 | fn should_match_up_to_str_end() { 271 | assert_eq!((0, 2), matchers().match_regex_dq_string_part(r#"##"foo"#)) 272 | } 273 | 274 | #[test] 275 | fn should_skip_escaped_str_end() { 276 | assert_eq!((0, 7), matchers().match_regex_dq_string_part(r#"##\"foo"#)) 277 | } 278 | 279 | #[test] 280 | fn should_match_up_to_interpolation_start() { 281 | assert_eq!((0, 3), matchers().match_regex_dq_string_part(r#"aa #{ foo"#)) 282 | } 283 | 284 | #[test] 285 | fn should_skip_escaped_interpolation_start() { 286 | assert_eq!((0, 10), matchers().match_regex_dq_string_part(r#"aa \#{ foo"#)) 287 | } 288 | 289 | fn matchers() -> Matchers { 290 | let options = LexerOptions::default(); 291 | Matchers::new( 292 | &options, 293 | &HashSet::new() 294 | ) 295 | } 296 | } 297 | -------------------------------------------------------------------------------- /src/tokens/lexer/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | use environment::LexingEnvironment; 3 | use tokens::{ LexerOptions, TokenIter }; 4 | use self::matchers::Matchers; 5 | 6 | mod delimiters; 7 | mod matchers; 8 | 9 | pub mod options; 10 | pub mod iter; 11 | 12 | /// Parses template file and converts it to a stream of tokens. 13 | pub struct Lexer { 14 | options: LexerOptions, 15 | matchers: Matchers, 16 | } 17 | 18 | impl Lexer { 19 | 20 | /// Creates a new lexer with specified options and operator list. 21 | pub fn new(options: LexerOptions, operators: &HashSet<&'static str>) -> Lexer { 22 | Lexer { 23 | options: options, 24 | matchers: Matchers::new( 25 | &options, 26 | operators 27 | ), 28 | } 29 | } 30 | 31 | /// Initialize default lexer with default options. 32 | pub fn default(env: &LexingEnvironment) -> Lexer { 33 | Lexer::new( 34 | LexerOptions::default(), 35 | &env.operators 36 | ) 37 | } 38 | 39 | /// Convert provided template into a token stream. 40 | pub fn tokens<'r, 'code>(&'r self, code: &'code str) -> TokenIter<'r, 'code> 41 | { 42 | TokenIter::new(self, code) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/tokens/lexer/options.rs: -------------------------------------------------------------------------------- 1 | use super::delimiters::Delimiters; 2 | 3 | /// Lexer options. 4 | #[derive(Copy, Clone)] 5 | pub struct LexerOptions { 6 | pub tag_comment: Delimiters, 7 | pub tag_block: Delimiters, 8 | pub tag_variable: Delimiters, 9 | pub whitespace_trim: &'static str, 10 | pub interpolation: Delimiters, 11 | } 12 | 13 | impl LexerOptions { 14 | pub fn default() -> LexerOptions { 15 | LexerOptions { 16 | tag_comment: Delimiters::new("{#", "#}"), 17 | tag_block: Delimiters::new("{%", "%}"), 18 | tag_variable: Delimiters::new("{{", "}}"), 19 | whitespace_trim: "-", 20 | interpolation: Delimiters::new("#{", "}"), 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/tokens/mod.rs: -------------------------------------------------------------------------------- 1 | /*! 2 | 3 | Produces a token stream from source template. 4 | 5 | # Summary 6 | 7 | This module is capable of taking a Twig input template, for example, this one: 8 | 9 | ```twig 10 | Hello 11 | {% if world %} 12 | world 13 | {% else %} 14 | {{ other }} 15 | {% endif %} 16 | ``` 17 | 18 | And chopping it into tokens like these: 19 | 20 | ```text 21 | Ok(TokenRef { value: Text("Hello\n"), line: 1 }) 22 | Ok(TokenRef { value: BlockStart, line: 2 }) 23 | Ok(TokenRef { value: Name("if"), line: 2 }) 24 | Ok(TokenRef { value: Name("world"), line: 2 }) 25 | Ok(TokenRef { value: BlockEnd, line: 2 }) 26 | Ok(TokenRef { value: Text(" world\n"), line: 3 }) 27 | Ok(TokenRef { value: BlockStart, line: 4 }) 28 | Ok(TokenRef { value: Name("else"), line: 4 }) 29 | Ok(TokenRef { value: BlockEnd, line: 4 }) 30 | Ok(TokenRef { value: Text(" "), line: 5 }) 31 | Ok(TokenRef { value: VarStart, line: 5 }) 32 | Ok(TokenRef { value: Name("other"), line: 5 }) 33 | Ok(TokenRef { value: VarEnd, line: 5 }) 34 | Ok(TokenRef { value: Text("\n"), line: 5 }) 35 | Ok(TokenRef { value: BlockStart, line: 6 }) 36 | Ok(TokenRef { value: Name("endif"), line: 6 }) 37 | Ok(TokenRef { value: BlockEnd, line: 6 }) 38 | ``` 39 | 40 | Example code for this: 41 | 42 | ```rust 43 | use twig::environment::Environment; 44 | use twig::tokens::Lexer; 45 | 46 | let env = Environment::default().init_all(); 47 | let lexer = Lexer::default(&env.lexing); 48 | 49 | # let source = r#"Hello 50 | # {% if world %} 51 | # world 52 | # {% else %} 53 | # {{ other }} 54 | # {% endif %}"#; 55 | for token in lexer.tokens(source) { 56 | println!("{:?}", token); 57 | } 58 | ``` 59 | 60 | */ 61 | 62 | mod token; 63 | mod lexer; 64 | 65 | pub use self::token::{ 66 | TokenRef, 67 | TokenValue, TokenValueRef, 68 | Const, ConstRef, 69 | ConstNumber, ConstNumberRef, 70 | }; 71 | pub use self::lexer::Lexer; 72 | pub use self::lexer::iter::TokenIter; 73 | pub use self::lexer::options::LexerOptions; 74 | -------------------------------------------------------------------------------- /src/tokens/token.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | 3 | /// Lexer output token, lexer's output and parser's input. 4 | #[derive(Debug, Clone)] 5 | pub struct TokenRef<'a> { 6 | pub value: TokenValueRef<'a>, 7 | pub line: usize, 8 | } 9 | 10 | /// Token value. 11 | #[derive(PartialEq, Debug, Copy, Clone)] 12 | pub enum TokenValueRef<'a> { 13 | Text(&'a str), 14 | BlockStart, 15 | VarStart, 16 | BlockEnd, 17 | VarEnd, 18 | Name(&'a str), 19 | Value(ConstRef<'a>), 20 | Operator(&'a str), 21 | Punctuation(char), 22 | InterpolationStart, 23 | InterpolationEnd, 24 | CommentStart, // Not in vanilla Twig. 25 | } 26 | 27 | impl<'a> Into for TokenValueRef<'a> { 28 | fn into(self) -> TokenValue { 29 | match self { 30 | TokenValueRef::Text(t) => TokenValue::Text(t.into()), 31 | TokenValueRef::BlockStart => TokenValue::BlockStart, 32 | TokenValueRef::VarStart => TokenValue::VarStart, 33 | TokenValueRef::BlockEnd => TokenValue::BlockEnd, 34 | TokenValueRef::VarEnd => TokenValue::VarEnd, 35 | TokenValueRef::Name(n) => TokenValue::Name(n.into()), 36 | TokenValueRef::Value(v) => TokenValue::Value(v.into()), 37 | TokenValueRef::Operator(s) => TokenValue::Operator(s.into()), 38 | TokenValueRef::Punctuation(s) => TokenValue::Punctuation(s.into()), 39 | TokenValueRef::InterpolationStart => TokenValue::InterpolationStart, 40 | TokenValueRef::InterpolationEnd => TokenValue::InterpolationEnd, 41 | TokenValueRef::CommentStart => TokenValue::CommentStart, 42 | } 43 | } 44 | } 45 | 46 | /// Token value. 47 | #[derive(PartialEq, Debug, Clone)] 48 | pub enum TokenValue { 49 | Text(String), 50 | BlockStart, 51 | VarStart, 52 | BlockEnd, 53 | VarEnd, 54 | Name(String), 55 | Value(Const), 56 | Operator(String), 57 | Punctuation(char), 58 | InterpolationStart, 59 | InterpolationEnd, 60 | CommentStart, // Not in vanilla Twig. 61 | } 62 | 63 | impl TokenValue { 64 | /// Return english name and value for token. 65 | pub fn get_english(&self) -> (&'static str, Option) { 66 | match *self { 67 | TokenValue::Text(ref v) => ("text", Some(v.to_string())), 68 | TokenValue::BlockStart => ("begin of statement block", None), 69 | TokenValue::VarStart => ("begin of print statement", None), 70 | TokenValue::BlockEnd => ("end of statement block", None), 71 | TokenValue::VarEnd => ("end of print statement", None), 72 | TokenValue::Name(ref n) => ("name", Some(n.to_string())), 73 | TokenValue::Value(Const::Num(ref n)) => ("number", Some(n.to_string())), 74 | TokenValue::Value(Const::Str(ref s)) => ("string", Some(s.to_string())), 75 | TokenValue::Operator(ref s) => ("operator", Some(s.to_string())), 76 | TokenValue::Punctuation(s) => ("punctuation", Some(s.to_string())), 77 | TokenValue::InterpolationStart => ("begin of string interpolation", None), 78 | TokenValue::InterpolationEnd => ("end of string interpolation", None), 79 | TokenValue::CommentStart => ("comment start", None), 80 | } 81 | } 82 | } 83 | 84 | #[derive(PartialEq, Debug, Copy, Clone)] 85 | pub enum ConstRef<'a> { 86 | Num(ConstNumberRef<'a>), 87 | Str(&'a str), 88 | } 89 | 90 | impl<'a> ConstRef<'a> { 91 | pub fn new_big_num<'c>(num: &'c str) -> ConstRef<'c> { 92 | ConstRef::Num(ConstNumberRef::Big(num)) 93 | } 94 | 95 | pub fn new_float<'c>(num: f64) -> ConstRef<'c> { 96 | ConstRef::Num(ConstNumberRef::Float(num)) 97 | } 98 | 99 | pub fn new_int<'c>(num: i64) -> ConstRef<'c> { 100 | ConstRef::Num(ConstNumberRef::Int(num)) 101 | } 102 | 103 | pub fn new_str<'c>(s: &'c str) -> ConstRef<'c> { 104 | ConstRef::Str(s) 105 | } 106 | } 107 | 108 | impl<'a> Into for ConstRef<'a> { 109 | fn into(self) -> Const { 110 | match self { 111 | ConstRef::Num(n) => Const::Num(n.into()), 112 | ConstRef::Str(s) => Const::Str(s.into()), 113 | } 114 | } 115 | } 116 | 117 | #[derive(PartialEq, Debug, Clone)] 118 | pub enum Const { 119 | Num(ConstNumber), 120 | Str(String), 121 | } 122 | 123 | impl fmt::Display for Const { 124 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 125 | match *self { 126 | Const::Num(ref n) => write!(f, "{}", n), 127 | Const::Str(ref s) => write!(f, "{}", s), 128 | } 129 | } 130 | } 131 | 132 | /// Parsed twig number representation. 133 | #[derive(PartialEq, Debug, Copy, Clone)] 134 | pub enum ConstNumberRef<'a> { 135 | Big(&'a str), 136 | Float(f64), 137 | Int(i64), 138 | } 139 | 140 | /// Parsed twig number representation. 141 | #[derive(PartialEq, Debug, Clone)] 142 | pub enum ConstNumber { 143 | Big(String), 144 | Float(f64), 145 | Int(i64), 146 | } 147 | 148 | impl fmt::Display for ConstNumber { 149 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 150 | match *self { 151 | ConstNumber::Big(ref n) => write!(f, "{}", n), 152 | ConstNumber::Float(v) => write!(f, "{}", v), 153 | ConstNumber::Int(i) => write!(f, "{}", i), 154 | } 155 | } 156 | } 157 | 158 | impl<'a> Into for ConstNumberRef<'a> { 159 | fn into(self) -> ConstNumber { 160 | match self { 161 | ConstNumberRef::Big(n) => ConstNumber::Big(n.to_string()), 162 | ConstNumberRef::Float(v) => ConstNumber::Float(v), 163 | ConstNumberRef::Int(v) => ConstNumber::Int(v), 164 | } 165 | } 166 | } 167 | -------------------------------------------------------------------------------- /src/value/mod.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | use std::cmp::Ordering; 3 | use std::rc::Rc; 4 | use std::cell::RefCell; 5 | use std::collections::HashMap; 6 | use error::{ RuntimeError, RuntimeResult, CastError, CastTarget }; 7 | 8 | pub mod ops; 9 | 10 | const MAX_DEBUG_STRING_LENGTH: usize = 128; 11 | const MAX_DEBUG_ARRAY_LENGTH: usize = 4; 12 | const MAX_DEBUG_HASH_LENGTH: usize = 4; 13 | 14 | /// Value kind that can be used as Hash key. 15 | #[derive(Clone, Hash, PartialEq, Eq, PartialOrd)] 16 | pub enum HashKey { 17 | Int(i64), 18 | Str(String), 19 | } 20 | 21 | impl fmt::Debug for HashKey { 22 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 23 | match *self { 24 | HashKey::Int(ref v) => write!(f, "{}", v), 25 | HashKey::Str(ref v) => write!(f, "{:?}", v), 26 | } 27 | } 28 | } 29 | 30 | /// Represents Twig runtime value. 31 | pub enum Value { 32 | Null, 33 | Int(i64), 34 | Float(f64), 35 | Str(String), 36 | Array(Vec), 37 | Hash(HashMap), 38 | Obj(Rc>), 39 | Func(Rc Fn(&'r [Value]) -> Option >), 40 | } 41 | 42 | impl<'a> From> for Value { 43 | fn from(value: HashMap<&'a str, &'a str>) -> Value { 44 | let hash = value.into_iter() 45 | .map(|(k, v)| { 46 | (HashKey::Str(k.into()), Value::Str(v.into())) 47 | }) 48 | .collect(); 49 | Value::Hash(hash) 50 | } 51 | } 52 | 53 | impl From> for Value { 54 | fn from(value: HashMap) -> Value { 55 | let hash = value.into_iter() 56 | .map(|(k, v)| { 57 | (HashKey::Str(k), Value::Str(v)) 58 | }) 59 | .collect(); 60 | Value::Hash(hash) 61 | } 62 | } 63 | 64 | impl Clone for Value { 65 | fn clone(&self) -> Value { 66 | match *self { 67 | Value::Null => Value::Null, 68 | Value::Int(ref v) => Value::Int(v.clone()), 69 | Value::Float(ref v) => Value::Float(v.clone()), 70 | Value::Str(ref v) => Value::Str(v.clone()), 71 | Value::Array(ref v) => Value::Array(v.clone()), 72 | Value::Hash(ref v) => Value::Hash(v.clone()), 73 | Value::Obj(ref v) => Value::Obj(v.clone()), 74 | Value::Func(ref v) => Value::Func(v.clone()), 75 | } 76 | } 77 | } 78 | 79 | impl PartialEq for Value { 80 | fn eq(&self, other: &Value) -> bool { 81 | match (self, other) { 82 | (&Value::Null, &Value::Null) => true, 83 | (&Value::Int(ref a), &Value::Int(ref b)) => a.eq(b), 84 | (&Value::Float(ref a), &Value::Float(ref b)) => a.eq(b), 85 | (&Value::Str(ref a), &Value::Str(ref b)) => a.eq(b), 86 | (&Value::Array(ref a), &Value::Array(ref b)) => a.eq(b), 87 | (&Value::Obj(_), &Value::Obj(_)) => false, 88 | (&Value::Func(_), &Value::Func(_)) => false, 89 | _ => false, 90 | } 91 | } 92 | } 93 | 94 | impl Eq for Value {} 95 | 96 | impl PartialOrd for Value { 97 | fn partial_cmp(&self, other: &Value) -> Option { 98 | match (self, other) { 99 | (&Value::Null, &Value::Null) => Some(Ordering::Equal), 100 | (&Value::Int(ref a), &Value::Int(ref b)) => a.partial_cmp(b), 101 | (&Value::Float(ref a), &Value::Float(ref b)) => a.partial_cmp(b), 102 | (&Value::Str(ref a), &Value::Str(ref b)) => a.partial_cmp(b), 103 | (&Value::Array(ref a), &Value::Array(ref b)) => a.partial_cmp(b), 104 | (&Value::Obj(_), &Value::Obj(_)) => None, 105 | (&Value::Func(_), &Value::Func(_)) => None, 106 | _ => None, 107 | } 108 | } 109 | } 110 | 111 | impl fmt::Debug for Value { 112 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 113 | match *self { 114 | Value::Null => write!(f, "null"), 115 | Value::Int(ref v) => write!(f, "{}", v), 116 | Value::Float(ref v) => write!(f, "{}", v), 117 | Value::Str(ref v) => write!(f, "{:?}", ops::to_string_limited(v)), 118 | Value::Array(ref v) => { 119 | let mut list = f.debug_list(); 120 | for (i, item) in v.iter().enumerate() { 121 | list.entry(item); 122 | if i >= MAX_DEBUG_ARRAY_LENGTH { 123 | list.entry(&"..."); 124 | break; 125 | } 126 | } 127 | list.finish() 128 | }, 129 | Value::Hash(ref hash) => { 130 | let mut map = f.debug_map(); 131 | let i = 0; 132 | for (k, v) in hash { 133 | map.entry(k, v); 134 | if i >= MAX_DEBUG_HASH_LENGTH { 135 | map.entry(&"...", &"..."); 136 | break; 137 | } 138 | } 139 | map.finish() 140 | }, 141 | Value::Obj(_) => write!(f, "Object"), 142 | Value::Func(_) => write!(f, "Function"), 143 | } 144 | } 145 | } 146 | 147 | impl Value { 148 | /// If possible, returns this value represented as integer. 149 | pub fn int(self) -> RuntimeResult { 150 | Ok(match self { 151 | Value::Null => return Err(RuntimeError::ImpossibleCast { 152 | target: CastTarget::Int, 153 | reason: CastError::Null, 154 | }), 155 | Value::Int(v) => v, 156 | Value::Float(v) => return ops::float_to_int(v), 157 | Value::Str(v) => { 158 | match ops::parse_as_numeric(&v) { 159 | Ok(ops::ParseAsNumericResult::Int(v)) => v, 160 | Ok(ops::ParseAsNumericResult::Float(v)) => try!(ops::float_to_int(v)), 161 | Err(e) => return Err(e), 162 | } 163 | }, 164 | Value::Array(_) => return Err(RuntimeError::ImpossibleCast { 165 | target: CastTarget::Int, 166 | reason: CastError::Array, 167 | }), 168 | Value::Hash(_) => return Err(RuntimeError::ImpossibleCast { 169 | target: CastTarget::Int, 170 | reason: CastError::Hash, 171 | }), 172 | Value::Obj(_) => return Err(RuntimeError::ImpossibleCast { 173 | target: CastTarget::Int, 174 | reason: CastError::Object, 175 | }), 176 | Value::Func(_) => return Err(RuntimeError::ImpossibleCast { 177 | target: CastTarget::Int, 178 | reason: CastError::Function, 179 | }), 180 | }) 181 | } 182 | } 183 | 184 | /// Twig object abstraction. 185 | pub trait Object { 186 | fn property_error(&self, name: &str) -> RuntimeError { 187 | RuntimeError::ObjectHasNoProperty(name.into()) 188 | } 189 | 190 | fn method_error(&self, name: &str) -> RuntimeError { 191 | RuntimeError::ObjectHasNoMethod(name.into()) 192 | } 193 | 194 | fn get(&self, name: &str) -> RuntimeResult { 195 | Err(self.property_error(name)) 196 | } 197 | 198 | fn set(&mut self, name: &str, _value: Value) -> RuntimeResult<()> { 199 | Err(self.property_error(name)) 200 | } 201 | 202 | fn call(&mut self, name: &str, _values: &[Value]) -> RuntimeResult { 203 | Err(self.method_error(name)) 204 | } 205 | } 206 | 207 | #[cfg(test)] 208 | mod tests { 209 | use super::*; 210 | use error::RuntimeResult; 211 | 212 | struct Point { 213 | x: i64, 214 | y: i64, 215 | } 216 | 217 | impl Object for Point { 218 | fn get(&self, name: &str) -> RuntimeResult { 219 | Ok(match name { 220 | "x" => Value::Int(self.x), 221 | "y" => Value::Int(self.y), 222 | _ => return Err(self.property_error(name)), 223 | }) 224 | } 225 | 226 | fn set(&mut self, name: &str, value: Value) -> RuntimeResult<()> { 227 | Ok(match name { 228 | "x" => self.x = try!(value.int()), 229 | "y" => self.y = try!(value.int()), 230 | _ => return Err(self.property_error(name)), 231 | }) 232 | } 233 | } 234 | 235 | #[test] 236 | fn object_getters_and_setters() { 237 | let mut point = Point { x: 12, y: 13 }; 238 | assert_eq!(point.get("x").ok().unwrap(), Value::Int(12)); 239 | assert_eq!(point.get("y").ok().unwrap(), Value::Int(13)); 240 | 241 | point.set("x", Value::Int(42)); 242 | point.set("y", Value::Int(43)); 243 | assert_eq!(point.get("x").ok().unwrap(), Value::Int(42)); 244 | assert_eq!(point.get("y").ok().unwrap(), Value::Int(43)); 245 | } 246 | 247 | #[test] 248 | fn object_setter_can_convert_values() { 249 | let mut point = Point { x: 12, y: 13 }; 250 | 251 | point.set("x", Value::Str("48".into())); 252 | assert_eq!(point.get("x").ok().unwrap(), Value::Int(48)); 253 | } 254 | } 255 | -------------------------------------------------------------------------------- /src/value/ops.rs: -------------------------------------------------------------------------------- 1 | use std::i64; 2 | use super::MAX_DEBUG_STRING_LENGTH; 3 | use error::{ RuntimeResult, RuntimeError, CastTarget, CastError }; 4 | 5 | #[derive(Debug)] 6 | pub enum ParseAsNumericResult { 7 | Int(i64), 8 | Float(f64), 9 | } 10 | 11 | pub fn float_to_int(value: f64) -> RuntimeResult { 12 | if value.is_nan() { 13 | return Err(RuntimeError::ImpossibleCast { 14 | target: CastTarget::Int, 15 | reason: CastError::FloatNotANumber(value), 16 | }); 17 | } 18 | if value.is_infinite() { 19 | return Err(RuntimeError::ImpossibleCast { 20 | target: CastTarget::Int, 21 | reason: CastError::FloatIsInfinite(value), 22 | }); 23 | } 24 | if !double_fits_long(value) { 25 | return Err(RuntimeError::ImpossibleCast { 26 | target: CastTarget::Int, 27 | reason: CastError::FloatRange(value), 28 | }); 29 | } 30 | Ok(value as i64) 31 | } 32 | 33 | /// Try to convert string to a numeric value (either int or float). 34 | pub fn parse_as_numeric(value: &str) -> RuntimeResult { 35 | /* 36 | [Reference implementation](https://github.com/php/php-src/blob/2dd32fe489ebee719bd5eaff497689e1c3a88e95/Zend/zend_operators.c#L2753). 37 | 38 | This is actually a mini-parser. Therefore it is implemented as such here. 39 | */ 40 | 41 | if value.is_empty() { 42 | return Err(RuntimeError::ImpossibleCast { 43 | target: CastTarget::Number, 44 | reason: CastError::StringEmpty 45 | }); 46 | } 47 | 48 | match parse_as_float_or_int(value) { 49 | Ok(v) => Ok(v), 50 | Err(t) => Err(RuntimeError::ImpossibleCast { 51 | target: t, 52 | reason: CastError::StringNotNumerical(to_string_limited(value)), 53 | }) 54 | } 55 | } 56 | 57 | fn parse_as_float_or_int(value: &str) -> Result { 58 | const MAX_LENGTH_OF_LONG: usize = 20; 59 | 60 | /// Parsing state. 61 | enum State { 62 | /// At the begining, we skip any whitespace. 63 | Whitespace, 64 | /// Next, will skip any leading zeros. 65 | LeadingZeros { starts_at: usize }, 66 | /// We know it is float and where it starts. 67 | Float { starts_at: usize }, 68 | /// It can still be a number and we know where it should start. 69 | MaybeNumber { starts_at: usize }, 70 | /// At the end, skip whitespace, has to be int. 71 | MaybeInt { starts_at: usize, ends_at: usize }, 72 | } 73 | 74 | let mut neg = false; 75 | let mut state = State::Whitespace; 76 | 77 | for (i, c) in value.chars().enumerate() { 78 | match state { 79 | State::Whitespace => if !c.is_whitespace() { 80 | match c { 81 | '.' | 'i' | 'N' => state = State::Float { starts_at: i }, 82 | '-' => { 83 | neg = true; 84 | state = State::LeadingZeros { starts_at: i + 1 }; 85 | }, 86 | '+' => state = State::LeadingZeros { starts_at: i + 1 }, 87 | '0' => state = State::LeadingZeros { starts_at: i }, 88 | n if n.is_digit(10) => state = State::MaybeNumber { starts_at: i }, 89 | _ => return Err(CastTarget::Number), 90 | } 91 | }, 92 | State::LeadingZeros { starts_at } => match c { 93 | '.' => state = State::Float { starts_at: i }, 94 | 'i' => state = State::Float { starts_at: starts_at }, 95 | c if c.is_whitespace() => state = State::MaybeInt { starts_at: i-1, ends_at: i }, 96 | n if n.is_digit(10) => state = State::MaybeNumber { starts_at: i }, 97 | _ => return Err(CastTarget::Number), 98 | }, 99 | State::MaybeNumber { starts_at } => match c { 100 | '.' => state = State::Float { starts_at: starts_at }, 101 | c if c.is_whitespace() => state = State::MaybeInt { starts_at: starts_at, ends_at: i }, 102 | n if n.is_digit(10) => { 103 | if i - starts_at >= MAX_LENGTH_OF_LONG { 104 | state = State::Float { starts_at: starts_at }; 105 | } 106 | }, 107 | _ => return Err(CastTarget::Number), 108 | }, 109 | State::Float { .. } => break, 110 | State::MaybeInt { .. } => if !c.is_whitespace() { 111 | return Err(CastTarget::Number); 112 | } 113 | }; 114 | } 115 | 116 | match state { 117 | State::MaybeNumber { starts_at } => Ok(ParseAsNumericResult::Int( 118 | match value[starts_at..].parse() { 119 | Ok(v) => if neg { 0 - v } else { v }, 120 | Err(_) => return Err(CastTarget::Int), 121 | } 122 | )), 123 | State::MaybeInt { starts_at, ends_at } => Ok(ParseAsNumericResult::Int( 124 | match value[starts_at..ends_at].parse() { 125 | Ok(v) => if neg { 0 - v } else { v }, 126 | Err(_) => return Err(CastTarget::Int), 127 | } 128 | )), 129 | State::Float { starts_at } => Ok(ParseAsNumericResult::Float( 130 | match value[starts_at..].trim_right().parse() { 131 | Ok(v) => if neg { 0.0 - v } else { v }, 132 | Err(_) => return Err(CastTarget::Float), 133 | } 134 | )), 135 | State::LeadingZeros { .. } => Ok(ParseAsNumericResult::Int(0)), 136 | _ => Err(CastTarget::Number), 137 | } 138 | } 139 | 140 | pub fn to_string_limited(v: &str) -> String { 141 | if v.len() > MAX_DEBUG_STRING_LENGTH { 142 | [&v[..MAX_DEBUG_STRING_LENGTH], "..."].concat() 143 | } else { 144 | v.into() 145 | } 146 | } 147 | 148 | pub fn double_fits_long(v: f64) -> bool { 149 | v > i64::MAX as f64 || v < i64::MIN as f64 150 | } 151 | 152 | #[cfg(test)] 153 | mod tests { 154 | use std::f64; 155 | use super::*; 156 | 157 | #[test] 158 | fn parses_string_to_float() { 159 | let cases = vec![ 160 | (".", 0.0), 161 | (" . ", 0.0), 162 | (" .0 ", 0.0), 163 | (" .00 ", 0.0), 164 | ("0.0", 0.0), 165 | ("000.0", 0.0), 166 | (" 000.0", 0.0), 167 | (".01", 0.01), 168 | ("-.01", -0.01), 169 | (" -.01 ", -0.01), 170 | (" -.01e+3 ", -0.01e+3), 171 | ("inf", f64::INFINITY), 172 | ("-inf", f64::NEG_INFINITY), 173 | ("NaN", f64::NAN), 174 | (" inf ", f64::INFINITY), 175 | (" -inf ", f64::NEG_INFINITY), 176 | (" NaN ", f64::NAN), 177 | ("2.01", 2.01), 178 | ("002.01", 2.01), 179 | (" 002.01", 2.01), 180 | ]; 181 | 182 | for (input, expected) in cases { 183 | match parse_as_numeric(input) { 184 | Ok(ParseAsNumericResult::Float(v)) => assert_floats_equal(v, expected), 185 | Ok(ParseAsNumericResult::Int(v)) => panic!("expected float when parsing {:?} case, got {:?}", input, v), 186 | Err(e) => panic!("failed to parse {:?}, error: {:?}", input, e), 187 | } 188 | } 189 | } 190 | 191 | #[test] 192 | fn parses_string_to_integer() { 193 | let cases = vec![ 194 | ("0", 0), 195 | (" 0 ", 0), 196 | (" 000 ", 0), 197 | (" 000", 0), 198 | ("000 ", 0), 199 | ("22", 22), 200 | (" 22 ", 22), 201 | (" 22", 22), 202 | ("22 ", 22), 203 | ("001", 1), 204 | (" 001 ", 1), 205 | (" 001", 1), 206 | ("001 ", 1), 207 | ("-001", -1), 208 | (" -001 ", -1), 209 | (" -001", -1), 210 | ("-001 ", -1), 211 | ("145354534", 145354534), 212 | ]; 213 | 214 | for (input, expected) in cases { 215 | match parse_as_numeric(input) { 216 | Ok(ParseAsNumericResult::Int(v)) => assert_eq!(v, expected), 217 | Ok(ParseAsNumericResult::Float(v)) => panic!("expected int when parsing {:?} case, got {:?}", input, v), 218 | Err(e) => panic!("failed to parse {:?}, error: {:?}", input, e), 219 | } 220 | } 221 | } 222 | 223 | #[test] 224 | fn fails_parsing_invalid_string_to_float() { 225 | let cases = vec![ 226 | (".k", r#"Nonnumerical string ".k" is not a float"#), 227 | ("12 12", r#"Nonnumerical string "12 12" is not a number"#), 228 | ("0inf", r#"Nonnumerical string "0inf" is not a float"#), 229 | ("-0inf", r#"Nonnumerical string "-0inf" is not a number"#), 230 | ("0-inf", r#"Nonnumerical string "0-inf" is not a number"#), 231 | ]; 232 | 233 | for (input, expected) in cases { 234 | match parse_as_numeric(input) { 235 | Err(e) => assert_eq!(&format!("{}", e), expected), 236 | Ok(v) => panic!("expected error {:?} when parsing {:?}, received {:?}", expected, input, v), 237 | } 238 | } 239 | } 240 | 241 | fn assert_floats_equal(a: f64, b: f64) { 242 | if a.is_nan() && b.is_nan() { 243 | return; 244 | } 245 | 246 | if a.is_infinite() && b.is_infinite() { 247 | if a.is_sign_positive() == b.is_sign_positive() { 248 | return; 249 | } 250 | } 251 | 252 | const e: f64 = 0.0000001; 253 | assert!(a > b - e && a < b + e, format!("expected floats {:?} and {:?} to be equal", a, b)); 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /templates/fos_login.html.twig: -------------------------------------------------------------------------------- 1 | {% extends "FOSUserBundle::layout.html.twig" %} 2 | 3 | {% trans_default_domain 'FOSUserBundle' %} 4 | 5 | {% block fos_user_content %} 6 | {% if error %} 7 |
{{ error.messageKey|trans(error.messageData, 'security') }}
8 | {% endif %} 9 | 10 |
11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 |
24 | {% endblock fos_user_content %} 25 | -------------------------------------------------------------------------------- /tests/fixture_tests.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | extern crate serde_json; 3 | extern crate difference; 4 | extern crate term; 5 | #[macro_use] extern crate log; 6 | extern crate env_logger; 7 | 8 | use std::collections::HashMap; 9 | use std::io::{self, Read, BufReader, BufRead}; 10 | use std::env; 11 | use std::io::Write; 12 | use difference::Difference; 13 | use std::fs::{self, DirEntry, File}; 14 | use std::path::Path; 15 | 16 | use twig::environment::{ Environment, Config }; 17 | use twig::loader::ArrayLoader; 18 | use twig::Engine; 19 | 20 | #[test] 21 | fn fixtures() { 22 | env_logger::init().unwrap(); 23 | 24 | let errors = visit_fixtures(&env::current_dir().unwrap().join("tests").join("fixtures"), &|entry| { 25 | let f = match File::open(entry.path()) { 26 | Ok(f) => f, 27 | Err(e) => panic!("error opening fixture file {:?}, {:?}", entry.path(), e), 28 | }; 29 | let fixture = match Fixture::new(f) { 30 | Err(e) => panic!("invalid test {:?}", e), 31 | Ok(f) => f, 32 | }; 33 | 34 | let message = match fixture.message.clone() { 35 | Some(m) => m, 36 | None => panic!("fixture {:?} must have a message", entry.path()), 37 | }; 38 | print_fixture_start(&message).unwrap(); 39 | 40 | let mut twig = Engine::new(ArrayLoader::new( 41 | vec![("index.twig".into(), fixture.template.expect("fixture must contain main template"))] 42 | .into_iter() 43 | .chain(fixture.templates.into_iter()) 44 | .collect() 45 | ), match fixture.config { 46 | Some(config) => Environment::new(Config::from_hashmap( 47 | match serde_json::from_str(&config) { 48 | Ok(map) => map, 49 | Err(e) => panic!("failed to deserialize template config: {:#?}", e), 50 | } 51 | )), 52 | _ => Environment::default(), 53 | }); 54 | 55 | let data = match fixture.data { 56 | Some(data) => match serde_json::from_str::>(&data) { 57 | Ok(map) => map, 58 | Err(e) => panic!("failed to deserialize template data: {:#?}", e), 59 | }, 60 | None => HashMap::new(), 61 | }; 62 | 63 | let res = match twig.get("index.twig", data) { 64 | Ok(res) => res, 65 | Err(e) => panic!("\nerror executing template:\n {:#?}\n", e), 66 | }; 67 | 68 | let expected = fixture.expect.expect("fixture must have expect block"); 69 | 70 | if res != expected { 71 | print_fixture_result(false).unwrap(); 72 | 73 | let (_, changeset) = difference::diff( 74 | &res, 75 | &expected, 76 | "\n" 77 | ); 78 | print_diff(&changeset).unwrap(); 79 | 80 | Some(( 81 | entry.path().to_string_lossy().into_owned(), 82 | message, 83 | changeset 84 | )) 85 | //assert_eq!(res, expected); 86 | } else { 87 | print_fixture_result(true).unwrap(); 88 | None 89 | } 90 | }).unwrap(); 91 | 92 | let num_errors = errors.len(); 93 | if num_errors > 0 { 94 | for (file, name, changeset) in errors { 95 | println!("in {}", file); 96 | println!("testing {}", name); 97 | print_uncolored(&changeset); 98 | } 99 | //panic!("{} fixtures produced errors", num_errors); 100 | } 101 | } 102 | 103 | fn visit_fixtures(dir: &Path, cb: &Fn(&DirEntry) -> Option<(String, String, Vec)>) -> io::Result)>> { 104 | let mut errors = Vec::new(); 105 | if try!(fs::metadata(dir)).is_dir() { 106 | for entry in try!(fs::read_dir(dir)) { 107 | let entry = try!(entry); 108 | if try!(fs::metadata(entry.path())).is_dir() { 109 | for e in try!(visit_fixtures(&entry.path(), cb)) { 110 | errors.push(e); 111 | } 112 | } else { 113 | if let Some(Some("test")) = entry.path().extension().map(|v| v.to_str()) { 114 | if let Some(err) = cb(&entry) { 115 | errors.push(err); 116 | } 117 | } 118 | } 119 | } 120 | } 121 | Ok(errors) 122 | } 123 | 124 | #[derive(Debug)] 125 | enum FixtureError { 126 | ExpectedBlockStart, 127 | IoError, 128 | } 129 | 130 | #[derive(Debug)] 131 | enum TemplateName { 132 | Main, 133 | Other(String), 134 | } 135 | 136 | #[derive(Debug)] 137 | enum ReadState { 138 | Message(String), 139 | Template((TemplateName, String)), 140 | Data(String), 141 | Config(String), 142 | Expect(String) 143 | } 144 | 145 | #[derive(Debug)] 146 | struct Fixture { 147 | message: Option, 148 | template: Option, 149 | templates: Vec<(String, String)>, 150 | data: Option, 151 | config: Option, 152 | expect: Option, 153 | } 154 | 155 | const TEMPLATE_NAME_START: &'static str = "--TEMPLATE("; 156 | const TEMPLATE_NAME_END: &'static str = ")--"; 157 | 158 | fn check_for_new_state(line: &str) -> Option { 159 | match &line[..] { 160 | "--TEST--" => Some(ReadState::Message(String::new())), 161 | "--DATA--" => Some(ReadState::Data(String::new())), 162 | "--CONFIG--" => Some(ReadState::Config(String::new())), 163 | "--EXPECT--" => Some(ReadState::Expect(String::new())), 164 | other => if other.starts_with("--TEMPLATE(") { 165 | let name = other[TEMPLATE_NAME_START.len()..other.len()-TEMPLATE_NAME_END.len()].to_string(); 166 | Some(ReadState::Template((TemplateName::Other(name), String::new()))) 167 | } else if other.starts_with("--TEMPLATE--") { 168 | Some(ReadState::Template((TemplateName::Main, String::new()))) 169 | } else { 170 | None 171 | }, 172 | } 173 | } 174 | 175 | impl Fixture { 176 | pub fn new(input: R) -> Result { 177 | let mut state = None; 178 | let mut fixture = Fixture { 179 | message: None, 180 | template: None, 181 | templates: Vec::new(), 182 | data: None, 183 | config: None, 184 | expect: None, 185 | }; 186 | 187 | for maybe_line in BufReader::new(input).lines() { 188 | let line = match maybe_line { 189 | Ok(l) => l, 190 | Err(_) => return Err(FixtureError::IoError), 191 | }; 192 | 193 | state = match state { 194 | None => { 195 | Some(try!(check_for_new_state(&line).ok_or(FixtureError::ExpectedBlockStart))) 196 | }, 197 | Some(mut old) => match check_for_new_state(&line) { 198 | Some(new_state) => { 199 | fixture.collect(old); 200 | Some(new_state) 201 | }, 202 | None => { 203 | match old { 204 | ReadState::Message(ref mut m) => { if m.len() > 0 { m.push_str("\n"); } m.push_str(&line); }, 205 | ReadState::Template((_, ref mut m)) => { if m.len() > 0 { m.push_str("\n"); } m.push_str(&line); }, 206 | ReadState::Data(ref mut m) => { if m.len() > 0 { m.push_str("\n"); } m.push_str(&line); }, 207 | ReadState::Config(ref mut m) => { if m.len() > 0 { m.push_str("\n"); } m.push_str(&line); }, 208 | ReadState::Expect(ref mut m) => { if m.len() > 0 { m.push_str("\n"); } m.push_str(&line); }, 209 | }; 210 | Some(old) 211 | } 212 | }, 213 | } 214 | } 215 | 216 | if let Some(leftover_state) = state { 217 | fixture.collect(leftover_state); 218 | } 219 | 220 | Ok(fixture) 221 | } 222 | 223 | fn collect(&mut self, state: ReadState) { 224 | match state { 225 | ReadState::Message(m) => self.message = Some(m), 226 | ReadState::Template((TemplateName::Main, m)) => self.template = Some(m), 227 | ReadState::Template((TemplateName::Other(name), m)) => { 228 | self.templates.push((name, m)); 229 | }, 230 | ReadState::Data(m) => self.data = Some(m), 231 | ReadState::Config(m) => self.config = Some(m), 232 | ReadState::Expect(m) => self.expect = Some(m), 233 | } 234 | } 235 | } 236 | 237 | fn print_fixture_start(message: &str) -> io::Result<()> { 238 | let mut t = term::stdout().unwrap(); 239 | try!(write!(t, "fixture ")); 240 | try!(t.attr(term::Attr::Bold)); 241 | try!(write!(t, "{}", message)); 242 | try!(t.reset()); 243 | try!(write!(t, " ... ")); 244 | t.flush() 245 | } 246 | 247 | fn print_fixture_result(ok: bool) -> io::Result<()> { 248 | let mut t = term::stdout().unwrap(); 249 | if ok { 250 | try!(t.fg(term::color::GREEN)); 251 | try!(writeln!(t, "ok")); 252 | } else { 253 | try!(t.fg(term::color::RED)); 254 | try!(writeln!(t, "ERROR!")); 255 | } 256 | try!(t.reset()); 257 | t.flush() 258 | } 259 | 260 | fn print_diff(changeset: &Vec) -> io::Result<()> { 261 | let mut t = term::stdout().unwrap(); 262 | 263 | for i in 0..changeset.len() { 264 | match changeset[i] { 265 | Difference::Same(ref x) => { 266 | try!(t.reset()); 267 | try!(writeln!(t, " {}", x)); 268 | }, 269 | Difference::Add(ref x) => { 270 | for line in x.lines() { 271 | try!(t.fg(term::color::GREEN)); 272 | try!(writeln!(t, "+ {}", line)); 273 | } 274 | }, 275 | Difference::Rem(ref x) => { 276 | for line in x.lines() { 277 | try!(t.fg(term::color::RED)); 278 | try!(writeln!(t, "- {}", line)); 279 | } 280 | } 281 | } 282 | } 283 | try!(t.reset()); 284 | try!(writeln!(t, "")); 285 | t.flush() 286 | } 287 | 288 | fn print_uncolored(changeset: &Vec) { 289 | for i in 0..changeset.len() { 290 | match changeset[i] { 291 | Difference::Same(ref x) => { 292 | println!(" {}", x); 293 | }, 294 | Difference::Add(ref x) => { 295 | for line in x.lines() { 296 | println!("+ {}", line); 297 | } 298 | }, 299 | Difference::Rem(ref x) => { 300 | for line in x.lines() { 301 | println!("- {}", line); 302 | } 303 | } 304 | } 305 | } 306 | println!(""); 307 | } 308 | -------------------------------------------------------------------------------- /tests/fixtures/autoescape/filename.test: -------------------------------------------------------------------------------- 1 | --TEST-- 2 | "filename" autoescape strategy 3 | --TEMPLATE-- 4 | {{ br -}} 5 | {{ include('index.html.twig') -}} 6 | {{ include('index.txt.twig') -}} 7 | --TEMPLATE(index.html.twig)-- 8 | {{ br -}} 9 | --TEMPLATE(index.txt.twig)-- 10 | {{ br -}} 11 | --DATA-- 12 | { "br": "
" } 13 | --CONFIG-- 14 | { "autoescape": "filename" } 15 | --EXPECT-- 16 | <br /> 17 | <br /> 18 |
19 | -------------------------------------------------------------------------------- /tests/lexer/mod.rs: -------------------------------------------------------------------------------- 1 | use std::iter::repeat; 2 | 3 | use twig::tokens::*; 4 | use twig::environment::CompiledEnvironment; 5 | use twig::Expect; 6 | 7 | #[test] 8 | fn name_label_for_tag() { 9 | let template = "{% § %}"; 10 | let env = CompiledEnvironment::default(); 11 | let lexer = Lexer::default(&env.lexing); 12 | let mut _s = lexer.tokens(&template); 13 | 14 | _s = expect(_s, TokenValueRef::BlockStart); 15 | _s = expect(_s, TokenValueRef::Name("§")); 16 | } 17 | 18 | #[test] 19 | fn test_name_label_for_function() { 20 | let template = "{{ §() }}"; 21 | let env = CompiledEnvironment::default(); 22 | let lexer = Lexer::default(&env.lexing); 23 | let mut _s = lexer.tokens(&template); 24 | 25 | _s = expect(_s, TokenValueRef::VarStart); 26 | _s = expect(_s, TokenValueRef::Name("§")); 27 | } 28 | 29 | #[test] 30 | fn test_brackets_nesting() { 31 | let template = r#"{{ {"a":{"b":"c"}} }}"#; 32 | 33 | assert_eq!(2, count_token(template, TokenValueRef::Punctuation('{'))); 34 | assert_eq!(2, count_token(template, TokenValueRef::Punctuation('}'))); 35 | } 36 | 37 | #[test] 38 | #[allow(deprecated)] 39 | fn test_line_directive() { 40 | let template = [ 41 | "foo", 42 | "bar", 43 | "{% line 10 %}", 44 | "{{", 45 | "baz", 46 | "}}", 47 | ].connect("\n"); 48 | 49 | let env = CompiledEnvironment::default(); 50 | let lexer = Lexer::default(&env.lexing); 51 | let mut _s = lexer.tokens(&template); 52 | 53 | // foo\nbar\n 54 | _s = expect_with_line(_s, TokenValueRef::Text("foo\nbar\n"), 1); 55 | // \n (after {% line %}) 56 | _s = expect_with_line(_s, TokenValueRef::Text("\n"), 10); 57 | // {{ 58 | _s = expect_with_line(_s, TokenValueRef::VarStart, 11); 59 | // baz 60 | _s = expect_with_line(_s, TokenValueRef::Name("baz"), 12); 61 | } 62 | 63 | #[test] 64 | fn test_long_comments() { 65 | let template = [ 66 | "{# ", 67 | &*repeat("*").take(100000).collect::(), 68 | " #}", 69 | ].concat(); 70 | 71 | let env = CompiledEnvironment::default(); 72 | let lexer = Lexer::default(&env.lexing); 73 | let mut _s = lexer.tokens(&template); 74 | 75 | expect_end(_s); 76 | } 77 | 78 | #[test] 79 | fn test_raw() { 80 | let template = [ 81 | "{% raw %}aaa{% endraw %}", 82 | ].concat(); 83 | 84 | let env = CompiledEnvironment::default(); 85 | let lexer = Lexer::default(&env.lexing); 86 | let mut _s = lexer.tokens(&template); 87 | 88 | expect(_s, TokenValueRef::Text("aaa")); 89 | } 90 | 91 | #[test] 92 | fn test_raw_trim() { 93 | let template = [ 94 | "{% raw %}aaa {%- endraw %}", 95 | ].concat(); 96 | 97 | let env = CompiledEnvironment::default(); 98 | let lexer = Lexer::default(&env.lexing); 99 | let mut _s = lexer.tokens(&template); 100 | 101 | expect(_s, TokenValueRef::Text("aaa")); 102 | } 103 | 104 | #[test] 105 | fn test_verbatim() { 106 | let template = [ 107 | "{% verbatim %}bbb{% endverbatim %}", 108 | ].concat(); 109 | 110 | let env = CompiledEnvironment::default(); 111 | let lexer = Lexer::default(&env.lexing); 112 | let mut _s = lexer.tokens(&template); 113 | 114 | expect(_s, TokenValueRef::Text("bbb")); 115 | } 116 | 117 | #[test] 118 | fn test_long_raw() { 119 | let text = &*repeat("*").take(100000).collect::(); 120 | 121 | let template = [ 122 | "{% raw %}", 123 | text, 124 | "{% endraw %}", 125 | ].concat(); 126 | 127 | let env = CompiledEnvironment::default(); 128 | let lexer = Lexer::default(&env.lexing); 129 | let mut _s = lexer.tokens(&template); 130 | 131 | expect(_s, TokenValueRef::Text(text)); 132 | } 133 | 134 | #[test] 135 | fn test_long_var() { 136 | let text = &*repeat("x").take(100000).collect::(); 137 | 138 | let template = [ 139 | "{{ ", 140 | text, 141 | " }}", 142 | ].concat(); 143 | 144 | let env = CompiledEnvironment::default(); 145 | let lexer = Lexer::default(&env.lexing); 146 | let mut _s = lexer.tokens(&template); 147 | 148 | _s = expect(_s, TokenValueRef::VarStart); 149 | _s = expect(_s, TokenValueRef::Name(text)); 150 | } 151 | 152 | #[test] 153 | fn test_long_block() { 154 | let text = &*repeat("x").take(100000).collect::(); 155 | 156 | let template = [ 157 | "{% ", 158 | text, 159 | " %}", 160 | ].concat(); 161 | 162 | let env = CompiledEnvironment::default(); 163 | let lexer = Lexer::default(&env.lexing); 164 | let mut _s = lexer.tokens(&template); 165 | 166 | _s = expect(_s, TokenValueRef::BlockStart); 167 | _s = expect(_s, TokenValueRef::Name(text)); 168 | } 169 | 170 | #[test] 171 | fn test_big_numbers() { 172 | let template = "{{ 922337203685477580700 }}"; 173 | 174 | let env = CompiledEnvironment::default(); 175 | let lexer = Lexer::default(&env.lexing); 176 | let mut _s = lexer.tokens(&template); 177 | 178 | _s.next(); 179 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_big_num("922337203685477580700"))); 180 | } 181 | 182 | #[test] 183 | fn test_int_numbers() { 184 | let template = "{{ 9223372036854775807 }}"; 185 | 186 | let env = CompiledEnvironment::default(); 187 | let lexer = Lexer::default(&env.lexing); 188 | let mut _s = lexer.tokens(&template); 189 | 190 | _s.next(); 191 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_int(9223372036854775807))); 192 | } 193 | 194 | #[test] 195 | fn test_int_numbers2() { 196 | let template = "{{ 9223372036854775808 }}"; 197 | 198 | let env = CompiledEnvironment::default(); 199 | let lexer = Lexer::default(&env.lexing); 200 | let mut _s = lexer.tokens(&template); 201 | 202 | _s.next(); 203 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_big_num("9223372036854775808"))); 204 | } 205 | 206 | #[test] 207 | fn test_float_numbers() { 208 | let template = "{{ 92233.33 }}"; 209 | 210 | let env = CompiledEnvironment::default(); 211 | let lexer = Lexer::default(&env.lexing); 212 | let mut _s = lexer.tokens(&template); 213 | 214 | _s.next(); 215 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_float(92233.33))); 216 | } 217 | 218 | #[test] 219 | fn test_string_with_escaped_delimiter() { 220 | let templates = [ 221 | (r#"{{ 'foo \' bar' }}"#, r#"foo \' bar"#), 222 | (r#"{{ "foo \" bar" }}"#, r#"foo \" bar"#), 223 | ]; 224 | 225 | let env = CompiledEnvironment::default(); 226 | let lexer = Lexer::default(&env.lexing); 227 | 228 | for &(template, expected) in &templates { 229 | let mut _s = lexer.tokens(template); 230 | _s = expect(_s, TokenValueRef::VarStart); 231 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_str(expected))); 232 | } 233 | } 234 | 235 | #[test] 236 | fn test_string_with_interpolation() { 237 | let template = r#"foo {{ "bar #{ baz + 1 }" }}"#; 238 | 239 | let env = CompiledEnvironment::default(); 240 | let lexer = Lexer::default(&env.lexing); 241 | let mut _s = lexer.tokens(&template); 242 | 243 | _s = expect(_s, TokenValueRef::Text("foo ")); 244 | _s = expect(_s, TokenValueRef::VarStart); 245 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_str("bar "))); 246 | _s = expect(_s, TokenValueRef::InterpolationStart); 247 | _s = expect(_s, TokenValueRef::Name("baz")); 248 | _s = expect(_s, TokenValueRef::Operator("+")); 249 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_int(1))); 250 | _s = expect(_s, TokenValueRef::InterpolationEnd); 251 | _s = expect(_s, TokenValueRef::VarEnd); 252 | } 253 | 254 | #[test] 255 | fn test_string_with_escaped_interpolation() { 256 | let template = r#"{{ "bar \#{baz+1}" }}"#; 257 | 258 | let env = CompiledEnvironment::default(); 259 | let lexer = Lexer::default(&env.lexing); 260 | let mut _s = lexer.tokens(&template); 261 | 262 | _s = expect(_s, TokenValueRef::VarStart); 263 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_str(r#"bar \#{baz+1}"#))); 264 | _s = expect(_s, TokenValueRef::VarEnd); 265 | } 266 | 267 | #[test] 268 | fn test_string_with_hash() { 269 | let template = r#"{{ "bar # baz" }}"#; 270 | 271 | let env = CompiledEnvironment::default(); 272 | let lexer = Lexer::default(&env.lexing); 273 | let mut _s = lexer.tokens(&template); 274 | 275 | _s = expect(_s, TokenValueRef::VarStart); 276 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_str("bar # baz"))); 277 | _s = expect(_s, TokenValueRef::VarEnd); 278 | } 279 | 280 | #[test] 281 | fn test_string_with_unterminated_interpolation() { 282 | let template = r#"{{ "bar #{x" }}"#; 283 | 284 | let env = CompiledEnvironment::default(); 285 | let lexer = Lexer::default(&env.lexing); 286 | let mut _s = lexer.tokens(&template); 287 | 288 | expect_error(_s, r#"Unclosed """ at line 1"#); 289 | } 290 | 291 | #[test] 292 | fn test_string_with_nested_interpolations() { 293 | let template = r#"{{ "bar #{ "foo#{bar}" }" }}"#; 294 | 295 | let env = CompiledEnvironment::default(); 296 | let lexer = Lexer::default(&env.lexing); 297 | let mut _s = lexer.tokens(&template); 298 | 299 | _s = expect(_s, TokenValueRef::VarStart); 300 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_str(r#"bar "#))); 301 | _s = expect(_s, TokenValueRef::InterpolationStart); 302 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_str(r#"foo"#))); 303 | _s = expect(_s, TokenValueRef::InterpolationStart); 304 | _s = expect(_s, TokenValueRef::Name("bar")); 305 | _s = expect(_s, TokenValueRef::InterpolationEnd); 306 | _s = expect(_s, TokenValueRef::InterpolationEnd); 307 | _s = expect(_s, TokenValueRef::VarEnd); 308 | } 309 | 310 | #[test] 311 | fn test_string_with_nested_interpolations_in_block() { 312 | let template = r#"{% foo "bar #{ "foo#{bar}" }" %}"#; 313 | 314 | let env = CompiledEnvironment::default(); 315 | let lexer = Lexer::default(&env.lexing); 316 | let mut _s = lexer.tokens(&template); 317 | 318 | _s = expect(_s, TokenValueRef::BlockStart); 319 | _s = expect(_s, TokenValueRef::Name("foo")); 320 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_str(r#"bar "#))); 321 | _s = expect(_s, TokenValueRef::InterpolationStart); 322 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_str(r#"foo"#))); 323 | _s = expect(_s, TokenValueRef::InterpolationStart); 324 | _s = expect(_s, TokenValueRef::Name("bar")); 325 | _s = expect(_s, TokenValueRef::InterpolationEnd); 326 | _s = expect(_s, TokenValueRef::InterpolationEnd); 327 | _s = expect(_s, TokenValueRef::BlockEnd); 328 | } 329 | 330 | #[test] 331 | fn test_operator_ending_with_a_letter_at_the_end_of_a_line() { 332 | let template = "{{ 1 and\n0}}"; 333 | 334 | let env = CompiledEnvironment::default(); 335 | let lexer = Lexer::default(&env.lexing); 336 | let mut _s = lexer.tokens(&template); 337 | 338 | _s = expect(_s, TokenValueRef::VarStart); 339 | _s = expect(_s, TokenValueRef::Value(ConstRef::new_int(1))); 340 | _s = expect(_s, TokenValueRef::Operator("and")); 341 | } 342 | 343 | #[test] 344 | fn test_unterminated_variable() { 345 | let template = " 346 | 347 | {{ 348 | 349 | bar 350 | 351 | 352 | "; 353 | 354 | let env = CompiledEnvironment::default(); 355 | let lexer = Lexer::default(&env.lexing); 356 | let mut _s = lexer.tokens(&template); 357 | 358 | expect_error(_s, "Unclosed \"variable\" at line 3"); 359 | } 360 | 361 | #[test] 362 | fn test_unterminated_block() { 363 | let template = " 364 | 365 | {% 366 | 367 | bar 368 | 369 | 370 | "; 371 | 372 | let env = CompiledEnvironment::default(); 373 | let lexer = Lexer::default(&env.lexing); 374 | let mut _s = lexer.tokens(&template); 375 | 376 | expect_error(_s, "Unclosed \"block\" at line 3"); 377 | } 378 | 379 | fn count_token(template: &'static str, token_value: TokenValueRef) -> u32 { 380 | let env = CompiledEnvironment::default(); 381 | let lexer = Lexer::default(&env.lexing); 382 | let mut count = 0; 383 | 384 | for maybe_token in lexer.tokens(&template) { 385 | if let Ok(token) = maybe_token { 386 | if token.value == token_value { 387 | count += 1; 388 | } 389 | } 390 | } 391 | 392 | count 393 | } 394 | 395 | fn expect_with_line<'i, 'c>(mut stream: TokenIter<'i, 'c>, token_value: TokenValueRef<'c>, line: usize) -> TokenIter<'i, 'c> { 396 | match stream.expect((line, token_value)) { 397 | Ok(token) => assert_eq!(token.line, line), 398 | Err(e) => panic!("Received error {:?}", e), 399 | }; 400 | stream 401 | } 402 | 403 | fn expect<'i, 'c>(mut stream: TokenIter<'i, 'c>, token_value: TokenValueRef<'c>) -> TokenIter<'i, 'c> { 404 | if let Err(e) = stream.expect((1, token_value)) { 405 | panic!("Received error {:?}", e); 406 | } 407 | stream 408 | } 409 | 410 | /// Runs iterator until it returns error and then checks if error string matches. 411 | fn expect_error<'i, 'c>(mut stream: TokenIter<'i, 'c>, text: &'i str) { 412 | let mut next = stream.next(); 413 | loop { 414 | match next { 415 | None => panic!("expected error, but reached the end of token stream"), 416 | Some(Err(ref e)) => { 417 | assert_eq!(format!("{}", e), text); 418 | return; 419 | }, 420 | Some(Ok(_)) => next = stream.next(), 421 | }; 422 | } 423 | } 424 | 425 | /// Runs iterator and expects that it is at the end. 426 | fn expect_end<'i, 'c>(mut stream: TokenIter<'i, 'c>) { 427 | match stream.next() { 428 | Some(other) => panic!("expected the stream to be at the end, but got {:?}", other), 429 | _ => (), 430 | } 431 | } 432 | -------------------------------------------------------------------------------- /tests/lexer_tests.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | 3 | mod lexer; 4 | -------------------------------------------------------------------------------- /tests/parser/array_expression.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | 3 | use super::support; 4 | use twig::nodes::expr::Expr; 5 | 6 | #[test] 7 | fn test_array_expression() { 8 | for (template, expected) in get_tests_for_array() { 9 | let module = support::expect_parsed(template); 10 | assert_eq!(module.body.expect_print(), &expected); 11 | } 12 | } 13 | 14 | fn get_tests_for_array<'r>() -> Vec<(&'static str, Expr<'r>)> { 15 | vec![ 16 | // simple array 17 | (r#"{{ [1, 2] }}"#, Expr::new_array(vec![ 18 | Expr::new_int_constant(1, 1), 19 | Expr::new_int_constant(2, 1), 20 | ], 1)), 21 | // array with trailing , 22 | (r#"{{ [1, 2, ] }}"#, Expr::new_array(vec![ 23 | Expr::new_int_constant(1, 1), 24 | Expr::new_int_constant(2, 1), 25 | ], 1)), 26 | // simple hash 27 | (r#"{{ {"a": "b", "b": "c"} }}"#, Expr::new_hash(vec![ 28 | ( 29 | Expr::new_str_constant("a", 1), 30 | Expr::new_str_constant("b", 1), 31 | ), 32 | ( 33 | Expr::new_str_constant("b", 1), 34 | Expr::new_str_constant("c", 1), 35 | ), 36 | ], 1)), 37 | // hash with trailing , 38 | (r#"{{ {"a": "b", "b": "c", } }}"#, Expr::new_hash(vec![ 39 | ( 40 | Expr::new_str_constant("a", 1), 41 | Expr::new_str_constant("b", 1), 42 | ), 43 | ( 44 | Expr::new_str_constant("b", 1), 45 | Expr::new_str_constant("c", 1), 46 | ), 47 | ], 1)), 48 | // hash with unquoted keys 49 | (r#"{{ {a: "b", b: "c" } }}"#, Expr::new_hash(vec![ 50 | ( 51 | Expr::new_str_constant("a", 1), 52 | Expr::new_str_constant("b", 1), 53 | ), 54 | ( 55 | Expr::new_str_constant("b", 1), 56 | Expr::new_str_constant("c", 1), 57 | ), 58 | ], 1)), 59 | // hash with number keys 60 | (r#"{{ {2: "b", 3: "c" } }}"#, Expr::new_hash(vec![ 61 | ( 62 | Expr::new_int_constant(2, 1), 63 | Expr::new_str_constant("b", 1), 64 | ), 65 | ( 66 | Expr::new_int_constant(3, 1), 67 | Expr::new_str_constant("c", 1), 68 | ), 69 | ], 1)), 70 | // hash in an array 71 | (r#"{{ [1, {"a": "b", "b": "c"}] }}"#, Expr::new_array(vec![ 72 | Expr::new_int_constant(1, 1), 73 | Expr::new_hash(vec![ 74 | ( 75 | Expr::new_str_constant("a", 1), 76 | Expr::new_str_constant("b", 1), 77 | ), 78 | ( 79 | Expr::new_str_constant("b", 1), 80 | Expr::new_str_constant("c", 1), 81 | ), 82 | ], 1), 83 | ], 1)), 84 | // array in a hash 85 | (r#"{{ {"a": [1, 2], "b": "c"} }}"#, Expr::new_hash(vec![ 86 | ( 87 | Expr::new_str_constant("a", 1), 88 | Expr::new_array(vec![ 89 | Expr::new_int_constant(1, 1), 90 | Expr::new_int_constant(2, 1), 91 | ], 1), 92 | ), 93 | ( 94 | Expr::new_str_constant("b", 1), 95 | Expr::new_str_constant("c", 1), 96 | ), 97 | ], 1)), 98 | ] 99 | } 100 | 101 | #[test] 102 | fn test_array_syntax_error() { 103 | for template in get_failing_tests_for_array() { 104 | match support::maybe_parsed(template) { 105 | Ok(_) => panic!("expected {:?} to produce error", template), 106 | Err(e) => { println!("tmp {} produces {}", template, e); }, 107 | } 108 | } 109 | } 110 | 111 | fn get_failing_tests_for_array<'r>() -> Vec<&'static str> { 112 | vec![ 113 | r#"{{ [1, "a": "b"] }}"#, 114 | r#"{{ {"a": "b", 2} }}"#, 115 | ] 116 | } 117 | -------------------------------------------------------------------------------- /tests/parser/can_only_assign_to_names.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | 3 | use super::support; 4 | 5 | #[test] 6 | fn test_can_only_assign_to_names() { 7 | for template in get_failing_tests_for_assignment() { 8 | match support::maybe_parsed(template) { 9 | Ok(_) => panic!("expected {:?} to produce error", template), 10 | Err(e) => { println!("tmp {} produces {}", template, e); }, 11 | } 12 | } 13 | } 14 | 15 | fn get_failing_tests_for_assignment<'r>() -> Vec<&'static str> { 16 | vec![ 17 | r#"{% set false = "foo" %}"#, 18 | r#"{% set true = "foo" %}"#, 19 | r#"{% set none = "foo" %}"#, 20 | r#"{% set 3 = "foo" %}"#, 21 | r#"{% set 1 + 2 = "foo" %}"#, 22 | r#"{% set "bar" = "foo" %}"#, 23 | r#"{% set %}{% endset %}"#, 24 | ] 25 | } 26 | -------------------------------------------------------------------------------- /tests/parser/mod.rs: -------------------------------------------------------------------------------- 1 | mod support; 2 | 3 | mod can_only_assign_to_names; 4 | mod array_expression; 5 | mod string_expression; 6 | mod named_and_default_args; 7 | -------------------------------------------------------------------------------- /tests/parser/named_and_default_args.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | 3 | use super::support; 4 | 5 | #[test] 6 | #[should_panic( 7 | expected = r#"Arguments must be separated by a comma at line 1"# 8 | )] 9 | fn attribute_call_does_not_support_named_arguments() { 10 | support::unwrap_or_display(support::maybe_parsed(r#"{{ foo.bar(name="Foo") }}"#)); 11 | } 12 | 13 | #[test] 14 | #[should_panic( 15 | expected = r#"Arguments must be separated by a comma at line 1"# 16 | )] 17 | fn macro_call_does_not_support_named_arguments() { 18 | support::unwrap_or_display( 19 | support::maybe_parsed(r#"{% from _self import foo %}{% macro foo() %}{% endmacro %}{{ foo(name="Foo") }}"#) 20 | ); 21 | } 22 | 23 | #[test] 24 | #[should_panic( 25 | expected = r#"Expected "name" but received "string" with value "a" at line 1"# 26 | )] 27 | fn macro_definition_does_not_support_non_name_variable_name() { 28 | support::unwrap_or_display( 29 | support::maybe_parsed(r#"{% macro foo("a") %}{% endmacro %}"#) 30 | ); 31 | } 32 | 33 | #[test] 34 | fn macro_definition_does_not_support_non_constant_default_values() { 35 | for template in get_macro_definition_does_not_support_non_constant_default_values() { 36 | match support::maybe_parsed(template) { 37 | Ok(_) => panic!("expected {:?} to produce error", template), 38 | Err(e) => { 39 | println!("tmp {} produces {}", template, e); 40 | assert!(format!("{}", e) 41 | .contains(r#"A default value for an argument must be a constant (a boolean, a string, a number, or an array) at line 1"#)); 42 | }, 43 | } 44 | } 45 | } 46 | 47 | fn get_macro_definition_does_not_support_non_constant_default_values() -> Vec<&'static str> { 48 | vec![ 49 | r#"{% macro foo(name = "a #{foo} a") %}{% endmacro %}"#, 50 | r#"{% macro foo(name = [["b", "a #{foo} a"]]) %}{% endmacro %}"#, 51 | ] 52 | } 53 | 54 | #[test] 55 | fn macro_definition_supports_constant_default_values() { 56 | for template in get_macro_definition_supports_constant_default_values() { 57 | support::unwrap_or_display( 58 | support::maybe_parsed(template) 59 | ) 60 | } 61 | } 62 | 63 | fn get_macro_definition_supports_constant_default_values() -> Vec<&'static str> { 64 | vec![ 65 | r#"{% macro foo(name = "aa") %}{% endmacro %}"#, 66 | r#"{% macro foo(name = 12) %}{% endmacro %}"#, 67 | r#"{% macro foo(name = true) %}{% endmacro %}"#, 68 | r#"{% macro foo(name = ["a"]) %}{% endmacro %}"#, 69 | r#"{% macro foo(name = [["a"]]) %}{% endmacro %}"#, 70 | r#"{% macro foo(name = {a: "a"}) %}{% endmacro %}"#, 71 | r#"{% macro foo(name = {a: {b: "a"}}) %}{% endmacro %}"#, 72 | ] 73 | } 74 | -------------------------------------------------------------------------------- /tests/parser/string_expression.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | 3 | use super::support; 4 | use twig::nodes::expr::{ Expr, ExprValue }; 5 | 6 | #[test] 7 | #[should_panic( 8 | expected = r#"Unexpected token "string" of value "b" ("end of print statement" expected)"# 9 | )] 10 | fn test_string_expression_does_not_concatenate_two_consecutive_strings() { 11 | support::unwrap_or_display( 12 | support::maybe_parsed(r#"{{ "a" "b" }}"#) 13 | ); 14 | } 15 | 16 | #[test] 17 | fn test_string_expression() { 18 | for (template, expected) in get_tests_for_string() { 19 | let module = support::expect_parsed(template); 20 | assert_eq!(module.body.expect_print(), &expected); 21 | } 22 | } 23 | 24 | fn get_tests_for_string<'r>() -> Vec<(&'static str, Expr<'r>)> { 25 | vec![ 26 | (r#"{{ "foo" }}"#, Expr::new_str_constant("foo", 1)), 27 | (r#"{{ "foo #{bar}" }}"#, Expr::new_at(ExprValue::Concat { 28 | left: Box::new(Expr::new_str_constant("foo ", 1)), 29 | right: Box::new(Expr::new_name("bar", 1)), 30 | }, 1)), 31 | (r#"{{ "foo #{bar} baz" }}"#, Expr::new_at(ExprValue::Concat { 32 | left: Box::new(Expr::new_at(ExprValue::Concat { 33 | left: Box::new(Expr::new_str_constant("foo ", 1)), 34 | right: Box::new(Expr::new_name("bar", 1)), 35 | }, 1)), 36 | right: Box::new(Expr::new_str_constant(" baz", 1)), 37 | }, 1)), 38 | (r#"{{ "foo #{"foo #{bar} baz"} baz" }}"#, Expr::new_at(ExprValue::Concat { 39 | left: Box::new(Expr::new_at(ExprValue::Concat { 40 | left: Box::new(Expr::new_str_constant("foo ", 1)), 41 | right: Box::new(Expr::new_at(ExprValue::Concat { 42 | left: Box::new(Expr::new_at(ExprValue::Concat { 43 | left: Box::new(Expr::new_str_constant("foo ", 1)), 44 | right: Box::new(Expr::new_name("bar", 1)), 45 | }, 1)), 46 | right: Box::new(Expr::new_str_constant(" baz", 1)), 47 | }, 1)), 48 | }, 1)), 49 | right: Box::new(Expr::new_str_constant(" baz", 1)), 50 | }, 1)), 51 | ] 52 | } 53 | -------------------------------------------------------------------------------- /tests/parser/support/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | 3 | use std::fmt; 4 | use twig::environment::Environment; 5 | use twig::tokens::Lexer; 6 | use twig::nodes::{ Parser, Parse, Module }; 7 | use twig::error::TemplateResult; 8 | 9 | pub fn maybe_parsed(template: &'static str) -> TemplateResult { 10 | let env = Environment::default().init_all(); 11 | let lexer = Lexer::default(&env.lexing); 12 | let mut tokens = lexer.tokens(template); 13 | let mut parser = Parser::new(&env.parsing, &mut tokens); 14 | Module::parse(&mut parser) 15 | } 16 | 17 | pub fn expect_parsed(template: &'static str) -> Module { 18 | match maybe_parsed(template) { 19 | Ok(m) => m, 20 | Err(e) => panic!("parsing error: {:?}", e), 21 | } 22 | } 23 | 24 | pub fn unwrap_or_display(value: Result) { 25 | match value { 26 | Ok(_) => (), 27 | Err(e) => panic!("{}", e), 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /tests/parser_tests.rs: -------------------------------------------------------------------------------- 1 | extern crate twig; 2 | 3 | mod parser; 4 | -------------------------------------------------------------------------------- /tests/support/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | 3 | use twig::environment::Environment; 4 | use twig::tokens::Lexer; 5 | use twig::nodes::{ Parser, Parse, Module }; 6 | use twig::error::TemplateResult; 7 | 8 | pub fn maybe_parsed(template: &'static str) -> TemplateResult { 9 | let env = Environment::default().init_all(); 10 | let lexer = Lexer::default(&env.lexing); 11 | let mut tokens = lexer.tokens(template); 12 | let mut parser = Parser::new(&env.parsing, &mut tokens); 13 | Module::parse(&mut parser) 14 | } 15 | 16 | pub fn expect_parsed(template: &'static str) -> Module { 17 | match maybe_parsed(template) { 18 | Ok(m) => m, 19 | Err(e) => panic!("parsing error: {:?}", e), 20 | } 21 | } 22 | --------------------------------------------------------------------------------