├── .gitignore ├── .travis.yml ├── HISTORY.md ├── README.md ├── config └── config.exs ├── docs ├── misc │ ├── how_it_works.md │ ├── line_number_preservation.md │ └── prior_art.md └── syntax │ ├── code.md │ ├── comments.md │ ├── compatibility_with_pug.md │ ├── doctype.md │ ├── elements.md │ ├── syntax.md │ └── text.md ├── lib ├── expug.ex └── expug │ ├── builder.ex │ ├── compiler.ex │ ├── expression_tokenizer.ex │ ├── expug_error.ex │ ├── runtime.ex │ ├── stringifier.ex │ ├── tokenizer.ex │ ├── tokenizer_tools.ex │ ├── tokenizer_tools │ └── state.ex │ ├── transformer.ex │ └── visitor.ex ├── mix.exs ├── mix.lock └── test ├── builder_test.exs ├── compiler_test.exs ├── eex_eval_test.exs ├── expug_test.exs ├── runtime_test.exs ├── stringifier_test.exs ├── test_helper.exs ├── todo_test.exs ├── tokenizer_test.exs ├── tokenizer_tools_test.exs ├── transformer_test.exs └── visitor_test.exs /.gitignore: -------------------------------------------------------------------------------- 1 | /_build 2 | /cover 3 | /deps 4 | erl_crash.dump 5 | *.ez 6 | /doc 7 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: elixir 2 | elixir: 3 | - '1.2.6' 4 | - '1.5.2' 5 | script: 6 | - mix test --exclude pending 7 | cache: 8 | directories: 9 | - _deps 10 | - build 11 | - node_modules 12 | after_success: 13 | - if [ "$TRAVIS_BRANCH" = "master" -a "$TRAVIS_PULL_REQUEST" = "false" ]; then mix docs; npm install git-update-ghpages; ./node_modules/.bin/git-update-ghpages -e; fi 14 | env: 15 | global: 16 | - GIT_NAME: Travis CI 17 | - GIT_EMAIL: nobody@nobody.org 18 | - GITHUB_REPO: rstacruz/expug 19 | - GIT_SOURCE: doc 20 | -------------------------------------------------------------------------------- /HISTORY.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [v0.9.2] 4 | > Oct 26, 2017 5 | 6 | - Fix issue where elements with text (eg, `title Hello`) produce errors. 7 | 8 | [v0.9.2]: https://github.com/rstacruz/expug/compare/v0.9.1...v0.9.2 9 | 10 | ## [v0.9.1] 11 | > Oct 19, 2017 12 | 13 | - Fix problem with value-less attributes (eg, `input(required type="text")`). 14 | 15 | [v0.9.1]: https://github.com/rstacruz/expug/compare/v0.9.0...v0.9.1 16 | 17 | ## [v0.9.0] 18 | > Oct 18, 2017 19 | 20 | - [#10] - Fix Elixir 1.5.2 warnings about `String.rstrip/1`. 21 | - [#8] - Fix `$` not being allowed in plain texts. ([@cubeguerrero], [#7]) 22 | 23 | [#10]: https://github.com/rstacruz/expug/issues/10 24 | [#7]: https://github.com/rstacruz/expug/issues/7 25 | [#8]: https://github.com/rstacruz/expug/issues/8 26 | [@cubeguerrero]: https://github.com/cubeguerrero 27 | [v0.9.0]: https://github.com/rstacruz/expug/compare/v0.8.0...v0.9.0 28 | 29 | ## [v0.8.0] 30 | > Mar 14, 2017 31 | 32 | - [#6] - Fixes consecutive if-else blocks. 33 | 34 | [v0.8.0]: https://github.com/rstacruz/expug/compare/v0.7.3...v0.8.0 35 | 36 | ## [v0.7.3] 37 | > Aug 29, 2016 38 | 39 | - [#5] - Fixes problems with some nodes rendering as text. 40 | 41 | [v0.7.3]: https://github.com/rstacruz/expug/compare/v0.7.2...v0.7.3 42 | 43 | ## [v0.7.2] 44 | > Aug 29, 2016 45 | 46 | - Fix multiline dot blocks (`script.`). 47 | 48 | [v0.7.2]: https://github.com/rstacruz/expug/compare/v0.7.1...v0.7.2 49 | 50 | ## [v0.7.1] 51 | > Jul 29, 2016 52 | 53 | Squash Elixir warnings; no functional changes. 54 | 55 | [v0.7.1]: https://github.com/rstacruz/expug/compare/v0.7.0...v0.7.1 56 | 57 | ## [v0.7.0] 58 | > Jul 29, 2016 59 | 60 | Support improved multiline. Write `=`, `!=` or `-` immediately followed by a newline. All text indented inside it will be treated as part of an Elixir expression. 61 | 62 | ```jade 63 | = 64 | render App.MyView, "index.html", 65 | [conn: @conn] ++ 66 | assigns 67 | ``` 68 | 69 | Error messages have also been improved. 70 | 71 | [v0.7.0]: https://github.com/rstacruz/expug/compare/v0.6.0...v0.7.0 72 | 73 | ## [v0.6.0] 74 | > Jul 25, 2016 75 | 76 | Fix: Line comments have been changed from `-//` to `//-` (had a mistake in implementing that, sorry!) 77 | 78 | [v0.6.0]: https://github.com/rstacruz/expug/compare/v0.5.0...v0.6.0 79 | 80 | ## [v0.5.0] 81 | > Jul 25, 2016 82 | 83 | HTML comments are now supported. They are just like `-//` comments, but they will render as ``. 84 | 85 | ```jade 86 | // This is a comment 87 | (Anything below it will be part of the comment) 88 | ``` 89 | 90 | [v0.5.0]: https://github.com/rstacruz/expug/compare/v0.4.0...v0.5.0 91 | 92 | ## [v0.4.0] 93 | > Jul 23, 2016 94 | 95 | Value-less boolean attributes are now supported. 96 | 97 | ```jade 98 | textarea(spellcheck) 99 | ``` 100 | 101 | Unescaped text (`!=`) is now supported. 102 | 103 | ```jade 104 | div!= markdown_to_html(@article.body) |> sanitize() 105 | ``` 106 | 107 | You can now change the `raw` helper in case you're not using Phoenix. The `raw_helper` (which defaults to `"raw"` as Phoenix uses) is used on unfiltered text (such as `!= text`). 108 | 109 | ```ex 110 | Expug.to_eex!("div= \"Hello\"", raw_helper: "") 111 | ``` 112 | 113 | [v0.4.0]: https://github.com/rstacruz/expug/compare/v0.3.0...v0.4.0 114 | 115 | ## [v0.3.0] 116 | > Jul 21, 2016 117 | 118 | [#3] - Attribute values are now escaped properly. This means you can now properly do: 119 | 120 | ```jade 121 | - json = "{\"hello\":\"world\"}" 122 | div(data-value=json) 123 | ``` 124 | 125 | ```html 126 |
127 | ``` 128 | 129 | `nil` values are also now properly handled, along with boolean values. 130 | 131 | ```jade 132 | textarea(spellcheck=nil) 133 | textarea(spellcheck=true) 134 | textarea(spellcheck=false) 135 | ``` 136 | 137 | ```html 138 | 139 | 140 | 141 | ``` 142 | 143 | [#3]: https://github.com/rstacruz/expug/issues/3 144 | [v0.3.0]: https://github.com/rstacruz/expug/compare/v0.2.0...v0.3.0 145 | 146 | ## [v0.2.0] 147 | > Jul 17, 2016 148 | 149 | The new block text directive allows you to write text without Expug parsing. 150 | 151 | ```jade 152 | script. 153 | if (usingExpug) { 154 | alert('Awesome!') 155 | } 156 | ``` 157 | 158 | Added support for multiline code. Lines ending in `{`, `(`, `[` or `,` will assume to be wrapped. 159 | 160 | ```jade 161 | = render App.FooView, "nav.html", 162 | conn: @conn, 163 | action: { 164 | "Create new", 165 | item_path(@conn, :new) } 166 | ``` 167 | 168 | [v0.2.0]: https://github.com/rstacruz/expug/compare/v0.1.1...v0.2.0 169 | 170 | ## [v0.1.1] 171 | > Jun 27, 2016 172 | 173 | Expug now supports `if do ... end` and other blocks. 174 | 175 | ```jade 176 | = if @error do 177 | .alert Uh oh! Check your form and try again. 178 | ``` 179 | 180 | [v0.1.1]: https://github.com/rstacruz/expug/compare/v0.0.1...v0.1.1 181 | 182 | ## [v0.0.1] 183 | > Jun 26, 2016 184 | 185 | Initial release. 186 | 187 | [v0.0.1]: https://github.com/rstacruz/expug/tree/v0.0.1 188 | [#5]: https://github.com/rstacruz/expug/issues/5 189 | [#6]: https://github.com/rstacruz/expug/issues/6 190 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Expug 4 | 5 | > Indented shorthand HTML templates for Elixir 6 | 7 | [![](https://img.shields.io/github/stars/rstacruz/expug.svg?style=social)](https://github.com/rstacruz/expug) 8 | [![](https://img.shields.io/travis/rstacruz/expug.svg?branch=master)](https://travis-ci.org/rstacruz/expug "See test builds") 9 | 10 | Expug is a template language based on [Pug][] (formerly known as [Jade][]). 11 | It lets you write HTML as indented shorthand, inspired by Haml, Slim, Pug/Jade, and so on. 12 | 13 | 14 | 15 | ```jade 16 | doctype html 17 | html 18 | meta(charset="utf-8") 19 | title Hello, world! 20 | body 21 | a(href=@link) 22 | | This is a link 23 | ``` 24 | 25 | [Pug]: http://pugjs.org/ 26 | [Jade]: http://jade-lang.com/ 27 | 28 | ## Installation 29 | 30 | Add expug to your list of dependencies in `mix.exs`: 31 | 32 | ```elixir 33 | def deps do 34 | [{:expug, "~> 0.9"}] 35 | end 36 | ``` 37 | 38 | Also see [phoenix_expug](https://github.com/rstacruz/phoenix_expug) for Phoenix integration. 39 | 40 | ## Syntax 41 | 42 | Use CSS-like selectors for elements, and express your nesting through indentations. 43 | 44 | ```jade 45 | ul.links 46 | li 47 | a(href="/") This is a link 48 | ``` 49 | 50 | Read more: [Syntax](https://hexdocs.pm/expug/syntax.html) 51 | 52 | ## Why should I use it? 53 | 54 | Read on [this comparison](https://hexdocs.pm/expug/prior_art.html) to see how Expug compares to other similar libraries. 55 | 56 | ## Thanks 57 | 58 | **expug** © 2016-2017, Rico Sta. Cruz. Released under the [MIT] License.
59 | Authored and maintained by Rico Sta. Cruz with help from contributors ([list][contributors]). 60 | 61 | > [ricostacruz.com](http://ricostacruz.com)  ·  62 | > GitHub [@rstacruz](https://github.com/rstacruz)  ·  63 | > Twitter [@rstacruz](https://twitter.com/rstacruz) 64 | 65 | [MIT]: http://mit-license.org/ 66 | [contributors]: http://github.com/rstacruz/expug/contributors 67 | 68 | [![](https://img.shields.io/github/followers/rstacruz.svg?style=social&label=@rstacruz)](https://github.com/rstacruz)   69 | [![](https://img.shields.io/twitter/follow/rstacruz.svg?style=social&label=@rstacruz)](https://twitter.com/rstacruz) 70 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | # This file is responsible for configuring your application 2 | # and its dependencies with the aid of the Mix.Config module. 3 | use Mix.Config 4 | 5 | # This configuration is loaded before any dependency and is restricted 6 | # to this project. If another project depends on this project, this 7 | # file won't be loaded nor affect the parent project. For this reason, 8 | # if you want to provide default values for your application for 9 | # 3rd-party users, it should be done in your "mix.exs" file. 10 | 11 | # You can configure for your application as: 12 | # 13 | # config :expug, key: :value 14 | # 15 | # And access this configuration in your application as: 16 | # 17 | # Application.get_env(:expug, :key) 18 | # 19 | # Or configure a 3rd-party app: 20 | # 21 | # config :logger, level: :info 22 | # 23 | 24 | config :ex_unit, :capture_log, true 25 | 26 | # It is also possible to import configuration files, relative to this 27 | # directory. For example, you can emulate configuration per environment 28 | # by uncommenting the line below and defining dev.exs, test.exs and such. 29 | # Configuration from the imported file will override the ones defined 30 | # here (which is why it is important to import them last). 31 | # 32 | # import_config "#{Mix.env}.exs" 33 | -------------------------------------------------------------------------------- /docs/misc/how_it_works.md: -------------------------------------------------------------------------------- 1 | # Misc: How it works 2 | 3 | Expug converts a `.pug` template into an EEx string: 4 | 5 | ```elixir 6 | iex> Expug.to_eex!(~s[div(role="alert")= @message]) 7 | "
><%= @message %>" 8 | ``` 9 | 10 | See the module `Expug` for details. 11 | -------------------------------------------------------------------------------- /docs/misc/line_number_preservation.md: -------------------------------------------------------------------------------- 1 | # Misc: Line number preservation 2 | 3 | Eex has no provisions for source maps, so we'll have to emulate this by outputing EEx that matches line numbers *exactly* with the source `.pug` files. 4 | 5 | ```jade 6 | div 7 | | Hello, 8 | = @name 9 | 10 | button.btn 11 | | Save 12 | ``` 13 | 14 | ```html 15 |
16 | Hello, 17 | <%= @name %> 18 | <% 19 | %><%="\n"%>
21 | ``` 22 | 23 | ## Internal notes 24 | 25 | `Expug.Builder` brings this output: 26 | 27 | ```js 28 | lines = %{ 29 | :lines => 6, 30 | 1 => [ "
" ], 31 | 2 => [ "Hello," ], 32 | 3 => [ "<%= @name %>" ], 33 | 34 | 5 => [ "", "
" ] 36 | } 37 | ``` 38 | 39 | `Expug.Stringifier` will take this and yield a final EEx string. The rules it follows are: 40 | 41 | - Multiline lines (like 6) will be joined with a fake newline (`<%= "\n" %>`). 42 | - Empty lines (like line 4) will start with `<%`, with a final `%>` in the next line that has something. 43 | -------------------------------------------------------------------------------- /docs/misc/prior_art.md: -------------------------------------------------------------------------------- 1 | # Misc: Prior art 2 | 3 | > a.k.a., "Why should I use Expug over other template engines?" 4 | 5 | There's [calliope] and [slime] that brings Haml and Slim to Elixir, respectively. Expug offers a bit more: 6 | 7 | ## Pug/Jade syntax! 8 | 9 | The Pug syntax is something I personally find more sensible than Slim, and less noisy than Haml. 10 | 11 | ``` 12 | # Expug 13 | p.alert(align="center") Hello! 14 | 15 | # HAML 16 | %p.alert{align: "center"} Hello! 17 | 18 | # Slime 19 | p.alert align="center" Hello! 20 | ``` 21 | 22 | Expug tries to infer what you mean based on balanced parentheses. In contrast, you're forced to use `"#{...}"` in slime. 23 | 24 | ``` 25 | # Expug 26 | script(src=static_path(@conn, "/js/app.js") type="text/javascript") 27 | # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 28 | 29 | # Slime 30 | script[src="#{static_path(@conn, "/js/app.js")}" type="text/javascript"] 31 | ``` 32 | 33 | Also notice that you're forced to use `[` in Slime if your attributes have `(` in it. Expug doesn't have this restriction. 34 | 35 | ``` 36 | # Slime 37 | a(href="/") 38 | a(href="/link") 39 | a[href="/link" onclick="alert('Are you sure?')"] 40 | ``` 41 | 42 | Slime has optional braces, which leads to a lot of confusion. In Expug, parentheses are required. 43 | 44 | ``` 45 | # Slime 46 | strong This is bold text. 47 | strong color="blue" This is also valid, but confusing. 48 | 49 | # Expug 50 | strong(color="blue") Easier and less confusing! 51 | ``` 52 | 53 | 54 | ## True multilines 55 | 56 | Expug has a non-line-based tokenizer that can figure out multiline breaks. 57 | 58 | ``` 59 | # Expug 60 | = render App.UserView, 61 | "show.html", 62 | conn: @conn 63 | 64 | div( 65 | style="font-weight: bold" 66 | role="alert" 67 | ) 68 | ``` 69 | 70 | Using brace-matching, Expug's parser can reliably figure out what you mean. 71 | 72 | ``` 73 | # Expug 74 | script( 75 | src=static_path( 76 | @conn, 77 | "/js/app.js")) 78 | ``` 79 | 80 | ## Correct line number errors 81 | 82 | Errors in Expug will always map to the correct source line numbers. 83 | 84 | > CompileError in show.html.pug (line 2):
85 | > assign @xyz not available in eex template. 86 | 87 | [calliope]: https://github.com/nurugger07/calliope 88 | [slime]: https://github.com/slime-lang/slime 89 | -------------------------------------------------------------------------------- /docs/syntax/code.md: -------------------------------------------------------------------------------- 1 | # Syntax: Code 2 | 3 | ## Unbuffered code 4 | Unbuffered code starts with `-` does not add any output directly. 5 | 6 | ```jade 7 | - name = assigns.name 8 | div(id="name-#{name}") 9 | ``` 10 | 11 | ## Bufferred code 12 | 13 | Buffered code starts with `=` and outputs the result of evaluating the Elixir expression in the template. For security, it is first HTML escaped. 14 | 15 | ```jade 16 | p= "Hello, #{name}" 17 | ``` 18 | 19 | ## Unescaped code 20 | 21 | Buffered code may be unescaped by using `!=`. This skips the HTML escaping. 22 | 23 | ```jade 24 | div!= markdown_to_html(@article.body) |> sanitize() 25 | ``` 26 | 27 | ## Conditionals and Loops 28 | 29 | For `if`, `cond`, `try`, `for`, an `end` statement is automatically inferred. 30 | 31 | ```jade 32 | = if assigns.name do 33 | = "Hello, #{@name}" 34 | ``` 35 | 36 | They also need to begin with `=`, not `-`. Except for `else`, `rescue` and so on. 37 | 38 | ```jade 39 | = if assigns.current_user do 40 | | Welcome. 41 | - else 42 | | You are not signed in. 43 | ``` 44 | 45 | ## Multiline 46 | 47 | If a line ends in one of these characters: `,` `(` `{` `[`, the next line is considered to be part of the Elixir expression. 48 | 49 | ```jade 50 | = render App.PageView, 51 | "index.html", 52 | conn: @conn 53 | ``` 54 | 55 | You may also force multiline by starting a line with `=` immediately followed by a newline. Any text indented after this will be treated as an Elixir expression, regardless of what each line ends in. 56 | 57 | ```jade 58 | = 59 | render App.PageView, 60 | "index.html", 61 | [conn: @conn] ++ 62 | assigns 63 | ``` 64 | -------------------------------------------------------------------------------- /docs/syntax/comments.md: -------------------------------------------------------------------------------- 1 | # Syntax: Comments 2 | 3 | Comments begin with `//-`. 4 | 5 | ```jade 6 | //- This is a comment 7 | ``` 8 | 9 | You may nest under it, and those lines will be ignored. 10 | 11 | ```jade 12 | //- everything here is ignored: 13 | a(href="/") 14 | | Link 15 | ``` 16 | 17 | `-#` is also supported to keep consistency with Elixir. 18 | 19 | ```jade 20 | -# This is also a comment 21 | ``` 22 | 23 | HTML comments 24 | ------------- 25 | 26 | HTML comments begin with `//` (no hyphen). They will be rendered as ``. 27 | 28 | ```jade 29 | // This is a comment 30 | ``` 31 | 32 | Also see 33 | -------- 34 | 35 | - 36 | -------------------------------------------------------------------------------- /docs/syntax/compatibility_with_pug.md: -------------------------------------------------------------------------------- 1 | # Syntax: Compatibility with Pug 2 | 3 | Expug retains most of Pug/Jade's features, adds some Elixir'isms, and drops the features that don't make sense. 4 | 5 | ## Added 6 | 7 | - __Multiline attributes__ are supported. As long as you use balanced braces, Expug is smart enough to know when to count the next line as part of an expression. 8 | 9 | ```jade 10 | button.btn( 11 | role='btn' 12 | class=( 13 | get_classname(@button) 14 | ) 15 | )= get_text "Submit" 16 | ``` 17 | 18 | - __Multiline codeb blocks__ are also supported. See [code](code.html) for rules on how this works. 19 | 20 | ```jade 21 | = render( 22 | App.MyView, 23 | "index.html", 24 | conn: @conn) 25 | ``` 26 | 27 | ## Changed 28 | 29 | - __Comments__ are done using `-#` as well as `-//`, following Elixir conventions. The old `-//` syntax is supported for increased compatibility with text editor syntax highlighting. 30 | 31 | - __Text attributes__ need to have double-quoted strings (`"`). Single-line strings will translate to Elixir char lists, which is likely not what you want. 32 | 33 | - __Statements with blocks__ like `= if .. do` ... `- end` should start with `=`, and end in `-`. This is the same as you would do in EEx. 34 | 35 | ## Removed 36 | 37 | The following features are not available due to the limitations of EEx. 38 | 39 | - [include](http://jade-lang.com/reference/includes) (partials) 40 | - [block/extends](http://jade-lang.com/reference/extends) (layouts & template inheritance) 41 | - [mixins](http://jade-lang.com/reference/mixins) (functions) 42 | 43 | The following syntactic sugars, are not implemented, simply because they're not idiomatic Elixir. There are other ways to accomplish them. 44 | 45 | - [case](http://jade-lang.com/reference/case/) 46 | - [conditionals](http://jade-lang.com/reference/conditionals) 47 | - [iteration](http://jade-lang.com/reference/iteration) 48 | 49 | The following are still unimplemented, but may be in the future. 50 | 51 | - [filters](http://jade-lang.com/reference/case/) 52 | - [interpolation](http://jade-lang.com/reference/interpolation/) 53 | - multi-line statements (`-\n ...`) 54 | 55 | The following are unimplemented, just because I don't want to implement them. 56 | 57 | - Doctype shorthands are limited to only `html` and `xml`. The [XHTML shorthands](http://jade-lang.com/reference/doctype/) were not implemented to discourage their use. 58 | 59 | ## The same 60 | 61 | - __Indent sensitivity__ rules of Pug/Jade have been preserved. This means you can do: 62 | 63 | ```jade 64 | html 65 | head 66 | title This is indented with 4 spaces 67 | ``` 68 | -------------------------------------------------------------------------------- /docs/syntax/doctype.md: -------------------------------------------------------------------------------- 1 | # Syntax: Doctype 2 | 3 | `doctype html` is shorthand for ``. It's only allowed at the beginning of the document. 4 | 5 | ```jade 6 | doctype html 7 | ``` 8 | 9 | These other doctypes are available: 10 | 11 | | Expug | HTML | 12 | | --- | --- | 13 | | `doctype html` | `` | 14 | | `doctype xml` | `` | 15 | 16 | ## Custom doctypes 17 | 18 | You may use other doctypes. 19 | 20 | ```jade 21 | doctype html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd" 22 | ``` 23 | 24 | ## Also see 25 | 26 | - 27 | -------------------------------------------------------------------------------- /docs/syntax/elements.md: -------------------------------------------------------------------------------- 1 | # Syntax: Elements 2 | 3 | Elements are just lines. 4 | 5 | ```jade 6 | div 7 | ``` 8 | 9 | ## Class names and ID's 10 | 11 | You may add `.classes` and `#id`s after an element name. 12 | 13 | ```jade 14 | p.alert 15 | div#box 16 | ``` 17 | 18 | If you do, the element name is optional. 19 | 20 | ```jade 21 | #box 22 | .alert 23 | ``` 24 | 25 | You may chain them as much as you need to. 26 | 27 | ```jade 28 | .alert.alert-danger#error 29 | ``` 30 | 31 | ## Attributes 32 | 33 | Enclose attributes in `(...)` after an element name. 34 | 35 | ```jade 36 | a(href="google.com") Google 37 | a(class="button" href="google.com") Google 38 | .box(style="display: none") 39 | ``` 40 | 41 | The attribute values are Elixir expressions. 42 | 43 | ```jade 44 | script(src=static_path(@conn, "/js/app.js")) 45 | ``` 46 | 47 | ## Text 48 | 49 | Text after the classes/attributes are shown as plain text. See [text](text.html). 50 | 51 | ```jade 52 | a(href="google.com") Google 53 | ``` 54 | 55 | You may also use `|` for plain text with other elements. 56 | 57 | ```jade 58 | div 59 | | Welcome, new user! 60 | a(href="/signup") Register 61 | ``` 62 | 63 | ## Nesting 64 | 65 | Nest elements by indentation. 66 | 67 | ```jade 68 | ul 69 | li 70 | a(href="/") Home 71 | li 72 | a(href="/about") About 73 | ``` 74 | 75 | ## Multiline 76 | 77 | Attributes may span multiple lines. Expug tries to intelligently figure out what you mean by balancing `(` `[` `{` `"` `'` pairs. 78 | 79 | ```jade 80 | a( 81 | href=page_path( 82 | @conn, 83 | "index.html" 84 | ) 85 | )= "View list of pages" 86 | ``` 87 | -------------------------------------------------------------------------------- /docs/syntax/syntax.md: -------------------------------------------------------------------------------- 1 | # Syntax 2 | 3 | The syntax is based on Pug (formerly known as Jade). Most of Pug's syntax is supported. 4 | 5 | Elements 6 | -------- 7 | 8 | Write elements in short CSS-like syntax. Express nesting through indentation. 9 | 10 | ```jade 11 | .alert.alert-danger#error 12 | a(href="google.com") Google 13 | ``` 14 | See: [Elements](elements.html) 15 | 16 | Code 17 | ---- 18 | 19 | Use `=` and `-` to run Elixir code. 20 | 21 | ```jade 22 | = if @user do 23 | = "Welcome, #{@user.name}" 24 | - else 25 | | You're not signed in. 26 | ``` 27 | 28 | See: [Code](code.html) 29 | 30 | Text 31 | ---- 32 | 33 | Text nodes begin with `|`. 34 | 35 | ```jade 36 | a(href="/signup") 37 | | Register now 38 | ``` 39 | 40 | See: [Text](text.html) 41 | 42 | Comments 43 | -------- 44 | 45 | ```jade 46 | //- This is a comment 47 | -# this, too 48 | 49 | // this is an HTML comment 50 | ``` 51 | 52 | See: [Comments](comments.html) 53 | 54 | Doctype 55 | ------- 56 | 57 | ```jade 58 | doctype html 59 | ``` 60 | 61 | See: [Doctype](doctype.html) 62 | 63 | Compatibility with Pug 64 | ---------------------- 65 | 66 | Most of Pug's syntax is supported, with a few differences. 67 | See: [Compatibility with Pug](compatibility_with_pug.html) 68 | -------------------------------------------------------------------------------- /docs/syntax/text.md: -------------------------------------------------------------------------------- 1 | # Syntax: Text 2 | 3 | ## Piped text 4 | 5 | The simplest way of adding plain text to templates is to prefix the line with a `|` character. 6 | 7 | ```jade 8 | | Plain text can include html 9 | p 10 | | It must always be on its own line 11 | ``` 12 | 13 | ## Inline in a Tag 14 | 15 | Since it's a common use case, you can put text in a tag just by adding it inline after a space. 16 | 17 | ```jade 18 | p Plain text can include html 19 | ``` 20 | 21 | ## Block text 22 | 23 | Often you might want large blocks of text within a tag. A good example is with inline scripts or styles. To do this, just add a `.` after the tag (with no preceding space): 24 | 25 | ```jade 26 | script. 27 | if (usingExpug) 28 | console.log('you are awesome') 29 | else 30 | console.log('use expug') 31 | ``` 32 | 33 | 34 | -------------------------------------------------------------------------------- /lib/expug.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug do 2 | @moduledoc ~S""" 3 | Expug compiles templates to an eex template. 4 | 5 | `to_eex/2` turns an Expug source into an EEx template. 6 | 7 | iex> source = "div\n | Hello" 8 | iex> Expug.to_eex(source) 9 | {:ok, "
\nHello<%= \"\\n\" %>
\n"} 10 | 11 | `to_eex!/2` is the same, and instead returns the result or throws an 12 | `Expug.Error`. 13 | 14 | iex> source = "div\n | Hello" 15 | iex> Expug.to_eex!(source) 16 | "
\nHello<%= \"\\n\" %>
\n" 17 | 18 | ## Errors 19 | `to_eex/2` will give you this in case of an error: 20 | 21 | {:error, %{ 22 | type: :parse_error, 23 | position: {3, 2}, # line/col 24 | ... # other metadata 25 | }} 26 | 27 | Internally, the other classes will throw `%{type, position, ...}` which will 28 | be caught here. 29 | 30 | ## The `raw` helper 31 | Note that it needs `raw/1`, something typically provided by 32 | [Phoenix.HTML](http://devdocs.io/phoenix/phoenix_html/phoenix.html#raw/1). 33 | You don't need Phoenix.HTML however; a binding with `raw/1` would do. 34 | 35 | iex> Expug.to_eex!(~s[div(role="alert")= @message]) 36 | "><%= \"\\n\" %><%= @message %><%= \"\\n\" %>
\n" 37 | 38 | ## Internal notes 39 | 40 | `Expug.to_eex/2` pieces together 4 steps into a pipeline: 41 | 42 | - `tokenize/2` - turns source into tokens. 43 | - `compile/2` - turns tokens into an AST. 44 | - `build/2` - turns an AST into a line map. 45 | - `stringify/2` - turns a line map into an EEx template. 46 | 47 | ## Also see 48 | 49 | - `Expug.Tokenizer` 50 | - `Expug.Compiler` 51 | - `Expug.Builder` 52 | - `Expug.Stringifier` 53 | """ 54 | 55 | defdelegate tokenize(source, opts), to: Expug.Tokenizer 56 | defdelegate compile(tokens, opts), to: Expug.Compiler 57 | defdelegate build(ast, opts), to: Expug.Builder 58 | defdelegate stringify(lines, opts), to: Expug.Stringifier 59 | 60 | @doc ~S""" 61 | Compiles an Expug template to an EEx template. 62 | 63 | Returns `{:ok, result}`, where `result` is an EEx string. On error, it will 64 | return `{:error, ...}`. 65 | 66 | ## Options 67 | All options are optional. 68 | 69 | * `attr_helper` (String) - the attribute helper to use (default: `"Expug.Runtime.attr"`) 70 | * `raw_helper` (String) - the raw helper to use (default: `"raw"`) 71 | """ 72 | def to_eex(source, opts \\ []) do 73 | try do 74 | eex = source 75 | |> tokenize(opts) 76 | |> compile(opts) 77 | |> build(opts) 78 | |> stringify(opts) 79 | {:ok, eex} 80 | catch %{type: _type} = err-> 81 | {:error, err} 82 | end 83 | end 84 | 85 | @doc ~S""" 86 | Compiles an Expug template to an EEx template and raises errors on failure. 87 | 88 | Returns the EEx string on success. On failure, it raises `Expug.Error`. 89 | """ 90 | def to_eex!(source, opts \\ []) do 91 | case to_eex(source, opts) do 92 | {:ok, eex} -> 93 | eex 94 | {:error, err} -> 95 | err = err |> Map.put(:source, source) 96 | raise Expug.Error.exception(err) 97 | end 98 | end 99 | end 100 | -------------------------------------------------------------------------------- /lib/expug/builder.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.Builder do 2 | @moduledoc ~S""" 3 | Builds lines from an AST. 4 | 5 | iex> source = "div\n | Hello" 6 | iex> with tokens <- Expug.Tokenizer.tokenize(source), 7 | ...> ast <- Expug.Compiler.compile(tokens), 8 | ...> lines <- Expug.Builder.build(ast), 9 | ...> do: lines 10 | %{ 11 | :lines => 2, 12 | 1 => ["
"], 13 | 2 => ["Hello", "
"] 14 | } 15 | 16 | This gives you a map of lines that the `Stringifier` will work on. 17 | 18 | ## Also see 19 | - `Expug.Compiler` builds the AST used by this builder. 20 | - `Expug.Stringifier` takes this builder's output. 21 | """ 22 | 23 | require Logger 24 | 25 | # See: http://www.w3.org/TR/html5/syntax.html#void-elements 26 | @void_elements ["area", "base", "br", "col", "embed", "hr", "img", "input", 27 | "keygen", "link", "meta", "param", "source", "track", "wbr"] 28 | 29 | @defaults %{ 30 | attr_helper: "Expug.Runtime.attr", 31 | raw_helper: "raw" 32 | } 33 | 34 | def build(ast, opts \\ []) do 35 | opts = Enum.into(opts, @defaults) 36 | 37 | %{lines: 0, options: opts, doctype: nil} 38 | |> make(ast) 39 | |> Map.delete(:options) 40 | end 41 | 42 | @doc """ 43 | Builds elements. 44 | """ 45 | def make(doc, %{type: :document} = node) do 46 | doc 47 | |> Map.put(:doctype, :html) 48 | |> make(node[:doctype]) 49 | |> children(node[:children]) 50 | |> Map.delete(:doctype) 51 | end 52 | 53 | def make(doc, %{type: :doctype, value: "html5"} = node) do 54 | doc 55 | |> put(node, "") 56 | end 57 | 58 | def make(doc, %{type: :doctype, value: "xml"} = node) do 59 | doc 60 | |> Map.put(:doctype, :xml) 61 | |> put(node, ~s()) 62 | end 63 | 64 | def make(doc, %{type: :doctype, value: value} = node) do 65 | doc 66 | |> put(node, "") 67 | end 68 | 69 | @doc """ 70 | Builds elements. 71 | """ 72 | def make(doc, %{type: :element, children: list} = node) do 73 | doc 74 | |> put(node, element(doc, node)) 75 | |> children(list) 76 | |> put_last(" node[:name] <> ">") 77 | end 78 | 79 | def make(doc, %{type: :element} = node) do 80 | doc 81 | |> put(node, self_closing_element(doc, node)) 82 | end 83 | 84 | def make(doc, %{type: :statement, value: value, children: [_|_] = list} = node) do 85 | doc 86 | |> put(node, "<% #{value} %>") 87 | |> put_collapse(node) 88 | |> children(list) 89 | |> add_closing(node) 90 | end 91 | 92 | def make(doc, %{type: :statement, value: value} = node) do 93 | doc 94 | |> put(node, "<% #{value} %>") 95 | end 96 | 97 | @doc """ 98 | Builds text. 99 | """ 100 | def make(doc, %{type: :raw_text, value: value} = node) do 101 | doc 102 | |> put(node, "#{value}") 103 | end 104 | 105 | def make(doc, %{type: :buffered_text, value: value, children: [_|_] = list} = node) do 106 | doc 107 | |> put(node, "<%= #{value} %>") 108 | |> put_collapse(node) 109 | |> children(list) 110 | |> add_closing(node) 111 | end 112 | 113 | def make(doc, %{type: :buffered_text, value: value} = node) do 114 | doc 115 | |> put(node, "<%= #{value} %>") 116 | end 117 | 118 | def make(doc, %{type: :html_comment, value: value} = node) do 119 | doc 120 | |> put(node, "") 121 | end 122 | 123 | # Handle `!= for item <- list do` (has children) 124 | def make(doc, %{type: :unescaped_text, value: value, children: [_|_] = list} = node) do 125 | %{options: %{raw_helper: raw}} = doc 126 | doc 127 | |> put(node, "<%= #{raw}(#{value} %>") 128 | |> put_collapse(node) 129 | |> children(list) 130 | |> add_closing(node, ")") 131 | end 132 | 133 | # Handle `!= @hello` 134 | def make(doc, %{type: :unescaped_text, value: value} = node) do 135 | %{options: %{raw_helper: raw}} = doc 136 | case node[:open] do 137 | true -> 138 | doc 139 | |> put(node, "<%= #{raw}(#{value} %>") 140 | _ -> 141 | doc 142 | |> put(node, "<%= #{raw}(#{value}) %>") 143 | end 144 | end 145 | 146 | def make(doc, %{type: :block_text, value: value} = node) do 147 | doc 148 | |> put(node, value) 149 | end 150 | 151 | def make(doc, nil) do 152 | doc 153 | end 154 | 155 | def make(_doc, %{type: type, token: {position, _, _}}) do 156 | throw %{ 157 | type: :cant_build_node, 158 | node_type: type, 159 | position: position 160 | } 161 | end 162 | 163 | def add_closing(doc, node, suffix \\ "") 164 | def add_closing(doc, %{close: close}, suffix) do 165 | doc 166 | |> put_last_no_space("<% #{close}#{suffix} %>") 167 | end 168 | 169 | def add_closing(doc, _, _), do: doc 170 | 171 | @doc """ 172 | Builds a list of nodes. 173 | """ 174 | def children(doc, nil) do 175 | doc 176 | end 177 | 178 | def children(doc, list) do 179 | Enum.reduce list, doc, fn node, doc -> 180 | make(doc, node) 181 | end 182 | end 183 | 184 | @doc """ 185 | Builds an element opening tag. 186 | """ 187 | 188 | def element(doc, node) do 189 | "<" <> node[:name] <> attributes(doc, node[:attributes]) <> ">" 190 | end 191 | 192 | def self_closing_element(doc, node) do 193 | tag = node[:name] <> attributes(doc, node[:attributes]) 194 | cond do 195 | doc[:doctype] == :xml -> 196 | "<#{tag} />" 197 | self_closable?(node) -> 198 | "<#{tag}>" 199 | true -> 200 | "<#{tag}>" 201 | end 202 | end 203 | 204 | def self_closable?(node) do 205 | Enum.any?(@void_elements, &(&1 == node[:name])) && true 206 | end 207 | 208 | @doc ~S""" 209 | Stringifies an attributes map. 210 | 211 | iex> doc = %{options: %{}} 212 | iex> Expug.Builder.attributes(doc, %{ "src" => [{:text, "image.jpg"}] }) 213 | " src=\"image.jpg\"" 214 | 215 | #iex> doc = %{options: %{}} 216 | #iex> Expug.Builder.attributes(doc, %{ "class" => [{:text, "a"}, {:text, "b"}] }) 217 | #" class=\"a b\"" 218 | 219 | iex> doc = %{options: %{attr_helper: "attr", raw_helper: "raw"}} 220 | iex> Expug.Builder.attributes(doc, %{ "src" => [{:eval, "@image"}] }) 221 | "<%= raw(attr(\"src\", @image)) %>" 222 | 223 | iex> doc = %{options: %{attr_helper: "attr", raw_helper: "raw"}} 224 | iex> Expug.Builder.attributes(doc, %{ "class" => [{:eval, "@a"}, {:eval, "@b"}] }) 225 | "<%= raw(attr(\"class\", Enum.join([@a, @b], \" \"))) %>" 226 | """ 227 | def attributes(_doc, nil), do: "" 228 | 229 | def attributes(doc, %{} = attributes) do 230 | Enum.reduce attributes, "", fn {key, values}, acc -> 231 | acc <> valueify(doc, key, values) 232 | end 233 | end 234 | 235 | def valueify(doc, key, [{:eval, value}]) do 236 | %{options: %{attr_helper: attr, raw_helper: raw}} = doc 237 | "<%= #{raw}(#{attr}(#{inspect(key)}, #{value})) %>" 238 | end 239 | 240 | def valueify(_doc, key, [{:text, value}]) do 241 | Expug.Runtime.attr(key, value) 242 | end 243 | 244 | def valueify(doc, key, values) when length(values) > 1 do 245 | %{options: %{attr_helper: attr, raw_helper: raw}} = doc 246 | inside = Enum.reduce values, "", fn 247 | {:eval, value}, acc -> 248 | acc |> str_join(value, ", ") 249 | {:text, value}, acc -> 250 | acc |> str_join(Expug.Runtime.attr_value(value), ", ") 251 | end 252 | 253 | "<%= #{raw}(#{attr}(#{inspect(key)}, Enum.join([#{inside}], \" \"))) %>" 254 | end 255 | 256 | def str_join(left, str, sep \\ " ") 257 | def str_join("", str, _sep), do: str 258 | def str_join(left, str, sep), do: left <> sep <> str 259 | 260 | @doc """ 261 | Adds a line based on a token's location. 262 | """ 263 | def put(%{lines: max} = doc, %{token: {{line, _col}, _, _}}, str) do 264 | doc 265 | |> update_line_count(line, max) 266 | |> Map.update(line, [str], &(&1 ++ [str])) 267 | end 268 | 269 | @doc """ 270 | Adds a line to the end of a document. 271 | Used for closing tags. 272 | """ 273 | def put_last(%{lines: line} = doc, str) do 274 | doc 275 | |> Map.update(line, [str], &(&1 ++ [str])) 276 | end 277 | 278 | @doc """ 279 | Puts a collapser on the lane after the given token. 280 | Used for if...end statements. 281 | """ 282 | def put_collapse(%{lines: max} = doc, %{token: {{line, _col}, _, _}}) do 283 | doc 284 | |> update_line_count(line + 1, max) 285 | |> Map.update(line + 1, [:collapse], &(&1 ++ [:collapse])) 286 | end 287 | 288 | @doc """ 289 | Adds a line to the end of a document, but without a newline before it. 290 | Used for closing `<% end %>`. 291 | """ 292 | def put_last_no_space(%{lines: line} = doc, str) do 293 | doc 294 | |> Map.update(line, [str], fn segments -> 295 | List.update_at(segments, -1, &(&1 <> str)) 296 | end) 297 | end 298 | 299 | @doc """ 300 | Updates the `:lines` count if the latest line is beyond the current max. 301 | """ 302 | def update_line_count(doc, line, max) when line > max do 303 | Map.put(doc, :lines, line) 304 | end 305 | 306 | def update_line_count(doc, _line, _max) do 307 | doc 308 | end 309 | end 310 | -------------------------------------------------------------------------------- /lib/expug/compiler.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.Compiler do 2 | @moduledoc """ 3 | Compiles tokens into an AST. 4 | 5 | ## How it works 6 | Nodes are maps with a `:type` key. They are then filled up using a function 7 | with the same name as the type: 8 | 9 | node = %{type: :document} 10 | document({node, tokens}) 11 | 12 | This function returns another `{node, tokens}` tuple, where `node` is the 13 | updated node, and `tokens` are the rest of the tokens to parse. 14 | 15 | The functions (`document/1`) here can do 1 of these things: 16 | 17 | - Spawn a child, say, `%{type: :element}`, then delegate to its function (eg, `element()`). 18 | - Simply return a `{node, tokens}` - no transformation here. 19 | 20 | The functions `indent()` and `statement()` are a little different. It can 21 | give you an element, or a text node, or whatever. 22 | 23 | ## Also see 24 | - `Expug.Tokenizer` is used to build the tokens used by this compiler. 25 | - `Expug.Builder` uses the AST made by this compiler. 26 | """ 27 | 28 | require Logger 29 | 30 | @doc """ 31 | Compiles tokens. Returns `{:ok, ast}` on success. 32 | 33 | On failure, it returns `{:error, [type: type, position: {line, col}]}`. 34 | """ 35 | def compile(tokens, _opts \\ []) do 36 | tokens = Enum.reverse(tokens) 37 | node = %{type: :document} 38 | 39 | try do 40 | {node, _tokens} = document({node, tokens}) 41 | node = Expug.Transformer.transform(node) 42 | node 43 | catch 44 | {:compile_error, type, [{pos, token, _} | _]} -> 45 | # TODO: create an EOF token 46 | throw %{ type: type, position: pos, token_type: token } 47 | {:compile_error, type, []} -> 48 | throw %{ type: type } 49 | end 50 | end 51 | 52 | @doc """ 53 | A document. 54 | """ 55 | def document({node, [{_, :doctype, type} = t | tokens]}) do 56 | node = Map.put(node, :doctype, %{ 57 | type: :doctype, 58 | value: type, 59 | token: t 60 | }) 61 | indent({node, tokens}, [0]) 62 | end 63 | 64 | def document({node, tokens}) do 65 | indent({node, tokens}, [0]) # optional 66 | end 67 | 68 | @doc """ 69 | Indentation. Called with `depth` which is the current level its at. 70 | """ 71 | def indent({node, [{_, :indent, subdepth} | [_|_] = tokens]}, [d | _] = depths) 72 | when subdepth > d do 73 | if node[:children] == nil do 74 | throw {:compile_error, :unexpected_indent, hd(tokens)} 75 | end 76 | 77 | # Found children, start a new subtree. 78 | [child | rest] = Enum.reverse(node[:children] || []) 79 | {child, tokens} = statement({child, tokens}, [ subdepth | depths ]) 80 | |> indent([ subdepth | depths ]) 81 | 82 | # Go back to our tree. 83 | children = Enum.reverse([child | rest]) 84 | node = Map.put(node, :children, children) 85 | {node, tokens} 86 | |> indent(depths) 87 | end 88 | 89 | def indent({node, [{_, :indent, subdepth} | [_|_] = tokens]}, [d | _] = depths) 90 | when subdepth == d do 91 | {node, tokens} 92 | |> statement(depths) 93 | |> indent(depths) 94 | end 95 | 96 | def indent({node, [{_, :indent, subdepth} | [_|_]] = tokens}, [d | _]) 97 | when subdepth < d do 98 | # throw {:compile_error, :ambiguous_indentation, token} 99 | {node, tokens} 100 | end 101 | 102 | # End of file, no tokens left. 103 | def indent({node, []}, _depth) do 104 | {node, []} 105 | end 106 | 107 | def indent({_node, tokens}, _depth) do 108 | throw {:compile_error, :unexpected_token, tokens} 109 | end 110 | 111 | @doc """ 112 | A statement after an `:indent`. 113 | Can consume these: 114 | 115 | :element_name 116 | :element_class 117 | :element_id 118 | [:attribute_open [...] :attribute_close] 119 | [:buffered_text | :unescaped_text | :raw_text | :block_text] 120 | """ 121 | def statement({node, [{_, :line_comment, _} | [{_, :subindent, _} | _] = tokens]}, _depths) do 122 | # Pretend to be an element and capture stuff into it; discard it afterwards. 123 | # This is wrong anyway; it should be tokenized differently. 124 | subindent({node, tokens}) 125 | end 126 | 127 | def statement({node, [{_, :line_comment, _} | tokens]}, _depths) do 128 | {node, tokens} 129 | end 130 | 131 | def statement({node, [{_, :html_comment, value} = t | tokens]}, _depths) do 132 | child = %{type: :html_comment, value: value, token: t} 133 | {child, tokens} = append_subindent({child, tokens}) 134 | node = add_child(node, child) 135 | {node, tokens} 136 | end 137 | 138 | def statement({node, [{_, :element_name, _} = t | _] = tokens}, depths) do 139 | add_element(node, t, tokens, depths) 140 | end 141 | 142 | def statement({node, [{_, :element_class, _} = t | _] = tokens}, depths) do 143 | add_element(node, t, tokens, depths) 144 | end 145 | 146 | def statement({node, [{_, :element_id, _} = t | _] = tokens}, depths) do 147 | add_element(node, t, tokens, depths) 148 | end 149 | 150 | def statement({node, [{_, :raw_text, value} = t | tokens]}, _depth) do 151 | child = %{type: :raw_text, value: value, token: t} 152 | node = add_child(node, child) 153 | {node, tokens} 154 | end 155 | 156 | def statement({node, [{_, :buffered_text, value} = t | tokens]}, _depth) do 157 | child = %{type: :buffered_text, value: value, token: t} 158 | {child, tokens} = append_subindent({child, tokens}) 159 | node = add_child(node, child) 160 | {node, tokens} 161 | end 162 | 163 | def statement({node, [{_, :unescaped_text, value} = t | tokens]}, _depth) do 164 | child = %{type: :unescaped_text, value: value, token: t} 165 | {child, tokens} = append_subindent({child, tokens}) 166 | node = add_child(node, child) 167 | {node, tokens} 168 | end 169 | 170 | def statement({node, [{_, :statement, value} = t | tokens]}, _depth) do 171 | child = %{type: :statement, value: value, token: t} 172 | {child, tokens} = append_subindent({child, tokens}) 173 | node = add_child(node, child) 174 | {node, tokens} 175 | end 176 | 177 | def statement({_node, tokens}, _depths) do 178 | throw {:compile_error, :unexpected_token, tokens} 179 | end 180 | 181 | @doc """ 182 | Consumes `:subindent` tokens and adds them to the `value` of `node`. 183 | """ 184 | def append_subindent({node, [{_, :subindent, value} | tokens]}) do 185 | node = node 186 | |> Map.update(:value, value, &(&1 <> "\n#{value}")) 187 | {node, tokens} 188 | |> append_subindent() 189 | end 190 | 191 | def append_subindent({node, tokens}) do 192 | {node, tokens} 193 | end 194 | 195 | def add_element(node, t, tokens, depth) do 196 | child = %{type: :element, name: "div", token: t} 197 | {child, rest} = element({child, tokens}, node, depth) 198 | node = add_child(node, child) 199 | {node, rest} 200 | end 201 | 202 | @doc """ 203 | Parses an element. 204 | Returns a `%{type: :element}` node. 205 | """ 206 | def element({node, tokens}, parent, depths) do 207 | case tokens do 208 | [{_, :element_name, value} | rest] -> 209 | node = Map.put(node, :name, value) 210 | element({node, rest}, parent, depths) 211 | 212 | [{_, :element_id, value} | rest] -> 213 | attr_list = add_attribute(node[:attributes] || %{}, "id", {:text, value}) 214 | node = Map.put(node, :attributes, attr_list) 215 | element({node, rest}, parent, depths) 216 | 217 | [{_, :element_class, value} | rest] -> 218 | attr_list = add_attribute(node[:attributes] || %{}, "class", {:text, value}) 219 | node = Map.put(node, :attributes, attr_list) 220 | element({node, rest}, parent, depths) 221 | 222 | [{_, :raw_text, value} = t | rest] -> 223 | # should be in children 224 | child = %{type: :raw_text, value: value, token: t} 225 | node = add_child(node, child) 226 | element({node, rest}, parent, depths) 227 | 228 | [{_, :buffered_text, value} = t | rest] -> 229 | child = %{type: :buffered_text, value: value, token: t} 230 | {child, rest} = append_subindent({child, rest}) 231 | node = add_child(node, child) 232 | element({node, rest}, parent, depths) 233 | 234 | [{_, :unescaped_text, value} = t | rest] -> 235 | child = %{type: :unescaped_text, value: value, token: t} 236 | {child, rest} = append_subindent({child, rest}) 237 | node = add_child(node, child) 238 | element({node, rest}, parent, depths) 239 | 240 | [{_, :block_text, _} | rest] -> 241 | t = hd(rest) 242 | {rest, lines} = subindent_capture(rest) 243 | child = %{type: :block_text, value: Enum.join(lines, "\n"), token: t} 244 | node = add_child(node, child) 245 | element({node, rest}, parent, depths) 246 | 247 | [{_, :attribute_open, _} | rest] -> 248 | {attr_list, rest} = attribute({node[:attributes] || %{}, rest}) 249 | node = Map.put(node, :attributes, attr_list) 250 | element({node, rest}, parent, depths) 251 | 252 | tokens -> 253 | {node, tokens} 254 | end 255 | end 256 | 257 | @doc """ 258 | Returns a list of `[type: :attribute]` items. 259 | """ 260 | def attribute({attr_list, tokens}) do 261 | case tokens do 262 | [{_, :attribute_key, key}, {_, :attribute_value, value} | rest] -> 263 | attr_list = add_attribute(attr_list, key, {:eval, value}) 264 | {attr_list, rest} 265 | |> attribute() 266 | 267 | [{_, :attribute_key, key} | rest] -> 268 | attr_list = add_attribute(attr_list, key, {:eval, true}) 269 | {attr_list, rest} 270 | |> attribute() 271 | 272 | [{_, :attribute_close, _} | rest] -> 273 | {attr_list, rest} 274 | 275 | rest -> 276 | {attr_list, rest} 277 | end 278 | end 279 | 280 | def add_attribute(list, key, value) do 281 | Map.update(list, key, [value], &(&1 ++ [value])) 282 | end 283 | 284 | @doc """ 285 | Adds a child to a Node. 286 | 287 | iex> Expug.Compiler.add_child(%{}, %{type: :a}) 288 | %{children: [%{type: :a}]} 289 | 290 | iex> src = %{children: [%{type: :a}]} 291 | ...> Expug.Compiler.add_child(src, %{type: :b}) 292 | %{children: [%{type: :a}, %{type: :b}]} 293 | """ 294 | def add_child(node, child) do 295 | Map.update(node, :children, [child], &(&1 ++ [child])) 296 | end 297 | 298 | @doc """ 299 | Matches `:subindent` tokens and discards them. Used for line comments (`-#`). 300 | """ 301 | def subindent({node, [{_, :subindent, _} | rest]}) do 302 | subindent({node, rest}) 303 | end 304 | 305 | def subindent({node, rest}) do 306 | {node, rest} 307 | end 308 | 309 | def subindent_capture(tokens, lines \\ []) 310 | def subindent_capture([{_, :subindent, line} | rest], lines) do 311 | lines = lines ++ [line] 312 | subindent_capture(rest, lines) 313 | end 314 | 315 | def subindent_capture(rest, lines) do 316 | {rest, lines} 317 | end 318 | end 319 | -------------------------------------------------------------------------------- /lib/expug/expression_tokenizer.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.ExpressionTokenizer do 2 | @moduledoc ~S""" 3 | Tokenizes an expression. 4 | This is used by `Expug.Tokenizer` to match attribute values and support multiline. 5 | 6 | `expression/2` is used to capture an expression token. 7 | 8 | state 9 | |> Expug.ExpressionTokenizer.expression(:attribute_value) 10 | 11 | ## Valid expressions 12 | Expressions are combination of one or more of these: 13 | 14 | - a word without spaces 15 | - a balanced `(` ... `)` pair (or `[`, or `{`) 16 | - a string with single quotes `'...'` or double quotes `"..."` 17 | 18 | A balanced pair can have balanced pairs, words, and strings inside them. 19 | Double-quote strings can have `#{...}` interpolation inside them. 20 | 21 | ## Examples 22 | These are valid expressions: 23 | 24 | hello 25 | hello(1 + 2) 26 | "Hello world" # strings 27 | (hello world) # balanced (...) pair 28 | 29 | These aren't: 30 | 31 | hello world # spaces 32 | hello(world[) # pairs not balanced 33 | "hello #{foo(}" # not balanced inside an interpolation 34 | """ 35 | 36 | import Expug.TokenizerTools 37 | 38 | def expression(state, token_name) do 39 | state 40 | |> start_empty(token_name) 41 | |> many_of(&expression_fragment/1) 42 | end 43 | 44 | def expression_fragment(state) do 45 | state 46 | |> one_of([ 47 | &balanced_parentheses/1, 48 | &balanced_braces/1, 49 | &balanced_brackets/1, 50 | &double_quote_string/1, 51 | &single_quote_string/1, 52 | &expression_term/1 53 | ]) 54 | end 55 | 56 | @doc """ 57 | Matches simple expressions like `xyz` or even `a+b`. 58 | """ 59 | def expression_term(state) do 60 | state 61 | |> append(~r/^[^\(\)\[\]\{\}"', \n\t]+/) 62 | end 63 | 64 | @doc """ 65 | Matches simple expressions like `xyz`, but only for inside parentheses. 66 | These can have spaces. 67 | """ 68 | def expression_term_inside(state) do 69 | state 70 | |> append(~r/^[^\(\)\[\]\{\}"']+/) 71 | end 72 | 73 | @doc """ 74 | Matches balanced `(...)` fragments 75 | """ 76 | def balanced_parentheses(state) do 77 | state 78 | |> balanced_pairs(~r/^\(/, ~r/^\)/) 79 | end 80 | 81 | @doc """ 82 | Matches balanced `{...}` fragments 83 | """ 84 | def balanced_braces(state) do 85 | state 86 | |> balanced_pairs(~r/^\{/, ~r/^\}/) 87 | end 88 | 89 | @doc """ 90 | Matches balanced `[...]` fragments 91 | """ 92 | def balanced_brackets(state) do 93 | state 94 | |> balanced_pairs(~r/^\[/, ~r/^\]/) 95 | end 96 | 97 | @doc """ 98 | Underlying implementation for `balanced_*` functions 99 | """ 100 | def balanced_pairs(state, left, right) do 101 | state 102 | |> append(left) 103 | |> optional(fn s -> s 104 | |> many_of(fn s -> s 105 | |> one_of([ 106 | &expression_fragment/1, 107 | &expression_term_inside/1 108 | ]) 109 | end) 110 | end) 111 | |> append(right) 112 | end 113 | 114 | @doc """ 115 | Matches an entire double-quoted string, taking care of interpolation and escaping 116 | """ 117 | def double_quote_string(state) do 118 | state 119 | |> append(~r/^"/) 120 | |> optional_many_of(fn s -> s 121 | |> one_of([ 122 | &(&1 |> append(~r/^#/) |> balanced_braces()), 123 | &(&1 |> append(~r/^(?:(?:\\")|[^"])/)) 124 | ]) 125 | end) 126 | |> append(~r/^"/) 127 | end 128 | 129 | @doc """ 130 | Matches an entire double-quoted string, taking care of escaping 131 | """ 132 | def single_quote_string(state) do 133 | state 134 | |> append(~r/^'/) 135 | |> optional_many_of(&(&1 |> append(~r/^(?:(?:\\')|[^'])/))) 136 | |> append(~r/^'/) 137 | end 138 | end 139 | -------------------------------------------------------------------------------- /lib/expug/expug_error.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.Error do 2 | @moduledoc """ 3 | A parse error 4 | """ 5 | 6 | defexception [:message, :line, :column] 7 | 8 | def exception(%{type: type, position: {ln, col}, source: source} = err) do 9 | {message, description} = exception_message(type, err) 10 | line_source = source |> String.split("\n") |> Enum.at(ln - 1) 11 | indent = repeat_string(col - 1, " ") 12 | 13 | %Expug.Error{ 14 | message: 15 | "#{message} on line #{ln}\n\n" 16 | <> " #{line_source}\n" 17 | <> " #{indent}^\n\n" 18 | <> description, 19 | line: ln, 20 | column: col 21 | } 22 | end 23 | 24 | def exception(err) do 25 | %Expug.Error{ 26 | message: "Error #{inspect(err)}" 27 | } 28 | end 29 | 30 | def repeat_string(times, string \\ " ") do 31 | 1..times |> Enum.reduce("", fn _, acc -> acc <> string end) 32 | end 33 | 34 | def exception_message(:parse_error, %{expected: _expected}) do 35 | { 36 | "Parse error", 37 | """ 38 | Expug encountered a character it didn't expect. 39 | """ 40 | } 41 | end 42 | 43 | def exception_message(:unexpected_indent, _) do 44 | { 45 | "Unexpected indentation", 46 | """ 47 | Expug found spaces when it didn't expect any. 48 | """ 49 | } 50 | end 51 | 52 | def exception_message(:ambiguous_indentation, _) do 53 | { 54 | "Ambiguous indentation", 55 | """ 56 | Expug found spaces when it didn't expect any. 57 | """ 58 | } 59 | end 60 | 61 | def exception_message(type, _) do 62 | { 63 | "#{type} error", 64 | """ 65 | Expug encountered a #{type} error. 66 | """ 67 | } 68 | end 69 | end 70 | -------------------------------------------------------------------------------- /lib/expug/runtime.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.Runtime do 2 | @moduledoc """ 3 | Functions used by Expug-compiled templates at runtime. 4 | 5 | ```eex 6 |
>
7 | ``` 8 | """ 9 | 10 | @doc """ 11 | Quotes a given `str` for use as an HTML attribute. 12 | """ 13 | def attr_value(str) do 14 | "\"#{attr_value_escape(str)}\"" 15 | end 16 | 17 | def attr_value_escape(str) do 18 | str 19 | |> String.replace("&", "&") 20 | |> String.replace("\"", """) 21 | |> String.replace("<", "<") 22 | |> String.replace(">", ">") 23 | end 24 | 25 | def attr(key, true) do 26 | " " <> key 27 | end 28 | 29 | def attr(_key, false) do 30 | "" 31 | end 32 | 33 | def attr(_key, nil) do 34 | "" 35 | end 36 | 37 | def attr(key, value) do 38 | " " <> key <> "=" <> attr_value(value) 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/expug/stringifier.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.Stringifier do 2 | @moduledoc """ 3 | Stringifies builder output. 4 | 5 | ## Also see 6 | - `Expug.Builder` builds the line map used by this stringifier. 7 | - `Expug.to_eex/1` is the main entry point that uses this stringifier. 8 | """ 9 | 10 | def stringify(%{} = doc, _opts \\ []) do 11 | {max, doc} = Map.pop(doc, :lines) 12 | doc = doc 13 | |> Map.delete(:doctype) 14 | |> Map.delete(:options) 15 | list = doc |> Map.to_list() |> Enum.sort() 16 | 17 | case render_lines(list, 0, max) do 18 | # Move the newline to the end 19 | "\n" <> rest -> rest <> "\n" 20 | rest -> rest 21 | end 22 | end 23 | 24 | # Works on a list of `{2, ["
"]}` tuples. 25 | # Each pass works on one line. 26 | # 27 | # %{ 28 | # :lines => 2, 29 | # 1 => ["
"], 30 | # 2 => ["", "
"] 31 | # } 32 | # 33 | # Renders into these in 2 passes: 34 | # 35 | # "\n
" 36 | # "\n<%= "\n" %>
" 37 | # 38 | defp render_lines([{line, elements} | rest], last, max) do 39 | {padding, meat} = render_elements(elements, line, last) 40 | cursor = line + count_newlines(meat) 41 | 42 | padding <> meat <> render_lines(rest, cursor, max) 43 | end 44 | 45 | defp render_lines([], _last, _max) do 46 | "" 47 | end 48 | 49 | # Renders a line. If it starts with :collapse, don't give 50 | # the `\n` 51 | defp render_elements([:collapse | elements], line, last) do 52 | { padding(line, last - 1), 53 | Enum.join(elements, ~S[<%= "\n" %>]) } 54 | end 55 | 56 | defp render_elements(elements, line, last) do 57 | { "\n" <> padding(line, last), 58 | Enum.join(elements, ~S[<%= "\n" %>]) } 59 | end 60 | 61 | # Counts the amount of newlines in a string 62 | defp count_newlines(str) do 63 | length(Regex.scan(~r/\n/, str)) 64 | end 65 | 66 | # Contructs `<% .. %>` padding. Used to fill in blank lines 67 | # in the source. 68 | defp padding(line, last) when line - last - 1 <= 0 do 69 | "" 70 | end 71 | 72 | defp padding(line, last) do 73 | "<%" <> newlines(line - last - 1) <> "%>" 74 | end 75 | 76 | # Gives `n` amounts of newlines. 77 | def newlines(n) when n <= 0 do 78 | "" 79 | end 80 | 81 | def newlines(n) do 82 | "\n" <> newlines(n - 1) 83 | end 84 | end 85 | -------------------------------------------------------------------------------- /lib/expug/tokenizer.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.Tokenizer do 2 | @moduledoc ~S""" 3 | Tokenizes a Pug template into a list of tokens. The main entry point is 4 | `tokenize/1`. 5 | 6 | iex> Expug.Tokenizer.tokenize("title= name") 7 | [ 8 | {{1, 8}, :buffered_text, "name"}, 9 | {{1, 1}, :element_name, "title"}, 10 | {{1, 1}, :indent, 0} 11 | ] 12 | 13 | Note that the tokens are reversed! It's easier to append to the top of a list 14 | rather than to the end, making it more efficient. 15 | 16 | This output is the consumed next by `Expug.Compiler`, which turns them into 17 | an Abstract Syntax Tree. 18 | 19 | ## Token types 20 | 21 | ``` 22 | div.blue#box 23 | ``` 24 | 25 | - `:indent` - 0 26 | - `:element_name` - `"div"` 27 | - `:element_class` - `"blue"` 28 | - `:element_id` - `"box"` 29 | 30 | ``` 31 | div(name="en") 32 | ``` 33 | 34 | - `:attribute_open` - `"("` 35 | - `:attribute_key` - `"name"` 36 | - `:attribute_value` - `"\"en\""` 37 | - `:attribute_close` - `")"` 38 | 39 | ``` 40 | div= hello 41 | ``` 42 | 43 | - `:buffered_text` - `hello` 44 | 45 | ``` 46 | div!= hello 47 | ``` 48 | 49 | - `:unescaped_text` - `hello` 50 | 51 | ``` 52 | div hello 53 | ``` 54 | 55 | - `:raw_text` - `"hello"` 56 | 57 | ``` 58 | | Hello there 59 | ``` 60 | 61 | - `:raw_text` - `"Hello there"` 62 | 63 | ``` 64 | = Hello there 65 | ``` 66 | 67 | - `:buffered_text` - `"Hello there"` 68 | 69 | ``` 70 | - foo = bar 71 | ``` 72 | 73 | - `:statement` - `foo = bar` 74 | 75 | ``` 76 | doctype html5 77 | ``` 78 | 79 | - `:doctype` - `html5` 80 | 81 | ``` 82 | -# comment 83 | more comments 84 | ``` 85 | 86 | - `:line_comment` - `comment` 87 | - `:subindent` - `more comments` 88 | 89 | ``` 90 | // comment 91 | more comments 92 | ``` 93 | 94 | - `:html_comment` - `comment` 95 | - `:subindent` - `more comments` 96 | 97 | ## Also see 98 | - `Expug.TokenizerTools` has the functions used by this tokenizer. 99 | - `Expug.Compiler` uses the output of this tokenizer to build an AST. 100 | - `Expug.ExpressionTokenizer` is used to tokenize expressions. 101 | """ 102 | 103 | import Expug.TokenizerTools 104 | alias Expug.TokenizerTools.State 105 | 106 | @doc """ 107 | Tokenizes a string. 108 | Returns a list of tokens. Each token is in the format `{position, token, value}`. 109 | """ 110 | def tokenize(source, opts \\ []) do 111 | source = trim_trailing(source) 112 | run(source, opts, &document/1) 113 | end 114 | 115 | @doc """ 116 | Matches an entire document. 117 | """ 118 | def document(state) do 119 | state 120 | |> optional(&newlines/1) 121 | |> optional(&doctype/1) 122 | |> many_of( 123 | &(&1 |> element_or_text() |> newlines()), 124 | &(&1 |> element_or_text())) 125 | end 126 | 127 | @doc """ 128 | Matches `doctype html`. 129 | """ 130 | def doctype(state) do 131 | state 132 | |> discard(~r/^doctype/, :doctype_prelude) 133 | |> whitespace() 134 | |> eat(~r/^[^\n]+/, :doctype) 135 | |> optional(&newlines/1) 136 | end 137 | 138 | @doc """ 139 | Matches an HTML element, text node, or, you know... the basic statements. 140 | I don't know what to call this. 141 | """ 142 | def element_or_text(state) do 143 | state 144 | |> indent() 145 | |> one_of([ 146 | &line_comment/1, # `-# hello` 147 | &html_comment/1, # `// hello` 148 | &buffered_text/1, # `= hello` 149 | &unescaped_text/1, # `!= hello` 150 | &raw_text/1, # `| hello` 151 | &statement/1, # `- hello` 152 | &element/1 # `div.blue hello` 153 | ]) 154 | end 155 | 156 | @doc """ 157 | Matches any number of blank newlines. Whitespaces are accounted for. 158 | """ 159 | def newlines(state) do 160 | state 161 | |> discard(~r/^\n(?:[ \t]*\n)*/, :newlines) 162 | end 163 | 164 | @doc """ 165 | Matches an indentation. Gives a token that looks like `{_, :indent, 2}` 166 | where the last number is the number of spaces/tabs. 167 | 168 | Doesn't really care if you use spaces or tabs; a tab is treated like a single 169 | space. 170 | """ 171 | def indent(state) do 172 | state 173 | |> eat(~r/^\s*/, :indent, &[{&3, :indent, String.length(&2)} | &1]) 174 | end 175 | 176 | @doc """ 177 | Matches `div.foo[id="name"]= Hello world` 178 | """ 179 | def element(state) do 180 | state 181 | |> element_descriptor() 182 | |> optional(&attributes_block/1) 183 | |> optional(fn s -> s 184 | |> one_of([ 185 | &sole_buffered_text/1, 186 | &sole_unescaped_text/1, 187 | &sole_raw_text/1, 188 | &block_text/1 189 | ]) 190 | end) 191 | end 192 | 193 | @doc """ 194 | Matches `div`, `div.foo` `div.foo.bar#baz`, etc 195 | """ 196 | def element_descriptor(state) do 197 | state 198 | |> one_of([ 199 | &element_descriptor_full/1, 200 | &element_name/1, 201 | &element_class_or_id_list/1 202 | ]) 203 | end 204 | 205 | @doc """ 206 | Matches `div.foo.bar#baz` 207 | """ 208 | def element_descriptor_full(state) do 209 | state 210 | |> element_name() 211 | |> element_class_or_id_list() 212 | end 213 | 214 | @doc """ 215 | Matches `.foo.bar#baz` 216 | """ 217 | def element_class_or_id_list(state) do 218 | state 219 | |> many_of(&element_class_or_id/1) 220 | end 221 | 222 | @doc """ 223 | Matches `.foo` or `#id` (just one) 224 | """ 225 | def element_class_or_id(state) do 226 | state 227 | |> one_of([ &element_class/1, &element_id/1 ]) 228 | end 229 | 230 | @doc """ 231 | Matches `.foo` 232 | """ 233 | def element_class(state) do 234 | state 235 | |> discard(~r/^\./, :dot) 236 | |> eat(~r/^[A-Za-z0-9_\-]+/, :element_class) 237 | end 238 | 239 | @doc """ 240 | Matches `#id` 241 | """ 242 | def element_id(state) do 243 | state 244 | |> discard(~r/^#/, :hash) 245 | |> eat(~r/^[A-Za-z0-9_\-]+/, :element_id) 246 | end 247 | 248 | @doc """ 249 | Matches `[name='foo' ...]` 250 | """ 251 | def attributes_block(state) do 252 | state 253 | |> optional_whitespace() 254 | |> one_of([ 255 | &attribute_bracket/1, 256 | &attribute_paren/1, 257 | &attribute_brace/1 258 | ]) 259 | end 260 | 261 | def attribute_bracket(state) do 262 | state 263 | |> eat(~r/^\[/, :attribute_open) 264 | |> optional_whitespace() 265 | |> optional(&attribute_list/1) 266 | |> eat(~r/^\]/, :attribute_close) 267 | end 268 | 269 | def attribute_paren(state) do 270 | state 271 | |> eat(~r/^\(/, :attribute_open) 272 | |> optional_whitespace() 273 | |> optional(&attribute_list/1) 274 | |> eat(~r/^\)/, :attribute_close) 275 | end 276 | 277 | def attribute_brace(state) do 278 | state 279 | |> eat(~r/^\{/, :attribute_open) 280 | |> optional_whitespace() 281 | |> optional(&attribute_list/1) 282 | |> eat(~r/^\}/, :attribute_close) 283 | end 284 | 285 | @doc """ 286 | Matches `foo='val' bar='val'` 287 | """ 288 | def attribute_list(state) do 289 | state 290 | |> optional_whitespace_or_newline() 291 | |> many_of( 292 | &(&1 |> attribute() |> attribute_separator() |> whitespace_or_newline()), 293 | &(&1 |> attribute())) 294 | |> optional_whitespace_or_newline() 295 | end 296 | 297 | @doc """ 298 | Matches an optional comma in between attributes. 299 | 300 | div(id=a class=b) 301 | div(id=a, class=b) 302 | """ 303 | def attribute_separator(state) do 304 | state 305 | |> discard(~r/^,?/, :comma) 306 | end 307 | 308 | @doc """ 309 | Matches `foo='val'` or `foo` 310 | """ 311 | def attribute(state) do 312 | state 313 | |> one_of([ 314 | &attribute_key_value/1, 315 | &attribute_key/1 316 | ]) 317 | end 318 | 319 | def attribute_key_value(state) do 320 | state 321 | |> attribute_key() 322 | |> optional_whitespace() 323 | |> attribute_equal() 324 | |> optional_whitespace() 325 | |> attribute_value() 326 | end 327 | 328 | def attribute_key(state) do 329 | state 330 | |> eat(~r/^[A-Za-z][A-Za-z\-0-9:]*/, :attribute_key) 331 | end 332 | 333 | def attribute_value(state) do 334 | state 335 | |> Expug.ExpressionTokenizer.expression(:attribute_value) 336 | end 337 | 338 | def attribute_equal(state) do 339 | state 340 | |> discard(~r/^=/, :eq) 341 | end 342 | 343 | @doc "Matches whitespace; no tokens emitted" 344 | def whitespace(state) do 345 | state 346 | |> discard(~r/^[ \t]+/, :whitespace) 347 | end 348 | 349 | @doc "Matches whitespace or newline; no tokens emitted" 350 | def whitespace_or_newline(state) do 351 | state 352 | |> discard(~r/^[ \t\n]+/, :whitespace_or_newline) 353 | end 354 | 355 | def optional_whitespace(state) do 356 | state 357 | |> discard(~r/^[ \t]*/, :whitespace) 358 | end 359 | 360 | def optional_whitespace_or_newline(state) do 361 | state 362 | |> discard(~r/^[ \t\n]*/, :whitespace_or_newline) 363 | end 364 | 365 | @doc "Matches `=`" 366 | def sole_buffered_text(state) do 367 | state 368 | |> optional_whitespace() 369 | |> buffered_text() 370 | end 371 | 372 | @doc "Matches `!=`" 373 | def sole_unescaped_text(state) do 374 | state 375 | |> optional_whitespace() 376 | |> unescaped_text() 377 | end 378 | 379 | @doc "Matches text" 380 | def sole_raw_text(state) do 381 | state 382 | |> whitespace() 383 | |> eat(~r/^[^\n]+/, :raw_text) 384 | end 385 | 386 | @doc "Matches `title` in `title= hello`" 387 | def element_name(state) do 388 | state 389 | |> eat(~r/^[A-Za-z_][A-Za-z0-9:_\-]*/, :element_name) 390 | end 391 | 392 | def line_comment(state) do 393 | state 394 | |> one_of([ 395 | &(&1 |> discard(~r/^\/\/-/, :line_comment)), 396 | &(&1 |> discard(~r/^-\s*(?:#|\/\/)/, :line_comment)) 397 | ]) 398 | |> optional_whitespace() 399 | |> eat(~r/^[^\n]*/, :line_comment) 400 | |> optional(&subindent_block/1) 401 | end 402 | 403 | def block_text(state) do 404 | state 405 | |> eat(~r/^\./, :block_text) 406 | |> subindent_block() 407 | end 408 | 409 | def subindent_block(state) do 410 | sublevel = state |> get_next_indent() 411 | state 412 | |> many_of(& &1 |> newlines() |> subindent(sublevel)) 413 | end 414 | 415 | def subindent(state, level) do 416 | state 417 | |> discard(~r/^[ \t]{#{level}}/, :whitespace) 418 | |> eat(~r/^[^\n]*/, :subindent) 419 | end 420 | 421 | def get_indent([{_, :indent, text} | _]) do 422 | text 423 | end 424 | 425 | def get_indent([_ | rest]) do 426 | get_indent(rest) 427 | end 428 | 429 | def get_indent([]) do 430 | "" 431 | end 432 | 433 | def html_comment(state) do 434 | state 435 | |> discard(~r[^//], :html_comment) 436 | |> optional_whitespace() 437 | |> eat(~r/^[^\n$]*/, :html_comment) 438 | |> optional(&subindent_block/1) 439 | end 440 | 441 | def buffered_text(state) do 442 | state 443 | |> one_of([ 444 | &one_line_buffered_text/1, 445 | &multiline_buffered_text/1 446 | ]) 447 | end 448 | 449 | def one_line_buffered_text(state) do 450 | state 451 | |> discard(~r/^=/, :eq) 452 | |> optional_whitespace() 453 | |> eat(~r/^(?:[,\[\(\{]\s*\n|[^\n$])+/, :buffered_text) 454 | end 455 | 456 | def multiline_buffered_text(state) do 457 | state 458 | |> discard(~r/^=/, :eq) 459 | |> start_empty(:buffered_text) 460 | |> subindent_block() 461 | end 462 | 463 | def unescaped_text(state) do 464 | state 465 | |> one_of([ 466 | &one_line_unescaped_text/1, 467 | &multiline_unescaped_text/1 468 | ]) 469 | end 470 | 471 | def one_line_unescaped_text(state) do 472 | state 473 | |> discard(~r/^!=/, :bang_eq) 474 | |> optional_whitespace() 475 | |> eat(~r/^(?:[,\[\(\{]\s*\n|[^\n$])+/, :unescaped_text) 476 | end 477 | 478 | def multiline_unescaped_text(state) do 479 | state 480 | |> discard(~r/^!=/, :bang_eq) 481 | |> start_empty(:unescaped_text) 482 | |> subindent_block() 483 | end 484 | 485 | def raw_text(state) do 486 | state 487 | |> discard(~r/^\|/, :pipe) 488 | |> optional_whitespace() 489 | |> eat(~r/^[^\n]+/, :raw_text) 490 | end 491 | 492 | def statement(state) do 493 | state 494 | |> one_of([ 495 | &one_line_statement/1, 496 | &multiline_statement/1 497 | ]) 498 | end 499 | 500 | def one_line_statement(state) do 501 | state 502 | |> discard(~r/^\-/, :dash) 503 | |> optional_whitespace() 504 | |> eat(~r/^(?:[,\[\(\{]\s*\n|[^\n$])+/, :statement) 505 | end 506 | 507 | def multiline_statement(state) do 508 | state 509 | |> discard(~r/^\-/, :dash) 510 | |> start_empty(:statement) 511 | |> subindent_block() 512 | end 513 | 514 | @doc ~S""" 515 | Returns the next indentation level after some newlines. 516 | Infers the last indentation level based on `doc`. 517 | 518 | iex> source = "-#\n span" 519 | iex> doc = [{0, :indent, 0}] 520 | iex> Expug.Tokenizer.get_next_indent(%{tokens: doc, source: source, position: 2}, 0) 521 | 2 522 | """ 523 | def get_next_indent(%State{tokens: doc} = state) do 524 | level = get_indent(doc) 525 | get_next_indent(state, level) 526 | end 527 | 528 | @doc ~S""" 529 | Returns the next indentation level after some newlines. 530 | 531 | iex> source = "-#\n span" 532 | iex> Expug.Tokenizer.get_next_indent(%{tokens: [], source: source, position: 2}, 0) 533 | 2 534 | 535 | iex> source = "-#\n\n\n span" 536 | iex> Expug.Tokenizer.get_next_indent(%{tokens: [], source: source, position: 2}, 0) 537 | 2 538 | """ 539 | def get_next_indent(state, level) do 540 | %{tokens: [{_, :indent, sublevel} |_], position: pos} = 541 | state |> newlines() |> indent() 542 | if sublevel <= level, do: throw {:parse_error, pos, [:indent]} 543 | sublevel 544 | end 545 | 546 | # Shim for String.trim_trailing/1, which doesn't exist in Elixir 1.2.6. It 547 | # falls back to String.rstrip/1 in these cases. 548 | if Keyword.has_key?(String.__info__(:functions), :trim_trailing) do 549 | defp trim_trailing(source) do 550 | String.trim_trailing(source) 551 | end 552 | else 553 | defp trim_trailing(source) do 554 | String.rstrip(source) 555 | end 556 | end 557 | end 558 | -------------------------------------------------------------------------------- /lib/expug/tokenizer_tools.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.TokenizerTools do 2 | @moduledoc """ 3 | Builds tokenizers. 4 | 5 | defmodule MyTokenizer do 6 | import Expug.TokenizerTools 7 | 8 | def tokenizer(source) 9 | run(source, [], &document/1) 10 | end 11 | 12 | def document(state) 13 | state 14 | |> discard(%r/^doctype /, :doctype_prelude) 15 | |> eat(%r/^[a-z0-9]+/, :doctype_value) 16 | end 17 | end 18 | 19 | ## The state 20 | 21 | `Expug.TokenizerTools.State` is a struct from the `source` and `opts` given to `run/3`. 22 | 23 | %{ tokens: [], source: "...", position: 0, options: ... } 24 | 25 | `run/3` creates the state and invokes a function you give it. 26 | 27 | source = "doctype html" 28 | run(source, [], &document/1) 29 | 30 | `eat/3` tries to find the given regexp from the `source` at position `pos`. 31 | If it matches, it returns a new state: a new token is added (`:open_quote` in 32 | this case), and the position `pos` is advanced. 33 | 34 | eat(state, ~r/^"/, :open_quote) 35 | 36 | If it fails to match, it'll throw a `{:parse_error, pos, [:open_quote]}`. 37 | Roughly this translates to "parse error in position *pos*, expected to find 38 | *:open_quote*". 39 | 40 | ## Mixing and matching 41 | 42 | `eat/3` will normally be wrapped into functions for most token types. 43 | 44 | def doctype(state) 45 | state 46 | |> discard(%r/^doctype/, :doctype_prelude) 47 | |> whitespace() 48 | |> eat(%r/^[a-z0-9]+/, :doctype_value) 49 | end 50 | 51 | def whitespace(state) 52 | state 53 | |> eat(^r/[ \s\t]+, :whitespace, :nil) 54 | end 55 | 56 | `one_of/3`, `optional/2`, `many_of/2` can then be used to compose these functions. 57 | 58 | state 59 | |> one_of([ &doctype/1, &foobar/1 ]) 60 | |> optional(&doctype/1) 61 | |> many_of(&doctype/1) 62 | """ 63 | 64 | alias Expug.TokenizerTools.State 65 | 66 | @doc """ 67 | Turns a State into a final result. 68 | 69 | Returns either `{:ok, doc}` or `{:parse_error, %{type, position, expected}}`. 70 | Guards against unexpected end-of-file. 71 | """ 72 | def finalize(%State{tokens: doc, source: source, position: position}) do 73 | if String.slice(source, position..-1) != "" do 74 | expected = Enum.uniq_by(get_parse_errors(doc), &(&1)) 75 | throw {:parse_error, position, expected} 76 | else 77 | doc 78 | |> scrub_parse_errors() 79 | |> convert_positions(source) 80 | end 81 | end 82 | 83 | @doc """ 84 | Runs; catches parse errors and throws them properly. 85 | """ 86 | def run(source, opts, fun) do 87 | state = %State{tokens: [], source: source, position: 0, options: opts} 88 | try do 89 | fun.(state) 90 | |> finalize() 91 | catch {:parse_error, position, expected} -> 92 | position = convert_positions(position, source) 93 | throw %{type: :parse_error, position: position, expected: expected} 94 | end 95 | end 96 | 97 | @doc """ 98 | Extracts the last parse errors that happened. 99 | 100 | In case of failure, `run/3` will check the last parse errors 101 | that happened. Returns a list of atoms of the expected tokens. 102 | """ 103 | def get_parse_errors([{_, :parse_error, expected} | rest]) do 104 | expected ++ get_parse_errors(rest) 105 | end 106 | 107 | def get_parse_errors(_) do 108 | [] 109 | end 110 | 111 | @doc """ 112 | Gets rid of the `:parse_error` hints in the document. 113 | """ 114 | def scrub_parse_errors(doc) do 115 | Enum.reject doc, fn {_, type, _} -> 116 | type == :parse_error 117 | end 118 | end 119 | 120 | @doc """ 121 | Finds any one of the given token-eater functions. 122 | 123 | state |> one_of([ &brackets/1, &braces/1, &parens/1 ]) 124 | """ 125 | def one_of(state, funs, expected \\ []) 126 | def one_of(%State{} = state, [fun | rest], expected) do 127 | try do 128 | fun.(state) 129 | catch {:parse_error, _, expected_} -> 130 | one_of(state, rest, expected ++ expected_) 131 | end 132 | end 133 | 134 | def one_of(%State{position: pos}, [], expected) do 135 | throw {:parse_error, pos, expected} 136 | end 137 | 138 | @doc """ 139 | An optional argument. 140 | 141 | state |> optional(&text/1) 142 | """ 143 | def optional(state, fun) do 144 | try do 145 | fun.(state) 146 | catch 147 | {:parse_error, _, [nil | _]} -> 148 | # These are append errors, don't bother with it 149 | state 150 | 151 | {:parse_error, err_pos, expected} -> 152 | # Add a parse error pseudo-token to the document. They will be scrubbed 153 | # later on, but it will be inspected in case of a parse error. 154 | next = {err_pos, :parse_error, expected} 155 | Map.update(state, :tokens, [next], &[next | &1]) 156 | end 157 | end 158 | 159 | @doc """ 160 | Checks many of a certain token. 161 | """ 162 | def many_of(state, head) do 163 | many_of(state, head, head) 164 | end 165 | 166 | @doc """ 167 | Checks many of a certain token, and lets you provide a different `tail`. 168 | """ 169 | def many_of(state = %State{source: source, position: pos}, head, tail) do 170 | if String.slice(source, pos..-1) == "" do 171 | state 172 | else 173 | try do 174 | state |> head.() |> many_of(head, tail) 175 | catch {:parse_error, _, _} -> 176 | state |> tail.() 177 | end 178 | end 179 | end 180 | 181 | @doc """ 182 | Checks many of a certain token. 183 | 184 | Syntactic sugar for `optional(s, many_of(s, ...))`. 185 | """ 186 | def optional_many_of(state, head) do 187 | state 188 | |> optional(&(&1 |> many_of(head))) 189 | end 190 | 191 | @doc """ 192 | Consumes a token. 193 | 194 | See `eat/4`. 195 | """ 196 | def eat(state, expr) do 197 | eat(state, expr, nil, fn doc, _, _ -> doc end) 198 | end 199 | 200 | @doc """ 201 | Consumes a token. 202 | 203 | state 204 | |> eat(~r/[a-z]+/, :key) 205 | |> discard(~r/\s*=\s*/, :equal) 206 | |> eat(~r/[a-z]+/, :value) 207 | """ 208 | def eat(state, expr, token_name) do 209 | eat(state, expr, token_name, &([{&3, token_name, &2} | &1])) 210 | end 211 | 212 | @doc """ 213 | Consumes a token, but doesn't push it to the State. 214 | 215 | state 216 | |> eat(~r/[a-z]+/, :key) 217 | |> discard(~r/\s*=\s*/, :equal) 218 | |> eat(~r/[a-z]+/, :value) 219 | """ 220 | def discard(state, expr, token_name) do 221 | eat state, expr, token_name, fn state, _, _ -> state end 222 | end 223 | 224 | @doc """ 225 | Consumes a token. 226 | 227 | eat state, ~r/.../, :document 228 | 229 | Returns a `State`. Available parameters are: 230 | 231 | * `state` - assumed to be a state map (given by `run/3`). 232 | * `expr` - regexp expression. 233 | * `token_name` (atom, optional) - token name. 234 | * `reducer` (function, optional) - a function. 235 | 236 | ## Reducers 237 | 238 | If `reducer` is a function, `tokens` is transformed using that function. 239 | 240 | eat state, ~r/.../, :document, &[{&3, :document, &2} | &1] 241 | 242 | # &1 == tokens in current State 243 | # &2 == matched String 244 | # &3 == position 245 | 246 | ## Also see 247 | 248 | `discard/3` will consume a token, but not push it to the State. 249 | 250 | state 251 | |> discard(~r/\s+/, :whitespace) # discard it 252 | """ 253 | def eat(%{tokens: doc, source: source, position: pos} = state, expr, token_name, fun) do 254 | remainder = String.slice(source, pos..-1) 255 | case match(expr, remainder) do 256 | [term] -> 257 | length = String.length(term) 258 | state 259 | |> Map.put(:position, pos + length) 260 | |> Map.put(:tokens, fun.(doc, term, pos)) 261 | nil -> 262 | throw {:parse_error, pos, [token_name]} 263 | end 264 | end 265 | 266 | @doc """ 267 | Creates an token with a given `token_name`. 268 | 269 | This is functionally the same as `|> eat(~r//, :token_name)`, but using 270 | `start_empty()` can make your code more readable. 271 | 272 | state 273 | |> start_empty(:quoted_string) 274 | |> append(~r/^"/) 275 | |> append(~r/[^"]+/) 276 | |> append(~r/^"/) 277 | """ 278 | def start_empty(%State{position: pos} = state, token_name) do 279 | token = {pos, token_name, ""} 280 | state 281 | |> Map.update(:tokens, [token], &[token | &1]) 282 | end 283 | 284 | @doc """ 285 | Like `eat/4`, but instead of creating a token, it appends to the last token. 286 | 287 | Useful alongside `start_empty()`. 288 | 289 | state 290 | |> start_empty(:quoted_string) 291 | |> append(~r/^"/) 292 | |> append(~r/[^"]+/) 293 | |> append(~r/^"/) 294 | """ 295 | def append(state, expr) do 296 | # parse_error will trip here; the `nil` token name ensures parse errors 297 | # will not make it to the document. 298 | state 299 | |> eat(expr, nil, fn [ {pos, token_name, left} | rest ], right, _pos -> 300 | [ {pos, token_name, left <> right} | rest ] 301 | end) 302 | end 303 | 304 | @doc ~S""" 305 | Converts numeric positions into `{line, col}` tuples. 306 | 307 | iex> source = "div\n body" 308 | iex> doc = [ 309 | ...> { 0, :indent, "" }, 310 | ...> { 0, :element_name, "div" }, 311 | ...> { 4, :indent, " " }, 312 | ...> { 6, :element_name, "body" } 313 | ...> ] 314 | iex> Expug.TokenizerTools.convert_positions(doc, source) 315 | [ 316 | { {1, 1}, :indent, "" }, 317 | { {1, 1}, :element_name, "div" }, 318 | { {2, 1}, :indent, " " }, 319 | { {2, 3}, :element_name, "body" } 320 | ] 321 | """ 322 | def convert_positions(doc, source) do 323 | offsets = String.split(source, "\n") 324 | |> Stream.map(&(String.length(&1) + 1)) 325 | |> Stream.scan(&(&1 + &2)) 326 | |> Enum.to_list 327 | offsets = [ 0 | offsets ] 328 | convert_position(doc, offsets) 329 | end 330 | 331 | # Converts a position number `n` to a tuple `{line, col}`. 332 | defp convert_position(pos, offsets) when is_number(pos) do 333 | line = Enum.find_index(offsets, &(pos < &1)) 334 | offset = Enum.at(offsets, line - 1) 335 | col = pos - offset 336 | {line, col + 1} 337 | end 338 | 339 | defp convert_position({pos, a, b}, offsets) do 340 | {convert_position(pos, offsets), a, b} 341 | end 342 | 343 | defp convert_position([ token | rest ], offsets) do 344 | [ convert_position(token, offsets) | convert_position(rest, offsets) ] 345 | end 346 | 347 | defp convert_position([], _offsets) do 348 | [] 349 | end 350 | 351 | defp match(expr, remainder) do 352 | Regex.run(expr, remainder) 353 | end 354 | end 355 | -------------------------------------------------------------------------------- /lib/expug/tokenizer_tools/state.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.TokenizerTools.State do 2 | @moduledoc """ 3 | The state used by the tokenizer. 4 | 5 | %{ tokens: [], source: "...", position: 0, options: ... } 6 | 7 | ## Also see 8 | 9 | - `Expug.TokenizerTools` 10 | """ 11 | defstruct [:tokens, :source, :position, :options] 12 | end 13 | 14 | -------------------------------------------------------------------------------- /lib/expug/transformer.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.Transformer do 2 | @moduledoc """ 3 | Transforms a node after compilation. 4 | """ 5 | 6 | alias Expug.Visitor 7 | 8 | # Helper for later 9 | defmacrop statement?(type) do 10 | quote do 11 | unquote(type) == :buffered_text or 12 | unquote(type) == :unescaped_text or 13 | unquote(type) == :statement 14 | end 15 | end 16 | 17 | @doc """ 18 | Transforms a node. 19 | """ 20 | def transform(node) do 21 | node 22 | |> Visitor.visit_children(&close_clauses/1) 23 | end 24 | 25 | @doc """ 26 | Finds out what clauses can follow a given clause. 27 | 28 | iex> Expug.Transformer.clause_after("if") 29 | ["else"] 30 | 31 | iex> Expug.Transformer.clause_after("try") 32 | ["catch", "rescue", "after"] 33 | 34 | iex> Expug.Transformer.clause_after("cond") 35 | [] # nothing can follow cond 36 | """ 37 | def clause_after("if"), do: ["else"] 38 | def clause_after("unless"), do: ["else"] 39 | def clause_after("try"), do: ["catch", "rescue", "after"] 40 | def clause_after("catch"), do: ["catch", "after"] 41 | def clause_after("rescue"), do: ["rescue", "after"] 42 | def clause_after(_), do: [] 43 | def clause_roots(), do: ["if", "unless", "try"] 44 | 45 | @doc """ 46 | Closes all possible clauses in the given `children`. 47 | """ 48 | def close_clauses(children) do 49 | {_, children} = close_clause(children, clause_roots()) 50 | children 51 | end 52 | 53 | @doc """ 54 | Closes all a given `next` clause in the given `children`. 55 | 56 | Returns a tuple of `{status, children}` where `:status` depicts what happened 57 | on the first node given to it. `:multi` means it was matched for a multi-clause, 58 | `:single` means it was matched for a single clause, `:ok` otherwise. 59 | """ 60 | def close_clause([node | children], next) do 61 | pre = prelude(node) 62 | 63 | cond do 64 | # it's a multi-clause thing (eg, if-else-end, try-rescue-after-end) 65 | # See if we're at `if`... 66 | statement?(node.type) and Enum.member?(next, pre) -> 67 | # Then check if the next one is `else`... 68 | case close_clause(children, clause_after(pre)) do 69 | {:multi, children} -> 70 | # the next one IS else, don't close and proceed 71 | node = node |> Map.put(:open, true) 72 | {:multi, [node | children]} 73 | 74 | {_, children} -> 75 | # the next one is not else, so close us up and proceed 76 | node = node 77 | |> Map.put(:open, true) 78 | |> Map.put(:close, "end") 79 | {:multi, [node | close_clauses(children)]} 80 | end 81 | 82 | # it's a single-clause thing (eg, cond do) 83 | statement?(node.type) and open?(node.value) and !Enum.member?(clause_roots(), pre) -> 84 | node = node 85 | |> Map.put(:open, true) 86 | |> Map.put(:close, "end") 87 | {:single, [node | close_clauses(children)]} 88 | 89 | # Else, just reset the chain 90 | true -> 91 | {:ok, [node | close_clauses(children)]} 92 | end 93 | end 94 | 95 | def close_clause([], _upcoming) do 96 | {:ok, []} # The last child is `if` 97 | end 98 | 99 | def close_clause(children, [] = _upcoming) do 100 | {:ok, children} # Already closed end, but there's still more 101 | end 102 | 103 | @doc """ 104 | Get the prelude of a given node 105 | 106 | iex> Expug.Transformer.prelude(%{value: "if foo"}) 107 | "if" 108 | 109 | iex> Expug.Transformer.prelude(%{value: "case derp"}) 110 | "case" 111 | 112 | iex> Expug.Transformer.prelude(%{value: "1 + 2"}) 113 | nil 114 | """ 115 | def prelude(%{value: statement}) do 116 | case Regex.run(~r/\s*([a-z]+)/, statement) do 117 | [_, prelude] -> prelude 118 | _ -> nil 119 | end 120 | end 121 | 122 | def prelude(_) do 123 | nil 124 | end 125 | 126 | # Checks if a given statement is open. 127 | defp open?(statement) do 128 | has_do = Regex.run(~r/[^A-Za-z0-9_]do\s*$/, statement) 129 | has_do = has_do || Regex.run(~r/[^A-Za-z0-9_]fn.*->$/, statement) 130 | has_do && true || false 131 | end 132 | end 133 | -------------------------------------------------------------------------------- /lib/expug/visitor.ex: -------------------------------------------------------------------------------- 1 | defmodule Expug.Visitor do 2 | @moduledoc """ 3 | Internal helper for traversing an AST. 4 | 5 | iex> node = %{ 6 | ...> title: "Hello", 7 | ...> children: [ 8 | ...> %{title: "fellow"}, 9 | ...> %{title: "humans"} 10 | ...> ] 11 | ...> } 12 | iex> Expug.Visitor.visit(node, fn node -> 13 | ...> {:ok, Map.update(node, :title, ".", &(&1 <> "."))} 14 | ...> end) 15 | %{ 16 | title: "Hello.", 17 | children: [ 18 | %{title: "fellow."}, 19 | %{title: "humans."} 20 | ] 21 | } 22 | """ 23 | 24 | @doc """ 25 | Returns a function `fun` recursively across `node` and its descendants. 26 | """ 27 | def visit(node, fun) do 28 | {continue, node} = fun.(node) 29 | if continue == :ok do 30 | visit_recurse(node, fun) 31 | else 32 | node 33 | end 34 | end 35 | 36 | @doc """ 37 | Visits all children lists recursively across `node` and its descendants. 38 | 39 | Works just like `visit/2`, but instead of operating on nodes, it operates on 40 | node children (lists). 41 | """ 42 | def visit_children(node, fun) do 43 | visit node, fn 44 | %{children: children} = node -> 45 | children = fun.(children) 46 | node = put_in(node.children, children) 47 | {:ok, node} 48 | node -> 49 | {:ok, node} 50 | end 51 | end 52 | 53 | defp visit_recurse(%{children: children} = node, fun) do 54 | Map.put(node, :children, (for c <- children, do: visit(c, fun))) 55 | end 56 | 57 | defp visit_recurse(node, _) do 58 | node 59 | end 60 | end 61 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Expug.Mixfile do 2 | use Mix.Project 3 | 4 | @version "0.9.2" 5 | @description """ 6 | Indented shorthand templates for HTML. (pre-release) 7 | """ 8 | 9 | def project do 10 | [app: :expug, 11 | version: @version, 12 | description: @description, 13 | elixir: "~> 1.2", 14 | elixirc_paths: elixirc_paths(Mix.env), 15 | build_embedded: Mix.env == :prod, 16 | start_permanent: Mix.env == :prod, 17 | source_url: "https://github.com/rstacruz/expug", 18 | homepage_url: "https://github.com/rstacruz/expug", 19 | docs: docs(), 20 | package: package(), 21 | deps: deps()] 22 | end 23 | 24 | def application do 25 | [applications: [:logger]] 26 | end 27 | 28 | defp deps do 29 | [ 30 | {:earmark, "~> 1.2.3", only: :dev}, 31 | {:ex_doc, "~> 0.18.1", only: :dev} 32 | ] 33 | end 34 | 35 | defp elixirc_paths(:test), do: ["lib", "test/support"] 36 | defp elixirc_paths(_), do: ["lib"] 37 | 38 | def package do 39 | [ 40 | maintainers: ["Rico Sta. Cruz"], 41 | licenses: ["MIT"], 42 | files: ["lib", "mix.exs", "README.md"], 43 | links: %{github: "https://github.com/rstacruz/expug"} 44 | ] 45 | end 46 | 47 | def docs do 48 | [ 49 | source_ref: "v#{@version}", 50 | main: "readme", 51 | extras: 52 | Path.wildcard("*.md") ++ 53 | Path.wildcard("docs/**/*.md") 54 | ] 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{"calliope": {:hex, :calliope, "0.4.0", "cdba8ae42b225de1c906bcb511b1fa3a8dd601bda3b2005743111f7ec92cd809", [:mix], []}, 2 | "earmark": {:hex, :earmark, "1.2.3", "206eb2e2ac1a794aa5256f3982de7a76bf4579ff91cb28d0e17ea2c9491e46a4", [:mix], [], "hexpm"}, 3 | "ex_doc": {:hex, :ex_doc, "0.18.1", "37c69d2ef62f24928c1f4fdc7c724ea04aecfdf500c4329185f8e3649c915baf", [:mix], [{:earmark, "~> 1.1", [hex: :earmark, repo: "hexpm", optional: false]}], "hexpm"}, 4 | "phoenix_html": {:hex, :phoenix_html, "2.5.1", "631053f9e345fecb5c87d9e0ccd807f7266d27e2ee4269817067af425fd81ba8", [:mix], [{:plug, "~> 0.13 or ~> 1.0", [hex: :plug, optional: false]}]}, 5 | "plug": {:hex, :plug, "1.1.5", "de5645c18170415a72b18cc3d215c05321ddecac27a15acb923742156e98278b", [:mix], [{:cowboy, "~> 1.0", [hex: :cowboy, optional: true]}]}} 6 | -------------------------------------------------------------------------------- /test/builder_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BuilderTest do 2 | use ExUnit.Case 3 | doctest Expug.Builder 4 | 5 | def build(source) do 6 | with \ 7 | tokens <- Expug.Tokenizer.tokenize(source), 8 | ast <- Expug.Compiler.compile(tokens) do 9 | Expug.Builder.build(ast) 10 | end 11 | end 12 | 13 | test "build" do 14 | eex = build("doctype html\ndiv Hello") 15 | assert eex == %{ 16 | :lines => 2, 17 | 1 => [""], 18 | 2 => ["
", "Hello", "
"] 19 | } 20 | end 21 | 22 | test "self-closing img" do 23 | eex = build("doctype html\nimg") 24 | assert eex == %{ 25 | :lines => 2, 26 | 1 => [""], 27 | 2 => [""] 28 | } 29 | end 30 | 31 | test "self-closing xml" do 32 | eex = build("doctype xml\nimg") 33 | assert eex == %{ 34 | :lines => 2, 35 | 1 => [""], 36 | 2 => [""] 37 | } 38 | end 39 | 40 | test "single element" do 41 | eex = build("div") 42 | assert eex == %{ 43 | :lines => 1, 44 | 1 => ["
"] 45 | } 46 | end 47 | 48 | test "single element with attributes" do 49 | eex = build("div(id=foo)") 50 | assert eex == %{ 51 | :lines => 1, 52 | 1 => [">
"] 53 | } 54 | end 55 | 56 | test "value-less attributes" do 57 | eex = build("div(src)") 58 | assert eex == %{ 59 | :lines => 1, 60 | 1 => [">
"] 61 | } 62 | end 63 | 64 | test "with buffered text" do 65 | eex = build("div= hola()") 66 | assert eex == %{ 67 | :lines => 1, 68 | 1 =>["
", "<%= hola() %>", "
"] 69 | } 70 | end 71 | 72 | test "with unescaped text" do 73 | eex = build("div!= hola()") 74 | assert eex == %{ 75 | :lines => 1, 76 | 1 => ["
", "<%= raw(hola()) %>", "
"] 77 | } 78 | end 79 | 80 | test "unescaped text only" do 81 | eex = build("!= hola()") 82 | assert eex == %{ 83 | :lines => 1, 84 | 1 => ["<%= raw(hola()) %>"] 85 | } 86 | end 87 | 88 | test "nesting" do 89 | eex = build(""" 90 | doctype html 91 | div 92 | span= @hello 93 | """) 94 | assert eex == %{ 95 | :lines => 3, 96 | 1 =>[""], 97 | 2 =>["
"], 98 | 3 =>["", "<%= @hello %>", "", "
"] 99 | } 100 | end 101 | 102 | test "line comments" do 103 | eex = build(""" 104 | div 105 | -# hi 106 | div 107 | """) 108 | assert eex == %{ 109 | :lines => 3, 110 | 1 => ["
"], 111 | 3 => ["
"] 112 | } 113 | end 114 | 115 | test "line comments, capturing" do 116 | eex = build(""" 117 | div 118 | -# hi 119 | h1 120 | """) 121 | assert eex == %{ 122 | :lines => 1, 123 | 1 => ["
"] 124 | } 125 | end 126 | 127 | test "line comments, capturing 2" do 128 | eex = build(""" 129 | div 130 | -# hi 131 | h1 132 | span 133 | """) 134 | assert eex == %{ 135 | :lines => 4, 136 | 1 => ["
"], 137 | 4 => [""] 138 | } 139 | end 140 | 141 | test "indentation magic" do 142 | eex = build(""" 143 | div 144 | h1 145 | span 146 | | Hello 147 | """) 148 | assert eex == %{ 149 | :lines => 4, 150 | 1 => ["
"], 151 | 2 => ["

"], 152 | 3 => [""], 153 | 4 => ["Hello", "", "

", "
"] 154 | } 155 | end 156 | 157 | test "indentation magic 2" do 158 | eex = build(""" 159 | div 160 | h1 161 | span 162 | | Hello 163 | div 164 | """) 165 | assert eex == %{ 166 | :lines => 5, 167 | 1 => ["
"], 168 | 2 => ["

"], 169 | 3 => [""], 170 | 4 => ["Hello", "", "

", "
"], 171 | 5 => ["
"] 172 | } 173 | end 174 | 175 | test "attr and =" do 176 | eex = build(""" 177 | div(role="main")= @hello 178 | """) 179 | assert eex == %{ 180 | :lines => 1, 181 | 1 => [ 182 | ">", 183 | "<%= @hello %>", 184 | "" 185 | ] 186 | } 187 | end 188 | 189 | test "lone dot" do 190 | try do 191 | build(".") 192 | flunk "should've thrown something" 193 | catch err -> 194 | assert %{ 195 | expected: _, 196 | position: {1, 1}, 197 | type: :parse_error 198 | } = err 199 | end 200 | end 201 | 202 | test "dash" do 203 | eex = build("-hi") 204 | assert eex == %{ 205 | :lines => 1, 206 | 1 => ["<% hi %>"] 207 | } 208 | end 209 | 210 | test "dash with body" do 211 | eex = build("- for item <- @list do\n div") 212 | assert eex == %{ 213 | :lines => 2, 214 | 1 => ["<% for item <- @list do %>"], 215 | 2 => [:collapse, "
<% end %>"] 216 | } 217 | end 218 | 219 | test "unescaped with body" do 220 | eex = build("!= for item <- @list do\n div") 221 | assert eex == %{ 222 | :lines => 2, 223 | 1 => ["<%= raw(for item <- @list do %>"], 224 | 2 => [:collapse, "
<% end) %>"] 225 | } 226 | end 227 | 228 | @tag :pending 229 | test "dash with body, collapsing" do 230 | eex = build("- for item <- @list do\n div") 231 | assert eex == %{ 232 | :lines => 2, 233 | 1 => ["<% for item <- @list do %>"], 234 | 2 => [:collapse, "
<% end %>"] 235 | } 236 | end 237 | 238 | test "equal with body" do 239 | eex = build("= for item <- @list do\n div") 240 | assert eex == %{ 241 | :lines => 2, 242 | 1 => ["<%= for item <- @list do %>"], 243 | 2 => [:collapse, "
<% end %>"] 244 | } 245 | end 246 | 247 | @tag :pending 248 | test "equal with body with (" do 249 | eex = build("= Enum.map(@list, fn item ->\n div") 250 | assert eex == %{ 251 | :lines => 2, 252 | 1 => [ "<%= Enum.map(@list, fn item -> %>" ], 253 | 2 => [ "
<% end) %>" ] 254 | } 255 | end 256 | 257 | test "if .. else ... end" do 258 | eex = build("= if @x do\n div\n- else\n div") 259 | assert eex == %{ 260 | :lines => 4, 261 | 1 => ["<%= if @x do %>"], 262 | 2 => [:collapse, "
"], 263 | 3 => ["<% else %>"], 264 | 4 => [:collapse, "
<% end %>"] 265 | } 266 | end 267 | 268 | test "if .. else ... if ... end" do 269 | eex = build("= if @x do\n div\n- else\n div\n= if @y do\n span\n- else\n span") 270 | assert eex == %{ 271 | :lines => 8, 272 | 1 => ["<%= if @x do %>"], 273 | 2 => [:collapse, "
"], 274 | 3 => ["<% else %>"], 275 | 4 => [:collapse, "
<% end %>"], 276 | 5 => ["<%= if @y do %>"], 277 | 6 => [:collapse, ""], 278 | 7 => ["<% else %>"], 279 | 8 => [:collapse, "<% end %>"] 280 | } 281 | end 282 | 283 | test "if .. if ... end" do 284 | eex = build("= if @x do\n div\n= if @y do\n div") 285 | assert eex == %{ 286 | :lines => 4, 287 | 1 => ["<%= if @x do %>"], 288 | 2 => [:collapse, "
<% end %>"], 289 | 3 => ["<%= if @y do %>"], 290 | 4 => [:collapse, "
<% end %>"] 291 | } 292 | end 293 | 294 | test "form_for fn -> ... end" do 295 | eex = build("= form_for @foo, fn x ->\n div") 296 | assert eex == %{ 297 | :lines => 2, 298 | 1 => ["<%= form_for @foo, fn x -> %>"], 299 | 2 => [:collapse, "
<% end %>"], 300 | } 301 | end 302 | 303 | test "// comment" do 304 | eex = build("// hi") 305 | assert eex == %{ 306 | :lines => 1, 307 | 1 => [""], 308 | } 309 | end 310 | 311 | test "// comment, multiline" do 312 | eex = build("// hi\n world") 313 | assert eex == %{ 314 | :lines => 1, 315 | 1 => [""], 316 | } 317 | end 318 | 319 | test "// comment, multiline, empty first line" do 320 | eex = build("//\n world") 321 | assert eex == %{ 322 | :lines => 1, 323 | 1 => [""], 324 | } 325 | end 326 | 327 | test "// comment, multiline, with stuff after" do 328 | eex = build("//\n world\ndiv") 329 | assert eex == %{ 330 | :lines => 3, 331 | 1 => [""], 332 | 3 => ["
"] 333 | } 334 | end 335 | 336 | test "cond do ... end" do 337 | eex = build("= cond do\n div") 338 | assert eex == %{ 339 | :lines => 2, 340 | 1 => ["<%= cond do %>"], 341 | 2 => [:collapse, "
<% end %>"] 342 | } 343 | end 344 | 345 | @tag :pending 346 | test "try do ... catch ... rescue ... after ... end" 347 | 348 | test "extra space" do 349 | eex = build("div\n ") 350 | assert eex == %{ 351 | :lines => 1, 352 | 1 => [ "
" ] 353 | } 354 | end 355 | end 356 | -------------------------------------------------------------------------------- /test/compiler_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExpugCompilerTest do 2 | use ExUnit.Case 3 | 4 | import Expug.Tokenizer, only: [tokenize: 1] 5 | import Expug.Compiler, only: [compile: 1] 6 | 7 | doctest Expug.Compiler 8 | 9 | test "doctype only" do 10 | tokens = tokenize("doctype html5") 11 | ast = compile(tokens) 12 | assert %{ 13 | doctype: %{ 14 | type: :doctype, 15 | value: "html5", 16 | token: {{1, 9}, :doctype, "html5"} 17 | }, 18 | type: :document 19 | } = ast 20 | end 21 | 22 | test "tag only" do 23 | tokens = tokenize("div") 24 | ast = compile(tokens) 25 | assert %{ 26 | type: :document, 27 | children: [%{ 28 | name: "div", 29 | type: :element 30 | }] 31 | } = ast 32 | end 33 | 34 | test "doctype and tag" do 35 | tokens = tokenize("doctype html5\ndiv") 36 | ast = compile(tokens) 37 | assert %{ 38 | doctype: %{ 39 | type: :doctype, 40 | value: "html5", 41 | token: _ 42 | }, 43 | type: :document, 44 | children: [%{ 45 | name: "div", 46 | type: :element 47 | }] 48 | } = ast 49 | end 50 | 51 | test "doctype and tag and id" do 52 | tokens = tokenize("doctype html5\ndiv#box") 53 | ast = compile(tokens) 54 | assert %{ 55 | doctype: %{ 56 | type: :doctype, 57 | value: "html5" 58 | }, 59 | type: :document, 60 | children: [%{ 61 | type: :element, 62 | name: "div", 63 | attributes: %{ 64 | "id" => [{:text, "box"}] 65 | } 66 | }] 67 | } = ast 68 | end 69 | 70 | test "tag and classes" do 71 | tokens = tokenize("div.blue.small") 72 | ast = compile(tokens) 73 | assert %{ 74 | type: :document, 75 | children: [%{ 76 | name: "div", 77 | type: :element, 78 | attributes: %{ 79 | "class" => [{:text, "blue"}, {:text, "small"}] 80 | } 81 | }] 82 | } = ast 83 | end 84 | 85 | test "buffered text" do 86 | tokens = tokenize("div= hello") 87 | ast = compile(tokens) 88 | assert %{ 89 | type: :document, 90 | children: [%{ 91 | name: "div", 92 | type: :element, 93 | children: [%{ 94 | type: :buffered_text, 95 | value: "hello" 96 | }] 97 | }] 98 | } = ast 99 | end 100 | 101 | test "doctype and tags" do 102 | tokens = tokenize("doctype html5\ndiv\nspan") 103 | ast = compile(tokens) 104 | assert %{ 105 | doctype: %{ 106 | type: :doctype, 107 | value: "html5", 108 | token: {{1, 9}, :doctype, "html5"} 109 | }, 110 | type: :document, 111 | children: [%{ 112 | name: "div", 113 | type: :element, 114 | token: {{2, 1}, :element_name, "div"} 115 | }, %{ 116 | name: "span", 117 | type: :element, 118 | token: {{3, 1}, :element_name, "span"} 119 | }] 120 | } == ast 121 | end 122 | 123 | test "nesting" do 124 | tokens = tokenize("head\n title") 125 | ast = compile(tokens) 126 | assert %{ 127 | type: :document, 128 | children: [%{ 129 | name: "head", 130 | type: :element, 131 | children: [%{ 132 | name: "title", 133 | type: :element 134 | }] 135 | }] 136 | } = ast 137 | end 138 | 139 | test "nesting deeper" do 140 | tokens = tokenize("head\n title\n span") 141 | ast = compile(tokens) 142 | assert %{ 143 | type: :document, 144 | children: [%{ 145 | name: "head", 146 | type: :element, 147 | children: [%{ 148 | name: "title", 149 | type: :element, 150 | children: [%{ 151 | name: "span", 152 | type: :element 153 | }] 154 | }] 155 | }] 156 | } = ast 157 | end 158 | 159 | test "zigzag nesting" do 160 | tokens = tokenize("head\n title\n span\n meta") 161 | ast = compile(tokens) 162 | assert %{ 163 | type: :document, 164 | children: [%{ 165 | type: :element, 166 | name: "head", 167 | token: {{1, 1}, :element_name, "head"}, 168 | children: [%{ 169 | type: :element, 170 | name: "title", 171 | token: {{2, 3}, :element_name, "title"}, 172 | children: [%{ 173 | name: "span", 174 | type: :element, 175 | token: {{3, 5}, :element_name, "span"}, 176 | }] 177 | }, %{ 178 | name: "meta", 179 | type: :element, 180 | token: {{4, 3}, :element_name, "meta"}, 181 | }] 182 | }] 183 | } == ast 184 | end 185 | 186 | # test "zigzag nesting error" do 187 | # tokens = tokenize("head\n title\n span\n meta") 188 | # {:error, params} = compile(tokens) 189 | # assert params == %{ 190 | # type: :ambiguous_indentation, 191 | # position: {4, 2} 192 | # } 193 | # end 194 | 195 | test "attributes" do 196 | tokens = tokenize("div(style='color: blue')") 197 | ast = compile(tokens) 198 | assert %{ 199 | type: :document, 200 | children: [%{ 201 | type: :element, 202 | name: "div", 203 | attributes: %{ 204 | "style" => [{:eval, "'color: blue'"}] 205 | } 206 | }] 207 | } = ast 208 | end 209 | 210 | test "2 attributes" do 211 | tokens = tokenize("div(id='box' style='color: blue')") 212 | ast = compile(tokens) 213 | assert %{ 214 | type: :document, 215 | children: [%{ 216 | type: :element, 217 | name: "div", 218 | attributes: %{ 219 | "id" => [{:eval, "'box'"}], 220 | "style" => [{:eval, "'color: blue'"}] 221 | } 222 | }] 223 | } = ast 224 | end 225 | 226 | test "dupe attributes" do 227 | tokens = tokenize("div(src=1 src=2)") 228 | ast = compile(tokens) 229 | assert %{ 230 | type: :document, 231 | children: [%{ 232 | type: :element, 233 | name: "div", 234 | attributes: %{ 235 | "src" => [{:eval, "1"}, {:eval, "2"}] 236 | } 237 | }] 238 | } = ast 239 | end 240 | 241 | test "value-less attributes" do 242 | tokens = tokenize("div(src)") 243 | ast = compile(tokens) 244 | assert %{ 245 | type: :document, 246 | children: [%{ 247 | type: :element, 248 | name: "div", 249 | attributes: %{ 250 | "src" => [{:eval, true}] 251 | } 252 | }] 253 | } = ast 254 | end 255 | 256 | test "start with class" do 257 | tokens = tokenize(".hello") 258 | ast = compile(tokens) 259 | assert %{ 260 | type: :document, 261 | children: [%{ 262 | type: :element, 263 | name: "div", 264 | attributes: %{ 265 | "class" => [{:text, "hello"}] 266 | } 267 | }] 268 | } = ast 269 | end 270 | 271 | test "start with id" do 272 | tokens = tokenize("#hello") 273 | ast = compile(tokens) 274 | assert %{ 275 | type: :document, 276 | children: [%{ 277 | type: :element, 278 | name: "div", 279 | attributes: %{ 280 | "id" => [{:text, "hello"}] 281 | } 282 | }] 283 | } = ast 284 | end 285 | 286 | test "classes and id" do 287 | tokens = tokenize(".small.blue#box") 288 | ast = compile(tokens) 289 | assert %{ 290 | type: :document, 291 | children: [%{ 292 | type: :element, 293 | name: "div", 294 | attributes: %{ 295 | "class" => [{:text, "small"}, {:text, "blue"}], 296 | "id" => [{:text, "box"}] 297 | } 298 | }] 299 | } = ast 300 | end 301 | 302 | test "raw text only" do 303 | tokens = tokenize("| hi") 304 | ast = compile(tokens) 305 | assert %{ 306 | type: :document, 307 | children: [%{ 308 | type: :raw_text, 309 | value: "hi", 310 | token: {_, _, _} 311 | }] 312 | } = ast 313 | end 314 | 315 | test "double raw text" do 316 | tokens = tokenize("| hi\n| hello") 317 | ast = compile(tokens) 318 | assert %{ 319 | type: :document, 320 | children: [%{ 321 | type: :raw_text, 322 | value: "hi", 323 | token: {_, _, _} 324 | }, %{ 325 | type: :raw_text, 326 | value: "hello", 327 | token: {_, _, _} 328 | }] 329 | } = ast 330 | end 331 | 332 | test "buffered text only" do 333 | tokens = tokenize("= hi") 334 | ast = compile(tokens) 335 | assert %{ 336 | type: :document, 337 | children: [%{ 338 | type: :buffered_text, 339 | value: "hi", 340 | token: {{1, 3}, :buffered_text, "hi"} 341 | }] 342 | } == ast 343 | end 344 | 345 | test "unescaped text only" do 346 | tokens = tokenize("!= hi") 347 | ast = compile(tokens) 348 | assert %{ 349 | type: :document, 350 | children: [%{ 351 | type: :unescaped_text, 352 | value: "hi", 353 | token: {{1, 4}, :unescaped_text, "hi"} 354 | }] 355 | } == ast 356 | end 357 | 358 | test "unescaped text with element" do 359 | tokens = tokenize("div!= hi") 360 | ast = compile(tokens) 361 | assert ast == %{ 362 | type: :document, 363 | children: [%{ 364 | type: :element, 365 | name: "div", 366 | token: {{1, 1}, :element_name, "div"}, 367 | children: [%{ 368 | type: :unescaped_text, 369 | value: "hi", 370 | token: {{1, 7}, :unescaped_text, "hi"} 371 | }] 372 | }] 373 | } 374 | end 375 | 376 | test "statement with children" do 377 | tokens = tokenize("- hi\n div") 378 | ast = compile(tokens) 379 | assert %{ 380 | type: :document, 381 | children: [%{ 382 | type: :statement, 383 | value: "hi", 384 | children: [%{ 385 | name: "div", 386 | token: {{2, 3}, :element_name, "div"}, 387 | type: :element 388 | }], 389 | token: {{1, 3}, :statement, "hi"} 390 | }] 391 | } == ast 392 | end 393 | 394 | test "if ... end" do 395 | tokens = tokenize("= if @x do\n div") 396 | ast = compile(tokens) 397 | assert %{ 398 | type: :document, 399 | children: [%{ 400 | type: :buffered_text, 401 | value: "if @x do", 402 | open: true, 403 | close: "end", 404 | token: {{1, 3}, :buffered_text, "if @x do"}, 405 | children: [%{ 406 | type: :element, 407 | name: "div", 408 | token: {{2, 3}, :element_name, "div"} 409 | }], 410 | }] 411 | } == ast 412 | end 413 | 414 | test "if ... else ... end" do 415 | tokens = tokenize("= if @x do\n div\n- else\n span") 416 | ast = compile(tokens) 417 | assert %{ 418 | type: :document, 419 | children: [%{ 420 | type: :buffered_text, 421 | value: "if @x do", 422 | open: true, 423 | token: {{1, 3}, :buffered_text, "if @x do"}, 424 | children: [%{ 425 | type: :element, 426 | name: "div", 427 | token: {{2, 3}, :element_name, "div"} 428 | }], 429 | }, %{ 430 | type: :statement, 431 | value: "else", 432 | open: true, 433 | close: "end", 434 | token: {{3, 3}, :statement, "else"}, 435 | children: [%{ 436 | type: :element, 437 | name: "span", 438 | token: {{4, 3}, :element_name, "span"} 439 | }], 440 | }] 441 | } == ast 442 | end 443 | 444 | test "try ... catch ... end" do 445 | tokens = tokenize("= try do\n div\n- catch ->\n span") 446 | ast = compile(tokens) 447 | assert %{ 448 | type: :document, 449 | children: [%{ 450 | type: :buffered_text, 451 | value: "try do", 452 | open: true, 453 | token: {{1, 3}, :buffered_text, "try do"}, 454 | children: [%{ 455 | type: :element, 456 | name: "div", 457 | token: {{2, 3}, :element_name, "div"} 458 | }], 459 | }, %{ 460 | type: :statement, 461 | value: "catch ->", 462 | open: true, 463 | close: "end", 464 | token: {{3, 3}, :statement, "catch ->"}, 465 | children: [%{ 466 | type: :element, 467 | name: "span", 468 | token: {{4, 3}, :element_name, "span"} 469 | }], 470 | }] 471 | } == ast 472 | end 473 | 474 | test "try ... end" do 475 | tokens = tokenize("= try do\n div") 476 | ast = compile(tokens) 477 | assert %{ 478 | type: :document, 479 | children: [%{ 480 | type: :buffered_text, 481 | value: "try do", 482 | open: true, 483 | close: "end", 484 | token: {{1, 3}, :buffered_text, "try do"}, 485 | children: [%{ 486 | type: :element, 487 | name: "div", 488 | token: {{2, 3}, :element_name, "div"} 489 | }], 490 | }] 491 | } == ast 492 | end 493 | 494 | test "cond do" do 495 | tokens = tokenize("= cond do\n div") 496 | ast = compile(tokens) 497 | assert %{ 498 | type: :document, 499 | children: [%{ 500 | type: :buffered_text, 501 | value: "cond do", 502 | open: true, 503 | close: "end", 504 | token: {{1, 3}, :buffered_text, "cond do"}, 505 | children: [%{ 506 | type: :element, 507 | name: "div", 508 | token: {{2, 3}, :element_name, "div"} 509 | }], 510 | }] 511 | } == ast 512 | end 513 | 514 | test "script." do 515 | tokens = tokenize("script.\n alert('hello')") 516 | ast = compile(tokens) 517 | assert %{ 518 | type: :document, 519 | children: [%{ 520 | type: :element, 521 | name: "script", 522 | token: {{1, 1}, :element_name, "script"}, 523 | children: [%{ 524 | type: :block_text, 525 | value: "alert('hello')", 526 | token: {{2, 3}, :subindent, "alert('hello')"} 527 | }], 528 | }] 529 | } == ast 530 | end 531 | 532 | test "comment in the middle" do 533 | tokens = tokenize("div\n// hi\nh1") 534 | ast = compile(tokens) 535 | assert ast == %{ 536 | type: :document, 537 | children: [%{ 538 | type: :element, 539 | name: "div", 540 | token: {{1, 1}, :element_name, "div"} 541 | }, %{ 542 | type: :html_comment, 543 | value: "hi", 544 | token: {{2, 4}, :html_comment, "hi"} 545 | }, %{ 546 | type: :element, 547 | name: "h1", 548 | token: {{3, 1}, :element_name, "h1"} 549 | }] 550 | } 551 | end 552 | 553 | test "multiline comment in the middle" do 554 | tokens = tokenize("div\n// hi\n yo\nh1") 555 | ast = compile(tokens) 556 | assert ast == %{ 557 | type: :document, 558 | children: [%{ 559 | type: :element, 560 | name: "div", 561 | token: {{1, 1}, :element_name, "div"} 562 | }, %{ 563 | type: :html_comment, 564 | value: "hi\nyo", 565 | token: {{2, 4}, :html_comment, "hi"} 566 | }, %{ 567 | type: :element, 568 | name: "h1", 569 | token: {{4, 1}, :element_name, "h1"} 570 | }] 571 | } 572 | end 573 | 574 | test "full multiline =" do 575 | tokens = tokenize("=\n ab\n cd") 576 | ast = compile(tokens) 577 | assert %{ 578 | type: :document, 579 | children: [%{ 580 | type: :buffered_text, 581 | value: "\nab\ncd", 582 | token: {{1, 2}, :buffered_text, ""} 583 | }] 584 | } == ast 585 | end 586 | 587 | test "full multiline !=" do 588 | tokens = tokenize("!=\n ab\n cd") 589 | ast = compile(tokens) 590 | assert %{ 591 | type: :document, 592 | children: [%{ 593 | type: :unescaped_text, 594 | value: "\nab\ncd", 595 | token: {{1, 3}, :unescaped_text, ""} 596 | }] 597 | } == ast 598 | end 599 | 600 | test "full multiline -" do 601 | tokens = tokenize("-\n ab\n cd") 602 | ast = compile(tokens) 603 | assert %{ 604 | type: :document, 605 | children: [%{ 606 | type: :statement, 607 | value: "\nab\ncd", 608 | token: {{1, 2}, :statement, ""} 609 | }] 610 | } == ast 611 | end 612 | 613 | test "full multiline = with div" do 614 | tokens = tokenize("div=\n ab\n cd") 615 | ast = compile(tokens) 616 | assert %{ 617 | type: :document, 618 | children: [%{ 619 | type: :element, 620 | name: "div", 621 | token: {{1, 1}, :element_name, "div"}, 622 | children: [%{ 623 | type: :buffered_text, 624 | value: "\nab\ncd", 625 | token: {{1, 5}, :buffered_text, ""} 626 | }] 627 | }] 628 | } == ast 629 | end 630 | 631 | test "start with a space" do 632 | tokens = tokenize(" div") 633 | assert catch_throw(compile(tokens)) == 634 | {:compile_error, :unexpected_indent, {{1, 2}, :element_name, "div"}} 635 | end 636 | end 637 | -------------------------------------------------------------------------------- /test/eex_eval_test.exs: -------------------------------------------------------------------------------- 1 | defmodule EexEvalTest do 2 | use ExUnit.Case 3 | 4 | def build(source, bindings \\ [], opts \\ []) do 5 | source 6 | |> Expug.to_eex!(raw_helper: "raw.") 7 | |> EEx.eval_string(bindings, opts) 8 | end 9 | 10 | test "basic" do 11 | eex = build(""" 12 | doctype html 13 | div 14 | span= @hello 15 | """, assigns: %{hello: "Sup"}) 16 | 17 | assert eex == ~S""" 18 | 19 |
20 | 21 | Sup 22 | 23 |
24 | """ 25 | end 26 | 27 | test "attributes" do 28 | eex = build(""" 29 | div(id=@id) 30 | """, assigns: %{id: "jabberwocky"}, raw: &(&1)) 31 | 32 | assert eex == ~S""" 33 |
34 | """ 35 | end 36 | 37 | test "true attributes" do 38 | eex = build("div(spellcheck=@spellcheck)", 39 | assigns: %{spellcheck: true}, raw: &(&1)) 40 | 41 | assert eex == "
\n" 42 | end 43 | 44 | test "false attributes" do 45 | eex = build("div(spellcheck=@spellcheck)", 46 | assigns: %{spellcheck: false}, raw: &(&1)) 47 | 48 | assert eex == "
\n" 49 | end 50 | 51 | test "value-less attributes" do 52 | eex = build("div(spellcheck)", 53 | assigns: %{}, raw: &(&1)) 54 | 55 | assert eex == "
\n" 56 | end 57 | end 58 | -------------------------------------------------------------------------------- /test/expug_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExpugTest do 2 | use ExUnit.Case 3 | doctest Expug 4 | 5 | # test "build" do 6 | # {:ok, eex} = Expug.to_eex("doctype html\ndiv Hello") 7 | # assert eex == "\n
\nHello\n
\n" 8 | # end 9 | 10 | test "with class" do 11 | {:ok, eex} = Expug.to_eex("div.hello") 12 | output = run_eex(eex) 13 | assert output == "
\n" 14 | end 15 | 16 | test "with buffered text" do 17 | {:ok, eex} = Expug.to_eex("div.hello.world") 18 | output = run_eex(eex) 19 | assert output == "
\n" 20 | end 21 | 22 | test "with assigns in attribute" do 23 | {:ok, eex} = Expug.to_eex("div(class=@klass)") 24 | output = run_eex(eex, assigns: [klass: "hello"]) 25 | assert output == "
\n" 26 | end 27 | 28 | test "with assigns in text" do 29 | {:ok, eex} = Expug.to_eex("div\n = @msg") 30 | output = run_eex(eex, assigns: [msg: "hello"]) 31 | assert output == "
\nhello\n
\n" 32 | end 33 | 34 | test "parse error" do 35 | {:error, output} = Expug.to_eex("hello\nhuh?") 36 | assert %{ 37 | type: :parse_error, 38 | position: {2, 4}, 39 | expected: [:eq, :bang_eq, :whitespace, :block_text, :attribute_open] 40 | } = output 41 | end 42 | 43 | test "bang, parse error" do 44 | msg = """ 45 | Parse error on line 2 46 | 47 | div 48 | ^ 49 | 50 | Expug encountered a character it didn't expect. 51 | """ 52 | assert_raise Expug.Error, msg, fn -> 53 | Expug.to_eex!("hello\ndiv") 54 | end 55 | end 56 | 57 | test "bang, parse error (2)" do 58 | msg = """ 59 | Parse error on line 1 60 | 61 | div(a!) 62 | ^ 63 | 64 | Expug encountered a character it didn't expect. 65 | """ 66 | assert_raise Expug.Error, msg, fn -> 67 | Expug.to_eex!("div(a!)") 68 | end 69 | end 70 | 71 | test "bang, parse error (3)" do 72 | msg = """ 73 | Parse error on line 1 74 | 75 | div= 76 | ^ 77 | 78 | Expug encountered a character it didn't expect. 79 | """ 80 | assert_raise Expug.Error, msg, fn -> 81 | Expug.to_eex!("div=") 82 | end 83 | end 84 | 85 | # test "bang, compile error" do 86 | # msg = "ambiguous indentation on line 4 col 2" 87 | # assert_raise Expug.Error, msg, fn -> 88 | # Expug.to_eex!("h1\n h2\n h3\n h4") 89 | # end 90 | # end 91 | 92 | @doc """ 93 | A terrible hack, I know, but this means we get to skip on Phoenix.HTML as a 94 | dependency 95 | """ 96 | def run_eex(eex, opts \\ []) do 97 | eex 98 | |> String.replace(~r/raw\(/, "raw.(") 99 | |> EEx.eval_string([{:raw, fn x -> x end} | opts]) 100 | end 101 | end 102 | -------------------------------------------------------------------------------- /test/runtime_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Expug.RuntimeTest do 2 | use ExUnit.Case 3 | doctest Expug.Runtime 4 | 5 | import Expug.Runtime 6 | 7 | test "strings" do 8 | assert attr("value", "hello") == ~S( value="hello") 9 | end 10 | 11 | test "escaping" do 12 | assert attr("value", ~S(

)) == ~S( value="<h1 a="b">") 13 | end 14 | 15 | test "boolean false" do 16 | assert attr("disabled", false) == "" 17 | end 18 | 19 | test "boolean true" do 20 | assert attr("disabled", true) == " disabled" 21 | end 22 | 23 | test "nil" do 24 | assert attr("disabled", nil) == "" 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /test/stringifier_test.exs: -------------------------------------------------------------------------------- 1 | defmodule StringifierTest do 2 | use ExUnit.Case 3 | 4 | def build(source) do 5 | source 6 | |> Expug.Tokenizer.tokenize() 7 | |> Expug.Compiler.compile() 8 | |> Expug.Builder.build() 9 | |> Expug.Stringifier.stringify() 10 | end 11 | 12 | test "nesting" do 13 | eex = build(""" 14 | doctype html 15 | div 16 | span= @hello 17 | """) 18 | 19 | assert eex == ~S""" 20 | 21 |
22 | <%= "\n" %><%= @hello %><%= "\n" %><%= "\n" %>
23 | """ 24 | end 25 | 26 | test "with extra lines" do 27 | eex = build(""" 28 | doctype html 29 | 30 | 31 | div 32 | span= @hello 33 | """) 34 | 35 | assert eex == ~S""" 36 | 37 | <% 38 | 39 | %>
40 | <%= "\n" %><%= @hello %><%= "\n" %><%= "\n" %>
41 | """ 42 | end 43 | 44 | test "with extra lines, 2" do 45 | eex = build(""" 46 | doctype html 47 | 48 | div 49 | 50 | span= @hello 51 | """) 52 | 53 | 54 | assert eex == ~S""" 55 | 56 | <% 57 | %>
58 | <% 59 | %><%= "\n" %><%= @hello %><%= "\n" %><%= "\n" %>
60 | """ 61 | end 62 | 63 | test "indentation magic" do 64 | eex = build(""" 65 | div 66 | h1 67 | span 68 | a.foo 69 | | Hello 70 | """) 71 | assert eex == ~S""" 72 |
73 |

74 | 75 | 76 | Hello<%= "\n" %><%= "\n" %><%= "\n" %>

<%= "\n" %>
77 | """ 78 | end 79 | 80 | test "joining classes" do 81 | eex = build(""" 82 | div.foo(class="bar") 83 | """) 84 | 85 | assert eex == ~S""" 86 | > 87 | """ 88 | end 89 | 90 | test "joining IDs" do 91 | eex = build(""" 92 | div#a#b 93 | """) 94 | 95 | assert eex == ~S""" 96 | > 97 | """ 98 | end 99 | 100 | test "extra depths" do 101 | eex = build(""" 102 | div(role="hi" 103 | ) 104 | 105 | div 106 | """) 107 | 108 | assert eex == ~S""" 109 | > 110 | <% 111 | 112 | %>
113 | """ 114 | end 115 | 116 | test "new line attributes" do 117 | eex = build(""" 118 | div(role="hi" 119 | id="foo") 120 | """) 121 | 122 | assert eex == ~S""" 123 | <%= raw(Expug.Runtime.attr("role", "hi")) %>> 124 | """ 125 | end 126 | 127 | test "colon in attributes" do 128 | eex = build(""" 129 | div(svg:src="hi") 130 | """) 131 | 132 | assert eex == ~S""" 133 | > 134 | """ 135 | end 136 | 137 | test "collapsing" do 138 | eex = build(""" 139 | = if @foo do 140 | div 141 | """) 142 | 143 | assert eex == """ 144 | <%= if @foo do %><% 145 | %>
<% end %> 146 | """ 147 | end 148 | 149 | test "empty strings" do 150 | eex = build("") 151 | 152 | assert eex == "" 153 | end 154 | 155 | test "empty space attributes" do 156 | eex = build("div( )") 157 | 158 | assert eex == "
\n" 159 | end 160 | 161 | @tag :pending 162 | test "illegal nesting inside |" do 163 | eex = build(""" 164 | | hi 165 | foo 166 | """) 167 | 168 | assert eex == "" 169 | end 170 | 171 | @tag :pending 172 | test "-// comment nesting" 173 | 174 | test "script." do 175 | eex = build(""" 176 | script. 177 | alert("hi") 178 | """) 179 | 180 | assert eex == ~S""" 181 | 183 | """ 184 | end 185 | 186 | test "script. multiline" do 187 | eex = build(""" 188 | script. 189 | alert("hi") 190 | alert("hello") 191 | alert("hola") 192 | """) 193 | 194 | assert eex == ~S""" 195 | 199 | """ 200 | end 201 | 202 | @tag :pending 203 | test "ul: li: button Hello" 204 | 205 | test "multiline" do 206 | eex = build(""" 207 | - render( 208 | @conn) 209 | div 210 | """) 211 | 212 | assert eex == ~S""" 213 | <% render( 214 | @conn) %> 215 |
216 | """ 217 | end 218 | 219 | test "multiline =" do 220 | eex = build(""" 221 | = render( 222 | @conn) 223 | div 224 | """) 225 | 226 | assert eex == ~S""" 227 | <%= render( 228 | @conn) %> 229 |
230 | """ 231 | end 232 | 233 | test "newline with dot (#5)" do 234 | eex = build(""" 235 | li 236 | 237 | img(src=x.x) 238 | """) 239 | 240 | assert eex == ~S""" 241 |
  • 242 | <% 243 | %>><%= "\n" %>
  • 244 | """ 245 | end 246 | 247 | test "if-else" do 248 | eex = build(""" 249 | = if @hello 250 | div 251 | - else 252 | div 253 | """) 254 | 255 | assert eex == ~S""" 256 | <%= if @hello %><% 257 | %>
    258 | <% else %><% 259 | %>
    <% end %> 260 | """ 261 | end 262 | 263 | test "data attributes" do 264 | eex = build(""" 265 | a(required a=b) 266 | """) 267 | 268 | assert eex == ~S""" 269 | <%= raw(Expug.Runtime.attr("required", true)) %>> 270 | """ 271 | end 272 | end 273 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | -------------------------------------------------------------------------------- /test/todo_test.exs: -------------------------------------------------------------------------------- 1 | defmodule TodoTestd do 2 | use ExUnit.Case 3 | @moduletag :pending 4 | 5 | # test "Most everything" 6 | # test "Track line/column in tokens" 7 | # test "comma-delimited attributes" 8 | # test "Multiline attributes" 9 | # test "HTML escaping" 10 | # test "boolean value (`textarea(spellcheck=@spellcheck)`)" 11 | # test "Auto-end of `cond do` etc" 12 | # test "Nesting HTML comments" 13 | 14 | # Priority: 15 | # test "value-less attributes (`textarea(spellcheck)`)" 16 | # test "`.` raw text (like `script.`)" 17 | # test "multiline" 18 | # test "!= unescaped code" 19 | # test "Showing HTML comments with //" 20 | # test "space at the beginning" 21 | test "Block expansion (li: a)" 22 | test "HTML in Pug templates" 23 | 24 | # Lower priority: 25 | test "Spacing between <%= for %>" 26 | test "Self-closing tag syntax (img/)" 27 | test "Filters" 28 | end 29 | -------------------------------------------------------------------------------- /test/tokenizer_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExpugTokenizerTest do 2 | use ExUnit.Case 3 | 4 | import Expug.Tokenizer, only: [tokenize: 1] 5 | import Enum, only: [reverse: 1] 6 | 7 | doctest Expug.Tokenizer 8 | 9 | test "basic" do 10 | output = tokenize("head") 11 | assert reverse(output) == [ 12 | {{1, 1}, :indent, 0}, 13 | {{1, 1}, :element_name, "head"} 14 | ] 15 | end 16 | 17 | test "h1" do 18 | output = tokenize("h1") 19 | assert reverse(output) == [ 20 | {{1, 1}, :indent, 0}, 21 | {{1, 1}, :element_name, "h1"} 22 | ] 23 | end 24 | 25 | test "extra whitespaces (spaces)" do 26 | output = tokenize("h1 ") 27 | assert reverse(output) == [ 28 | {{1, 1}, :indent, 0}, 29 | {{1, 1}, :element_name, "h1"} 30 | ] 31 | end 32 | 33 | test "extra whitespaces (newline)" do 34 | output = tokenize("h1\n") 35 | assert reverse(output) == [ 36 | {{1, 1}, :indent, 0}, 37 | {{1, 1}, :element_name, "h1"} 38 | ] 39 | end 40 | 41 | test "extra whitespaces (newline and spaces)" do 42 | output = tokenize("h1 \n ") 43 | assert reverse(output) == [ 44 | {{1, 1}, :indent, 0}, 45 | {{1, 1}, :element_name, "h1"} 46 | ] 47 | end 48 | 49 | test "xml namespace" do 50 | output = tokenize("html:h1") 51 | assert reverse(output) == [ 52 | {{1, 1}, :indent, 0}, 53 | {{1, 1}, :element_name, "html:h1"} 54 | ] 55 | end 56 | 57 | test "dashes" do # but why? 58 | output = tokenize("Todo-app") 59 | assert reverse(output) == [ 60 | {{1, 1}, :indent, 0}, 61 | {{1, 1}, :element_name, "Todo-app"} 62 | ] 63 | end 64 | 65 | test "basic with text" do 66 | output = tokenize("title Hello world") 67 | assert reverse(output) == [ 68 | {{1, 1}, :indent, 0}, 69 | {{1, 1}, :element_name, "title"}, 70 | {{1, 7}, :raw_text, "Hello world"} 71 | ] 72 | end 73 | 74 | test "title= name" do 75 | output = tokenize("title= name") 76 | assert reverse(output) == [ 77 | {{1, 1}, :indent, 0}, 78 | {{1, 1}, :element_name, "title"}, 79 | {{1, 8}, :buffered_text, "name"} 80 | ] 81 | end 82 | 83 | test "title!= name" do 84 | output = tokenize("title!= name") 85 | assert reverse(output) == [ 86 | {{1, 1}, :indent, 0}, 87 | {{1, 1}, :element_name, "title"}, 88 | {{1, 9}, :unescaped_text, "name"} 89 | ] 90 | end 91 | 92 | test "| name $200" do 93 | output = tokenize("| name $200") 94 | assert reverse(output) == [ 95 | {{1, 1}, :indent, 0}, 96 | {{1, 3}, :raw_text, "name $200"} 97 | ] 98 | end 99 | 100 | test "multiline" do 101 | output = tokenize("head\nbody\n") 102 | assert reverse(output) == [ 103 | {{1, 1}, :indent, 0}, 104 | {{1, 1}, :element_name, "head"}, 105 | {{2, 1}, :indent, 0}, 106 | {{2, 1}, :element_name, "body"}, 107 | ] 108 | end 109 | 110 | test "multiline with blank lines" do 111 | output = tokenize("head\n \n \nbody\n") 112 | assert reverse(output) == [ 113 | {{1, 1}, :indent, 0}, 114 | {{1, 1}, :element_name, "head"}, 115 | {{4, 1}, :indent, 0}, 116 | {{4, 1}, :element_name, "body"}, 117 | ] 118 | end 119 | 120 | test "div[]" do 121 | output = tokenize("div[]") 122 | assert reverse(output) == [ 123 | {{1, 1}, :indent, 0}, 124 | {{1, 1}, :element_name, "div"}, 125 | {{1, 4}, :attribute_open, "["}, 126 | {{1, 5}, :attribute_close, "]"} 127 | ] 128 | end 129 | 130 | test "div()" do 131 | output = tokenize("div()") 132 | assert reverse(output) == [ 133 | {{1, 1}, :indent, 0}, 134 | {{1, 1}, :element_name, "div"}, 135 | {{1, 4}, :attribute_open, "("}, 136 | {{1, 5}, :attribute_close, ")"} 137 | ] 138 | end 139 | 140 | test "div(id=\"hi\")" do 141 | output = tokenize("div(id=\"hi\")") 142 | assert reverse(output) == [ 143 | {{1, 1}, :indent, 0}, 144 | {{1, 1}, :element_name, "div"}, 145 | {{1, 4}, :attribute_open, "("}, 146 | {{1, 5}, :attribute_key, "id"}, 147 | {{1, 8}, :attribute_value, "\"hi\""}, 148 | {{1, 12}, :attribute_close, ")"} 149 | ] 150 | end 151 | 152 | test "div(id='hi')" do 153 | output = tokenize("div(id='hi')") 154 | assert reverse(output) == [ 155 | {{1, 1}, :indent, 0}, 156 | {{1, 1}, :element_name, "div"}, 157 | {{1, 4}, :attribute_open, "("}, 158 | {{1, 5}, :attribute_key, "id"}, 159 | {{1, 8}, :attribute_value, "'hi'"}, 160 | {{1, 12}, :attribute_close, ")"} 161 | ] 162 | end 163 | 164 | test ~S[div(id='\'')] do 165 | output = tokenize(~S[div(id='\'')]) 166 | assert reverse(output) == [ 167 | {{1, 1}, :indent, 0}, 168 | {{1, 1}, :element_name, "div"}, 169 | {{1, 4}, :attribute_open, "("}, 170 | {{1, 5}, :attribute_key, "id"}, 171 | {{1, 8}, :attribute_value, ~S['\'']}, 172 | {{1, 12}, :attribute_close, ")"} 173 | ] 174 | end 175 | 176 | test ~S[div(id='hi\'')] do 177 | output = tokenize(~S[div(id='hi\'')]) 178 | assert reverse(output) == [ 179 | {{1, 1}, :indent, 0}, 180 | {{1, 1}, :element_name, "div"}, 181 | {{1, 4}, :attribute_open, "("}, 182 | {{1, 5}, :attribute_key, "id"}, 183 | {{1, 8}, :attribute_value, ~S['hi\'']}, 184 | {{1, 14}, :attribute_close, ")"} 185 | ] 186 | end 187 | 188 | test "div(id=\"hi\" class=\"foo\")" do 189 | output = tokenize("div(id=\"hi\" class=\"foo\")") 190 | assert reverse(output) == [ 191 | {{1, 1}, :indent, 0}, 192 | {{1, 1}, :element_name, "div"}, 193 | {{1, 4}, :attribute_open, "("}, 194 | {{1, 5}, :attribute_key, "id"}, 195 | {{1, 8}, :attribute_value, "\"hi\""}, 196 | {{1, 13}, :attribute_key, "class"}, 197 | {{1, 19}, :attribute_value, "\"foo\""}, 198 | {{1, 24}, :attribute_close, ")"} 199 | ] 200 | end 201 | 202 | test "class" do 203 | output = tokenize("div.blue") 204 | assert reverse(output) == [ 205 | {{1, 1}, :indent, 0}, 206 | {{1, 1}, :element_name, "div"}, 207 | {{1, 5}, :element_class, "blue"} 208 | ] 209 | end 210 | 211 | test "classes" do 212 | output = tokenize("div.blue.sm") 213 | assert reverse(output) == [ 214 | {{1, 1}, :indent, 0}, 215 | {{1, 1}, :element_name, "div"}, 216 | {{1, 5}, :element_class, "blue"}, 217 | {{1, 10}, :element_class, "sm"} 218 | ] 219 | end 220 | 221 | test "classes and ID" do 222 | output = tokenize("div.blue.sm#box") 223 | assert reverse(output) == [ 224 | {{1, 1}, :indent, 0}, 225 | {{1, 1}, :element_name, "div"}, 226 | {{1, 5}, :element_class, "blue"}, 227 | {{1, 10}, :element_class, "sm"}, 228 | {{1, 13}, :element_id, "box"} 229 | ] 230 | end 231 | 232 | test "parse error" do 233 | try do 234 | tokenize("hello\nhuh?") 235 | catch output -> 236 | assert %{ 237 | type: :parse_error, 238 | position: {2, 4}, 239 | expected: [:eq, :bang_eq, :whitespace, :block_text, :attribute_open] 240 | } = output 241 | end 242 | end 243 | 244 | test "| raw text" do 245 | output = tokenize("| text") 246 | assert reverse(output) == [ 247 | {{1, 1}, :indent, 0}, 248 | {{1, 3}, :raw_text, "text"} 249 | ] 250 | end 251 | 252 | test "= buffered text" do 253 | output = tokenize("= text") 254 | assert reverse(output) == [ 255 | {{1, 1}, :indent, 0}, 256 | {{1, 3}, :buffered_text, "text"} 257 | ] 258 | end 259 | 260 | test "- statement" do 261 | output = tokenize("- text") 262 | assert reverse(output) == [ 263 | {{1, 1}, :indent, 0}, 264 | {{1, 3}, :statement, "text"} 265 | ] 266 | end 267 | 268 | test "- statement multiline" do 269 | output = tokenize("- text,\n foo") 270 | assert reverse(output) == [ 271 | {{1, 1}, :indent, 0}, 272 | {{1, 3}, :statement, "text,\n foo"} 273 | ] 274 | end 275 | 276 | test "- statement multiline (2)" do 277 | output = tokenize("- text(\n foo)\ndiv") 278 | assert reverse(output) == [ 279 | {{1, 1}, :indent, 0}, 280 | {{1, 3}, :statement, "text(\n foo)"}, 281 | {{3, 1}, :indent, 0}, 282 | {{3, 1}, :element_name, "div"} 283 | ] 284 | end 285 | 286 | test "doctype" do 287 | output = tokenize("doctype html5") 288 | assert reverse(output) == [ 289 | {{1, 9}, :doctype, "html5"} 290 | ] 291 | end 292 | 293 | test "doctype + html" do 294 | output = tokenize("doctype html5\nhtml") 295 | assert reverse(output) == [ 296 | {{1, 9}, :doctype, "html5"}, 297 | {{2, 1}, :indent, 0}, 298 | {{2, 1}, :element_name, "html"} 299 | ] 300 | end 301 | 302 | test "div(id=(hello))" do 303 | output = tokenize("div(id=(hello))") 304 | assert reverse(output) == [ 305 | {{1, 1}, :indent, 0}, 306 | {{1, 1}, :element_name, "div"}, 307 | {{1, 4}, :attribute_open, "("}, 308 | {{1, 5}, :attribute_key, "id"}, 309 | {{1, 8}, :attribute_value, "(hello)"}, 310 | {{1, 15}, :attribute_close, ")"} 311 | ] 312 | end 313 | 314 | test "div(id=(hello(world)))" do 315 | output = tokenize("div(id=(hello(world)))") 316 | assert reverse(output) == [ 317 | {{1, 1}, :indent, 0}, 318 | {{1, 1}, :element_name, "div"}, 319 | {{1, 4}, :attribute_open, "("}, 320 | {{1, 5}, :attribute_key, "id"}, 321 | {{1, 8}, :attribute_value, "(hello(world))"}, 322 | {{1, 22}, :attribute_close, ")"} 323 | ] 324 | end 325 | 326 | test "div(id=(hello(worl[]d)))" do 327 | output = tokenize("div(id=(hello(worl[]d)))") 328 | assert reverse(output) == [ 329 | {{1, 1}, :indent, 0}, 330 | {{1, 1}, :element_name, "div"}, 331 | {{1, 4}, :attribute_open, "("}, 332 | {{1, 5}, :attribute_key, "id"}, 333 | {{1, 8}, :attribute_value, "(hello(worl[]d))"}, 334 | {{1, 24}, :attribute_close, ")"} 335 | ] 336 | end 337 | 338 | test ~S[div(id="hello #{world}")] do 339 | output = tokenize(~S[div(id="hello #{world}")]) 340 | assert reverse(output) == [ 341 | {{1, 1}, :indent, 0}, 342 | {{1, 1}, :element_name, "div"}, 343 | {{1, 4}, :attribute_open, "("}, 344 | {{1, 5}, :attribute_key, "id"}, 345 | {{1, 8}, :attribute_value, ~S["hello #{world}"]}, 346 | {{1, 24}, :attribute_close, ")"} 347 | ] 348 | end 349 | 350 | test ~S[div(id=hello)] do 351 | output = tokenize(~S[div(id=hello)]) 352 | assert reverse(output) == [ 353 | {{1, 1}, :indent, 0}, 354 | {{1, 1}, :element_name, "div"}, 355 | {{1, 4}, :attribute_open, "("}, 356 | {{1, 5}, :attribute_key, "id"}, 357 | {{1, 8}, :attribute_value, "hello"}, 358 | {{1, 13}, :attribute_close, ")"} 359 | ] 360 | end 361 | 362 | test ~S[div $100] do 363 | output = tokenize(~S[div $100]) 364 | assert reverse(output) == [ 365 | {{1, 1}, :indent, 0}, 366 | {{1, 1}, :element_name, "div"}, 367 | {{1, 5}, :raw_text, "$100"} 368 | ] 369 | end 370 | 371 | 372 | test "with indent" do 373 | output = tokenize("head\n title") 374 | assert reverse(output) == [ 375 | {{1, 1}, :indent, 0}, 376 | {{1, 1}, :element_name, "head"}, 377 | {{2, 1}, :indent, 2}, 378 | {{2, 3}, :element_name, "title"} 379 | ] 380 | end 381 | 382 | test ~S[div(src=a id=b)] do 383 | output = tokenize(~S[div(src=a id=b)]) 384 | assert reverse(output) == [ 385 | {{1, 1}, :indent, 0}, 386 | {{1, 1}, :element_name, "div"}, 387 | {{1, 4}, :attribute_open, "("}, 388 | {{1, 5}, :attribute_key, "src"}, 389 | {{1, 9}, :attribute_value, "a"}, 390 | {{1, 11}, :attribute_key, "id"}, 391 | {{1, 14}, :attribute_value, "b"}, 392 | {{1, 15}, :attribute_close, ")"} 393 | ] 394 | end 395 | 396 | test ~S[div( src=a id=b )] do 397 | output = tokenize(~S[div( src=a id=b )]) 398 | assert reverse(output) == [ 399 | {{1, 1}, :indent, 0}, 400 | {{1, 1}, :element_name, "div"}, 401 | {{1, 4}, :attribute_open, "("}, 402 | {{1, 6}, :attribute_key, "src"}, 403 | {{1, 10}, :attribute_value, "a"}, 404 | {{1, 12}, :attribute_key, "id"}, 405 | {{1, 15}, :attribute_value, "b"}, 406 | {{1, 17}, :attribute_close, ")"} 407 | ] 408 | end 409 | 410 | test ~S[div(src=a, id=b)] do 411 | output = tokenize(~S[div(src=a, id=b)]) 412 | assert reverse(output) == [ 413 | {{1, 1}, :indent, 0}, 414 | {{1, 1}, :element_name, "div"}, 415 | {{1, 4}, :attribute_open, "("}, 416 | {{1, 5}, :attribute_key, "src"}, 417 | {{1, 9}, :attribute_value, "a"}, 418 | {{1, 12}, :attribute_key, "id"}, 419 | {{1, 15}, :attribute_value, "b"}, 420 | {{1, 16}, :attribute_close, ")"} 421 | ] 422 | end 423 | 424 | test "newline between attributes" do 425 | output = tokenize("div(src=a,\n id=b)") 426 | assert reverse(output) == [ 427 | {{1, 1}, :indent, 0}, 428 | {{1, 1}, :element_name, "div"}, 429 | {{1, 4}, :attribute_open, "("}, 430 | {{1, 5}, :attribute_key, "src"}, 431 | {{1, 9}, :attribute_value, "a"}, 432 | {{2, 3}, :attribute_key, "id"}, 433 | {{2, 6}, :attribute_value, "b"}, 434 | {{2, 7}, :attribute_close, ")"} 435 | ] 436 | end 437 | 438 | test "multiline attribute contents" do 439 | output = tokenize("div(\n src=a\n )") 440 | assert reverse(output) == [ 441 | {{1, 1}, :indent, 0}, 442 | {{1, 1}, :element_name, "div"}, 443 | {{1, 4}, :attribute_open, "("}, 444 | {{2, 3}, :attribute_key, "src"}, 445 | {{2, 7}, :attribute_value, "a"}, 446 | {{3, 3}, :attribute_close, ")"} 447 | ] 448 | end 449 | 450 | test "multiline expressions" do 451 | output = tokenize("div(src=(a\n b))") 452 | assert reverse(output) == [ 453 | {{1, 1}, :indent, 0}, 454 | {{1, 1}, :element_name, "div"}, 455 | {{1, 4}, :attribute_open, "("}, 456 | {{1, 5}, :attribute_key, "src"}, 457 | {{1, 9}, :attribute_value, "(a\n b)"}, 458 | {{2, 5}, :attribute_close, ")"} 459 | ] 460 | end 461 | 462 | test "empty attributes" do 463 | output = tokenize("div(src=\"\")") 464 | assert reverse(output) == [ 465 | {{1, 1}, :indent, 0}, 466 | {{1, 1}, :element_name, "div"}, 467 | {{1, 4}, :attribute_open, "("}, 468 | {{1, 5}, :attribute_key, "src"}, 469 | {{1, 9}, :attribute_value, "\"\""}, 470 | {{1, 11}, :attribute_close, ")"} 471 | ] 472 | end 473 | 474 | test "-# comments" do 475 | output = tokenize("div\n-# ...") 476 | assert reverse(output) == [ 477 | {{1, 1}, :indent, 0}, 478 | {{1, 1}, :element_name, "div"}, 479 | {{2, 1}, :indent, 0}, 480 | {{2, 4}, :line_comment, "..."} 481 | ] 482 | end 483 | 484 | test "-# comments, blank" do 485 | output = tokenize("div\n-#") 486 | assert reverse(output) == [ 487 | {{1, 1}, :indent, 0}, 488 | {{1, 1}, :element_name, "div"}, 489 | {{2, 1}, :indent, 0}, 490 | {{2, 3}, :line_comment, ""} 491 | ] 492 | end 493 | 494 | test "-# comments, space" do 495 | output = tokenize("div\n-# ") 496 | assert reverse(output) == [ 497 | {{1, 1}, :indent, 0}, 498 | {{1, 1}, :element_name, "div"}, 499 | {{2, 1}, :indent, 0}, 500 | {{2, 3}, :line_comment, ""} 501 | ] 502 | end 503 | 504 | test "-# comments, nesting" do 505 | output = tokenize("-#\n foobar") 506 | assert reverse(output) == [ 507 | {{1, 1}, :indent, 0}, 508 | {{1, 3}, :line_comment, ""}, 509 | {{2, 3}, :subindent, "foobar"} 510 | ] 511 | end 512 | 513 | test "-// comments, nesting" do 514 | output = tokenize("-//\n foobar") 515 | assert reverse(output) == [ 516 | {{1, 1}, :indent, 0}, 517 | {{1, 4}, :line_comment, ""}, 518 | {{2, 3}, :subindent, "foobar"} 519 | ] 520 | end 521 | 522 | test "-# comments, nesting and after" do 523 | output = tokenize("-#\n foobar\ndiv") 524 | assert reverse(output) == [ 525 | {{1, 1}, :indent, 0}, 526 | {{1, 3}, :line_comment, ""}, 527 | {{2, 3}, :subindent, "foobar"}, 528 | {{3, 1}, :indent, 0}, 529 | {{3, 1}, :element_name, "div"} 530 | ] 531 | end 532 | 533 | test "// comments" do 534 | output = tokenize("div\n// ...") 535 | assert reverse(output) == [ 536 | {{1, 1}, :indent, 0}, 537 | {{1, 1}, :element_name, "div"}, 538 | {{2, 1}, :indent, 0}, 539 | {{2, 4}, :html_comment, "..."} 540 | ] 541 | end 542 | 543 | test "// comments, nesting" do 544 | output = tokenize("div\n// ...\n hi") 545 | assert reverse(output) == [ 546 | {{1, 1}, :indent, 0}, 547 | {{1, 1}, :element_name, "div"}, 548 | {{2, 1}, :indent, 0}, 549 | {{2, 4}, :html_comment, "..."}, 550 | {{3, 3}, :subindent, "hi"} 551 | ] 552 | end 553 | 554 | test "// comments, with trailing" do 555 | output = tokenize("div\n// ...\nh1") 556 | assert reverse(output) == [ 557 | {{1, 1}, :indent, 0}, 558 | {{1, 1}, :element_name, "div"}, 559 | {{2, 1}, :indent, 0}, 560 | {{2, 4}, :html_comment, "..."}, 561 | {{3, 1}, :indent, 0}, 562 | {{3, 1}, :element_name, "h1"} 563 | ] 564 | end 565 | 566 | test "- with children" do 567 | output = tokenize("- hi\n div") 568 | assert reverse(output) == [ 569 | {{1, 1}, :indent, 0}, 570 | {{1, 3}, :statement, "hi"}, 571 | {{2, 1}, :indent, 2}, 572 | {{2, 3}, :element_name, "div"} 573 | ] 574 | end 575 | 576 | test "= with children" do 577 | output = tokenize("= hi\n div") 578 | assert reverse(output) == [ 579 | {{1, 1}, :indent, 0}, 580 | {{1, 3}, :buffered_text, "hi"}, 581 | {{2, 1}, :indent, 2}, 582 | {{2, 3}, :element_name, "div"} 583 | ] 584 | end 585 | 586 | test "= full multiline" do 587 | output = tokenize("=\n ab\n cd") 588 | assert reverse(output) == [ 589 | {{1, 1}, :indent, 0}, 590 | {{1, 2}, :buffered_text, ""}, 591 | {{2, 3}, :subindent, "ab"}, 592 | {{3, 3}, :subindent, "cd"} 593 | ] 594 | end 595 | 596 | test "separating attributes with newlines" do 597 | output = tokenize("div(a=1\nb=2)") 598 | assert reverse(output) == [ 599 | {{1, 1}, :indent, 0}, 600 | {{1, 1}, :element_name, "div"}, 601 | {{1, 4}, :attribute_open, "("}, 602 | {{1, 5}, :attribute_key, "a"}, 603 | {{1, 7}, :attribute_value, "1"}, 604 | {{2, 1}, :attribute_key, "b"}, 605 | {{2, 3}, :attribute_value, "2"}, 606 | {{2, 4}, :attribute_close, ")"} 607 | ] 608 | end 609 | 610 | test "script." do 611 | output = tokenize("script.\n hello") 612 | assert reverse(output) == [ 613 | {{1, 1}, :indent, 0}, 614 | {{1, 1}, :element_name, "script"}, 615 | {{1, 7}, :block_text, "."}, 616 | {{2, 3}, :subindent, "hello"} 617 | ] 618 | end 619 | 620 | test "script. with class" do 621 | output = tokenize("script.box.\n hello") 622 | assert reverse(output) == [ 623 | {{1, 1}, :indent, 0}, 624 | {{1, 1}, :element_name, "script"}, 625 | {{1, 8}, :element_class, "box"}, 626 | {{1, 11}, :block_text, "."}, 627 | {{2, 3}, :subindent, "hello"} 628 | ] 629 | end 630 | 631 | test "script. with class and attributes" do 632 | output = tokenize("script.box(id=\"foo\").\n hello") 633 | assert reverse(output) == [ 634 | {{1, 1}, :indent, 0}, 635 | {{1, 1}, :element_name, "script"}, 636 | {{1, 8}, :element_class, "box"}, 637 | {{1, 11}, :attribute_open, "("}, 638 | {{1, 12}, :attribute_key, "id"}, 639 | {{1, 15}, :attribute_value, "\"foo\""}, 640 | {{1, 20}, :attribute_close, ")"}, 641 | {{1, 21}, :block_text, "."}, 642 | {{2, 3}, :subindent, "hello"} 643 | ] 644 | end 645 | 646 | test "script. multiline" do 647 | output = tokenize("script.\n hello\n world") 648 | assert reverse(output) == [ 649 | {{1, 1}, :indent, 0}, 650 | {{1, 1}, :element_name, "script"}, 651 | {{1, 7}, :block_text, "."}, 652 | {{2, 3}, :subindent, "hello"}, 653 | {{3, 3}, :subindent, " world"} 654 | ] 655 | end 656 | 657 | test "script. multiline with sibling" do 658 | output = tokenize("script.\n hello\n world\ndiv") 659 | assert reverse(output) == [ 660 | {{1, 1}, :indent, 0}, 661 | {{1, 1}, :element_name, "script"}, 662 | {{1, 7}, :block_text, "."}, 663 | {{2, 3}, :subindent, "hello"}, 664 | {{3, 3}, :subindent, " world"}, 665 | {{4, 1}, :indent, 0}, 666 | {{4, 1}, :element_name, "div"} 667 | ] 668 | end 669 | 670 | test "value-less attributes" do 671 | output = tokenize("div(src)") 672 | assert reverse(output) == [ 673 | {{1, 1}, :indent, 0}, 674 | {{1, 1}, :element_name, "div"}, 675 | {{1, 4}, :attribute_open, "("}, 676 | {{1, 5}, :attribute_key, "src"}, 677 | {{1, 8}, :attribute_close, ")"} 678 | ] 679 | end 680 | 681 | test "newline with dot (#5)" do 682 | output = tokenize(""" 683 | li 684 | 685 | img(src=x.x) 686 | """) 687 | 688 | assert reverse(output) == [ 689 | {{1, 1}, :indent, 0}, 690 | {{1, 1}, :element_name, "li"}, 691 | {{3, 1}, :indent, 2}, 692 | {{3, 3}, :element_name, "img"}, 693 | {{3, 6}, :attribute_open, "("}, 694 | {{3, 7}, :attribute_key, "src"}, 695 | {{3, 11}, :attribute_value, "x.x"}, 696 | {{3, 14}, :attribute_close, ")"} 697 | ] 698 | end 699 | 700 | test "boolean attributes" do 701 | output = tokenize(""" 702 | a(required) 703 | """) 704 | 705 | assert reverse(output) == [ 706 | {{1, 1}, :indent, 0}, 707 | {{1, 1}, :element_name, "a"}, 708 | {{1, 2}, :attribute_open, "("}, 709 | {{1, 3}, :attribute_key, "required"}, 710 | {{1, 11}, :attribute_close, ")"} 711 | ] 712 | end 713 | 714 | test "multiple boolean attributes" do 715 | output = tokenize(""" 716 | a(required checked) 717 | """) 718 | 719 | assert reverse(output) == [ 720 | {{1, 1}, :indent, 0}, 721 | {{1, 1}, :element_name, "a"}, 722 | {{1, 2}, :attribute_open, "("}, 723 | {{1, 3}, :attribute_key, "required"}, 724 | {{1, 12}, :attribute_key, "checked"}, 725 | {{1, 19}, :attribute_close, ")"} 726 | ] 727 | end 728 | 729 | test "data attributes mixed with others" do 730 | output = tokenize(""" 731 | a(required a=b) 732 | """) 733 | 734 | assert reverse(output) == [ 735 | {{1, 1}, :indent, 0}, 736 | {{1, 1}, :element_name, "a"}, 737 | {{1, 2}, :attribute_open, "("}, 738 | {{1, 3}, :attribute_key, "required"}, 739 | {{1, 12}, :attribute_key, "a"}, 740 | {{1, 14}, :attribute_value, "b"}, 741 | {{1, 15}, :attribute_close, ")"} 742 | ] 743 | end 744 | 745 | test "an element with text and then without" do 746 | output = tokenize("a hi\nlink") 747 | assert reverse(output) == [ 748 | {{1, 1}, :indent, 0}, 749 | {{1, 1}, :element_name, "a"}, 750 | {{1, 3}, :raw_text, "hi"}, 751 | {{2, 1}, :indent, 0}, 752 | {{2, 1}, :element_name, "link"} 753 | ] 754 | end 755 | 756 | # test "comma delimited attributes" 757 | # test "script." 758 | # test "comments" 759 | # test "!=" 760 | end 761 | -------------------------------------------------------------------------------- /test/tokenizer_tools_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExpugTokenizerToolsTest do 2 | use ExUnit.Case 3 | 4 | doctest Expug.TokenizerTools 5 | end 6 | -------------------------------------------------------------------------------- /test/transformer_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExpugTransormerTest do 2 | use ExUnit.Case 3 | doctest Expug.Transformer 4 | end 5 | -------------------------------------------------------------------------------- /test/visitor_test.exs: -------------------------------------------------------------------------------- 1 | defmodule VisitorTest do 2 | use ExUnit.Case 3 | doctest Expug.Visitor 4 | end 5 | --------------------------------------------------------------------------------