├── README.md
├── example
├── person-docs.lua
├── person.md
└── person.nelua
├── nelua-docs.lua
├── nldoc.lua
└── tests
└── test.lua
/README.md:
--------------------------------------------------------------------------------
1 | # NLDoc - Nelua Documentation Generator
2 |
3 | This is tool to generate documentation for
4 | [Nelua](https://github.com/edubart/nelua-lang) source files.
5 |
6 | This library was created mainly to generate documentation for the Nelua
7 | standard libraries,
8 | but it can also be used to generate documentation for other projects,
9 | though you may have to personalize or configure it for your needs.
10 |
11 | This library is theoretically easy to hack in case needed,
12 | it's just a single lua script, comments, and it does not
13 | depend on anything other than [LPegRex](https://github.com/edubart/lpegrex) and Lua.
14 |
15 | It contains the whole Nelua grammar definition inside,
16 | theoretically this library could also be used to generate documentation for Lua sources,
17 | because the Nelua grammar is a super-set of the Lua grammar.
18 |
19 | ## How it works
20 |
21 | This library parses a source file twice,
22 | the first parse outputs the complete AST for source file.
23 | A second parse collects all line comments.
24 | After both AST and comments are parsed, all AST nodes are
25 | visited while collecting comments just before the AST node.
26 | Just variable declarations and function definitions are considered for documentation.
27 | Local symbols and variables are ignored in the documentation,
28 | unless you optionally force a name to be included.
29 | Finally documentation is emitted for filtered AST nodes using
30 | a markdown template with gathered code and comment information,
31 | this template can be customized.
32 |
33 | The library design resembles how the Nelua compiler works internally,
34 | but in a very simple manner, because like the Nelua compiler it has
35 | a parser made in LPegRex, a visitor pattern to traverse the AST nodes,
36 | and a generator to emit text, but here we emit documentation
37 | instead of C code.
38 |
39 | ## Complex Example
40 |
41 | The
42 | [nelua-docs.lua](https://github.com/edubart/nldoc/blob/master/nelua-docs.lua) file
43 | is used to generate documentation for in the Nelua website, specifically the
44 | [libraries](https://nelua.io/libraries/)
45 | and
46 | [C libraries](https://nelua.io/clibraries/) pages.
47 |
48 | Here is a quick how to generate that documentation:
49 |
50 | ```bash
51 | git clone https://github.com/edubart/nelua-lang.git
52 | git clone https://github.com/edubart/nldoc.git && cd nldoc
53 | lua nelua-docs.lua ../nelua-lang
54 | ```
55 |
56 | This requires Lua 5.4 and LPegRex to be installed, alternatively
57 | you can run with `nelua --script`, as Nelua compiler comes with Lua 5.4 and LPegRex bundled:
58 |
59 | ```bash
60 | nelua --script nelua-docs.lua ../nelua-lang
61 | ```
62 |
63 | It will generate documentation according to the rules defined in `nelua-docs.lua`,
64 | read the file to understand how it works.
65 |
66 | ## Small Example
67 |
68 | The [example folder](https://github.com/edubart/nldoc/blob/master/example/)
69 | contains a simple example on how to generate documentation for a small library:
70 |
71 | - [example/person.nelua](https://github.com/edubart/nldoc/blob/master/example/person.nelua) the source file that will be parsed and documented.
72 | - [example/person-docs.lua](https://github.com/edubart/nldoc/blob/master/example/person-docs.lua) a lua script that generates the documentation.
73 | - [example/person.md](https://github.com/edubart/nldoc/blob/master/example/person.md) the generated documentation output in Markdown format.
74 |
75 | To regenerate `example/person.md` run the following:
76 |
77 | ```bash
78 | nelua --script example/person-docs.lua
79 | ```
80 |
--------------------------------------------------------------------------------
/example/person-docs.lua:
--------------------------------------------------------------------------------
1 | local nldoc = require 'nldoc'
2 |
3 | local emitter = nldoc.Emitter.create()
4 |
5 | -- add documentation header
6 | emitter:add[[
7 | # Documentation
8 |
9 | Just an example documentation.
10 | This text goes in the top of the page.
11 | ]]
12 |
13 | nldoc.generate_doc(emitter, 'example/person.nelua')
14 |
15 | -- add documentation footer
16 | emitter:add[[
17 |
18 | You have reached the end of the documentation!
19 | ]]
20 |
21 | nldoc.write_file('example/person.md', emitter:generate())
22 |
--------------------------------------------------------------------------------
/example/person.md:
--------------------------------------------------------------------------------
1 | # Documentation
2 |
3 | Just an example documentation.
4 | This text goes in the top of the page.
5 | ## person
6 |
7 | Person library.
8 |
9 | This library is just an example for documentation.
10 |
11 | ### Person
12 |
13 | ```nelua
14 | global Person = @record{
15 | name: string, -- Full name.
16 | age: integer, -- Years since birth.
17 | }
18 | ```
19 |
20 | Person record.
21 |
22 | ### Person.create
23 |
24 | ```nelua
25 | function Person.create(name: string, age: integer): *Person
26 | ```
27 |
28 | Creates a new person with `name` and `age`.
29 |
30 | ### Person:get_name
31 |
32 | ```nelua
33 | function Person:get_name(): string
34 | ```
35 |
36 | Returns the person name.
37 |
38 | ### Person:get_age
39 |
40 | ```nelua
41 | function Person:get_age(): integer
42 | ```
43 |
44 | Returns the person age.
45 |
46 | ---
47 |
48 | You have reached the end of the documentation!
49 |
--------------------------------------------------------------------------------
/example/person.nelua:
--------------------------------------------------------------------------------
1 | --[[
2 | Person library.
3 |
4 | This library is just an example for documentation.
5 | ]]
6 |
7 | require 'allocators.default'
8 |
9 | -- Person record.
10 | global Person = @record{
11 | name: string, -- Full name.
12 | age: integer, -- Years since birth.
13 | }
14 |
15 | -- Creates a new person with `name` and `age`.
16 | function Person.create(name: string, age: integer): *Person
17 | return new((@Person){name = name, age = age})
18 | end
19 |
20 | -- Returns the person name.
21 | function Person:get_name(): string
22 | return self.name
23 | end
24 |
25 | -- Returns the person age.
26 | function Person:get_age(): integer
27 | return self.age
28 | end
29 |
--------------------------------------------------------------------------------
/nelua-docs.lua:
--------------------------------------------------------------------------------
1 | local nldoc = require 'nldoc'
2 |
3 | -- Generate standard libraries documentation.
4 | local function gen_stdlib(neluadir)
5 | -- emitter used to concatenate documentation text
6 | local emitter = nldoc.Emitter.create()
7 |
8 | -- add documentation heading
9 | emitter:add[[
10 | ---
11 | layout: docs
12 | title: Libraries
13 | permalink: /libraries/
14 | categories: docs toc
15 | toc: true
16 | order: 4
17 | ---
18 |
19 | This is a list of Nelua standard libraries.
20 | {: .lead}
21 |
22 | To use a library, use `require 'libraryname'`{:.language-nelua}.
23 | {: .callout.callout-info}
24 |
25 | ]]
26 |
27 | -- parse and generate documentation for many sources
28 | nldoc.generate_doc(emitter, neluadir..'/lib/builtins.nelua')
29 | nldoc.generate_doc(emitter, neluadir..'/lib/arg.nelua')
30 | nldoc.generate_doc(emitter, neluadir..'/lib/iterators.nelua')
31 | nldoc.generate_doc(emitter, neluadir..'/lib/io.nelua')
32 | nldoc.generate_doc(emitter, neluadir..'/lib/filestream.nelua')
33 | nldoc.generate_doc(emitter, neluadir..'/lib/math.nelua')
34 | nldoc.generate_doc(emitter, neluadir..'/lib/memory.nelua')
35 | nldoc.generate_doc(emitter, neluadir..'/lib/os.nelua')
36 | nldoc.generate_doc(emitter, neluadir..'/lib/span.nelua', {
37 | include_names={spanT=true}
38 | })
39 | nldoc.generate_doc(emitter, neluadir..'/lib/string.nelua', {
40 | include_names={string=true},
41 | top_template = [[
42 | ## $(name)
43 |
44 | $(text)
45 |
46 | ### string
47 |
48 | ```nelua
49 | global string = @record{
50 | data: *[0]byte,
51 | size: usize,
52 | }
53 | ```
54 |
55 | The string record defined in the compiler sources.
56 |
57 | New strings always have the `data` buffer null terminated by default
58 | to have more comparability with C APIs.
59 | The `data` buffer is 0-indexed (unlike string APIs).
60 | ]]
61 | })
62 | nldoc.generate_doc(emitter, neluadir..'/lib/stringbuilder.nelua', {
63 | include_names={stringbuilderT=true}
64 | })
65 | nldoc.generate_doc(emitter, neluadir..'/lib/traits.nelua')
66 | nldoc.generate_doc(emitter, neluadir..'/lib/utf8.nelua')
67 | nldoc.generate_doc(emitter, neluadir..'/lib/coroutine.nelua')
68 | nldoc.generate_doc(emitter, neluadir..'/lib/hash.nelua')
69 | nldoc.generate_doc(emitter, neluadir..'/lib/vector.nelua', {
70 | include_names={vectorT=true}
71 | })
72 | nldoc.generate_doc(emitter, neluadir..'/lib/sequence.nelua', {
73 | include_names={sequenceT=true}
74 | })
75 | nldoc.generate_doc(emitter, neluadir..'/lib/list.nelua', {
76 | include_names={listnodeT=true, listT=true}
77 | })
78 | nldoc.generate_doc(emitter, neluadir..'/lib/hashmap.nelua', {
79 | include_names={hashnodeT=true, hashmapT=true}
80 | })
81 |
82 | nldoc.generate_doc(emitter, neluadir..'/lib/allocators/default.nelua', {
83 | name='allocators.default',
84 | })
85 | nldoc.generate_doc(emitter, neluadir..'/lib/allocators/allocator.nelua', {
86 | name='allocators.allocator',
87 | include_names={Allocator=true}
88 | })
89 | nldoc.generate_doc(emitter, neluadir..'/lib/allocators/general.nelua', {
90 | name='allocators.general',
91 | })
92 | nldoc.generate_doc(emitter, neluadir..'/lib/allocators/gc.nelua', {
93 | name='allocators.gc',
94 | })
95 | nldoc.generate_doc(emitter, neluadir..'/lib/allocators/arena.nelua', {
96 | name='allocators.arena',
97 | include_names={ArenaAllocatorT=true}
98 | })
99 | nldoc.generate_doc(emitter, neluadir..'/lib/allocators/stack.nelua', {
100 | name='allocators.stack',
101 | include_names={StackAllocatorT=true}
102 | })
103 | nldoc.generate_doc(emitter, neluadir..'/lib/allocators/pool.nelua', {
104 | name='allocators.pool',
105 | include_names={PoolAllocatorT=true}
106 | })
107 | nldoc.generate_doc(emitter, neluadir..'/lib/allocators/heap.nelua', {
108 | name='allocators.heap',
109 | include_names={HeapAllocatorT=true}
110 | })
111 |
112 | -- add documentation footer
113 | emitter:add[[
114 |
115 | C Libraries >>
116 | ]]
117 |
118 | -- generate the documentation file
119 | local docfile = neluadir..'/docs/pages/libraries.md'
120 | nldoc.write_file(docfile, emitter:generate())
121 | print('generated', docfile)
122 | end
123 |
124 | -- Generate C libraries documentation.
125 | local function gen_clib(neluadir)
126 | local emitter = nldoc.Emitter.create()
127 |
128 | -- add documentation heading
129 | emitter:add[[
130 | ---
131 | layout: docs
132 | title: C libraries
133 | permalink: /clibraries/
134 | categories: docs toc
135 | toc: true
136 | order: 5
137 | ---
138 |
139 | Nelua provides bindings for common C functions according to the C11 specification.
140 | This is a list of all imported C libraries.
141 | {: .lead}
142 |
143 | To use a C library, use `require 'C.stdlib'`{:.language-nelua} for example.
144 | {: .callout.callout-info}
145 |
146 | Nelua encourages you to use it's standard libraries instead of the C APIs,
147 | these are provided just as convenience for interoperating with C libraries.
148 | {:.alert.alert-info}
149 |
150 | ]]
151 |
152 | -- parse and generate documentation for many sources
153 | nldoc.generate_doc(emitter, neluadir..'/lib/C/arg.nelua', {name='C.arg',include_names={C=true}})
154 | nldoc.generate_doc(emitter, neluadir..'/lib/C/ctype.nelua', {name='C.ctype',include_names={C=true}})
155 | nldoc.generate_doc(emitter, neluadir..'/lib/C/errno.nelua', {name='C.errno',include_names={C=true}})
156 | nldoc.generate_doc(emitter, neluadir..'/lib/C/locale.nelua', {name='C.locale',include_names={C=true}})
157 | nldoc.generate_doc(emitter, neluadir..'/lib/C/math.nelua', {name='C.math',include_names={C=true}})
158 | nldoc.generate_doc(emitter, neluadir..'/lib/C/signal.nelua', {name='C.signal',include_names={C=true}})
159 | nldoc.generate_doc(emitter, neluadir..'/lib/C/stdarg.nelua', {name='C.stdarg',include_names={C=true}})
160 | nldoc.generate_doc(emitter, neluadir..'/lib/C/stdio.nelua', {name='C.stdio',include_names={C=true}})
161 | nldoc.generate_doc(emitter, neluadir..'/lib/C/stdlib.nelua', {name='C.stdlib',include_names={C=true}})
162 | nldoc.generate_doc(emitter, neluadir..'/lib/C/string.nelua', {name='C.string',include_names={C=true}})
163 | nldoc.generate_doc(emitter, neluadir..'/lib/C/time.nelua', {name='C.time',include_names={C=true}})
164 | nldoc.generate_doc(emitter, neluadir..'/lib/C/threads.nelua', {name='C.threads',include_names={C=true}})
165 | nldoc.generate_doc(emitter, neluadir..'/lib/C/stdatomic.nelua', {name='C.stdatomic',include_names={C=true}})
166 |
167 | -- generate the documentation file
168 | local docfile = neluadir..'/docs/pages/clibraries.md'
169 | nldoc.write_file(docfile, emitter:generate())
170 | print('generated', docfile)
171 | end
172 |
173 | if not arg[1] then
174 | print 'Please pass the Nelua source directory as the first argument.'
175 | os.exit(1)
176 | end
177 |
178 | -- Generate documentation for Nelua libraries.
179 | local neluadir = arg[1]
180 | gen_stdlib(neluadir)
181 | gen_clib(neluadir)
182 |
--------------------------------------------------------------------------------
/nldoc.lua:
--------------------------------------------------------------------------------
1 | -- LPegRex is the only external dependency.
2 | local lpegrex = require 'lpegrex'
3 |
4 | --------------------------------------------------------------------------------
5 | -- Utilities.
6 |
7 | -- Walk iterator, used by `walk_nodes`.
8 | local function walk_nodes_iterator(node, parent, parentindex)
9 | if node.tag then
10 | coroutine.yield(node, parent, parentindex)
11 | end
12 | for i=1,#node do
13 | local v = node[i]
14 | if type(v) == 'table' then
15 | walk_nodes_iterator(v, node, i)
16 | end
17 | end
18 | end
19 |
20 | -- Walk all nodes from an AST.
21 | local function walk_nodes(ast)
22 | return coroutine.wrap(walk_nodes_iterator), ast
23 | end
24 |
25 | -- Read a file and return its contents as string.
26 | local function read_file(filename)
27 | local file, err = assert(io.open(filename))
28 | local contents = file:read("*a")
29 | file:close()
30 | return contents
31 | end
32 |
33 | -- Write contents to a file.
34 | local function write_file(filename, contents)
35 | local file, err = assert(io.open(filename,'w'))
36 | assert(file:write(contents))
37 | assert(file:close())
38 | return true
39 | end
40 |
41 | --------------------------------------------------------------------------------
42 | -- Parser class, used to parse source codes.
43 |
44 | local Parser = {}
45 | local Parser_mt = {__index = Parser}
46 |
47 | -- Creates a new Parser.
48 | function Parser.create(grammar, comments_grammar, errors, defs)
49 | local source_patt = lpegrex.compile(grammar, defs)
50 | local comments_patt = lpegrex.compile(comments_grammar, defs)
51 | return setmetatable({
52 | source_patt = source_patt,
53 | comments_patt = comments_patt,
54 | errors = errors,
55 | }, Parser_mt)
56 | end
57 |
58 | -- Pretty print a parsing syntax error.
59 | local function parse_error(self, source, name, errlabel, errpos)
60 | name = name or ''
61 | local lineno, colno, line = lpegrex.calcline(source, errpos)
62 | local colhelp = string.rep(' ', colno-1)..'^'
63 | local errmsg = self.errors[errlabel] or errlabel
64 | error('syntax error: '..name..':'..lineno..':'..colno..': '..errmsg..
65 | '\n'..line..'\n'..colhelp)
66 | end
67 |
68 | -- Parse source into an AST.
69 | function Parser:parse(source, name)
70 | local ast, errlabel, errpos = self.source_patt:match(source)
71 | if not ast then
72 | parse_error(self, source, name, errlabel, errpos)
73 | end
74 | return ast
75 | end
76 |
77 | -- Remove left common left indentations from a text.
78 | local function trim_identantion(text)
79 | local initcol, ss = 0x7fffffff, {}
80 | -- find common indentation
81 | for line in text:gmatch('([^\n]*)\n?') do
82 | if #line > 0 then
83 | local charcol = line:find('[^%s]')
84 | if charcol then
85 | initcol = math.min(initcol, charcol)
86 | end
87 | end
88 | ss[#ss+1] = line
89 | end
90 | -- remove common indentation and trim right
91 | for i=1,#ss do
92 | ss[i] = ss[i]:sub(initcol):gsub('%s*$', '')
93 | end
94 | return table.concat(ss, '\n')
95 | end
96 |
97 | -- Trim spaces from comments.
98 | local function trim_comments(comments)
99 | for i=1,#comments do
100 | local comment = comments[i]
101 | comment.text = trim_identantion(comment.text)
102 | end
103 | -- remove empty comments
104 | local i = 1
105 | while i <= #comments do
106 | local comment = comments[i]
107 | if comment.text == '' then
108 | table.remove(comments, i)
109 | else
110 | i = i + 1
111 | end
112 | end
113 | end
114 |
115 | -- Calculate comments line numbers.
116 | local function calc_comments(comments, source)
117 | -- gather line number and calc comment texts
118 | for i=1,#comments do
119 | local comment = comments[i]
120 | comment.text, comment[1] = comment[1], nil
121 | -- calculate line numbers
122 | comment.lineno, comment.colno = lpegrex.calcline(source, comment.pos)
123 | comment.endlineno, comment.endcolno = lpegrex.calcline(source, comment.endpos-1)
124 | end
125 | end
126 |
127 | -- Combine neighbor comments.
128 | local function combine_comments(comments, source)
129 | local i = 1
130 | while i < #comments do
131 | local c1 = comments[i]
132 | local c2 = comments[i+1]
133 | local inbetween = source:sub(c1.endpos, c2.pos-1)
134 | if c1.colno == c2.colno and
135 | c1.endlineno+1 == c2.lineno and
136 | c1.tag == c2.tag and
137 | inbetween:match('^%s*$') then
138 | comments[i] = {
139 | tag = c1.tag,
140 | text = c1.text..'\n'..c2.text,
141 | pos = c1.pos,
142 | endpos = c2.endpos,
143 | lineno = c1.lineno,
144 | endlineno = c2.endlineno,
145 | colno = c1.colno,
146 | endcolno = c2.endcolno,
147 | combined = true,
148 | }
149 | table.remove(comments, i+1)
150 | else
151 | i = i + 1
152 | end
153 | end
154 | return comments
155 | end
156 |
157 | -- Convert a list of comments to a map of line number and comment.
158 | local function make_comments_by_line(comments)
159 | local comments_by_line = {}
160 | for i=1,#comments do
161 | local comment = comments[i]
162 | for lineno=comment.lineno,comment.endlineno do
163 | assert(not comments_by_line[lineno])
164 | comments_by_line[lineno] = comment
165 | end
166 | end
167 | return comments_by_line
168 | end
169 |
170 | -- Parse all comments from source into a map and a list of comments.
171 | function Parser:parse_comments(source, name)
172 | local comments, errlabel, errpos = self.comments_patt:match(source)
173 | if not comments then
174 | parse_error(self, source, name, errlabel, errpos)
175 | end
176 | calc_comments(comments, source)
177 | combine_comments(comments, source)
178 | trim_comments(comments)
179 | local comments_by_line = make_comments_by_line(comments)
180 | return comments_by_line, comments
181 | end
182 |
183 | --------------------------------------------------------------------------------
184 | -- Nelua Parser
185 |
186 | -- Complete syntax grammar of Nelua defined in a single PEG.
187 | local syntax_grammar = [==[
188 | chunk <-- SHEBANG? SKIP Block (!.)^UnexpectedSyntax
189 |
190 | Block <==(local / global /
191 | FuncDef / Return / In /
192 | Do / Defer /
193 | If / Switch /
194 | for /
195 | While / Repeat /
196 | Break / Continue /
197 | Goto / Label /
198 | Preprocess /
199 | Assign / call /
200 | `;`)*
201 |
202 | -- Statements
203 | Label <== `::` @name @`::`
204 | Return <== `return` (expr (`,` @expr)*)?
205 | In <== `in` @expr
206 | Break <== `break`
207 | Continue <== `continue`
208 | Goto <== `goto` @name
209 | Do <== `do` Block @`end`
210 | Defer <== `defer` Block @`end`
211 | While <== `while` @expr @`do` Block @`end`
212 | Repeat <== `repeat` Block @`until` @expr
213 | If <== `if` ifs (`else` Block)? @`end`
214 | ifs <-| @expr @`then` Block (`elseif` @expr @`then` Block)*
215 | Switch <== `switch` @expr `do`? @cases (`else` Block)? @`end`
216 | cases <-| (`case` @exprs @`then` Block)+
217 | for <-- `for` (ForNum / ForIn)
218 | ForNum <== iddecl `=` @expr @`,` forcmp~? @expr (`,` @expr)~? @`do` Block @`end`
219 | ForIn <== @iddecls @`in` @exprs @`do` Block @`end`
220 | local <-- `local` (localfunc / localvar)
221 | global <-- `global` (globalfunc / globalvar)
222 | localfunc : FuncDef <== `function` $'local' @namedecl @funcbody
223 | globalfunc : FuncDef <== `function` $'global' @namedecl @funcbody
224 | FuncDef <== `function` $false @funcname @funcbody
225 | funcbody <-- `(` funcargs @`)` (`:` @funcrets)~? annots~? Block @`end`
226 | localvar : VarDecl <== $'local' @suffixeddecls (`=` @exprs)?
227 | globalvar : VarDecl <== $'global' @suffixeddecls (`=` @exprs)?
228 | Assign <== vars `=` @exprs
229 | Preprocess <== PREPROCESS SKIP
230 |
231 | -- Simple expressions
232 | Number <== NUMBER name? SKIP
233 | String <== STRING name? SKIP
234 | Boolean <== `true`->totrue / `false`->tofalse
235 | Nilptr <== `nilptr`
236 | Nil <== `nil`
237 | Varargs <== `...`
238 | Id <== name
239 | IdDecl <== name (`:` @typeexpr)~? annots?
240 | typeddecl : IdDecl <== name `:` @typeexpr annots?
241 | suffixeddecl : IdDecl <== (idsuffixed / name) (`:` @typeexpr)~? annots?
242 | suffixeddeclexpr <-- suffixeddecl / PreprocessExpr
243 | namedecl : IdDecl <== name
244 | Function <== `function` @funcbody
245 | InitList <== `{` (field (fieldsep field)* fieldsep?)? @`}`
246 | field <-- Pair / expr
247 | Paren <== `(` @expr @`)`
248 | DoExpr <== `(` `do` Block @`end` @`)`
249 | Type <== `@` @typeexpr
250 |
251 | Pair <== `[` @expr @`]` @`=` @expr / name `=` @expr / `=` @id -> pair_sugar
252 | Annotation <== name annotargs?
253 |
254 | -- Preprocessor replaceable nodes
255 | PreprocessExpr <== `#[` {@expr->0} @`]#`
256 | PreprocessName <== `#|` {@expr->0} @`|#`
257 | ppcallprim : PreprocessExpr <== {NAME->0} `!` &callsuffix
258 |
259 | -- Suffixes
260 | Call <== callargs
261 | CallMethod <== `:` @name @callargs
262 | DotIndex <== `.` @name
263 | ColonIndex <== `:` @name
264 | KeyIndex <== `[` @expr @`]`
265 |
266 | indexsuffix <-- DotIndex / KeyIndex
267 | callsuffix <-- Call / CallMethod
268 |
269 | var <-- (exprprim (indexsuffix / callsuffix+ indexsuffix)+)~>rfoldright /
270 | id / deref
271 | call <-- (exprprim (callsuffix / indexsuffix+ callsuffix)+)~>rfoldright
272 | exprsuffixed <-- (exprprim (indexsuffix / callsuffix)*)~>rfoldright
273 | idsuffixed <-- (id DotIndex+)~>rfoldright
274 | funcname <-- (id DotIndex* ColonIndex?)~>rfoldright
275 |
276 | -- Lists
277 | callargs <-| `(` (expr (`,` @expr)*)? @`)` / InitList / String
278 | annotargs <-| `(` (expr (`,` @expr)*)? @`)` / InitList / String / PreprocessExpr
279 | iddecls <-| iddecl (`,` @iddecl)*
280 | funcargs <-| (iddecl (`,` iddecl)* (`,` VarargsType)? / VarargsType)?
281 | suffixeddecls <-| suffixeddeclexpr (`,` @suffixeddeclexpr)*
282 | exprs <-| expr (`,` @expr)*
283 | annots <-| `<` @Annotation (`,` @Annotation)* @`>`
284 | funcrets <-| `(` typeexpr (`,` @typeexpr)* @`)` / typeexpr
285 | vars <-| var (`,` @var)*
286 |
287 | -- Expression operators
288 | opor : BinaryOp <== `or`->'or' @exprand
289 | opand : BinaryOp <== `and`->'and' @exprcmp
290 | opcmp : BinaryOp <== cmp @exprbor
291 | opbor : BinaryOp <== `|`->'bor' @exprbxor
292 | opbxor : BinaryOp <== `~`->'bxor' @exprband
293 | opband : BinaryOp <== `&`->'band' @exprbshift
294 | opbshift : BinaryOp <== (`<<`->'shl' / `>>>`->'asr' / `>>`->'shr') @exprconcat
295 | opconcat : BinaryOp <== `..`->'concat' @exprconcat
296 | oparit : BinaryOp <== (`+`->'add' / `-`->'sub') @exprfact
297 | opfact : BinaryOp <== (`*`->'mul' / `///`->'tdiv' / `//`->'idiv' / `/`->'div' /
298 | `%%%`->'tmod' / `%`->'mod') @exprunary
299 | oppow : BinaryOp <== `^`->'pow' @exprunary
300 | opunary : UnaryOp <== (`not`->'not' / `-`->'unm' / `#`->'len' /
301 | `~`->'bnot' / `&`->'ref' / `$`->'deref') @exprunary
302 | deref : UnaryOp <== `$`->'deref' @exprunary
303 |
304 | -- Expressions
305 | expr <-- expror
306 | expror <-- (exprand opor*)~>foldleft
307 | exprand <-- (exprcmp opand*)~>foldleft
308 | exprcmp <-- (exprbor opcmp*)~>foldleft
309 | exprbor <-- (exprbxor opbor*)~>foldleft
310 | exprbxor <-- (exprband opbxor*)~>foldleft
311 | exprband <-- (exprbshift opband*)~>foldleft
312 | exprbshift <-- (exprconcat opbshift*)~>foldleft
313 | exprconcat <-- (exprarit opconcat*)~>foldleft
314 | exprarit <-- (exprfact oparit*)~>foldleft
315 | exprfact <-- (exprunary opfact*)~>foldleft
316 | exprunary <-- opunary / exprpow
317 | exprpow <-- (exprsimple oppow*)~>foldleft
318 | exprsimple <-- Number / String / Type / InitList / Boolean /
319 | Function / Nilptr / Nil / Varargs / exprsuffixed
320 | exprprim <-- ppcallprim / id / DoExpr / Paren
321 |
322 | -- Types
323 | RecordType <== 'record' WORDSKIP @`{` (RecordField (fieldsep RecordField)* fieldsep?)? @`}`
324 | UnionType <== 'union' WORDSKIP @`{` (UnionField (fieldsep UnionField)* fieldsep?)? @`}`
325 | EnumType <== 'enum' WORDSKIP (`(` @typeexpr @`)`)~? @`{` @enumfields @`}`
326 | FuncType <== 'function' WORDSKIP @`(` functypeargs @`)`(`:` @funcrets)?
327 | ArrayType <== 'array' WORDSKIP @`(` @typeexpr (`,` @expr)? @`)`
328 | PointerType <== 'pointer' WORDSKIP (`(` @typeexpr @`)`)?
329 | VariantType <== 'variant' WORDSKIP `(` @typearg (`,` @typearg)* @`)`
330 | VarargsType <== `...` (`:` @name)?
331 |
332 | RecordField <== name @`:` @typeexpr
333 | UnionField <== name `:` @typeexpr / $false typeexpr
334 | EnumField <== name (`=` @expr)?
335 |
336 | -- Type lists
337 | enumfields <-| EnumField (fieldsep EnumField)* fieldsep?
338 | functypeargs <-| (functypearg (`,` functypearg)* (`,` VarargsType)? / VarargsType)?
339 | typeargs <-| typearg (`,` @typearg)*
340 |
341 | functypearg <-- typeddecl / typeexpr
342 | typearg <-- typeexpr / `(` expr @`)` / expr
343 |
344 | -- Type expression operators
345 | typeopptr : PointerType <== `*`
346 | typeopopt : OptionalType <== `?`
347 | typeoparr : ArrayType <== `[` expr? @`]`
348 | typeopvar : VariantType <== typevaris
349 | typeopgen : GenericType <== `(` @typeargs @`)` / &`{` {| InitList |}
350 | typevaris : VariantType <== `|` @typeexprunary (`|` @typeexprunary)*
351 |
352 | typeopunary <-- typeopptr / typeopopt / typeoparr
353 |
354 | -- Type expressions
355 | typeexpr <-- (typeexprunary typevaris?)~>foldleft
356 | typeexprunary <-- (typeopunary* typexprsimple)->rfoldleft
357 | typexprsimple <-- RecordType / UnionType / EnumType / FuncType / ArrayType / PointerType /
358 | VariantType / (typeexprprim typeopgen?)~>foldleft
359 | typeexprprim <-- idsuffixed / id
360 |
361 | -- Common rules
362 | name <-- NAME SKIP / PreprocessName
363 | id <-- Id / PreprocessExpr
364 | iddecl <-- IdDecl / PreprocessExpr
365 | cmp <-- `==`->'eq' / forcmp
366 | forcmp <-- `~=`->'ne' / `<=`->'le' / `<`->'lt' / `>=`->'ge' / `>`->'gt'
367 | fieldsep <-- `,` / `;`
368 |
369 | -- String
370 | STRING <-- STRING_SHRT / STRING_LONG
371 | STRING_LONG <-- {:LONG_OPEN {LONG_CONTENT} @LONG_CLOSE:}
372 | STRING_SHRT <-- {:QUOTE_OPEN {~QUOTE_CONTENT~} @QUOTE_CLOSE:}
373 | QUOTE_OPEN <-- {:qe: ['"] :}
374 | QUOTE_CONTENT <-- (ESCAPE_SEQ / !(QUOTE_CLOSE / LINEBREAK) .)*
375 | QUOTE_CLOSE <-- =qe
376 | ESCAPE_SEQ <-- '\'->'' @ESCAPE
377 | ESCAPE <-- [\'"] /
378 | ('n' $10 / 't' $9 / 'r' $13 / 'a' $7 / 'b' $8 / 'v' $11 / 'f' $12)->tochar /
379 | ('x' {HEX_DIGIT^2} $16)->tochar /
380 | ('u' '{' {HEX_DIGIT^+1} '}' $16)->toutf8char /
381 | ('z' SPACE*)->'' /
382 | (DEC_DIGIT DEC_DIGIT^-1 !DEC_DIGIT / [012] DEC_DIGIT^2)->tochar /
383 | (LINEBREAK $10)->tochar
384 |
385 | -- Number
386 | NUMBER <-- {HEX_NUMBER / BIN_NUMBER / DEC_NUMBER}
387 | HEX_NUMBER <-- '0' [xX] @HEX_PREFIX ([pP] @EXP_DIGITS)?
388 | BIN_NUMBER <-- '0' [bB] @BIN_PREFIX ([pP] @EXP_DIGITS)?
389 | DEC_NUMBER <-- DEC_PREFIX ([eE] @EXP_DIGITS)?
390 | HEX_PREFIX <-- HEX_DIGIT+ ('.' HEX_DIGIT*)? / '.' HEX_DIGIT+
391 | BIN_PREFIX <-- BIN_DIGITS ('.' BIN_DIGITS?)? / '.' BIN_DIGITS
392 | DEC_PREFIX <-- DEC_DIGIT+ ('.' DEC_DIGIT*)? / '.' DEC_DIGIT+
393 | EXP_DIGITS <-- [+-]? DEC_DIGIT+
394 |
395 | -- Comments
396 | COMMENT <-- '--' (COMMENT_LONG / COMMENT_SHRT)
397 | COMMENT_LONG <-- (LONG_OPEN LONG_CONTENT @LONG_CLOSE)->0
398 | COMMENT_SHRT <-- (!LINEBREAK .)*
399 |
400 | -- Preprocess
401 | PREPROCESS <-- '##' (PREPROCESS_LONG / PREPROCESS_SHRT)
402 | PREPROCESS_LONG <-- {:'[' {:eq: '='*:} '[' {LONG_CONTENT} @LONG_CLOSE:}
403 | PREPROCESS_SHRT <-- {(!LINEBREAK .)*} LINEBREAK?
404 |
405 | -- Long (used by string, comment and preprocess)
406 | LONG_CONTENT <-- (!LONG_CLOSE .)*
407 | LONG_OPEN <-- '[' {:eq: '='*:} '[' LINEBREAK?
408 | LONG_CLOSE <-- ']' =eq ']'
409 |
410 | NAME <-- !KEYWORD {NAME_PREFIX NAME_SUFFIX?}
411 | NAME_PREFIX <-- [_a-zA-Z%utf8seq]
412 | NAME_SUFFIX <-- [_a-zA-Z0-9%utf8seq]+
413 |
414 | -- Miscellaneous
415 | SHEBANG <-- '#!' (!LINEBREAK .)* LINEBREAK?
416 | SKIP <-- (SPACE+ / COMMENT)*
417 | WORDSKIP <-- !NAME_SUFFIX SKIP
418 | LINEBREAK <-- %cn %cr / %cr %cn / %cn / %cr
419 | SPACE <-- %sp
420 | HEX_DIGIT <-- [0-9a-fA-F]
421 | BIN_DIGITS <-- [01]+ !DEC_DIGIT
422 | DEC_DIGIT <-- [0-9]
423 | EXTRA_TOKENS <-- `[[` `[=` `--` `##` -- Force defining these tokens.
424 | ]==]
425 |
426 | -- Grammar parsing only commends and ignoring the rest.
427 | local comments_grammar = [==[
428 | comments <-| (LongComment / ShortComment / .)*
429 |
430 | LongComment <== '--' LONG_OPEN LINEBREAK_SKIP {LONG_CONTENT} @LONG_CLOSE
431 | ShortComment <== '--' SHORT_SKIP {(!(SHORT_SKIP LINEBREAK) .)*} SHORT_SKIP
432 |
433 | SHORT_SKIP <-- (SPACE / '-')*
434 |
435 | LONG_CONTENT <-- (!LONG_CLOSE .)*
436 | LONG_OPEN <-- '[' {:eq: '='*:} '['
437 | LONG_CLOSE <-- SKIP ']' =eq ']'
438 |
439 | SKIP <-- %sp*
440 | LINEBREAK_SKIP <-- (SPACE* LINEBREAK)?
441 | SPACE <-- [ %ct%cf%cv]
442 | LINEBREAK <-- %cn %cr / %cr %cn / %cn / %cr
443 | ]==]
444 |
445 | -- List of syntax errors.
446 | local syntax_errors = {
447 | ["Expected_do"] = "expected `do` keyword to begin a statement block",
448 | ["Expected_then"] = "expected `then` keyword to begin a statement block",
449 | ["Expected_end"] = "expected `end` keyword to close a statement block",
450 | ["Expected_until"] = "expected `until` keyword to close a `repeat` statement",
451 | ["Expected_cases"] = "expected `case` keyword in `switch` statement",
452 | ["Expected_in"] = "expected `in` keyword in `for` statement",
453 | ["Expected_Annotation"] = "expected an annotation expression",
454 | ["Expected_expr"] = "expected an expression",
455 | ["Expected_exprand"] = "expected an expression after operator",
456 | ["Expected_exprcmp"] = "expected an expression after operator",
457 | ["Expected_exprbor"] = "expected an expression after operator",
458 | ["Expected_exprbxor"] = "expected an expression after operator",
459 | ["Expected_exprband"] = "expected an expression after operator",
460 | ["Expected_exprbshift"] = "expected an expression after operator",
461 | ["Expected_exprconcat"] = "expected an expression after operator",
462 | ["Expected_exprfact"] = "expected an expression after operator",
463 | ["Expected_exprunary"] = "expected an expression after operator",
464 | ["Expected_name"] = "expected an identifier name",
465 | ["Expected_namedecl"] = "expected an identifier name",
466 | ["Expected_Id"] = "expected an identifier name",
467 | ["Expected_IdDecl"] = "expected an identifier declaration",
468 | ["Expected_typearg"] = "expected an argument in type expression",
469 | ["Expected_typeexpr"] = "expected a type expression",
470 | ["Expected_typeexprunary"] = "expected a type expression",
471 | ["Expected_funcbody"] = "expected function body",
472 | ["Expected_funcrets"] = "expected function return types",
473 | ["Expected_funcname"] = "expected a function name",
474 | ["Expected_globaldecl"] = "expected a global identifier declaration",
475 | ["Expected_var"] = "expected a variable",
476 | ["Expected_enumfields"] = "expected a field in `enum` type",
477 | ["Expected_typeargs"] = "expected arguments in type expression",
478 | ["Expected_callargs"] = "expected call arguments",
479 | ["Expected_exprs"] = "expected expressions",
480 | ["Expected_globaldecls"] = "expected global identifiers declaration",
481 | ["Expected_iddecls"] = "expected identifiers declaration",
482 | ["Expected_("] = "expected parenthesis `(`",
483 | ["Expected_,"] = "expected comma `,`",
484 | ["Expected_:"] = "expected colon `:`",
485 | ["Expected_="] = "expected equals `=`",
486 | ["Expected_{"] = "expected curly brace `{`",
487 | ["Expected_)"] = "unclosed parenthesis, did you forget a `)`?",
488 | ["Expected_::"] = "unclosed label, did you forget a `::`?",
489 | ["Expected_>"] = "unclosed angle bracket, did you forget a `>`?",
490 | ["Expected_]"] = "unclosed square bracket, did you forget a `]`?",
491 | ["Expected_}"] = "unclosed curly brace, did you forget a `}`?",
492 | ["Expected_]#"] = "unclosed preprocess expression, did you forget a `]#`?",
493 | ["Expected_|#"] = "unclosed preprocess name, did you forget a `|#`?",
494 | ["Expected_LONG_CLOSE"] = "unclosed long, did you forget a `]]`?",
495 | ["Expected_QUOTE_CLOSE"] = "unclosed string, did you forget a quote?",
496 | ["Expected_ESCAPE"] = "malformed escape sequence",
497 | ["Expected_BIN_PREFIX"] = "malformed binary number",
498 | ["Expected_EXP_DIGITS"] = "malformed exponential number",
499 | ["Expected_HEX_PREFIX"] = "malformed hexadecimal number",
500 | ["UnexpectedSyntax"] = "unexpected syntax",
501 | }
502 |
503 | local defs = {}
504 |
505 | -- Auxiliary function for 'Pair' syntax sugar.
506 | function defs.pair_sugar(idnode)
507 | return idnode[1], idnode
508 | end
509 |
510 | local parser = Parser.create(syntax_grammar, comments_grammar, syntax_errors, defs)
511 |
512 | --------------------------------------------------------------------------------
513 | -- Emitter class, used to emit large texts.
514 |
515 | -- The emitter class.
516 | local Emitter = {}
517 | Emitter.__index = Emitter
518 |
519 | -- Creates a new emitter.
520 | function Emitter.create()
521 | return setmetatable({}, Emitter)
522 | end
523 |
524 | -- Appends a text.
525 | function Emitter:add(s)
526 | self[#self+1] = s
527 | end
528 |
529 | -- Combine all texts.
530 | function Emitter:generate()
531 | return table.concat(self)
532 | end
533 |
534 | --------------------------------------------------------------------------------
535 | -- Generator class, used to generate the documentation.
536 |
537 | -- The generator class.
538 | local Generator = {}
539 | local Generator_mt = {__index = Generator}
540 |
541 | -- Default symbol template.
542 | local symbol_template = [[
543 | ### $(name)
544 |
545 | ```$(lang)
546 | $(code)
547 | ```
548 |
549 | $(text)
550 |
551 | ]]
552 |
553 | -- Default heading template.
554 | local top_template = [[
555 | ## $(name)
556 |
557 | $(text)
558 |
559 | ]]
560 |
561 | local bottom_template = [[
562 | ---
563 | ]]
564 |
565 | -- Create a new generator from visitors.
566 | function Generator.create(visitors, lang)
567 | return setmetatable({
568 | top_template = top_template,
569 | symbol_template = symbol_template,
570 | bottom_template = bottom_template,
571 | lang = lang,
572 | visitors = visitors
573 | }, Generator_mt)
574 | end
575 |
576 | -- Emit documentation.
577 | function Generator:emit(source, filename, ast, comments, options, emitter)
578 | -- setup options
579 | options = options or {}
580 | options.include_names = options.include_names or {}
581 | options.top_template = options.top_template or self.top_template
582 | options.symbol_template = options.symbol_template or self.symbol_template
583 | -- create emitter
584 | if not emitter then
585 | emitter = Emitter.create()
586 | end
587 | -- create context
588 | local context = {
589 | ast = ast,
590 | source = source,
591 | filename = filename,
592 | options = options,
593 | comments = comments,
594 | lang = self.lang,
595 | symbols = {},
596 | }
597 | local visitors = self.visitors
598 | -- emit top heading
599 | local topcomment = comments[1]
600 | if topcomment and visitors.TopComment then
601 | visitors.TopComment(context, topcomment, emitter)
602 | end
603 | -- emit nodes
604 | for node, parent in walk_nodes(ast) do
605 | node.parent = parent
606 | local visit = visitors[node.tag]
607 | if visit then
608 | visit(context, node, emitter)
609 | end
610 | end
611 | emitter:add(bottom_template)
612 | return emitter
613 | end
614 |
615 | --------------------------------------------------------------------------------
616 | -- Nelua documentation generator
617 |
618 | local substitute_vars_mt = {}
619 | local substitute_vars = setmetatable({}, substitute_vars_mt)
620 | local substitute_defs = {}
621 | function substitute_defs.to_var(k)
622 | local v = substitute_vars[k]
623 | return v ~= nil and tostring(v) or ''
624 | end
625 | local substitute_patt = lpegrex.compile([[
626 | pat <- {~ (var / .)* ~}
627 | var <- ('$(' {[_%a]+} ')') -> to_var
628 | ]], substitute_defs)
629 |
630 | -- Substitute keywords between '$()' from a text using values from a table.
631 | local function template_substitute(format, vars)
632 | substitute_vars_mt.__index = vars
633 | return substitute_patt:match(format)
634 | end
635 |
636 | -- Trim comments on the right.
637 | local function rtrim_comments(text)
638 | text = text:gsub('%s*%-%-%[=*%[.*$', '') -- trim multi line comments
639 | repeat
640 | local n
641 | text, n = text:gsub('%s%-%-[^\n]*%s*$', '')
642 | until n == 0
643 | return text
644 | end
645 |
646 | -- Trim unwanted text in declarations.
647 | local function trimdecl(text)
648 | return rtrim_comments(text) -- remove trailing comments
649 | :gsub('%s*$', '') -- remove trailing spaces
650 | :gsub('%s*%b<>$', '') -- remove annotations
651 | end
652 |
653 | -- Trim unwanted text in definitions.
654 | local function trimdef(text)
655 | text = rtrim_comments(text) -- remove trailing comments
656 | :gsub('%s*$', '') -- remove trailing spaces
657 |
658 | return text
659 | end
660 |
661 | -- Filter symbol by name.
662 | local function document_symbol(context, symbol, emitter)
663 | if not symbol.name then
664 | -- probably a preprocessor name, ignore
665 | return false
666 | end
667 | local symbols, inclnames = context.symbols, context.options.include_names
668 | local classname = symbol.name:match('(.*)[.:][_%w]+$')
669 | if classname and not symbols[classname] and not inclnames[classname] then
670 | -- class symbol is not wanted
671 | return false
672 | end
673 | if not symbol.topscope or (symbol.declscope == 'local' and not inclnames[symbol.name]) then
674 | -- not a global or wanted symbol
675 | return false
676 | end
677 | table.insert(symbols, symbol)
678 | symbols[symbol.name] = true
679 | emitter:add(template_substitute(context.options.symbol_template, symbol))
680 | return true
681 | end
682 |
683 | -- Generator visitors.
684 | local visitors = {}
685 |
686 | -- Visit top most comment.
687 | function visitors.TopComment(context, comment, emitter)
688 | local filename = context.filename
689 | local name = context.options.name
690 | if not name then
691 | name = filename:match('([_%w]+)%.[_%w]+$') or filename
692 | context.name = name
693 | end
694 | local consts = {
695 | name = name,
696 | filename = filename,
697 | text = comment.text,
698 | }
699 | emitter:add(template_substitute(context.options.top_template, consts))
700 | end
701 |
702 | -- Visit function definitions.
703 | function visitors.FuncDef(context, node, emitter)
704 | local declscope = node[1]
705 | local blocknode = node[6]
706 | local lineno = lpegrex.calcline(context.source, node.pos)
707 | local chunk = context.source:sub(node.pos, blocknode.pos-1)
708 | local name = chunk:match('function%s+([^%(]*)%s*%(')
709 | local comment = context.comments[lineno-1]
710 | local decl = trimdecl(chunk)
711 | local code = declscope and declscope..' '..decl or decl
712 | local symbol = {
713 | tag = node.tag,
714 | name = name,
715 | code = code,
716 | comment = comment,
717 | text = comment and comment.text,
718 | lineno = lineno,
719 | topscope = node.parent == context.ast,
720 | declscope = declscope,
721 | node = node,
722 | lang = context.lang,
723 | }
724 | document_symbol(context, symbol, emitter)
725 | end
726 |
727 | -- Visit variable declarations.
728 | function visitors.VarDecl(context, node, emitter)
729 | local declscope = node[1]
730 | local varnodes = node[2]
731 | local valnodes = node[3]
732 | for i,varnode in ipairs(varnodes) do
733 | local valnode = valnodes and valnodes[i]
734 | local lineno = lpegrex.calcline(context.source, varnode.pos)
735 | local chunk = context.source:sub(varnode.pos, varnode.endpos-1)
736 | local comment = context.comments[lineno-1]
737 | local decl = trimdecl(chunk)
738 | local name = decl:match('^[_%.%w]+')
739 | local code = declscope and declscope..' '..decl or decl
740 | if valnode then
741 | local valpos, valendpos = valnode.pos, valnode.endpos
742 | for vnode in walk_nodes(valnode) do
743 | valpos = math.min(valpos, vnode.pos)
744 | valendpos = math.max(valendpos, vnode.endpos)
745 | end
746 | local def = trimdef(context.source:sub(valpos, valendpos-1))
747 | if def:find('^@') then
748 | code = code..' = '..def
749 | end
750 | end
751 | local symbol = {
752 | tag = node.tag,
753 | name = name,
754 | code = code,
755 | comment = comment,
756 | text = comment and comment.text,
757 | lineno = lineno,
758 | topscope = node.parent == context.ast,
759 | declscope = declscope,
760 | node = node,
761 | lang = context.lang,
762 | }
763 | document_symbol(context, symbol, emitter)
764 | end
765 | end
766 |
767 | local generator = Generator.create(visitors, 'nelua')
768 |
769 | --[[
770 | Generate documentation from a source file.
771 | All the code resumes to this single function.
772 | ]]
773 | local function generate_doc(emitter, filename, options)
774 | local source = read_file(filename)
775 | local ast = parser:parse(source, filename)
776 | local comments = parser:parse_comments(source, filename)
777 | generator:emit(source, filename, ast, comments, options, emitter)
778 | end
779 |
780 | local nldoc = {
781 | -- Utilities
782 | walk_nodes = walk_nodes,
783 | read_file = read_file,
784 | write_file = write_file,
785 |
786 | -- Parser
787 | Parser = Parser,
788 | syntax_grammar = syntax_grammar,
789 | comments_grammar = comments_grammar,
790 | syntax_errors = syntax_errors,
791 | defs = defs,
792 | parser = parser,
793 |
794 | -- Generator
795 | Emitter = Emitter,
796 | Generator = Generator,
797 | visitors = visitors,
798 | generator = generator,
799 |
800 | -- The most important function.
801 | generate_doc = generate_doc,
802 | }
803 |
804 | return nldoc
805 |
--------------------------------------------------------------------------------
/tests/test.lua:
--------------------------------------------------------------------------------
1 | local nelua_parser = require 'nldoc'.parser
2 | local lester = require 'lester'
3 |
4 | local describe, it, expect = lester.describe, lester.it, lester.expect
5 |
6 | describe("parser", function()
7 | describe("Nelua", function()
8 | it("source", function()
9 | expect.equal(nelua_parser:parse([[print 'hello world']]),
10 | { { { { "hello world",
11 | endpos = 20,
12 | pos = 7,
13 | tag = "String"
14 | } }, { "print",
15 | endpos = 7,
16 | pos = 1,
17 | tag = "Id"
18 | },
19 | endpos = 20,
20 | pos = 7,
21 | tag = "Call"
22 | },
23 | endpos = 20,
24 | pos = 1,
25 | tag = "Block"
26 | })
27 | end)
28 |
29 | describe("comments", function()
30 | it("short", function()
31 | expect.equal(
32 | select(2, nelua_parser:parse_comments("-- line comment \n")),
33 | {{text="line comment", tag="ShortComment",
34 | pos=1, endpos=17, lineno=1, endlineno=1, colno=1, endcolno=16}})
35 | end)
36 |
37 | it("long", function()
38 | expect.equal(
39 | select(2, nelua_parser:parse_comments("--[[ \n multi line\ncomment \n ]] \n")),
40 | {{text=" multi line\ncomment", tag="LongComment",
41 | pos=1, endpos=35, lineno=1, endlineno=4, colno=1, endcolno=4, eq=""}})
42 | expect.equal(
43 | select(2, nelua_parser:parse_comments("--[==[ \n multi line\ncomment \n ]==] \n")),
44 | {{text=" multi line\ncomment", tag="LongComment",
45 | pos=1, endpos=39, lineno=1, endlineno=4, colno=1, endcolno=6, eq="=="}})
46 | end)
47 |
48 | it("trim indentation", function()
49 | expect.equal(
50 | select(2, nelua_parser:parse_comments([==[--[=[
51 | long
52 |
53 | comment
54 | ]=]]==])),
55 | {{text="long\n\ncomment", tag="LongComment",
56 | pos=1, endpos=58, lineno=1, endlineno=5, colno=1, endcolno=13, eq="="}})
57 | expect.equal(
58 | select(2, nelua_parser:parse_comments([==[--[=[
59 | long
60 | indented
61 | comment
62 | ]=]]==])),
63 | {{text="long\nindented\ncomment", tag="LongComment",
64 | pos=1, endpos=78, lineno=1, endlineno=5, colno=1, endcolno=13, eq="="}})
65 | end)
66 |
67 | it("combine", function()
68 | expect.equal(
69 | select(2, nelua_parser:parse_comments([==[
70 | -- line1
71 | --
72 | -- line2
73 | ]=]]==])),
74 | {{text="line1\n\nline2", tag="ShortComment",
75 | pos=1, endpos=21, lineno=1, endlineno=3, colno=1, endcolno=8, combined=true}})
76 | end)
77 | end)
78 | end)
79 | end)
80 |
81 | lester.report()
82 |
--------------------------------------------------------------------------------