├── lua ├── clprc.lua ├── lexers │ ├── null.lua │ ├── container.lua │ ├── text.lua │ ├── dsv.lua │ ├── less.lua │ ├── jsp.lua │ ├── rhtml.lua │ ├── typescript.lua │ ├── sass.lua │ ├── json.lua │ ├── litcoffee.lua │ ├── tex.lua │ ├── lilypond.lua │ ├── git-rebase.lua │ ├── gettext.lua │ ├── gemini.lua │ ├── pico8.lua │ ├── man.lua │ ├── diff.lua │ ├── taskpaper.lua │ ├── props.lua │ ├── strace.lua │ ├── asp.lua │ ├── rpmspec.lua │ ├── gherkin.lua │ ├── ini.lua │ ├── smalltalk.lua │ ├── gap.lua │ ├── elm.lua │ ├── dockerfile.lua │ ├── toml.lua │ ├── haskell.lua │ ├── desktop.lua │ ├── faust.lua │ ├── io_lang.lua │ ├── ps.lua │ ├── protobuf.lua │ ├── ledger.lua │ ├── tcl.lua │ ├── apl.lua │ ├── rstats.lua │ ├── bibtex.lua │ ├── rc.lua │ ├── pure.lua │ ├── fennel.lua │ ├── latex.lua │ ├── vbscript.lua │ ├── pike.lua │ ├── ada.lua │ ├── idl.lua │ ├── context.lua │ ├── hare.lua │ ├── coffeescript.lua │ ├── dart.lua │ ├── gtkrc.lua │ ├── batch.lua │ ├── antlr.lua │ ├── myrddin.lua │ ├── mediawiki.lua │ ├── scala.lua │ ├── dot.lua │ ├── rails.lua │ ├── xs.lua │ ├── routeros.lua │ ├── vala.lua │ ├── fish.lua │ ├── actionscript.lua │ ├── eiffel.lua │ ├── powershell.lua │ ├── icon.lua │ ├── vb.lua │ ├── nemerle.lua │ ├── fsharp.lua │ ├── boo.lua │ ├── go.lua │ ├── objective_c.lua │ ├── csharp.lua │ ├── lisp.lua │ ├── snobol4.lua │ ├── django.lua │ ├── forth.lua │ ├── pascal.lua │ ├── groovy.lua │ ├── vcard.lua │ ├── applescript.lua │ ├── xml.lua │ ├── fantom.lua │ ├── xtend.lua │ ├── pkgbuild.lua │ ├── caml.lua │ ├── vhdl.lua │ ├── reason.lua │ ├── gnuplot.lua │ ├── wsf.lua │ ├── sql.lua │ └── sml.lua ├── themes │ ├── ansi-16.lua │ ├── monokai.lua │ ├── dracula.lua │ ├── gruvbox.lua │ └── solarized.lua ├── util.lua ├── color_theme.lua └── ansi_codes.lua ├── tests └── syntax │ ├── test.c │ └── test.lua ├── cf.sh ├── luaf.sh ├── .clang-format ├── .gitignore ├── LICENSE ├── clp.h ├── cli.c ├── man └── clp.1 ├── Makefile └── README.md /lua/clprc.lua: -------------------------------------------------------------------------------- 1 | clprc = {} 2 | clprc.theme = "ansi-16" 3 | return clprc 4 | -------------------------------------------------------------------------------- /tests/syntax/test.c: -------------------------------------------------------------------------------- 1 | void 2 | main() 3 | { 4 | printf("hello\n"); 5 | } 6 | -------------------------------------------------------------------------------- /tests/syntax/test.lua: -------------------------------------------------------------------------------- 1 | function add(a, b) 2 | return a + b 3 | end 4 | 5 | result = add(2, 3) 6 | print(result) -------------------------------------------------------------------------------- /lua/lexers/null.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Null LPeg lexer. 3 | return require('lexer').new('null') 4 | -------------------------------------------------------------------------------- /cf.sh: -------------------------------------------------------------------------------- 1 | find . -name '*.c' -type f -print0 | while IFS= read -r -d '' file; do 2 | clang-format "$file" > "$file.tmp" && mv "$file.tmp" "$file" 3 | done 4 | -------------------------------------------------------------------------------- /luaf.sh: -------------------------------------------------------------------------------- 1 | find . -name '*.lua' -type f -print0 | while IFS= read -r -d '' file; do 2 | lua-format "$file" > "$file.tmp" && mv "$file.tmp" "$file" 3 | done 4 | 5 | -------------------------------------------------------------------------------- /lua/lexers/container.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Container LPeg lexer. 3 | -- This is SciTE's plain text lexer. 4 | return require('lexer').new('container') 5 | -------------------------------------------------------------------------------- /.clang-format: -------------------------------------------------------------------------------- 1 | --- 2 | IndentWidth: 4 3 | AlwaysBreakAfterDefinitionReturnType: TopLevel 4 | BraceWrapping: 5 | AfterClass: true 6 | AfterControlStatement: false 7 | AfterFunction: true 8 | BreakBeforeBraces: Custom -------------------------------------------------------------------------------- /lua/lexers/text.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Text LPeg lexer. 3 | local lexer = require('lexer') 4 | 5 | local lex = lexer.new('text') 6 | 7 | lex:add_rule('whitespace', lexer.token(lexer.WHITESPACE, lexer.space ^ 1)) 8 | 9 | return lex 10 | -------------------------------------------------------------------------------- /lua/themes/ansi-16.lua: -------------------------------------------------------------------------------- 1 | local M = {} 2 | M.theme = { 3 | ['comment'] = {color = "cyan"}, 4 | ['keyword'] = {color = "blue"}, 5 | ['number'] = {color = "red"}, 6 | ['string'] = {color = "green"}, 7 | ['preprocessor'] = {color = "magenta"}, 8 | ['type'] = {color = "yellow"} 9 | } 10 | return M 11 | -------------------------------------------------------------------------------- /lua/lexers/dsv.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2016 Christian Hesse 2 | -- delimiter separated values LPeg lexer. 3 | local l = require('lexer') 4 | local token, word_match = l.token, l.word_match 5 | local P, R, S = lpeg.P, lpeg.R, lpeg.S 6 | 7 | local M = {_NAME = 'dsv'} 8 | 9 | -- Operators. 10 | local operator = token(l.OPERATOR, S(',;:|')) 11 | 12 | M._rules = {{'operator', operator}} 13 | 14 | return M 15 | -------------------------------------------------------------------------------- /lua/util.lua: -------------------------------------------------------------------------------- 1 | function copy_table(t) 2 | local t2 = {} 3 | for k, v in pairs(t) do t2[k] = v end 4 | return t2 5 | end 6 | 7 | function print_r(t, indent) 8 | indent = indent or "" 9 | for k, v in pairs(t) do 10 | if type(v) == "table" then 11 | print(indent .. tostring(k) .. ":") 12 | print_r(v, indent .. " ") 13 | else 14 | print(indent .. tostring(k) .. ": " .. tostring(v)) 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /lua/lexers/less.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Robert Gieseke. See LICENSE. 2 | -- Less CSS LPeg lexer. 3 | -- http://lesscss.org 4 | local lexer = require('lexer') 5 | local token = lexer.token 6 | local S = lpeg.S 7 | 8 | local lex = lexer.new('less', {inherit = lexer.load('css')}) 9 | 10 | -- Line comments. 11 | lex:add_rule('line_comment', token(lexer.COMMENT, lexer.to_eol('//'))) 12 | 13 | -- Variables. 14 | lex:add_rule('variable', 15 | token(lexer.VARIABLE, '@' * (lexer.alnum + S('_-{}')) ^ 1)) 16 | 17 | lexer.property['scintillua.comment'] = '//' 18 | 19 | return lex 20 | -------------------------------------------------------------------------------- /lua/lexers/jsp.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- JSP LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(..., {inherit = lexer.load('html')}) 7 | 8 | -- Embedded Java. 9 | local java = lexer.load('java') 10 | local java_start_rule = lex:tag(lexer.PREPROCESSOR, '<%' * P('=') ^ -1) 11 | local java_end_rule = lex:tag(lexer.PREPROCESSOR, '%>') 12 | lex:embed(java, java_start_rule, java_end_rule, true) 13 | 14 | -- Fold points. 15 | lex:add_fold_point(lexer.PREPROCESSOR, '<%', '%>') 16 | 17 | lexer.property['scintillua.comment'] = '' 18 | 19 | return lex 20 | -------------------------------------------------------------------------------- /lua/lexers/rhtml.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- RHTML LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(..., {inherit = lexer.load('html')}) 7 | 8 | -- Embedded Ruby. 9 | local ruby = lexer.load('rails') 10 | local ruby_start_rule = lex:tag(lexer.PREPROCESSOR, '<%' * P('=') ^ -1) 11 | local ruby_end_rule = lex:tag(lexer.PREPROCESSOR, '%>') 12 | lex:embed(ruby, ruby_start_rule, ruby_end_rule) 13 | 14 | -- Fold points. 15 | lex:add_fold_point(lexer.PREPROCESSOR, '<%', '%>') 16 | 17 | lexer.property['scintillua.comment'] = '' 18 | 19 | return lex 20 | -------------------------------------------------------------------------------- /lua/lexers/typescript.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2021-2023 Mitchell. See LICENSE. 2 | -- TypeScript LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(..., {inherit = lexer.load('javascript')}) 7 | 8 | -- Word lists. 9 | lex:set_word_list(lexer.KEYWORD, 10 | 'abstract as constructor declare is module namespace require type', 11 | true) 12 | 13 | lex:set_word_list(lexer.TYPE, 14 | 'boolean number bigint string unknown any void never symbol object', 15 | true) 16 | 17 | lexer.property['scintillua.comment'] = '//' 18 | 19 | return lex 20 | -------------------------------------------------------------------------------- /lua/lexers/sass.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Robert Gieseke. See LICENSE. 2 | -- Sass CSS preprocessor LPeg lexer. 3 | -- http://sass-lang.com 4 | local lexer = lexer 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new(..., {inherit = lexer.load('css')}) 8 | 9 | -- Line comments. 10 | lex:add_rule('line_comment', lex:tag(lexer.COMMENT, lexer.to_eol('//'))) 11 | 12 | -- Variables. 13 | lex:add_rule('variable', 14 | lex:tag(lexer.VARIABLE, '$' * (lexer.alnum + S('_-')) ^ 1)) 15 | 16 | -- Mixins. 17 | lex:add_rule('mixin', lex:tag(lexer.PREPROCESSOR, '@' * lexer.word)) 18 | 19 | lexer.property['scintillua.comment'] = '//' 20 | 21 | return lex 22 | -------------------------------------------------------------------------------- /lua/themes/monokai.lua: -------------------------------------------------------------------------------- 1 | local M = {} 2 | M.theme = { 3 | ['default'] = "#f8f8f2", 4 | ['nothing'] = '', 5 | ['class'] = "#f92672", 6 | ['comment'] = "#75715e", 7 | ['constant'] = "#f8f8f2", 8 | ['error'] = "#f8f8f0", 9 | ['function'] = "#a6e22e", 10 | ['keyword'] = "#f92672", 11 | ['label'] = "#e6db74", 12 | ['number'] = "#ae81ff", 13 | ['operator'] = "#f8f8f2", 14 | ['regex'] = "#ae81ff", 15 | ['string'] = "#e6db74", 16 | ['preprocessor'] = "#f92672", 17 | ['tag'] = "#f92672", 18 | ['type'] = "#66d9ef", 19 | ['variable'] = "#66d9ef", 20 | ['whitespace'] = '', 21 | ['embedded'] = "#e6db74", 22 | ['identifier'] = "#f8f8f2", 23 | } 24 | return M 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | main 2 | # Prerequisites 3 | *.d 4 | 5 | # Object files 6 | *.o 7 | *.ko 8 | *.obj 9 | *.elf 10 | 11 | # Linker output 12 | *.ilk 13 | *.map 14 | *.exp 15 | 16 | # Precompiled Headers 17 | *.gch 18 | *.pch 19 | 20 | # Libraries 21 | *.lib 22 | *.a 23 | *.la 24 | *.lo 25 | 26 | # Shared objects (inc. Windows DLLs) 27 | *.dll 28 | *.so 29 | *.so.* 30 | *.dylib 31 | 32 | # Executables 33 | *.exe 34 | *.out 35 | *.app 36 | *.i*86 37 | *.x86_64 38 | *.hex 39 | 40 | # Debug files 41 | *.dSYM/ 42 | *.su 43 | *.idb 44 | *.pdb 45 | 46 | # Kernel Module Compile Results 47 | *.mod* 48 | *.cmd 49 | .tmp_versions/ 50 | modules.order 51 | Module.symvers 52 | Mkfile.old 53 | dkms.conf 54 | tags 55 | .DS_Store 56 | clp 57 | config.mk 58 | 59 | .cache/* 60 | tests/tests 61 | sqlite3.c 62 | -------------------------------------------------------------------------------- /lua/themes/dracula.lua: -------------------------------------------------------------------------------- 1 | -- https://draculatheme.com/, https://spec.draculatheme.com/ 2 | local M = {} 3 | M.theme = { 4 | ['default'] = "#f8f8f2", 5 | ['nothing'] = '', 6 | ['class'] = "#8be9fd", 7 | ['comment'] = "#6272a4", 8 | ['constant'] = "#bd93f9", 9 | ['error'] = "#ff5555", 10 | ['function'] = "#50fa7b", 11 | ['keyword'] = "#ff79c6", 12 | ['label'] = "#ff79c6", 13 | ['number'] = "#bd93f9", 14 | ['operator'] = "#f8f8f2", 15 | ['regex'] = "#ff5555", 16 | ['string'] = "#f1fa8c", 17 | ['preprocessor'] = "#ff79c6", 18 | ['tag'] = "#ff79c6", 19 | ['type'] = "#8be9fd", 20 | ['variable'] = "#f8f8f2", 21 | ['whitespace'] = '', 22 | ['embedded'] = "#f1fa8c", 23 | ['identifier'] = "#f8f8f2", 24 | } 25 | return M 26 | -------------------------------------------------------------------------------- /lua/lexers/json.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Brian "Sir Alaran" Schott. See LICENSE. 2 | -- JSON LPeg lexer. 3 | -- Based off of lexer code by Mitchell. 4 | local lexer = lexer 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new(...) 8 | 9 | -- Strings. 10 | local sq_str = lexer.range("'", true) 11 | local dq_str = lexer.range('"', true) 12 | lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) 13 | 14 | -- Keywords. 15 | lex:add_rule('keyword', 16 | lex:tag(lexer.KEYWORD, lexer.word_match('true false null'))) 17 | 18 | -- Numbers. 19 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) 20 | 21 | -- Operators. 22 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('[]{}:,'))) 23 | 24 | -- Fold points. 25 | lex:add_fold_point(lexer.OPERATOR, '[', ']') 26 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 27 | 28 | return lex 29 | -------------------------------------------------------------------------------- /lua/lexers/litcoffee.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Robert Gieseke. See LICENSE. 2 | -- Literate CoffeeScript LPeg lexer. 3 | -- http://coffeescript.org/#literate 4 | local lexer = require('lexer') 5 | local token = lexer.token 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('litcoffee', {inherit = lexer.load('markdown')}) 9 | 10 | -- Embedded CoffeeScript. 11 | local coffeescript = lexer.load('coffeescript') 12 | local coffee_start_rule = token(lexer.EMBEDDED, (P(' ') ^ 4 + P('\t'))) 13 | local coffee_end_rule = token(lexer.EMBEDDED, lexer.newline) 14 | lex:embed(coffeescript, coffee_start_rule, coffee_end_rule) 15 | 16 | -- Use 'markdown_whitespace' instead of lexer.WHITESPACE since the latter would expand to 17 | -- 'litcoffee_whitespace'. 18 | lex:modify_rule('whitespace', 19 | token('markdown_whitespace', S(' \t') ^ 1 + S('\r\n') ^ 1)) 20 | 21 | return lex 22 | -------------------------------------------------------------------------------- /lua/lexers/tex.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Plain TeX LPeg lexer. 3 | -- Modified by Robert Gieseke. 4 | local lexer = lexer 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new(...) 8 | 9 | -- Comments. 10 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('%'))) 11 | 12 | -- TeX environments. 13 | lex:add_rule('environment', 14 | lex:tag('environment', '\\' * (P('begin') + 'end') * lexer.word)) 15 | 16 | -- Commands. 17 | lex:add_rule('command', 18 | lex:tag('command', '\\' * (lexer.alpha ^ 1 + S('#$&~_^%{}')))) 19 | 20 | -- Operators. 21 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('$&#{}[]'))) 22 | 23 | -- Fold points. 24 | lex:add_fold_point('environment', '\\begin', '\\end') 25 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 26 | 27 | lexer.property['scintillua.comment'] = '%' 28 | 29 | return lex 30 | -------------------------------------------------------------------------------- /lua/lexers/lilypond.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Robert Gieseke. See LICENSE. 2 | -- Lilypond LPeg lexer. 3 | -- TODO Embed Scheme; Notes?, Numbers? 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('lilypond') 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords, commands. 14 | lex:add_rule('keyword', token(lexer.KEYWORD, '\\' * lexer.word)) 15 | 16 | -- Identifiers. 17 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 18 | 19 | -- Strings. 20 | lex:add_rule('string', token(lexer.STRING, lexer.range('"', false, false))) 21 | 22 | -- Comments. 23 | -- TODO: block comment. 24 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('%'))) 25 | 26 | -- Operators. 27 | lex:add_rule('operator', token(lexer.OPERATOR, S("{}'~<>|"))) 28 | 29 | lexer.property['scintillua.comment'] = '%' 30 | 31 | return lex 32 | -------------------------------------------------------------------------------- /lua/lexers/git-rebase.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2017-2023 Marc André Tanner. See LICENSE. 2 | -- git-rebase(1) LPeg lexer. 3 | local lexer = lexer 4 | local P, R = lpeg.P, lpeg.R 5 | 6 | local lex = lexer.new(..., {lex_by_line = true}) 7 | 8 | -- Comments. 9 | lex:add_rule('comment', 10 | lex:tag(lexer.COMMENT, lexer.to_eol(lexer.starts_line('#')))) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lexer.starts_line( 14 | lex:word_match(lexer.KEYWORD)))) 15 | 16 | -- Commit SHA1. 17 | local function patn(pat, min, max) return -pat ^ (max + 1) * pat ^ min end 18 | 19 | lex:add_rule('commit', lex:tag(lexer.NUMBER, patn(R('09', 'af'), 7, 40))) 20 | 21 | lex:add_rule('message', lex:tag('message', lexer.to_eol())) 22 | 23 | -- Word lists. 24 | lex:set_word_list(lexer.KEYWORD, [[ 25 | p pick 26 | r reword 27 | e edit 28 | s squash 29 | f fixup 30 | x exec 31 | d drop 32 | b break 33 | l label 34 | t reset 35 | m merge 36 | ]]) 37 | 38 | return lex 39 | -------------------------------------------------------------------------------- /lua/lexers/gettext.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Gettext LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('gettext') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match( 14 | 'msgid msgid_plural msgstr fuzzy c-format no-c-format', 15 | true))) 16 | 17 | -- Identifiers. 18 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 19 | 20 | -- Variables. 21 | lex:add_rule('variable', token(lexer.VARIABLE, S('%$@') * lexer.word)) 22 | 23 | -- Strings. 24 | lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) 25 | 26 | -- Comments. 27 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#' * S(': .~')))) 28 | 29 | lexer.property['scintillua.comment'] = '#' 30 | 31 | return lex 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2022 Jon Eskin 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /lua/lexers/gemini.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2020-2023 Haelwenn (lanodan) Monnier . See LICENSE. 2 | -- Gemini / Gemtext LPeg lexer. 3 | -- See https://gemini.circumlunar.space/docs/specification.html 4 | local lexer = lexer 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new(...) 8 | 9 | local header = lex:tag(lexer.HEADING .. '.h3', 10 | lexer.to_eol(lexer.starts_line('###'))) + 11 | lex:tag(lexer.HEADING .. '.h2', 12 | lexer.to_eol(lexer.starts_line('##'))) + 13 | lex:tag(lexer.HEADING .. '.h1', 14 | lexer.to_eol(lexer.starts_line('#'))) 15 | lex:add_rule('header', header) 16 | 17 | lex:add_rule('list', lex:tag(lexer.LIST, lexer.to_eol(lexer.starts_line('*')))) 18 | 19 | lex:add_rule('blockquote', 20 | lex:tag(lexer.STRING, lexer.to_eol(lexer.starts_line('>')))) 21 | 22 | lex:add_rule('pre', 23 | lex:tag(lexer.CODE, lexer.to_eol(lexer.range('```', false, true)))) 24 | 25 | lex:add_rule('link', lex:tag(lexer.LINK, lexer.to_eol(lexer.starts_line('=>')))) 26 | 27 | return lex 28 | -------------------------------------------------------------------------------- /lua/lexers/pico8.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2016-2023 Alejandro Baez (https://keybase.io/baez). See LICENSE. 2 | -- PICO-8 lexer. 3 | -- http://www.lexaloffle.com/pico-8.php 4 | local lexer = lexer 5 | local word_match = lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new(...) 9 | 10 | -- Keywords 11 | lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lexer.word_match( 12 | '__gff__ __map__ __sfx__ __music__'))) 13 | 14 | -- Identifiers 15 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 16 | 17 | -- Comments 18 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('//', true))) 19 | 20 | -- Numbers 21 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.integer)) 22 | 23 | -- Operators 24 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, '_')) 25 | 26 | -- Embed Lua into PICO-8. 27 | local lua = lexer.load('lua') 28 | local lua_start_rule = lex:tag(lexer.KEYWORD, word_match('__lua__')) 29 | local lua_end_rule = lex:tag(lexer.KEYWORD, word_match('__gfx__')) 30 | lex:embed(lua, lua_start_rule, lua_end_rule) 31 | 32 | lexer.property['scintillua.comment'] = '//' 33 | 34 | return lex 35 | -------------------------------------------------------------------------------- /lua/lexers/man.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 David B. Lamkins . See LICENSE. 2 | -- man/roff LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('man') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Markup. 13 | lex:add_rule('rule1', token(lexer.STRING, '.' * 14 | lexer.to_eol( 15 | 'B' * P('R') ^ -1 + 'I' * P('PR') ^ -1))) 16 | lex:add_rule('rule2', token(lexer.NUMBER, lexer.to_eol('.' * S('ST') * 'H'))) 17 | lex:add_rule('rule3', 18 | token(lexer.KEYWORD, P('.br') + '.DS' + '.RS' + '.RE' + '.PD')) 19 | lex:add_rule('rule4', token(lexer.LABEL, '.' * (S('ST') * 'H' + '.TP'))) 20 | lex:add_rule('rule5', token(lexer.VARIABLE, 21 | '.B' * P('R') ^ -1 + '.I' * S('PR') ^ -1 + '.PP')) 22 | lex:add_rule('rule6', token(lexer.TYPE, '\\f' * S('BIPR'))) 23 | lex:add_rule('rule7', token(lexer.PREPROCESSOR, 24 | lexer.starts_line('.') * lexer.alpha ^ 1)) 25 | 26 | return lex 27 | -------------------------------------------------------------------------------- /lua/themes/gruvbox.lua: -------------------------------------------------------------------------------- 1 | local M = {} 2 | 3 | local colors = { 4 | black = '#928374', 5 | red = '#fb4934', 6 | green = '#b8bb26', 7 | yellow = '#fabd2f', 8 | blue = '#83a598', 9 | magenta = '#d3869b', 10 | cyan = '#8ec07c', 11 | aqua = '#8ec07c', 12 | orange = '#fe8019', 13 | white = '#ebdbb2', 14 | } 15 | 16 | M.theme = { 17 | ['default'] = colors.white, 18 | ['nothing'] = '', 19 | ['class'] = colors.yellow, 20 | ['comment'] = colors.gray, 21 | ['constant'] = colors.purple, 22 | ['error'] = colors.red, 23 | ['function'] = colors.green, 24 | ['keyword'] = colors.red, 25 | ['label'] = colors.red, 26 | ['number'] = colors.purple, 27 | ['operator'] = colors.white, 28 | ['regex'] = colors.aqua, 29 | ['string'] = colors.green, 30 | ['preprocessor'] = colors.aqua, 31 | ['tag'] = colors.blue, 32 | ['type'] = colors.yellow, 33 | ['variable'] = colors.blue, 34 | ['whitespace'] = '', 35 | ['embedded'] = colors.orange, 36 | ['identifier'] = colors.white, 37 | } 38 | return M 39 | -------------------------------------------------------------------------------- /lua/lexers/diff.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Diff LPeg lexer. 3 | local lexer = lexer 4 | local to_eol, starts_line = lexer.to_eol, lexer.starts_line 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new(..., {lex_by_line = true}) 8 | 9 | -- Text, file headers, and separators. 10 | lex:add_rule('index', lex:tag(lexer.COMMENT, to_eol(starts_line('Index: ')))) 11 | lex:add_rule('header', lex:tag(lexer.HEADING, 12 | to_eol(starts_line(P('*** ') + '--- ' + '+++ ')))) 13 | lex:add_rule('separator', lex:tag(lexer.COMMENT, 14 | to_eol(starts_line(P('---') + '****' + '=')))) 15 | 16 | -- Location. 17 | lex:add_rule('location', lex:tag(lexer.NUMBER, to_eol( 18 | starts_line('@@' + lexer.dec_num + '****')))) 19 | 20 | -- Additions, deletions, and changes. 21 | lex:add_rule('addition', lex:tag('addition', to_eol(starts_line(S('>+'))))) 22 | lex:add_rule('deletion', lex:tag('deletion', to_eol(starts_line(S('<-'))))) 23 | lex:add_rule('change', lex:tag('change', to_eol(starts_line('!')))) 24 | 25 | lex:add_rule('any_line', lex:tag(lexer.DEFAULT, lexer.to_eol())) 26 | 27 | return lex 28 | -------------------------------------------------------------------------------- /lua/lexers/taskpaper.lua: -------------------------------------------------------------------------------- 1 | -- Copyright (c) 2016-2023 Larry Hynes. See LICENSE. 2 | -- Taskpaper LPeg lexer 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(..., {lex_by_line = true}) 7 | 8 | -- Notes. 9 | local delimiter = lpeg.B(' ') + lpeg.B('\t') 10 | lex:add_rule('note', delimiter * lex:tag('note', lexer.to_eol(lexer.alnum))) 11 | 12 | -- Tasks. 13 | lex:add_rule('task', delimiter * lex:tag(lexer.LIST, '-')) 14 | 15 | -- Projects. 16 | lex:add_rule('project', lex:tag(lexer.HEADING, lexer.range( 17 | lexer.starts_line(lexer.alnum), ':') * 18 | lexer.newline)) 19 | 20 | -- Tags. 21 | lex:add_rule('extended_tag', lex:tag(lexer.TAG .. '.extended', 22 | '@' * lexer.word * '(' * 23 | (lexer.word + lexer.digit + '-') ^ 1 * 24 | ')')) 25 | lex:add_rule('day_tag', 26 | lex:tag(lexer.TAG .. '.day', (P('@today') + '@tomorrow'))) 27 | lex:add_rule('overdue_tag', lex:tag(lexer.TAG .. '.overdue', '@overdue')) 28 | lex:add_rule('plain_tag', lex:tag(lexer.TAG .. '.plain', '@' * lexer.word)) 29 | 30 | return lex 31 | -------------------------------------------------------------------------------- /lua/lexers/props.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Props LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(..., {lex_by_line = true}) 7 | 8 | -- Identifiers. 9 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, (lexer.alpha + S('.-_')) * 10 | (lexer.alnum + S('.-_') ^ 0))) 11 | 12 | -- Colors. 13 | local xdigit = lexer.xdigit 14 | lex:add_rule('color', lex:tag(lexer.NUMBER, '#' * xdigit * xdigit * xdigit * 15 | xdigit * xdigit * xdigit)) 16 | 17 | -- Comments. 18 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) 19 | 20 | -- Equals. 21 | lex:add_rule('equals', lex:tag(lexer.OPERATOR, '=')) 22 | 23 | -- Strings. 24 | local sq_str = lexer.range("'") 25 | local dq_str = lexer.range('"') 26 | lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) 27 | 28 | -- Variables. 29 | lex:add_rule('variable', lex:tag(lexer.OPERATOR, '$(') * 30 | lex:tag(lexer.VARIABLE, 31 | (lexer.nonnewline - lexer.space - ')') ^ 0) * 32 | lex:tag(lexer.OPERATOR, ')')) 33 | 34 | lexer.property['scintillua.comment'] = '#' 35 | 36 | return lex 37 | -------------------------------------------------------------------------------- /lua/lexers/strace.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2017-2023 Marc André Tanner. See LICENSE. 2 | -- strace(1) output lexer 3 | local lexer = lexer 4 | local S, B = lpeg.S, lpeg.B 5 | 6 | local lex = lexer.new(..., {lex_by_line = true}) 7 | 8 | -- Syscall 9 | lex:add_rule('syscall', lex:tag(lexer.FUNCTION, lexer.starts_line(lexer.word))) 10 | 11 | -- Upper case constants 12 | lex:add_rule('constant', lex:tag(lexer.CONSTANT, (lexer.upper + '_') * 13 | (lexer.upper + lexer.digit + '_') ^ 0)) 14 | 15 | -- Single and double quoted strings 16 | local sq_str = lexer.range("'", true) 17 | local dq_str = lexer.range('"', true) 18 | lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) 19 | 20 | -- Comments and text in parentheses at the line end 21 | local comment = lexer.range('/*', '*/') 22 | local description = lexer.range('(', ')') * lexer.newline 23 | lex:add_rule('comment', lex:tag(lexer.COMMENT, comment + description)) 24 | 25 | lex:add_rule('result', lex:tag(lexer.TYPE, B(' = ') * lexer.integer)) 26 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 27 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.float + lexer.integer)) 28 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>~!=^&|?~:;,.()[]{}'))) 29 | 30 | return lex 31 | -------------------------------------------------------------------------------- /lua/lexers/asp.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- ASP LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local html = lexer.load('html') 7 | local lex = lexer.new(..., {inherit = html}) -- proxy for HTML 8 | 9 | -- Embedded VB. 10 | local vb = lexer.load('vb') 11 | local vb_start_rule = lex:tag(lexer.PREPROCESSOR, '<%' * P('=') ^ -1) 12 | local vb_end_rule = lex:tag(lexer.PREPROCESSOR, '%>') 13 | lex:embed(vb, vb_start_rule, vb_end_rule) 14 | 15 | -- Embedded VBScript. 16 | local vbs = lexer.load('vb', 'vbscript') 17 | local script_element = lexer.word_match('script', true) 18 | local vbs_start_rule = #('<' * script_element * (P( 19 | function(input, index) 20 | if input:find('^%s+language%s*=%s*(["\'])vbscript%1', index) or 21 | input:find('^%s+type%s*=%s*(["\'])text/vbscript%1', index) then 22 | return true 23 | end 24 | end) + '>')) * html.embed_start_tag -- 26 | lex:embed(vbs, vbs_start_rule, vbs_end_rule) 27 | 28 | -- Fold points. 29 | lex:add_fold_point(lexer.PREPROCESSOR, '<%', '%>') 30 | 31 | lexer.property['scintillua.comment'] = '' 32 | 33 | return lex 34 | -------------------------------------------------------------------------------- /lua/lexers/rpmspec.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2022-2023 Matej Cepl mcepl.att.cepl.eu. See LICENSE. 2 | local lexer = require('lexer') 3 | local token, word_match = lexer.token, lexer.word_match 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new('rpmspec') 7 | 8 | -- Whitespace. 9 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 10 | 11 | -- Comments. 12 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 13 | 14 | -- Strings. 15 | lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) 16 | 17 | -- Keywords. 18 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 19 | 'Prereq', 'Summary', 'Name', 'Version', 'Packager', 'Requires', 20 | 'Recommends', 'Suggests', 'Supplements', 'Enhances', 'Icon', 'URL', 21 | 'Source', 'Patch', 'Prefix', 'Packager', 'Group', 'License', 'Release', 22 | 'BuildRoot', 'Distribution', 'Vendor', 'Provides', 'ExclusiveArch', 23 | 'ExcludeArch', 'ExclusiveOS', 'Obsoletes', 'BuildArch', 24 | 'BuildArchitectures', 'BuildRequires', 'BuildConflicts', 'BuildPreReq', 25 | 'Conflicts', 'AutoRequires', 'AutoReq', 'AutoReqProv', 'AutoProv', 'Epoch' 26 | })) 27 | 28 | -- Macros 29 | lex:add_rule('command', token(lexer.FUNCTION, '%' * lexer.word)) 30 | 31 | lexer.property['scintillua.comment'] = '#' 32 | 33 | return lex 34 | -------------------------------------------------------------------------------- /lua/lexers/gherkin.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 Jason Schindler. See LICENSE. 2 | -- Gherkin (https://github.com/cucumber/cucumber/wiki/Gherkin) LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('gherkin', {fold_by_indentation = true}) 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match( 14 | 'And Background But Examples Feature Given Outline Scenario Scenarios Then When'))) 15 | 16 | -- Strings. 17 | local doc_str = lexer.range('"""') 18 | local dq_str = lexer.range('"') 19 | lex:add_rule('string', token(lexer.STRING, doc_str + dq_str)) 20 | 21 | -- Comments. 22 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 23 | 24 | -- Numbers. 25 | -- lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 26 | 27 | -- Tags. 28 | lex:add_rule('tag', token(lexer.LABEL, '@' * lexer.word ^ 0)) 29 | 30 | -- Placeholders. 31 | lex:add_rule('placeholder', 32 | token(lexer.VARIABLE, lexer.range('<', '>', false, false, true))) 33 | 34 | -- Examples. 35 | lex:add_rule('example', token(lexer.DEFAULT, lexer.to_eol('|'))) 36 | 37 | lexer.property['scintillua.comment'] = '#' 38 | 39 | return lex 40 | -------------------------------------------------------------------------------- /lua/lexers/ini.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Ini LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('ini') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', 14 | token(lexer.KEYWORD, word_match('true false on off yes no'))) 15 | 16 | -- Identifiers. 17 | lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * 18 | (lexer.alnum + S('_.')) ^ 0)) 19 | 20 | -- Strings. 21 | local sq_str = lexer.range("'") 22 | local dq_str = lexer.range('"') 23 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 24 | 25 | -- Labels. 26 | lex:add_rule('label', token(lexer.LABEL, lexer.range('[', ']', true))) 27 | 28 | -- Comments. 29 | lex:add_rule('comment', 30 | token(lexer.COMMENT, lexer.to_eol(lexer.starts_line(S(';#'))))) 31 | 32 | -- Numbers. 33 | local integer = S('+-') ^ -1 * 34 | (lexer.hex_num + lexer.oct_num_('_') + lexer.dec_num_('_')) 35 | lex:add_rule('number', token(lexer.NUMBER, lexer.float + integer)) 36 | 37 | -- Operators. 38 | lex:add_rule('operator', token(lexer.OPERATOR, '=')) 39 | 40 | lexer.property['scintillua.comment'] = '#' 41 | 42 | return lex 43 | -------------------------------------------------------------------------------- /clp.h: -------------------------------------------------------------------------------- 1 | #ifndef CLP_H 2 | #define CLP_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | 18 | #ifndef PATHMAX 19 | #define PATHMAX 4096 20 | #endif 21 | 22 | #ifndef CLP_PATH 23 | #define CLP_PATH "/usr/local/share/clp" 24 | #endif 25 | 26 | #ifndef SRC_LUA_PATH 27 | #define SRC_LUA_PATH "" 28 | #endif 29 | 30 | struct clp_ctx { 31 | lua_State *L; 32 | char path[PATHMAX]; 33 | struct { 34 | bool print_available_overrides; 35 | char *filetype_override; 36 | char *color_theme_override; 37 | int highlight_line; 38 | } program_opts; 39 | char filename[PATHMAX]; 40 | }; 41 | 42 | void bail(lua_State *L, char *msg); 43 | 44 | void usage(); 45 | 46 | int print_lua_path(lua_State *L); 47 | 48 | bool lua_path_add(lua_State *L, const char *path); 49 | 50 | bool lua_paths_get(lua_State *L, char **lpath, char **cpath); 51 | 52 | int lua_init(struct clp_ctx *ctx); 53 | 54 | int clp_init(struct clp_ctx *ctx); 55 | 56 | int clp_open_file(struct clp_ctx *ctx, struct stat *buf, char *filename); 57 | 58 | int clp_run(struct clp_ctx *ctx); 59 | 60 | void clp_cleanup(struct clp_ctx *ctx); 61 | 62 | #endif /* CLP_H */ 63 | -------------------------------------------------------------------------------- /lua/lexers/smalltalk.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Smalltalk LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('smalltalk') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match( 14 | 'true false nil self super isNil not Smalltalk Transcript'))) 15 | 16 | -- Types. 17 | lex:add_rule('type', token(lexer.TYPE, word_match( 18 | 'Date Time Boolean True False Character String Array Symbol Integer Object'))) 19 | 20 | -- Identifiers. 21 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 22 | 23 | -- Strings. 24 | local sq_str = lexer.range("'") 25 | local word_str = '$' * lexer.word 26 | lex:add_rule('string', token(lexer.STRING, sq_str + word_str)) 27 | 28 | -- Comments. 29 | lex:add_rule('comment', token(lexer.COMMENT, lexer.range('"', false, false))) 30 | 31 | -- Numbers. 32 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 33 | 34 | -- Operators. 35 | lex:add_rule('operator', token(lexer.OPERATOR, S(':=_<>+-/*!()[]'))) 36 | 37 | -- Labels. 38 | lex:add_rule('label', token(lexer.LABEL, '#' * lexer.word)) 39 | 40 | -- Fold points. 41 | lex:add_fold_point(lexer.OPERATOR, '[', ']') 42 | 43 | lexer.property['scintillua.comment'] = '"|"' 44 | 45 | return lex 46 | -------------------------------------------------------------------------------- /lua/themes/solarized.lua: -------------------------------------------------------------------------------- 1 | -- Solarized color codes Copyright (c) 2011 Ethan Schoonover 2 | local M = {} 3 | 4 | local colors = { 5 | base03 = '#002b36', 6 | base02 = '#073642', 7 | base01 = '#586e75', 8 | base00 = '#657b83', 9 | base0 = '#839496', 10 | base1 = '#93a1a1', 11 | base2 = '#eee8d5', 12 | base3 = '#fdf6e3', 13 | yellow = '#b58900', 14 | orange = '#cb4b16', 15 | red = '#dc322f', 16 | magenta = '#d33682', 17 | violet = '#6c71c4', 18 | blue = '#268bd2', 19 | cyan = '#2aa198', 20 | green = '#859900', 21 | } 22 | 23 | M.theme = { 24 | ['default'] = colors.base0, 25 | ['nothing'] = '', 26 | ['class'] = colors.yellow, 27 | ['comment'] = colors.base01, 28 | ['constant'] = colors.cyan, 29 | ['definition'] = colors.blue, 30 | ['error'] = colors.red, 31 | ['function'] = colors.blue, 32 | ['keyword'] = colors.green, 33 | ['label'] = colors.green, 34 | ['number'] = colors.cyan, 35 | ['operator'] = colors.base01, 36 | ['regex'] = colors.green, 37 | ['string'] = colors.green, 38 | ['preprocessor'] = colors.orange, 39 | ['tag'] = colors.red, 40 | ['type'] = colors.yellow, 41 | ['variable'] = colors.blue, 42 | ['whitespace'] = '', 43 | ['embedded'] = colors.blue, 44 | ['identifier'] = colors.base0, 45 | } 46 | return M 47 | -------------------------------------------------------------------------------- /lua/lexers/gap.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Gap LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('gap') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'and', 'break', 'continue', 'do', 'elif', 'else', 'end', 'fail', 'false', 15 | 'fi', 'for', 'function', 'if', 'in', 'infinity', 'local', 'not', 'od', 'or', 16 | 'rec', 'repeat', 'return', 'then', 'true', 'until', 'while' 17 | })) 18 | 19 | -- Identifiers. 20 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 21 | 22 | -- Strings. 23 | local sq_str = lexer.range("'", true) 24 | local dq_str = lexer.range('"', true) 25 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 26 | 27 | -- Comments. 28 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 29 | 30 | -- Numbers. 31 | lex:add_rule('number', token(lexer.NUMBER, lexer.dec_num * -lexer.alpha)) 32 | 33 | -- Operators. 34 | lex:add_rule('operator', token(lexer.OPERATOR, S('*+-,./:;<=>~^#()[]{}'))) 35 | 36 | -- Fold points. 37 | lex:add_fold_point(lexer.KEYWORD, 'function', 'end') 38 | lex:add_fold_point(lexer.KEYWORD, 'do', 'od') 39 | lex:add_fold_point(lexer.KEYWORD, 'if', 'fi') 40 | lex:add_fold_point(lexer.KEYWORD, 'repeat', 'until') 41 | 42 | lexer.property['scintillua.comment'] = '#' 43 | 44 | return lex 45 | -------------------------------------------------------------------------------- /lua/color_theme.lua: -------------------------------------------------------------------------------- 1 | local ansi_codes = require('ansi_codes') 2 | local style = {} 3 | local ansi_colors = {} 4 | local theme_escape_codes = {} 5 | 6 | function init_style(theme) 7 | for token, color in pairs(theme) do 8 | if type(color) == 'table' then 9 | theme_escape_codes[token] = ansi_codes.ansi_string_4b(color) 10 | elseif color:sub(1, 1) == '#' then 11 | local r, g, b = ansi_codes.hex_to_rgb(color) 12 | theme_escape_codes[token] = ansi_codes.ansi_string_24b(r, g, b) 13 | end 14 | end 15 | end 16 | 17 | local line_highlight_style = { 18 | ['default'] = ansi_colors.black, 19 | ['nothing'] = '', 20 | ['class'] = ansi_colors.black, 21 | ['comment'] = ansi_colors.black, 22 | ['constant'] = ansi_colors.black, 23 | ['definition'] = ansi_colors.black, 24 | ['error'] = ansi_colors.black, 25 | ['function'] = ansi_colors.black, 26 | ['keyword'] = ansi_colors.black, 27 | ['label'] = ansi_colors.black, 28 | ['number'] = ansi_colors.black, 29 | ['operator'] = ansi_colors.black, 30 | ['regex'] = ansi_colors.black, 31 | ['string'] = ansi_colors.black, 32 | ['preprocessor'] = ansi_colors.black, 33 | ['tag'] = ansi_colors.black, 34 | ['type'] = ansi_colors.black, 35 | ['variable'] = ansi_colors.black, 36 | ['whitespace'] = '', 37 | ['embedded'] = ansi_colors.black, 38 | ['identifier'] = ansi_colors.black 39 | } 40 | 41 | style.line_highlight_style = line_highlight_style 42 | style.theme = theme_escape_codes 43 | style.init_style = init_style 44 | 45 | return style 46 | 47 | -------------------------------------------------------------------------------- /lua/lexers/elm.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2020-2023 Mitchell. See LICENSE. 2 | -- Elm LPeg lexer 3 | -- Adapted from Haskell LPeg lexer by Karl Schultheisz. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('elm', {fold_by_indentation = true}) 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords. 14 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match( 15 | 'if then else case of let in module import as exposing type alias port'))) 16 | 17 | -- Types & type constructors. 18 | local word = (lexer.alnum + S("._'#")) ^ 0 19 | local op = lexer.punct - S('()[]{}') 20 | lex:add_rule('type', 21 | token(lexer.TYPE, lexer.upper * word + ':' * (op ^ 1 - ':'))) 22 | 23 | -- Identifiers. 24 | lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * word)) 25 | 26 | -- Strings. 27 | lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) 28 | 29 | -- Chars. 30 | lex:add_rule('character', token(lexer.STRING, lexer.range("'", true))) 31 | 32 | -- Comments. 33 | local line_comment = lexer.to_eol('--', true) 34 | local block_comment = lexer.range('{-', '-}', false, false, true) 35 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 36 | 37 | -- Numbers. 38 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 39 | 40 | -- Operators. 41 | lex:add_rule('operator', token(lexer.OPERATOR, op)) 42 | 43 | lexer.property['scintillua.comment'] = '--' 44 | 45 | return lex 46 | -------------------------------------------------------------------------------- /lua/lexers/dockerfile.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2016-2023 Alejandro Baez (https://keybase.io/baez). See LICENSE. 2 | -- Dockerfile LPeg lexer. 3 | local lexer = lexer 4 | local P, S, B = lpeg.P, lpeg.S, lpeg.B 5 | 6 | local lex = lexer.new(..., {fold_by_indentation = true}) 7 | 8 | -- Keywords. 9 | local keyword = lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD)) 10 | lex:add_rule('keyword', keyword) 11 | 12 | -- Identifiers. 13 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 14 | 15 | -- Variable. 16 | lex:add_rule('variable', -B('\\') * lex:tag(lexer.OPERATOR, '$' * P('{') ^ -1) * 17 | lex:tag(lexer.VARIABLE, lexer.word)) 18 | 19 | -- Strings. 20 | local sq_str = lexer.range("'", false, false) 21 | local dq_str = lexer.range('"') 22 | lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) 23 | 24 | -- Comments. 25 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) 26 | 27 | -- Numbers. 28 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) 29 | 30 | -- Operators. 31 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('\\[],=:{}'))) 32 | 33 | local bash = lexer.load('bash') 34 | local start_rule = #P('RUN') * keyword * bash:get_rule('whitespace') 35 | local end_rule = -B('\\') * #lexer.newline * lex:get_rule('whitespace') 36 | lex:embed(bash, start_rule, end_rule) 37 | 38 | -- Word lists. 39 | lex:set_word_list(lexer.KEYWORD, { 40 | 'ADD', 'ARG', 'CMD', 'COPY', 'ENTRYPOINT', 'ENV', 'EXPOSE', 'FROM', 'LABEL', 41 | 'MAINTAINER', 'ONBUILD', 'RUN', 'STOPSIGNAL', 'USER', 'VOLUME', 'WORKDIR' 42 | }) 43 | 44 | lexer.property['scintillua.comment'] = '#' 45 | 46 | return lex 47 | -------------------------------------------------------------------------------- /lua/lexers/toml.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 Alejandro Baez (https://keybase.io/baez). See LICENSE. 2 | -- TOML LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(..., {fold_by_indentation = true}) 7 | 8 | -- Keywords. 9 | lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lexer.word_match('true false'))) 10 | 11 | -- Identifiers. 12 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 13 | 14 | -- Strings. 15 | local sq_str = lexer.range("'") 16 | local dq_str = lexer.range('"') 17 | lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) 18 | 19 | -- Comments. 20 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) 21 | 22 | -- Operators. 23 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('=+-,.{}[]()'))) 24 | 25 | -- Datetime. 26 | local year = lexer.digit * lexer.digit * lexer.digit * lexer.digit 27 | local month = lexer.digit * lexer.digit ^ -1 28 | local day = lexer.digit * lexer.digit ^ -1 29 | local date = year * '-' * month * '-' * day 30 | local hours = lexer.digit * lexer.digit ^ -1 31 | local minutes = lexer.digit * lexer.digit 32 | local seconds = lexer.digit * lexer.digit 33 | local fraction = '.' * lexer.digit ^ 0 34 | local time = hours * ':' * minutes * ':' * seconds * fraction ^ -1 35 | local zone = 'Z' + S(' \t') ^ 0 * S('-+') * hours * (':' * minutes) ^ -1 36 | lex:add_rule('datetime', lex:tag(lexer.NUMBER .. '.timestamp', 37 | date * (S('tT \t') * time * zone ^ -1))) 38 | 39 | -- Numbers. 40 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) 41 | 42 | lexer.property['scintillua.comment'] = '#' 43 | 44 | return lex 45 | -------------------------------------------------------------------------------- /lua/lexers/haskell.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Haskell LPeg lexer. 3 | -- Modified by Alex Suraci. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('haskell', {fold_by_indentation = true}) 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords. 14 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 15 | 'case', 'class', 'data', 'default', 'deriving', 'do', 'else', 'if', 16 | 'import', 'in', 'infix', 'infixl', 'infixr', 'instance', 'let', 'module', 17 | 'newtype', 'of', 'then', 'type', 'where', '_', 'as', 'qualified', 'hiding' 18 | })) 19 | 20 | -- Types & type constructors. 21 | local word = (lexer.alnum + S("._'#")) ^ 0 22 | local op = lexer.punct - S('()[]{}') 23 | lex:add_rule('type', 24 | token(lexer.TYPE, (lexer.upper * word) + (':' * (op ^ 1 - ':')))) 25 | 26 | -- Identifiers. 27 | lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + '_') * word)) 28 | 29 | -- Strings. 30 | local sq_str = lexer.range("'", true) 31 | local dq_str = lexer.range('"') 32 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 33 | 34 | -- Comments. 35 | local line_comment = lexer.to_eol('--', true) 36 | local block_comment = lexer.range('{-', '-}') 37 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 38 | 39 | -- Numbers. 40 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 41 | 42 | -- Operators. 43 | lex:add_rule('operator', token(lexer.OPERATOR, op)) 44 | 45 | lexer.property['scintillua.comment'] = '--' 46 | 47 | return lex 48 | -------------------------------------------------------------------------------- /lua/lexers/desktop.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Desktop Entry LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(...) 7 | 8 | -- Keys. 9 | lex:add_rule('key', lex:tag(lexer.VARIABLE_BUILTIN, 10 | lex:word_match(lexer.VARIABLE_BUILTIN))) 11 | 12 | -- Values. 13 | lex:add_rule('value', 14 | lex:tag(lexer.CONSTANT_BUILTIN, lexer.word_match('true false'))) 15 | 16 | -- Identifiers. 17 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, 18 | lexer.alpha * (lexer.alnum + S('_-')) ^ 0)) 19 | 20 | -- Group headers. 21 | local bracketed = lexer.range('[', ']') 22 | lex:add_rule('header', lexer.starts_line(lex:tag(lexer.HEADING, bracketed))) 23 | 24 | -- Locales. 25 | lex:add_rule('locale', lex:tag(lexer.TYPE, bracketed)) 26 | 27 | -- Strings. 28 | lex:add_rule('string', lex:tag(lexer.STRING, lexer.range('"'))) 29 | 30 | -- Comments. 31 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) 32 | 33 | -- Numbers. 34 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) 35 | 36 | -- Field codes. 37 | lex:add_rule('code', lex:tag(lexer.CONSTANT_BUILTIN, '%' * S('fFuUdDnNickvm'))) 38 | 39 | -- Operators. 40 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('='))) 41 | 42 | -- Word lists. 43 | lex:set_word_list(lexer.VARIABLE_BUILTIN, { 44 | 'Type', 'Version', 'Name', 'GenericName', 'NoDisplay', 'Comment', 'Icon', 45 | 'Hidden', 'OnlyShowIn', 'NotShowIn', 'TryExec', 'Exec', 'Exec', 'Path', 46 | 'Terminal', 'MimeType', 'Categories', 'StartupNotify', 'StartupWMClass', 47 | 'URL' 48 | }) 49 | 50 | lexer.property['scintillua.comment'] = '#' 51 | 52 | return lex 53 | -------------------------------------------------------------------------------- /cli.c: -------------------------------------------------------------------------------- 1 | #ifndef CLP_H 2 | #include "clp.h" 3 | #endif 4 | 5 | #define OPTPARSE_IMPLEMENTATION 6 | #include "optparse.h" 7 | // maybe we should make the opts set some flags in an options struct 8 | // and pass it to a func that sets up the lua accordingly 9 | int 10 | main(int argc, char *argv[]) 11 | { 12 | struct clp_ctx ctx; 13 | clp_init(&ctx); 14 | struct optparse_long longopts[] = { 15 | {"highlight-line", 'h', OPTPARSE_REQUIRED}, 16 | {"override-filetype", 't', OPTPARSE_REQUIRED}, 17 | {"list-overrides", 'l', OPTPARSE_NONE}, 18 | {"override-colortheme", 's', OPTPARSE_REQUIRED}, 19 | {0}}; 20 | 21 | int option = 0; 22 | char *filename = ""; 23 | struct optparse options; 24 | optparse_init(&options, argv); 25 | while ((option = optparse_long(&options, longopts, NULL)) != -1) { 26 | switch (option) { 27 | case 'l': 28 | ctx.program_opts.print_available_overrides = true; 29 | break; 30 | case 't': 31 | ctx.program_opts.filetype_override = options.optarg; 32 | break; 33 | case 'h': 34 | ctx.program_opts.highlight_line = atoi(options.optarg); 35 | break; 36 | case 's': 37 | ctx.program_opts.color_theme_override = options.optarg; 38 | break; 39 | default: 40 | usage(); 41 | exit(1); 42 | } 43 | } 44 | 45 | filename = optparse_arg(&options); 46 | 47 | struct stat buf; 48 | 49 | if (!ctx.program_opts.print_available_overrides && 50 | clp_open_file(&ctx, &buf, filename)) { 51 | return 1; 52 | } 53 | 54 | clp_run(&ctx); 55 | clp_cleanup(&ctx); 56 | return 0; 57 | } 58 | -------------------------------------------------------------------------------- /lua/lexers/faust.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 David B. Lamkins . See LICENSE. 2 | -- Faust LPeg lexer, see http://faust.grame.fr/ 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('faust') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'declare', 'import', 'mdoctags', 'dependencies', 'distributed', 'inputs', 15 | 'outputs', 'par', 'seq', 'sum', 'prod', 'xor', 'with', 'environment', 16 | 'library', 'component', 'ffunction', 'fvariable', 'fconstant', 'int', 17 | 'float', 'case', 'waveform', 'h:', 'v:', 't:' 18 | })) 19 | 20 | -- Identifiers. 21 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 22 | 23 | -- Strings. 24 | lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) 25 | 26 | -- Comments. 27 | local line_comment = lexer.to_eol('//') 28 | local block_comment = lexer.range('/*', '*/') 29 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 30 | 31 | -- Numbers. 32 | local int = lexer.digit ^ 1 33 | local rad = P('.') 34 | local exp = (P('e') * S('+-') ^ -1 * int) ^ -1 35 | local flt = int * (rad * int) ^ -1 * exp + int ^ -1 * rad * int * exp 36 | lex:add_rule('number', token(lexer.NUMBER, flt + int)) 37 | 38 | -- Pragmas. 39 | lex:add_rule('pragma', 40 | token(lexer.PREPROCESSOR, lexer.range('', ''))) 41 | 42 | -- Operators. 43 | lex:add_rule('operator', 44 | token(lexer.OPERATOR, S('+-/*%<>~!=^&|?~:;,.()[]{}@#$`\\\''))) 45 | 46 | lexer.property['scintillua.comment'] = '//' 47 | 48 | return lex 49 | -------------------------------------------------------------------------------- /lua/lexers/io_lang.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Io LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('io_lang') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'block', 'method', 'while', 'foreach', 'if', 'else', 'do', 'super', 'self', 15 | 'clone', 'proto', 'setSlot', 'hasSlot', 'type', 'write', 'print', 'forward' 16 | })) 17 | 18 | -- Types. 19 | lex:add_rule('type', token(lexer.TYPE, word_match { 20 | 'Block', 'Buffer', 'CFunction', 'Date', 'Duration', 'File', 'Future', 21 | 'LinkedList', 'List', 'Map', 'Message', 'Nil', 'Nop', 'Number', 'Object', 22 | 'String', 'WeakLink' 23 | })) 24 | 25 | -- Identifiers. 26 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 27 | 28 | -- Strings. 29 | local sq_str = lexer.range("'") 30 | local dq_str = lexer.range('"') 31 | local tq_str = lexer.range('"""') 32 | lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str)) 33 | 34 | -- Comments. 35 | local line_comment = lexer.to_eol(P('#') + '//') 36 | local block_comment = lexer.range('/*', '*/') 37 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 38 | 39 | -- Numbers. 40 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 41 | 42 | -- Operators. 43 | lex:add_rule('operator', token(lexer.OPERATOR, S('`~@$%^&*-+/=\\<>?.,:;()[]{}'))) 44 | 45 | -- Fold points. 46 | lex:add_fold_point(lexer.OPERATOR, '(', ')') 47 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 48 | 49 | lexer.property['scintillua.comment'] = '#' 50 | 51 | return lex 52 | -------------------------------------------------------------------------------- /lua/lexers/ps.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Postscript LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('ps') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'pop', 'exch', 'dup', 'copy', 'roll', 'clear', 'count', 'mark', 15 | 'cleartomark', 'counttomark', 'exec', 'if', 'ifelse', 'for', 'repeat', 16 | 'loop', 'exit', 'stop', 'stopped', 'countexecstack', 'execstack', 'quit', 17 | 'start', 'true', 'false', 'NULL' 18 | })) 19 | 20 | -- Functions. 21 | lex:add_rule('function', token(lexer.FUNCTION, word_match { 22 | 'add', 'div', 'idiv', 'mod', 'mul', 'sub', 'abs', 'ned', 'ceiling', 'floor', 23 | 'round', 'truncate', 'sqrt', 'atan', 'cos', 'sin', 'exp', 'ln', 'log', 24 | 'rand', 'srand', 'rrand' 25 | })) 26 | 27 | -- Identifiers. 28 | local word = (lexer.alpha + '-') * (lexer.alnum + '-') ^ 0 29 | lex:add_rule('identifier', token(lexer.IDENTIFIER, word)) 30 | 31 | -- Strings. 32 | local arrow_string = lexer.range('<', '>') 33 | local nested_string = lexer.range('(', ')', false, false, true) 34 | lex:add_rule('string', token(lexer.STRING, arrow_string + nested_string)) 35 | 36 | -- Comments. 37 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('%'))) 38 | 39 | -- Numbers. 40 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 41 | 42 | -- Labels. 43 | lex:add_rule('label', token(lexer.LABEL, '/' * word)) 44 | 45 | -- Operators. 46 | lex:add_rule('operator', token(lexer.OPERATOR, S('[]{}'))) 47 | 48 | lexer.property['scintillua.comment'] = '%' 49 | 50 | return lex 51 | -------------------------------------------------------------------------------- /lua/lexers/protobuf.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2016-2023 David B. Lamkins . See LICENSE. 2 | -- Protocol Buffer IDL LPeg lexer. 3 | -- 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('protobuf') 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords. 14 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 15 | 'contained', 'syntax', 'import', 'option', 'package', 'message', 'group', 16 | 'oneof', 'optional', 'required', 'repeated', 'default', 'extend', 17 | 'extensions', 'to', 'max', 'reserved', 'service', 'rpc', 'returns' 18 | })) 19 | 20 | -- Types. 21 | lex:add_rule('type', token(lexer.TYPE, word_match { 22 | 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64', 'fixed32', 23 | 'fixed64', 'sfixed32', 'sfixed64', 'float', 'double', 'bool', 'string', 24 | 'bytes', 'enum', 'true', 'false' 25 | })) 26 | 27 | -- Strings. 28 | local sq_str = P('L') ^ -1 * lexer.range("'", true) 29 | local dq_str = P('L') ^ -1 * lexer.range('"', true) 30 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 31 | 32 | -- Identifiers. 33 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 34 | 35 | -- Comments. 36 | local line_comment = lexer.to_eol('//', true) 37 | local block_comment = lexer.range('/*', '*/') 38 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 39 | 40 | -- Numbers. 41 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 42 | 43 | -- Operators. 44 | lex:add_rule('operator', token(lexer.OPERATOR, S('<>=|;,.()[]{}'))) 45 | 46 | lexer.property['scintillua.comment'] = '//' 47 | 48 | return lex 49 | -------------------------------------------------------------------------------- /lua/lexers/ledger.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 Charles Lehner. See LICENSE. 2 | -- ledger journal LPeg lexer, see http://www.ledger-cli.org/ 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('ledger', {lex_by_line = true}) 8 | 9 | local delim = P('\t') + P(' ') 10 | 11 | -- Account. 12 | lex:add_rule('account', token(lexer.VARIABLE, 13 | lexer.starts_line(S(' \t') ^ 1 * lexer.graph ^ 1))) 14 | 15 | -- Amount. 16 | lex:add_rule('amount', token(lexer.NUMBER, delim * (1 - S(';\r\n')) ^ 1)) 17 | 18 | -- Comments. 19 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(S(';#')))) 20 | 21 | -- Whitespace. 22 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 23 | 24 | -- Strings. 25 | local sq_str = lexer.range("'") 26 | local dq_str = lexer.range('"') 27 | local label = lexer.range('[', ']', true) 28 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + label)) 29 | 30 | -- Date. 31 | lex:add_rule('date', token(lexer.CONSTANT, 32 | lexer.starts_line((lexer.digit + S('/-')) ^ 1))) 33 | 34 | -- Automated transactions. 35 | lex:add_rule('auto_tx', token(lexer.PREPROCESSOR, 36 | lexer.to_eol(lexer.starts_line(S('=~'))))) 37 | 38 | -- Directives. 39 | local directive_word = word_match { 40 | ' account', 'alias', 'assert', 'bucket', 'capture', 'check', 'comment', 41 | 'commodity', 'define', 'end', 'fixed', 'endfixed', 'include', 'payee', 42 | 'apply', 'tag', 'test', 'year' 43 | } + S('AYNDCIiOobh') 44 | lex:add_rule('directive', token(lexer.KEYWORD, lexer.starts_line( 45 | S('!@') ^ -1 * directive_word))) 46 | 47 | lexer.property['scintillua.comment'] = '#' 48 | 49 | return lex 50 | -------------------------------------------------------------------------------- /lua/lexers/tcl.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2014-2023 Joshua Krämer. See LICENSE. 2 | -- Tcl LPeg lexer. 3 | -- This lexer follows the TCL dodekalogue (http://wiki.tcl.tk/10259). 4 | -- It is based on the previous lexer by Mitchell. 5 | local lexer = require('lexer') 6 | local token, word_match = lexer.token, lexer.word_match 7 | local P, S = lpeg.P, lpeg.S 8 | 9 | local lex = lexer.new('tcl') 10 | 11 | -- Whitespace. 12 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 13 | 14 | -- Comment. 15 | lex:add_rule('comment', token(lexer.COMMENT, 16 | lexer.to_eol( 17 | '#' * P( 18 | function(input, index) 19 | local i = index - 2 20 | while i > 0 and input:find('^[ \t]', i) do i = i - 1 end 21 | if i < 1 or input:find('^[\r\n;]', i) then return true end 22 | end)))) 23 | 24 | -- Separator (semicolon). 25 | lex:add_rule('separator', token(lexer.CLASS, ';')) 26 | 27 | -- Argument expander. 28 | lex:add_rule('expander', token(lexer.LABEL, '{*}')) 29 | 30 | -- Delimiters. 31 | lex:add_rule('braces', token(lexer.KEYWORD, S('{}'))) 32 | lex:add_rule('quotes', token(lexer.FUNCTION, '"')) 33 | lex:add_rule('brackets', token(lexer.VARIABLE, S('[]'))) 34 | 35 | -- Variable substitution. 36 | lex:add_rule('variable', 37 | token(lexer.STRING, '$' * (lexer.alnum + '_' + P(':') ^ 2) ^ 0)) 38 | 39 | -- Backslash substitution. 40 | local oct = lexer.digit * lexer.digit ^ -2 41 | local hex = 'x' * lexer.xdigit ^ 1 42 | local unicode = 'u' * lexer.xdigit * lexer.xdigit ^ -3 43 | lex:add_rule('backslash', token(lexer.TYPE, '\\' * (oct + hex + unicode + 1))) 44 | 45 | -- Fold points. 46 | lex:add_fold_point(lexer.KEYWORD, '{', '}') 47 | 48 | lexer.property['scintillua.comment'] = '#' 49 | 50 | return lex 51 | -------------------------------------------------------------------------------- /man/clp.1: -------------------------------------------------------------------------------- 1 | .TH CLP 1 2 | .SH NAME 3 | clp \- print files to stdout with syntax highlighting 4 | .SH SYNOPSIS 5 | .B clp 6 | [\fboptions\fr] 7 | file 8 | .SH DESCRIPTION 9 | .B clp 10 | Reads an input file, determines its language from its filetype, and writes it to stdout with syntax highlighting. 11 | .SH OPTIONS 12 | .HP 13 | \fB\-l\fR, \fB\-\-list\-overrides\fR 14 | .IP 15 | See below. 16 | .HP 17 | \fB\-t\fR, \fB\-\-override\-filetype\fR {filetype} 18 | .IP 19 | Force a language's syntax for highlighting the file. To see available filetypes, run \fBclp \-\-list\-overrides\fR 20 | .HP 21 | \fB\-t\fR, \fB\-\-override\-colortheme\fR {filetype} 22 | .IP 23 | Override the color theme defined in clprc.lua \fR 24 | .HP 25 | \fB\-h\fR, \fB\-\-highlight\-line\fR {number} 26 | .IP 27 | Highlight a non-blank line 28 | .SH FILES 29 | Upon startup 30 | .Nm 31 | will source the first \fBclprc.lua\fR configuration file found from these locations. 32 | .Bl -bullet 33 | .IP \[bu] 2 34 | \fB$CLP_PATH\fR if set or \fB/usr/local/share/clp\fR or if not 35 | .IP \[bu] 2 36 | The source directory where the project was built (when built from source) 37 | .IP \[bu] 2 38 | \fB$XDG_CONFIG_HOME/clp\fR or \fB$HOME/.config\fR if \fBXDG_CONFIG_HOME\fR is not set 39 | .HP 40 | When creating a new \fBclprc.lua\fR be sure to copy the structure from here. 41 | .SH CONFIGURATION 42 | \fBclp\fR is configured by the \fBclprc.lua\fR file described in the \fBFILES\fR section. 43 | .HP 44 | To change your colorscheme, set \fBclprc.theme = "{colorscheme name}"\fR in \fBclprc.lua\fR. A complete list of built-in colorschemes is available at \fBhttps://sr.ht/~eskin/clp/#setting-your-colorscheme\fR 45 | .HP 46 | Additional colorschemes can be installed to a "themes" folder in your clp configuration folder. For more information, see the README at \fBhttps://sr.ht/~eskin/clp/#configuration\fR 47 | 48 | 49 | -------------------------------------------------------------------------------- /lua/lexers/apl.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 David B. Lamkins . See LICENSE. 2 | -- APL LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('apl') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Comments. 13 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(P('⍝') + '#'))) 14 | 15 | -- Strings. 16 | local sq_str = lexer.range("'", false, false) 17 | local dq_str = lexer.range('"') 18 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 19 | 20 | -- Numbers. 21 | local dig = lexer.digit 22 | local rad = P('.') 23 | local exp = S('eE') 24 | local img = S('jJ') 25 | local sgn = P('¯') ^ -1 26 | local float = sgn * 27 | (dig ^ 0 * rad * dig ^ 1 + dig ^ 1 * rad * dig ^ 0 + dig ^ 1) * 28 | (exp * sgn * dig ^ 1) ^ -1 29 | lex:add_rule('number', token(lexer.NUMBER, float * img * float + float)) 30 | 31 | -- Keywords. 32 | lex:add_rule('keyword', 33 | token(lexer.KEYWORD, 34 | P('⍞') + 'χ' + '⍺' + '⍶' + '⍵' + '⍹' + '⎕' * 35 | lexer.alpha ^ 0)) 36 | 37 | -- Names. 38 | local n1l = lexer.alpha 39 | local n1b = P('_') + '∆' + '⍙' 40 | local n2l = n1l + lexer.digit 41 | local n2b = n1b + '¯' 42 | local n1 = n1l + n1b 43 | local n2 = n2l + n2b 44 | local name = n1 * n2 ^ 0 45 | 46 | -- Labels. 47 | lex:add_rule('label', token(lexer.LABEL, name * ':')) 48 | 49 | -- Variables. 50 | lex:add_rule('variable', token(lexer.VARIABLE, name)) 51 | 52 | -- Special. 53 | lex:add_rule('special', token(lexer.TYPE, S('{}[]();') + '←' + '→' + '◊')) 54 | 55 | -- Nabla. 56 | lex:add_rule('nabla', token(lexer.PREPROCESSOR, P('∇') + '⍫')) 57 | 58 | lexer.property['scintillua.comment'] = '#' 59 | 60 | return lex 61 | -------------------------------------------------------------------------------- /lua/lexers/rstats.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- R LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('rstats') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'break', 'else', 'for', 'if', 'in', 'next', 'repeat', 'return', 'switch', 15 | 'try', 'while', -- 16 | 'Inf', 'NA', 'NaN', 'NULL', 'FALSE', 'TRUE', 'F', 'T', 17 | -- Frequently used operators. 18 | '|>', '%%', '%*%', '%/%', '%in%', '%o%', '%x%' 19 | })) 20 | 21 | -- Types. 22 | lex:add_rule('type', token(lexer.TYPE, word_match { 23 | 'array', 'character', 'closure', 'complex', 'data.frame', 'double', 24 | 'environment', 'expression', 'externalptr', 'factor', 'function', 'integer', 25 | 'list', 'logical', 'matrix', 'numeric', 'pairlist', 'promise', 'raw', 26 | 'symbol', 'vector' 27 | })) 28 | 29 | -- Identifiers. 30 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 31 | 32 | -- Strings. 33 | local sq_str = lexer.range("'", true) 34 | local dq_str = lexer.range('"', true) 35 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 36 | 37 | -- Comments. 38 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 39 | 40 | -- Numbers. 41 | lex:add_rule('number', 42 | token(lexer.NUMBER, (lexer.number * P('i') ^ -1) * P('L') ^ -1)) 43 | 44 | -- Operators. 45 | lex:add_rule('operator', token(lexer.OPERATOR, S('<->+*/^=.,:;|$()[]{}'))) 46 | 47 | -- Folding 48 | lex:add_fold_point(lexer.OPERATOR, '(', ')') 49 | lex:add_fold_point(lexer.OPERATOR, '[', ']') 50 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 51 | 52 | lexer.property['scintillua.comment'] = '#' 53 | 54 | return lex 55 | -------------------------------------------------------------------------------- /lua/lexers/bibtex.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Bibtex LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(...) 7 | 8 | -- Fields. 9 | lex:add_rule('field', lex:tag(lexer.VARIABLE_BUILTIN, 10 | lex:word_match(lexer.VARIABLE_BUILTIN, true))) 11 | 12 | -- Identifiers. 13 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 14 | 15 | -- Strings. 16 | local dq_str = lexer.range('"') 17 | local br_str = lexer.range('{', '}', false, false, true) 18 | lex:add_rule('string', lex:tag(lexer.STRING, dq_str + br_str)) 19 | 20 | -- Operators. 21 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S(',='))) 22 | 23 | -- Embedded in Latex. 24 | local latex = lexer.load('latex') 25 | 26 | -- Embedded Bibtex. 27 | local entry = lex:tag(lexer.PREPROCESSOR, '@' * lex:word_match('entry', true)) 28 | local bibtex_start_rule = entry * lex:get_rule('whitespace') ^ 0 * 29 | lex:tag(lexer.OPERATOR, '{') 30 | local bibtex_end_rule = lex:tag(lexer.OPERATOR, '}') 31 | latex:embed(lex, bibtex_start_rule, bibtex_end_rule) 32 | 33 | -- Word lists. 34 | lex:set_word_list(lexer.VARIABLE_BUILTIN, { 35 | 'author', 'title', 'journal', 'year', 'volume', 'number', 'pages', 'month', 36 | 'note', 'key', 'publisher', 'editor', 'series', 'address', 'edition', 37 | 'howpublished', 'booktitle', 'organization', 'chapter', 'school', 38 | 'institution', 'type', 'isbn', 'issn', 'affiliation', 'issue', 'keyword', 39 | 'url' 40 | }) 41 | 42 | lex:set_word_list('entry', { 43 | 'string', -- 44 | 'book', 'article', 'booklet', 'conference', 'inbook', 'incollection', 45 | 'inproceedings', 'manual', 'mastersthesis', 'lambda', 'misc', 'phdthesis', 46 | 'proceedings', 'techreport', 'unpublished' 47 | }) 48 | 49 | lexer.property['scintillua.comment'] = '%' 50 | 51 | return lex 52 | -------------------------------------------------------------------------------- /lua/lexers/rc.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2017-2023 Michael Forney. See LICENSE. 2 | -- rc LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('rc') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'for', 'in', 'while', 'if', 'not', 'switch', 'case', 'fn', 'builtin', 'cd', 15 | 'eval', 'exec', 'exit', 'flag', 'rfork', 'shift', 'ulimit', 'umask', 'wait', 16 | 'whatis', '.', '~' 17 | })) 18 | 19 | -- Identifiers. 20 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 21 | 22 | -- Strings. 23 | local str = lexer.range("'", false, false) 24 | local heredoc = '<<' * P(function(input, index) 25 | local s, e, _, delimiter = input:find( 26 | '[ \t]*(["\']?)([%w!"%%+,-./:?@_~]+)%1', 27 | index) 28 | if s == index and delimiter then 29 | delimiter = delimiter:gsub('[%%+-.?]', '%%%1') 30 | e = select(2, input:find('[\n\r]' .. delimiter .. '[\n\r]', e)) 31 | return e and e + 1 or #input + 1 32 | end 33 | end) 34 | lex:add_rule('string', token(lexer.STRING, str + heredoc)) 35 | 36 | -- Comments. 37 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 38 | 39 | -- Numbers. 40 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 41 | 42 | -- Variables. 43 | lex:add_rule('variable', token(lexer.VARIABLE, '$' * S('"#') ^ -1 * 44 | ('*' + lexer.digit ^ 1 + lexer.word))) 45 | 46 | -- Operators. 47 | lex:add_rule('operator', token(lexer.OPERATOR, S('@`=!<>*&^|;?()[]{}') + '\\\n')) 48 | 49 | -- Fold points. 50 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 51 | 52 | lexer.property['scintillua.comment'] = '#' 53 | 54 | return lex 55 | -------------------------------------------------------------------------------- /lua/lexers/pure.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 David B. Lamkins . See LICENSE. 2 | -- pure LPeg lexer, see http://purelang.bitbucket.org/ 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('pure') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'namespace', 'with', 'end', 'using', 'interface', 'extern', 'let', 'const', 15 | 'def', 'type', 'public', 'private', 'nonfix', 'outfix', 'infix', 'infixl', 16 | 'infixr', 'prefix', 'postfix', 'if', 'otherwise', 'when', 'case', 'of', 17 | 'then', 'else' 18 | })) 19 | 20 | -- Identifiers. 21 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 22 | 23 | -- Strings. 24 | lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) 25 | 26 | -- Comments. 27 | local line_comment = lexer.to_eol('//') 28 | local block_comment = lexer.range('/*', '*/') 29 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 30 | 31 | -- Numbers. 32 | local bin = '0' * S('Bb') * S('01') ^ 1 33 | local hex = lexer.hex_num 34 | local dec = lexer.dec_num 35 | local int = (bin + hex + dec) * P('L') ^ -1 36 | local rad = P('.') - '..' 37 | local exp = (S('Ee') * S('+-') ^ -1 * int) ^ -1 38 | local flt = int * (rad * dec) ^ -1 * exp + int ^ -1 * rad * dec * exp 39 | lex:add_rule('number', token(lexer.NUMBER, flt + int)) 40 | 41 | -- Pragmas. 42 | local hashbang = lexer.starts_line('#!') * (lexer.nonnewline - '//') ^ 0 43 | lex:add_rule('pragma', token(lexer.PREPROCESSOR, hashbang)) 44 | 45 | -- Operators. 46 | lex:add_rule('operator', token(lexer.OPERATOR, 47 | '..' + S('+-/*%<>~!=^&|?~:;,.()[]{}@#$`\\\''))) 48 | 49 | lexer.property['scintillua.comment'] = '//' 50 | 51 | return lex 52 | -------------------------------------------------------------------------------- /lua/lexers/fennel.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Fennel LPeg lexer. 3 | -- Contributed by Momohime Honda. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('fennel', {inherit = lexer.load('lua')}) 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords. 14 | lex:modify_rule('keyword', token(lexer.KEYWORD, word_match { 15 | '#', '%', '*', '+', '-', '->>', '->', '-?>>', '-?>', '..', '.', '//', '/', 16 | ':', '<=', '<', '=', '>=', '>', '?.', '^', '~=', 'λ', 'accumulate', 'and', 17 | 'band', 'bnot', 'bor', 'bxor', 'collect', 'comment', 'do', 'doto', 'each', 18 | 'eval-compiler', 'fn', 'for', 'global', 'hashfn', 'icollect', 'if', 19 | 'import-macros', 'include', 'lambda', 'length', 'let', 'local', 'lshift', 20 | 'lua', 'macro', 'macrodebug', 'macros', 'match', 'not', 'not=', 'or', 21 | 'partial', 'pick-args', 'pick-values', 'quote', 'require-macros', 'rshift', 22 | 'set', 'set-forcibly!', 'tset', 'values', 'var', 'when', 'while', 23 | 'with-open' 24 | })) 25 | 26 | -- Identifiers. 27 | local initial = lexer.alpha + S('|$%&#*+-/<=>?~^_λ!') 28 | local subsequent = initial + lexer.digit 29 | lex:modify_rule('identifier', 30 | token(lexer.IDENTIFIER, initial * subsequent ^ 0 * P('#') ^ -1)) 31 | 32 | -- Strings. 33 | local dq_str = lexer.range('"') 34 | local kw_str = lpeg.B(1 - subsequent) * ':' * subsequent ^ 1 35 | lex:modify_rule('string', token(lexer.STRING, dq_str + kw_str)) 36 | 37 | -- Comments. 38 | lex:modify_rule('comment', token(lexer.COMMENT, lexer.to_eol(';'))) 39 | 40 | -- Ignore these rules. 41 | -- lex:modify_rule('longstring', P(false)) 42 | lex:modify_rule('label', P(false)) 43 | lex:modify_rule('operator', P(false)) 44 | 45 | lexer.property['scintillua.comment'] = ';' 46 | 47 | return lex 48 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | -include config.mk 2 | 3 | # CFLAGS = $(CFLAGS_DEBUG) 4 | CFLAGS += -Wall -pedantic 5 | CFLAGS += -I $(CURDIR)/include 6 | CFLAGS += $(CFLAGS_LUA) 7 | CFLAGS += -DCLP_PATH=\"${SHAREPREFIX}/clp\" 8 | CFLAGS += -DSRC_LUA_PATH=\"${ABS_SRCDIR}/lua\" 9 | LDFLAGS = $(LDFLAGS_LUA) 10 | 11 | SRC = clp.c 12 | ELF = clp 13 | TEST_SRC = tests/tests.c 14 | TEST_ELF = tests/tests 15 | 16 | ALL: $(ELF) 17 | 18 | config.mk: 19 | @touch $@ 20 | 21 | clp: config.mk clp.o cli.c 22 | $(CC) cli.c $(CFLAGS) $(LDFLAGS) clp.o -o clp 23 | 24 | $(TEST_ELF): clp.o $(TEST_SRC) 25 | $(CC) $(TEST_SRC) $(CFLAGS) $(LDFLAGS) $(CFLAGS_PCRE2) $(LDFLAGS_PCRE2) clp.o -o $(TEST_ELF) 26 | 27 | .PHONY: tests 28 | 29 | tests: $(TEST_ELF) 30 | ./$(TEST_ELF) 31 | 32 | install: $(ELF) 33 | @echo installing executable files to ${DESTDIR}${PREFIX}/bin 34 | @mkdir -p ${DESTDIR}${PREFIX}/bin 35 | cp -f clp ${DESTDIR}${PREFIX}/bin && \ 36 | chmod 755 ${DESTDIR}${PREFIX}/bin/clp; 37 | echo installing support files to ${DESTDIR}${SHAREPREFIX}/clp; \ 38 | mkdir -p ${DESTDIR}${SHAREPREFIX}/clp; \ 39 | cp -r lua/* ${DESTDIR}${SHAREPREFIX}/clp; 40 | @echo installing manual pages to ${DESTDIR}${MANPREFIX}/man1 41 | @mkdir -p ${DESTDIR}${MANPREFIX}/man1 42 | sed -e "s/VERSION/${VERSION}/" < "man/clp.1" > \ 43 | "${DESTDIR}${MANPREFIX}/man1/clp.1" && \ 44 | chmod 644 "${DESTDIR}${MANPREFIX}/man1/clp.1"; \ 45 | 46 | uninstall: 47 | @echo removing executable file from ${DESTDIR}${PREFIX}/bin 48 | @rm -f ${DESTDIR}${PREFIX}/bin/clp 49 | @echo removing manual pages from ${DESTDIR}${MANPREFIX}/man1 50 | @rm -f ${DESTDIR}${MANPREFIX}/man1/clp.1 51 | @echo removing support files from ${DESTDIR}${SHAREPREFIX}/clp 52 | @rm -rf ${DESTDIR}${SHAREPREFIX}/clp 53 | 54 | clean: 55 | rm -f clp $(TEST_ELF) *.o 56 | 57 | clp.o: clp.c 58 | $(CC) -c clp.c $(CFLAGS) -o clp.o 59 | -------------------------------------------------------------------------------- /lua/lexers/latex.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Latex LPeg lexer. 3 | -- Modified by Brian Schott. 4 | -- Modified by Robert Gieseke. 5 | local lexer = lexer 6 | local word_match = lexer.word_match 7 | local P, S = lpeg.P, lpeg.S 8 | 9 | local lex = lexer.new(...) 10 | 11 | -- Comments. 12 | local line_comment = lexer.to_eol('%') 13 | local block_comment = lexer.range('\\begin' * P(' ') ^ 0 * '{comment}', 14 | '\\end' * P(' ') ^ 0 * '{comment}') 15 | lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) 16 | 17 | -- Math environments. 18 | local math_word = word_match( 19 | 'align displaymath eqnarray equation gather math multline') 20 | local math_begin_end = (P('begin') + P('end')) * P(' ') ^ 0 * '{' * math_word * 21 | P('*') ^ -1 * '}' 22 | lex:add_rule('math', lex:tag('environment.math', 23 | '$' + '\\' * (S('[]()') + math_begin_end))) 24 | 25 | -- LaTeX environments. 26 | lex:add_rule('environment', 27 | lex:tag('environment', 28 | '\\' * (P('begin') + 'end') * P(' ') ^ 0 * '{' * lexer.word * 29 | P('*') ^ -1 * '}')) 30 | 31 | -- Sections. 32 | lex:add_rule('section', lex:tag('command.section', '\\' * word_match( 33 | 'part chapter section subsection subsubsection paragraph subparagraph') * 34 | P('*') ^ -1)) 35 | 36 | -- Commands. 37 | lex:add_rule('command', 38 | lex:tag('command', '\\' * (lexer.alpha ^ 1 + S('#$&~_^%{}\\')))) 39 | 40 | -- Operators. 41 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('&#{}[]'))) 42 | 43 | -- Fold points. 44 | lex:add_fold_point(lexer.COMMENT, '\\begin', '\\end') 45 | lex:add_fold_point('environment', '\\begin', '\\end') 46 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 47 | 48 | lexer.property['scintillua.comment'] = '%' 49 | 50 | return lex 51 | -------------------------------------------------------------------------------- /lua/lexers/vbscript.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2017 Mitchell mitchell.att.foicica.com. See LICENSE. 2 | -- VisualBasic LPeg lexer. 3 | local l = require('lexer') 4 | local token, word_match = l.token, l.word_match 5 | local P, R, S = lpeg.P, lpeg.R, lpeg.S 6 | 7 | local M = {_NAME = 'vbscript'} 8 | 9 | -- Whitespace. 10 | local ws = token(l.WHITESPACE, l.space ^ 1) 11 | 12 | -- Comments. 13 | local comment = token(l.COMMENT, (P("'") + word_match({'rem'}, nil, true)) * 14 | l.nonnewline ^ 0) 15 | 16 | -- Strings. 17 | local string = token(l.STRING, l.delimited_range('"', true, true)) 18 | 19 | -- Numbers. 20 | local number = token(l.NUMBER, (l.float + l.integer) * S('LlUuFf') ^ -2) 21 | 22 | -- Keywords. 23 | local keyword = token(l.KEYWORD, word_match({ 24 | -- Control. 25 | 'If', 'Then', 'Else', 'ElseIf', 'While', 'Wend', 'For', 'To', 'Each', 'In', 26 | 'Step', 'Case', 'Select', 'Return', 'Continue', 'Do', 'Until', 'Loop', 27 | 'Next', 'With', 'Exit', -- Operators. 28 | 'Mod', 'And', 'Not', 'Or', 'Xor', 'Is', -- Storage types. 29 | 'Call', 'Class', 'Const', 'Dim', 'ReDim', 'Preserve', 'Function', 'Sub', 30 | 'Property', 'End', 'Set', 'Let', 'Get', 'New', 'Randomize', 'Option', 31 | 'Explicit', 'On', 'Error', 'Execute', -- Storage modifiers. 32 | 'Private', 'Public', 'Default', -- Constants. 33 | 'Empty', 'False', 'Nothing', 'Null', 'True' 34 | }, nil, true)) 35 | 36 | -- Types. 37 | local type = token(l.TYPE, word_match({ 38 | 'Boolean', 'Byte', 'Char', 'Date', 'Decimal', 'Double', 'Long', 'Object', 39 | 'Short', 'Single', 'String' 40 | }, nil, true)) 41 | 42 | -- Identifiers. 43 | local identifier = token(l.IDENTIFIER, l.word) 44 | 45 | -- Operators. 46 | local operator = token(l.OPERATOR, S('=><+-*^&:.,_()')) 47 | 48 | M._rules = { 49 | {'whitespace', ws}, {'keyword', keyword}, {'type', type}, 50 | {'comment', comment}, {'identifier', identifier}, {'string', string}, 51 | {'number', number}, {'operator', operator} 52 | } 53 | 54 | return M 55 | -------------------------------------------------------------------------------- /lua/lexers/pike.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Pike LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('pike') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'break', 'case', 'catch', 'continue', 'default', 'do', 'else', 'for', 15 | 'foreach', 'gauge', 'if', 'lambda', 'return', 'sscanf', 'switch', 'while', 16 | 'import', 'inherit', -- Type modifiers. 17 | 'constant', 'extern', 'final', 'inline', 'local', 'nomask', 'optional', 18 | 'private', 'protected', 'public', 'static', 'variant' 19 | })) 20 | 21 | -- Types. 22 | lex:add_rule('type', token(lexer.TYPE, word_match( 23 | 'array class float function int mapping mixed multiset object program string void'))) 24 | 25 | -- Identifiers. 26 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 27 | 28 | -- Strings. 29 | local sq_str = lexer.range("'", true) 30 | local dq_str = P('#') ^ -1 * lexer.range('"', true) 31 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 32 | 33 | -- Comments. 34 | local line_comment = lexer.to_eol('//', true) 35 | local block_comment = lexer.range('/*', '*/', false, false, true) 36 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 37 | 38 | -- Numbers. 39 | lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('lLdDfF') ^ -1)) 40 | 41 | -- Preprocessors. 42 | lex:add_rule('preprocessor', 43 | token(lexer.PREPROCESSOR, lexer.to_eol(lexer.starts_line('#')))) 44 | 45 | -- Operators. 46 | lex:add_rule('operator', token(lexer.OPERATOR, S('<>=!+-/*%&|^~@`.,:;()[]{}'))) 47 | 48 | -- Fold points. 49 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 50 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 51 | 52 | lexer.property['scintillua.comment'] = '//' 53 | 54 | return lex 55 | -------------------------------------------------------------------------------- /lua/lexers/ada.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Ada LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('ada') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ 14 | 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all', 'and', 15 | 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare', 'delay', 16 | 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry', 'exception', 17 | 'exit', 'for', 'function', 'generic', 'goto', 'if', 'in', 'interface', 'is', 18 | 'limited', 'loop', 'mod', 'new', 'not', 'null', 'of', 'or', 'others', 'out', 19 | 'overriding', 'package', 'parallel', 'pragma', 'private', 'procedure', 20 | 'protected', 'raise', 'range', 'record', 'rem', 'renames', 'requeue', 21 | 'return', 'reverse', 'select', 'separate', 'some', 'subtype', 22 | 'synchronized', 'tagged', 'task', 'terminate', 'then', 'type', 'until', 23 | 'use', 'when', 'while', 'with', 'xor', -- 24 | 'true', 'false' 25 | }, true))) 26 | 27 | -- Types. 28 | lex:add_rule('type', token(lexer.TYPE, word_match({ 29 | 'boolean', 'character', 'count', 'duration', 'float', 'integer', 30 | 'long_float', 'long_integer', 'priority', 'short_float', 'short_integer', 31 | 'string' 32 | }, true))) 33 | 34 | -- Identifiers. 35 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 36 | 37 | -- Strings. 38 | lex:add_rule('string', token(lexer.STRING, lexer.range('"', true, false))) 39 | 40 | -- Comments. 41 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('--'))) 42 | 43 | -- Numbers. 44 | lex:add_rule('number', token(lexer.NUMBER, lexer.number_('_'))) 45 | 46 | -- Operators. 47 | lex:add_rule('operator', token(lexer.OPERATOR, S(':;=<>&+-*/.()'))) 48 | 49 | lexer.property['scintillua.comment'] = '--' 50 | 51 | return lex 52 | -------------------------------------------------------------------------------- /lua/lexers/idl.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- IDL LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('idl') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'abstract', 'attribute', 'case', 'const', 'context', 'custom', 'default', 15 | 'enum', 'exception', 'factory', 'FALSE', 'in', 'inout', 'interface', 16 | 'local', 'module', 'native', 'oneway', 'out', 'private', 'public', 'raises', 17 | 'readonly', 'struct', 'support', 'switch', 'TRUE', 'truncatable', 'typedef', 18 | 'union', 'valuetype' 19 | })) 20 | 21 | -- Types. 22 | lex:add_rule('type', token(lexer.TYPE, word_match { 23 | 'any', 'boolean', 'char', 'double', 'fixed', 'float', 'long', 'Object', 24 | 'octet', 'sequence', 'short', 'string', 'unsigned', 'ValueBase', 'void', 25 | 'wchar', 'wstring' 26 | })) 27 | 28 | -- Identifiers. 29 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 30 | 31 | -- Strings. 32 | local sq_str = lexer.range("'", true) 33 | local dq_str = lexer.range('"', true) 34 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 35 | 36 | -- Comments. 37 | local line_comment = lexer.to_eol('//', true) 38 | local block_comment = lexer.range('/*', '*/') 39 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 40 | 41 | -- Numbers. 42 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 43 | 44 | -- Preprocessor. 45 | lex:add_rule('preproc', token(lexer.PREPROCESSOR, lexer.starts_line('#') * 46 | word_match( 47 | 'define undef ifdef ifndef if elif else endif include warning pragma'))) 48 | 49 | -- Operators. 50 | lex:add_rule('operator', token(lexer.OPERATOR, S('!<>=+-/*%&|^~.,:;?()[]{}'))) 51 | 52 | lexer.property['scintillua.comment'] = '//' 53 | 54 | return lex 55 | -------------------------------------------------------------------------------- /lua/lexers/context.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Robert Gieseke, Lars Otter. See LICENSE. 2 | -- ConTeXt LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('context') 8 | 9 | -- TeX and ConTeXt mkiv environment definitions. 10 | local beginend = (P('begin') + 'end') 11 | local startstop = (P('start') + 'stop') 12 | 13 | -- Whitespace. 14 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 15 | 16 | -- Comments. 17 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('%'))) 18 | 19 | -- Sections. 20 | local wm_section = word_match { 21 | 'chapter', 'part', 'section', 'subject', 'subsection', 'subsubject', 22 | 'subsubsection', 'subsubsubject', 'subsubsubsection', 'subsubsubsubject', 23 | 'title' 24 | } 25 | local section = token(lexer.CLASS, '\\' * startstop ^ -1 * wm_section) 26 | lex:add_rule('section', section) 27 | 28 | -- TeX and ConTeXt mkiv environments. 29 | local environment = token(lexer.STRING, 30 | '\\' * (beginend + startstop) * lexer.alpha ^ 1) 31 | lex:add_rule('environment', environment) 32 | 33 | -- Commands. 34 | local command = token(lexer.KEYWORD, 35 | '\\' * 36 | (lexer.alpha ^ 1 * P('\\') * lexer.space ^ 1 + 37 | lexer.alpha ^ 1 + S('!"#$%&\',./;=[\\]_{|}~`^-'))) 38 | lex:add_rule('command', command) 39 | 40 | -- Operators. 41 | local operator = token(lexer.OPERATOR, S('#$_[]{}~^')) 42 | lex:add_rule('operator', operator) 43 | 44 | -- Fold points. 45 | lex:add_fold_point('environment', '\\start', '\\stop') 46 | lex:add_fold_point('environment', '\\begin', '\\end') 47 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 48 | 49 | -- Embedded Lua. 50 | local luatex = lexer.load('lua') 51 | local luatex_start_rule = #P('\\startluacode') * environment 52 | local luatex_end_rule = #P('\\stopluacode') * environment 53 | lex:embed(luatex, luatex_start_rule, luatex_end_rule) 54 | 55 | lexer.property['scintillua.comment'] = '%' 56 | 57 | return lex 58 | -------------------------------------------------------------------------------- /lua/lexers/hare.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2021-2023 Mitchell. See LICENSE. 2 | -- Hare LPeg lexer 3 | -- https://harelang.org 4 | -- Contributed by Qiu 5 | local lexer = require('lexer') 6 | local token, word_match = lexer.token, lexer.word_match 7 | local P, S = lpeg.P, lpeg.S 8 | 9 | local lex = lexer.new('hare') 10 | 11 | -- Whitespace. 12 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 13 | 14 | -- Keywords. 15 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 16 | 'as', 'break', 'case', 'const', 'continue', 'def', 'defer', 'else', 17 | 'export', 'false', 'fn', 'for', 'if', 'is', 'let', 'match', 'null', 18 | 'nullable', 'return', 'static', 'struct', 'switch', 'true', 'type', 'use', 19 | 'yield' 20 | })) 21 | 22 | -- Functions. 23 | lex:add_rule('function', token(lexer.FUNCTION, word_match { 24 | 'len', 'alloc', 'free', 'assert', 'abort', 'size', 'append', 'insert', 25 | 'delete', 'vastart', 'vaarg', 'vaend' 26 | })) 27 | 28 | -- Types. 29 | lex:add_rule('type', token(lexer.TYPE, word_match { 30 | 'bool', 'enum', 'f32', 'f64', 'i16', 'i32', 'i64', 'i8', 'int', 'u16', 31 | 'u32', 'u64', 'u8', 'uint', 'uintptr', 'union', 'void', 'rune', 'str', 32 | 'char' 33 | })) 34 | 35 | -- Identifiers. 36 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 37 | 38 | -- Strings. 39 | local dq_str = lexer.range('"') 40 | local raw_str = lexer.range('`') 41 | lex:add_rule('string', token(lexer.STRING, dq_str + raw_str)) 42 | 43 | -- Comments. 44 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('//'))) 45 | 46 | -- Numbers. 47 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 48 | 49 | -- Operators. 50 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%^!=&|?:;,.()[]{}<>'))) 51 | 52 | -- At rule. 53 | lex:add_rule('at_rule', token(lexer.ANNOTATION, '@' * 54 | word_match( 55 | 'noreturn offset init fini test symbol'))) 56 | 57 | -- Fold points. 58 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 59 | 60 | lexer.property['scintillua.comment'] = '//' 61 | 62 | return lex 63 | -------------------------------------------------------------------------------- /lua/lexers/coffeescript.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- CoffeeScript LPeg lexer. 3 | local lexer = require('lexer') 4 | local word_match = lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('coffeescript', {fold_by_indentation = true}) 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', lex:tag(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', lex:tag(lexer.KEYWORD, word_match { 14 | 'all', 'and', 'bind', 'break', 'by', 'case', 'catch', 'class', 'const', 15 | 'continue', 'default', 'delete', 'do', 'each', 'else', 'enum', 'export', 16 | 'extends', 'false', 'finally', 'for', 'function', 'if', 'import', 'in', 17 | 'instanceof', 'is', 'isnt', 'let', 'loop', 'native', 'new', 'no', 'not', 18 | 'of', 'off', 'on', 'or', 'return', 'super', 'switch', 'then', 'this', 19 | 'throw', 'true', 'try', 'typeof', 'unless', 'until', 'var', 'void', 'when', 20 | 'while', 'with', 'yes' 21 | })) 22 | 23 | -- Fields: object properties and methods. 24 | lex:add_rule('field', lex:tag(lexer.FUNCTION, '.' * (S('_$') + lexer.alpha) * 25 | (S('_$') + lexer.alnum) ^ 0)) 26 | 27 | -- Identifiers. 28 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 29 | 30 | -- Strings. 31 | local sq_str = lexer.range("'") 32 | local dq_str = lexer.range('"') 33 | local string = lex:tag(lexer.STRING, sq_str + dq_str) 34 | local regex_str = lexer.after_set('+-*%<>!=^&|?~:;,([{', 35 | lexer.range('/', true) * S('igm') ^ 0) 36 | local regex = lex:tag(lexer.REGEX, regex_str) 37 | lex:add_rule('string', string + regex) 38 | 39 | -- Comments. 40 | local block_comment = lexer.range('###') 41 | local line_comment = lexer.to_eol('#', true) 42 | lex:add_rule('comment', lex:tag(lexer.COMMENT, block_comment + line_comment)) 43 | 44 | -- Numbers. 45 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) 46 | 47 | -- Operators. 48 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;,.()[]{}'))) 49 | 50 | lexer.property['scintillua.comment'] = '#' 51 | 52 | return lex 53 | -------------------------------------------------------------------------------- /lua/lexers/dart.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2013-2023 Mitchell. See LICENSE. 2 | -- Dart LPeg lexer. 3 | -- Written by Brian Schott (@Hackerpilot on Github). 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('dart') 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords. 14 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 15 | 'assert', 'break', 'case', 'catch', 'class', 'const', 'continue', 'default', 16 | 'do', 'else', 'enum', 'extends', 'false', 'final', 'finally', 'for', 'if', 17 | 'in', 'is', 'new', 'null', 'rethrow', 'return', 'super', 'switch', 'this', 18 | 'throw', 'true', 'try', 'var', 'void', 'while', 'with' 19 | })) 20 | 21 | -- Built-ins. 22 | lex:add_rule('builtin', token(lexer.CONSTANT, word_match { 23 | 'abstract', 'as', 'dynamic', 'export', 'external', 'factory', 'get', 24 | 'implements', 'import', 'library', 'operator', 'part', 'set', 'static', 25 | 'typedef' 26 | })) 27 | 28 | -- Strings. 29 | local sq_str = S('r') ^ -1 * lexer.range("'", true) 30 | local dq_str = S('r') ^ -1 * lexer.range('"', true) 31 | local tq_str = S('r') ^ -1 * (lexer.range("'''") + lexer.range('"""')) 32 | lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str)) 33 | 34 | -- Identifiers. 35 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 36 | 37 | -- Comments. 38 | local line_comment = lexer.to_eol('//', true) 39 | local block_comment = lexer.range('/*', '*/', false, false, true) 40 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 41 | 42 | -- Numbers. 43 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 44 | 45 | -- Operators. 46 | lex:add_rule('operator', token(lexer.OPERATOR, S('#?=!<>+-*$/%&|^~.,;()[]{}'))) 47 | 48 | -- Annotations. 49 | lex:add_rule('annotation', token(lexer.ANNOTATION, '@' * lexer.word ^ 1)) 50 | 51 | -- Fold points. 52 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 53 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 54 | 55 | lexer.property['scintillua.comment'] = '//' 56 | 57 | return lex 58 | -------------------------------------------------------------------------------- /lua/lexers/gtkrc.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Gtkrc LPeg lexer. 3 | local lexer = lexer 4 | local word_match = lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new(...) 8 | 9 | -- Keywords. 10 | lex:add_rule('keyword', lex:tag(lexer.KEYWORD, word_match( 11 | 'binding class include module_path pixmap_path im_module_file style widget widget_class'))) 12 | 13 | -- Variables. 14 | lex:add_rule('variable', lex:tag(lexer.VARIABLE_BUILTIN, 15 | lex:word_match(lexer.VARIABLE_BUILTIN))) 16 | 17 | -- States. 18 | lex:add_rule('state', lex:tag(lexer.CONSTANT_BUILTIN, 19 | lex:word_match(lexer.CONSTANT_BUILTIN))) 20 | 21 | -- Functions. 22 | lex:add_rule('function', lex:tag(lexer.FUNCTION_BUILTIN, 23 | lex:word_match(lexer.FUNCTION_BUILTIN))) 24 | 25 | -- Identifiers. 26 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, 27 | lexer.alpha * (lexer.alnum + S('_-')) ^ 0)) 28 | 29 | -- Strings. 30 | local sq_str = lexer.range("'", true) 31 | local dq_str = lexer.range('"', true) 32 | lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str)) 33 | 34 | -- Comments. 35 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) 36 | 37 | -- Numbers. 38 | lex:add_rule('number', lex:tag(lexer.NUMBER, 39 | lexer.digit ^ 1 * ('.' * lexer.digit ^ 1) ^ -1)) 40 | 41 | -- Operators. 42 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S(':=,*()[]{}'))) 43 | 44 | -- Fold points. 45 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 46 | 47 | -- Word lists. 48 | lex:set_word_list(lexer.VARIABLE_BUILTIN, { 49 | 'bg', 'fg', 'base', 'text', 'xthickness', 'ythickness', 'bg_pixmap', 'font', 50 | 'fontset', 'font_name', 'stock', 'color', 'engine' 51 | }) 52 | 53 | lex:set_word_list(lexer.CONSTANT_BUILTIN, { 54 | 'ACTIVE', 'SELECTED', 'NORMAL', 'PRELIGHT', 'INSENSITIVE', 'TRUE', 'FALSE' 55 | }) 56 | 57 | lex:set_word_list(lexer.FUNCTION_BUILTIN, {'mix', 'shade', 'lighter', 'darker'}) 58 | 59 | lexer.property['scintillua.comment'] = '#' 60 | 61 | return lex 62 | -------------------------------------------------------------------------------- /lua/lexers/batch.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Batch LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('batch', {case_insensitive_fold_points = true}) 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ 14 | 'cd', 'chdir', 'md', 'mkdir', 'cls', 'for', 'if', 'echo', 'echo.', 'move', 15 | 'copy', 'ren', 'del', 'set', 'call', 'exit', 'setlocal', 'shift', 16 | 'endlocal', 'pause', 'defined', 'exist', 'errorlevel', 'else', 'in', 'do', 17 | 'NUL', 'AUX', 'PRN', 'not', 'goto', 'pushd', 'popd' 18 | }, true))) 19 | 20 | -- Functions. 21 | lex:add_rule('function', token(lexer.FUNCTION, word_match({ 22 | 'APPEND', 'ATTRIB', 'CHKDSK', 'CHOICE', 'DEBUG', 'DEFRAG', 'DELTREE', 23 | 'DISKCOMP', 'DISKCOPY', 'DOSKEY', 'DRVSPACE', 'EMM386', 'EXPAND', 24 | 'FASTOPEN', 'FC', 'FDISK', 'FIND', 'FORMAT', 'GRAPHICS', 'KEYB', 'LABEL', 25 | 'LOADFIX', 'MEM', 'MODE', 'MORE', 'MOVE', 'MSCDEX', 'NLSFUNC', 'POWER', 26 | 'PRINT', 'RD', 'REPLACE', 'RESTORE', 'SETVER', 'SHARE', 'SORT', 'SUBST', 27 | 'SYS', 'TREE', 'UNDELETE', 'UNFORMAT', 'VSAFE', 'XCOPY' 28 | }, true))) 29 | 30 | -- Comments. 31 | local rem = (P('REM') + 'rem') * #lexer.space 32 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(rem + '::'))) 33 | 34 | -- Identifiers. 35 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 36 | 37 | -- Strings. 38 | lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) 39 | 40 | -- Variables. 41 | local arg = '%' * lexer.digit + '%~' * lexer.alnum ^ 1 42 | local variable = lexer.range('%', true, false) 43 | lex:add_rule('variable', token(lexer.VARIABLE, arg + variable)) 44 | 45 | -- Labels. 46 | lex:add_rule('label', token(lexer.LABEL, ':' * lexer.word)) 47 | 48 | -- Operators. 49 | lex:add_rule('operator', token(lexer.OPERATOR, S('+|&!<>='))) 50 | 51 | -- Fold points. 52 | lex:add_fold_point(lexer.KEYWORD, 'setlocal', 'endlocal') 53 | 54 | lexer.property['scintillua.comment'] = 'REM ' 55 | 56 | return lex 57 | -------------------------------------------------------------------------------- /lua/lexers/antlr.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- ANTLR LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('antlr') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'abstract', 'break', 'case', 'catch', 'continue', 'default', 'do', 'else', 15 | 'extends', 'final', 'finally', 'for', 'if', 'implements', 'instanceof', 16 | 'native', 'new', 'private', 'protected', 'public', 'return', 'static', 17 | 'switch', 'synchronized', 'throw', 'throws', 'transient', 'try', 'volatile', 18 | 'while', 'package', 'import', 'header', 'options', 'tokens', 'strictfp', 19 | 'false', 'null', 'super', 'this', 'true' 20 | })) 21 | 22 | -- Types. 23 | lex:add_rule('type', token(lexer.TYPE, word_match( 24 | 'boolean byte char class double float int interface long short void'))) 25 | 26 | -- Functions. 27 | lex:add_rule('func', token(lexer.FUNCTION, 'assert')) 28 | 29 | -- Identifiers. 30 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 31 | 32 | -- Comments. 33 | local line_comment = lexer.to_eol('//') 34 | local block_comment = lexer.range('/*', '*/') 35 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 36 | 37 | -- Actions. 38 | local open_brace = token(lexer.OPERATOR, '{') 39 | local close_brace = token(lexer.OPERATOR, '}') 40 | lex:add_rule('action', 41 | open_brace * token('action', (1 - P('}')) ^ 0) * close_brace ^ -1) 42 | lex:add_style('action', lexer.styles.nothing) 43 | 44 | -- Strings. 45 | lex:add_rule('string', token(lexer.STRING, lexer.range("'", true))) 46 | 47 | -- Operators. 48 | lex:add_rule('operator', token(lexer.OPERATOR, S('$@:;|.=+*?~!^>-()[]{}'))) 49 | 50 | -- Fold points. 51 | lex:add_fold_point(lexer.OPERATOR, ':', ';') 52 | lex:add_fold_point(lexer.OPERATOR, '(', ')') 53 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 54 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 55 | 56 | lexer.property['scintillua.comment'] = '//' 57 | 58 | return lex 59 | -------------------------------------------------------------------------------- /lua/lexers/myrddin.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2017-2023 Michael Forney. See LICENSE 2 | -- Myrddin LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('myrddin') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'break', 'const', 'continue', 'elif', 'else', 'extern', 'false', 'for', 15 | 'generic', 'goto', 'if', 'impl', 'in', 'match', 'pkg', 'pkglocal', 'sizeof', 16 | 'struct', 'trait', 'true', 'type', 'union', 'use', 'var', 'while' 17 | })) 18 | 19 | -- Types. 20 | lex:add_rule('type', token(lexer.TYPE, word_match { 21 | 'void', 'bool', 'char', 'byte', 'int', 'uint', 'int8', 'uint8', 'int16', 22 | 'uint16', 'int32', 'uint32', 'int64', 'uint64', 'flt32', 'flt64' 23 | } + '@' * lexer.word)) 24 | 25 | -- Identifiers. 26 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 27 | 28 | -- Comments. 29 | local line_comment = lexer.to_eol('//', true) 30 | local block_comment = lexer.range('/*', '*/', false, false, true) 31 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 32 | 33 | -- Strings. 34 | local sq_str = lexer.range("'", true) 35 | local dq_str = lexer.range('"', true) 36 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 37 | 38 | -- Numbers. 39 | local digit = lexer.digit + '_' 40 | local bdigit = S('01') + '_' 41 | local xdigit = lexer.xdigit + '_' 42 | local odigit = lpeg.R('07') + '_' 43 | local integer = '0x' * xdigit ^ 1 + '0o' * odigit ^ 1 + '0b' * bdigit ^ 1 + 44 | digit ^ 1 45 | local float = digit ^ 1 * 46 | ((('.' * digit ^ 1) * (S('eE') * S('+-') ^ -1 * digit ^ 1) ^ 47 | -1) + 48 | (('.' * digit ^ 1) ^ -1 * S('eE') * S('+-') ^ -1 * digit ^ 49 | 1)) 50 | lex:add_rule('number', token(lexer.NUMBER, float + integer)) 51 | 52 | -- Operators. 53 | lex:add_rule('operator', token(lexer.OPERATOR, S('`#_+-/*%<>~!=^&|~:;,.()[]{}'))) 54 | 55 | lexer.property['scintillua.comment'] = '//' 56 | 57 | return lex 58 | -------------------------------------------------------------------------------- /lua/lexers/mediawiki.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- MediaWiki LPeg lexer. 3 | -- Contributed by Alexander Misel. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S, B = lpeg.P, lpeg.S, lpeg.B 7 | 8 | local lex = lexer.new('mediawiki') 9 | 10 | -- Comments. 11 | lex:add_rule('comment', token(lexer.COMMENT, lexer.range(''))) 12 | 13 | -- HTML-like tags 14 | local tag_start = token(lexer.TAG, 15 | '<' * P('/') ^ -1 * lexer.alnum ^ 1 * lexer.space ^ 0) 16 | local dq_str = '"' * ((lexer.any - S('>"\\')) + ('\\' * lexer.any)) ^ 0 * '"' 17 | local tag_attr = token(lexer.ATTRIBUTE, lexer.alpha ^ 1 * lexer.space ^ 0 * 18 | ('=' * lexer.space ^ 0 * 19 | (dq_str + (lexer.any - lexer.space - '>') ^ 0) ^ 20 | -1) ^ 0 * lexer.space ^ 0) 21 | local tag_end = token(lexer.TAG, P('/') ^ -1 * '>') 22 | lex:add_rule('tag', tag_start * tag_attr ^ 0 * tag_end) 23 | 24 | -- Link 25 | lex:add_rule('link', token(lexer.STRING, S('[]'))) 26 | lex:add_rule('internal_link', 27 | B('[[') * token(lexer.LINK, (lexer.any - '|' - ']]') ^ 1)) 28 | 29 | -- Templates and parser functions. 30 | lex:add_rule('template', token(lexer.OPERATOR, S('{}'))) 31 | lex:add_rule('parser_func', B('{{') * 32 | token(lexer.FUNCTION, 33 | '#' * lexer.alpha ^ 1 + lexer.upper ^ 1 * ':')) 34 | lex:add_rule('template_name', 35 | B('{{') * token(lexer.LINK, (lexer.any - S('{}|')) ^ 1)) 36 | 37 | -- Operators. 38 | lex:add_rule('operator', token(lexer.OPERATOR, S('-=|#~!'))) 39 | 40 | -- Behavior switches 41 | local start_pat = P(function(_, pos) return pos == 1 end) 42 | lex:add_rule('behavior_switch', 43 | (B(lexer.space) + start_pat) * token('behavior_switch', word_match( 44 | '__TOC__ __FORCETOC__ __NOTOC__ __NOEDITSECTION__ __NOCC__ __NOINDEX__')) * 45 | #lexer.space) 46 | lex:add_style('behavior_switch', lexer.styles.keyword) 47 | 48 | lexer.property['scintillua.comment'] = '' 49 | lexer.property['scintillua.angle.braces'] = '1' 50 | 51 | return lex 52 | -------------------------------------------------------------------------------- /lua/lexers/scala.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 JMS. See LICENSE. 2 | -- Scala LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('scala') 8 | 9 | -- Whitespace. 10 | local ws = token(lexer.WHITESPACE, lexer.space ^ 1) 11 | lex:add_rule('whitespace', ws) 12 | 13 | -- Classes. 14 | lex:add_rule('class', token(lexer.KEYWORD, 'class') * ws ^ 1 * 15 | token(lexer.CLASS, lexer.word)) 16 | 17 | -- Keywords. 18 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 19 | 'abstract', 'case', 'catch', 'class', 'def', 'do', 'else', 'extends', 20 | 'false', 'final', 'finally', 'for', 'forSome', 'if', 'implicit', 'import', 21 | 'lazy', 'match', 'new', 'null', 'object', 'override', 'package', 'private', 22 | 'protected', 'return', 'sealed', 'super', 'this', 'throw', 'trait', 'try', 23 | 'true', 'type', 'val', 'var', 'while', 'with', 'yield' 24 | })) 25 | 26 | -- Types. 27 | lex:add_rule('type', token(lexer.TYPE, word_match { 28 | 'Array', 'Boolean', 'Buffer', 'Byte', 'Char', 'Collection', 'Double', 29 | 'Float', 'Int', 'Iterator', 'LinkedList', 'List', 'Long', 'Map', 'None', 30 | 'Option', 'Set', 'Short', 'SortedMap', 'SortedSet', 'String', 'TreeMap', 31 | 'TreeSet' 32 | })) 33 | 34 | -- Functions. 35 | lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('(')) 36 | 37 | -- Identifiers. 38 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 39 | 40 | -- Strings. 41 | local symbol = "'" * lexer.word 42 | local dq_str = lexer.range('"', true) 43 | local tq_str = lexer.range('"""') 44 | lex:add_rule('string', token(lexer.STRING, tq_str + symbol + dq_str)) 45 | 46 | -- Comments. 47 | local line_comment = lexer.to_eol('//', true) 48 | local block_comment = lexer.range('/*', '*/') 49 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 50 | 51 | -- Numbers. 52 | lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlFfDd') ^ -1)) 53 | 54 | -- Operators. 55 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) 56 | 57 | -- Fold points. 58 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 59 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 60 | 61 | lexer.property['scintillua.comment'] = '//' 62 | 63 | return lex 64 | -------------------------------------------------------------------------------- /lua/lexers/dot.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Brian "Sir Alaran" Schott. See LICENSE. 2 | -- Dot LPeg lexer. 3 | -- Based off of lexer code by Mitchell. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('dot') 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords. 14 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 15 | 'graph', 'node', 'edge', 'digraph', 'fontsize', 'rankdir', 'fontname', 16 | 'shape', 'label', 'arrowhead', 'arrowtail', 'arrowsize', 'color', 'comment', 17 | 'constraint', 'decorate', 'dir', 'headlabel', 'headport', 'headURL', 18 | 'labelangle', 'labeldistance', 'labelfloat', 'labelfontcolor', 19 | 'labelfontname', 'labelfontsize', 'layer', 'lhead', 'ltail', 'minlen', 20 | 'samehead', 'sametail', 'style', 'taillabel', 'tailport', 'tailURL', 21 | 'weight', 'subgraph' 22 | })) 23 | 24 | -- Types. 25 | lex:add_rule('type', token(lexer.TYPE, word_match { 26 | ' box', 'polygon', 'ellipse', 'circle', 'point', 'egg', 'triangle', 27 | 'plaintext', 'diamond', 'trapezium', 'parallelogram', 'house', 'pentagon', 28 | 'hexagon', 'septagon', 'octagon', 'doublecircle', 'doubleoctagon', 29 | 'tripleoctagon', 'invtriangle', 'invtrapezium', 'invhouse', 'Mdiamond', 30 | 'Msquare', 'Mcircle', 'rect', 'rectangle', 'none', 'note', 'tab', 'folder', 31 | 'box3d', 'record' 32 | })) 33 | 34 | -- Identifiers. 35 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 36 | 37 | -- Strings. 38 | local sq_str = lexer.range("'") 39 | local dq_str = lexer.range('"') 40 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 41 | 42 | -- Comments. 43 | local line_comment = lexer.to_eol('//', true) 44 | local block_comment = lexer.range('/*', '*/') 45 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 46 | 47 | -- Numbers. 48 | lex:add_rule('number', token(lexer.NUMBER, lexer.dec_num + lexer.float)) 49 | 50 | -- Operators. 51 | lex:add_rule('operator', token(lexer.OPERATOR, S('->()[]{};'))) 52 | 53 | -- Fold points. 54 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 55 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 56 | 57 | lexer.property['scintillua.comment'] = '//' 58 | 59 | return lex 60 | -------------------------------------------------------------------------------- /lua/lexers/rails.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Ruby on Rails LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(..., {inherit = lexer.load('ruby')}) 7 | 8 | -- Word lists. 9 | lex:set_word_list(lexer.FUNCTION_BUILTIN, { 10 | -- ActionPack. 11 | 'before_filter', 'skip_before_filter', 'skip_after_filter', 'after_filter', 12 | 'around_filter', 'filter', 'filter_parameter_logging', 'layout', 13 | 'require_dependency', 'render', 'render_action', 'render_text', 14 | 'render_file', 'render_template', 'render_nothing', 'render_component', 15 | 'render_without_layout', 'rescue_from', 'url_for', 'redirect_to', 16 | 'redirect_to_path', 'redirect_to_url', 'respond_to', 'helper', 17 | 'helper_method', 'model', 'service', 'observer', 'serialize', 'scaffold', 18 | 'verify', 'hide_action', -- View helpers. 19 | 'check_box', 'content_for', 'error_messages_for', 'form_for', 'fields_for', 20 | 'file_field', 'hidden_field', 'image_submit_tag', 'label', 'link_to', 21 | 'password_field', 'radio_button', 'submit', 'text_field', 'text_area', 22 | -- ActiveRecord. 23 | 'after_create', 'after_destroy', 'after_save', 'after_update', 24 | 'after_validation', 'after_validation_on_create', 25 | 'after_validation_on_update', 'before_create', 'before_destroy', 26 | 'before_save', 'before_update', 'before_validation', 27 | 'before_validation_on_create', 'before_validation_on_update', 'composed_of', 28 | 'belongs_to', 'has_one', 'has_many', 'has_and_belongs_to_many', 'validate', 29 | 'validates', 'validate_on_create', 'validates_numericality_of', 30 | 'validate_on_update', 'validates_acceptance_of', 'validates_associated', 31 | 'validates_confirmation_of', 'validates_each', 'validates_format_of', 32 | 'validates_inclusion_of', 'validates_exclusion_of', 'validates_length_of', 33 | 'validates_presence_of', 'validates_size_of', 'validates_uniqueness_of', -- 34 | 'attr_protected', 'attr_accessible', 'attr_readonly', 35 | 'accepts_nested_attributes_for', 'default_scope', 'scope', 36 | -- ActiveSupport. 37 | 'alias_method_chain', 'alias_attribute', 'delegate', 'cattr_accessor', 38 | 'mattr_accessor', 'returning', 'memoize' 39 | }, true) 40 | 41 | lexer.property['scintillua.comment'] = '#' 42 | 43 | return lex 44 | -------------------------------------------------------------------------------- /lua/lexers/xs.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2017-2023 David B. Lamkins. See LICENSE. 2 | -- xs LPeg lexer. 3 | -- Adapted from rc lexer by Michael Forney. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('xs') 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords. 14 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 15 | 'access', 'alias', 'catch', 'cd', 'dirs', 'echo', 'else', 'escape', 'eval', 16 | 'exec', 'exit', 'false', 'fn-', 'fn', 'for', 'forever', 'fork', 'history', 17 | 'if', 'jobs', 'let', 'limit', 'local', 'map', 'omap', 'popd', 'printf', 18 | 'pushd', 'read', 'result', 'set-', 'switch', 'throw', 'time', 'true', 19 | 'umask', 'until', 'unwind-protect', 'var', 'vars', 'wait', 'whats', 'while', 20 | ':lt', ':le', ':gt', ':ge', ':eq', ':ne', '~', '~~', '...', '.' 21 | })) 22 | 23 | -- Identifiers. 24 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 25 | 26 | -- Strings. 27 | local str = lexer.range("'", false, true) 28 | local herestr = '<<<' * str 29 | local heredoc = '<<' * P(function(input, index) 30 | local s, e, _, delimiter = input:find( 31 | '[ \t]*(["\']?)([%w!"%%+,-./:?@_~]+)%1', 32 | index) 33 | if s == index and delimiter then 34 | delimiter = delimiter:gsub('[%%+-.?]', '%%%1') 35 | e = select(2, input:find('[\n\r]' .. delimiter .. '[\n\r]', e)) 36 | return e and e + 1 or #input + 1 37 | end 38 | end) 39 | lex:add_rule('string', token(lexer.STRING, str + herestr + heredoc)) 40 | 41 | -- Comments. 42 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 43 | 44 | -- Numbers. 45 | -- lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 46 | 47 | -- Constants. 48 | lex:add_rule('constant', token(lexer.CONSTANT, '$&' * lexer.word)) 49 | 50 | -- Variables. 51 | lex:add_rule('variable', token(lexer.VARIABLE, '$' * S('"#') ^ -1 * 52 | ('*' + lexer.digit ^ 1 + lexer.word))) 53 | 54 | -- Operators. 55 | lex:add_rule('operator', token(lexer.OPERATOR, S('@`=!<>*&^|;?()[]{}') + '\\\n')) 56 | 57 | -- Fold points. 58 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 59 | 60 | lexer.property['scintillua.comment'] = '#' 61 | 62 | return lex 63 | -------------------------------------------------------------------------------- /lua/lexers/routeros.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2020-2023 Christian Hesse. See LICENSE. 2 | -- Mikrotik RouterOS script LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('routeros') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | -- Control. 15 | ':delay', ':do', 'on-error', 'while', ':error', ':foreach', 'in', 'do', 16 | ':for', 'from', 'to', 'step', ':if', 'do', 'else', ':return', ':while', 17 | 'do', -- Menu specific commands. 18 | 'add', 'disable', 'edit', 'enable', 'export', 'find', 'get', 'info', 19 | 'monitor', 'print', 'append', 'as-value', 'brief', 'count-only', 'detail', 20 | 'file', 'follow', 'follow-only', 'from', 'interval', 'terse', 'value-list', 21 | 'where', 'without-paging', 'remove', 'set', -- Output & string handling. 22 | ':beep', ':blink', ':environment', ':execute', ':find', ':len', ':log', 23 | 'alert', 'critical', 'debug', 'emergency', 'error', 'info', 'notice', 24 | 'warning', ':parse', ':pick', ':put', ':terminal', ':time', ':typeof', 25 | -- Variable declaration. 26 | ':global', ':local', ':set', -- Variable casting. 27 | ':toarray', ':tobool', ':toid', ':toip', ':toip6', ':tonum', ':tostr', 28 | ':totime', -- Boolean values and logical operators. 29 | 'false', 'no', 'true', 'yes', 'and', 'in', 'or', -- Networking. 30 | ':ping', ':resolve' 31 | })) 32 | 33 | -- Identifiers. 34 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 35 | 36 | -- Comments. 37 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 38 | 39 | -- Numbers. 40 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 41 | 42 | -- Strings. 43 | lex:add_rule('string', token(lexer.STRING, lexer.range('"'))) 44 | 45 | -- Variables. 46 | lex:add_rule('variable', 47 | token(lexer.VARIABLE, 48 | '$' * (S('!#?*@$') + lexer.digit ^ 1 + lexer.word + 49 | lexer.range('{', '}', true, false, true)))) 50 | 51 | -- Operators. 52 | lex:add_rule('operator', token(lexer.OPERATOR, S('=!%<>+-/*&|~.,;()[]{}'))) 53 | 54 | -- Fold points. 55 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 56 | 57 | lexer.property['scintillua.comment'] = '#' 58 | 59 | return lex 60 | -------------------------------------------------------------------------------- /lua/lexers/vala.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Vala LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('vala') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'class', 'delegate', 'enum', 'errordomain', 'interface', 'namespace', 15 | 'signal', 'struct', 'using', -- Modifiers. 16 | 'abstract', 'const', 'dynamic', 'extern', 'inline', 'out', 'override', 17 | 'private', 'protected', 'public', 'ref', 'static', 'virtual', 'volatile', 18 | 'weak', -- Other. 19 | 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue', 'default', 20 | 'delete', 'do', 'else', 'ensures', 'finally', 'for', 'foreach', 'get', 'if', 21 | 'in', 'is', 'lock', 'new', 'requires', 'return', 'set', 'sizeof', 'switch', 22 | 'this', 'throw', 'throws', 'try', 'typeof', 'value', 'var', 'void', 'while', 23 | -- Etc. 24 | 'null', 'true', 'false' 25 | })) 26 | 27 | -- Types. 28 | lex:add_rule('type', token(lexer.TYPE, word_match { 29 | 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16', 'int32', 'int64', 30 | 'long', 'short', 'size_t', 'ssize_t', 'string', 'uchar', 'uint', 'uint8', 31 | 'uint16', 'uint32', 'uint64', 'ulong', 'unichar', 'ushort' 32 | })) 33 | 34 | -- Identifiers. 35 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 36 | 37 | -- Strings. 38 | local sq_str = lexer.range("'", true) 39 | local dq_str = lexer.range('"', true) 40 | local tq_str = lexer.range('"""') 41 | local ml_str = '@' * lexer.range('"', false, false) 42 | lex:add_rule('string', token(lexer.STRING, tq_str + sq_str + dq_str + ml_str)) 43 | 44 | -- Comments. 45 | local line_comment = lexer.to_eol('//', true) 46 | local block_comment = lexer.range('/*', '*/') 47 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 48 | 49 | -- Numbers. 50 | lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('uUlLfFdDmM') ^ -1)) 51 | 52 | -- Operators. 53 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) 54 | 55 | -- Fold points. 56 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 57 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 58 | 59 | lexer.property['scintillua.comment'] = '//' 60 | 61 | return lex 62 | -------------------------------------------------------------------------------- /lua/lexers/fish.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 Jason Schindler. See LICENSE. 2 | -- Fish (http://fishshell.com/) script LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('fish') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'alias', 'and', 'begin', 'bg', 'bind', 'block', 'break', 'breakpoint', 15 | 'builtin', 'case', 'cd', 'command', 'commandline', 'complete', 'contains', 16 | 'continue', 'count', 'dirh', 'dirs', 'echo', 'else', 'emit', 'end', 'eval', 17 | 'exec', 'exit', 'fg', 'fish', 'fish_config', 'fishd', 'fish_indent', 18 | 'fish_pager', 'fish_prompt', 'fish_right_prompt', 'fish_update_completions', 19 | 'for', 'funced', 'funcsave', 'function', 'functions', 'help', 'history', 20 | 'if', 'in', 'isatty', 'jobs', 'math', 'mimedb', 'nextd', 'not', 'open', 21 | 'or', 'popd', 'prevd', 'psub', 'pushd', 'pwd', 'random', 'read', 'return', 22 | 'set', 'set_color', 'source', 'status', 'switch', 'test', 'trap', 'type', 23 | 'ulimit', 'umask', 'vared', 'while' 24 | })) 25 | 26 | -- Identifiers. 27 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 28 | 29 | -- Variables. 30 | lex:add_rule('variable', token(lexer.VARIABLE, 31 | '$' * (lexer.word + lexer.range('{', '}', true)))) 32 | 33 | -- Strings. 34 | local sq_str = lexer.range("'", false, false) 35 | local dq_str = lexer.range('"') 36 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 37 | 38 | -- Shebang. 39 | lex:add_rule('shebang', token(lexer.COMMENT, lexer.to_eol('#!/'))) 40 | 41 | -- Comments. 42 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 43 | 44 | -- Numbers. 45 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 46 | 47 | -- Operators. 48 | lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^&|~.,:;?()[]{}'))) 49 | 50 | -- Fold points. 51 | lex:add_fold_point(lexer.KEYWORD, 'begin', 'end') 52 | lex:add_fold_point(lexer.KEYWORD, 'for', 'end') 53 | lex:add_fold_point(lexer.KEYWORD, 'function', 'end') 54 | lex:add_fold_point(lexer.KEYWORD, 'if', 'end') 55 | lex:add_fold_point(lexer.KEYWORD, 'switch', 'end') 56 | lex:add_fold_point(lexer.KEYWORD, 'while', 'end') 57 | 58 | lexer.property['scintillua.comment'] = '#' 59 | 60 | return lex 61 | -------------------------------------------------------------------------------- /lua/lexers/actionscript.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Actionscript LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('actionscript') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'break', 'continue', 'delete', 'do', 'else', 'for', 'function', 'if', 'in', 15 | 'new', 'on', 'return', 'this', 'typeof', 'var', 'void', 'while', 'with', 16 | 'NaN', 'Infinity', 'false', 'null', 'true', 'undefined', 17 | -- Reserved for future use. 18 | 'abstract', 'case', 'catch', 'class', 'const', 'debugger', 'default', 19 | 'export', 'extends', 'final', 'finally', 'goto', 'implements', 'import', 20 | 'instanceof', 'interface', 'native', 'package', 'private', 'Void', 21 | 'protected', 'public', 'dynamic', 'static', 'super', 'switch', 22 | 'synchonized', 'throw', 'throws', 'transient', 'try', 'volatile' 23 | })) 24 | 25 | -- Types. 26 | lex:add_rule('type', token(lexer.TYPE, word_match { 27 | 'Array', 'Boolean', 'Color', 'Date', 'Function', 'Key', 'MovieClip', 'Math', 28 | 'Mouse', 'Number', 'Object', 'Selection', 'Sound', 'String', 'XML', 29 | 'XMLNode', 'XMLSocket', -- Reserved for future use. 30 | 'boolean', 'byte', 'char', 'double', 'enum', 'float', 'int', 'long', 'short' 31 | })) 32 | 33 | -- Identifiers. 34 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 35 | 36 | -- Strings. 37 | local sq_str = lexer.range("'", true) 38 | local dq_str = lexer.range('"', true) 39 | local ml_str = lexer.range('') 40 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ml_str)) 41 | 42 | -- Comments. 43 | local line_comment = lexer.to_eol('//') 44 | local block_comment = lexer.range('/*', '*/') 45 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 46 | 47 | -- Numbers. 48 | lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlUuFf') ^ -2)) 49 | 50 | -- Operators. 51 | lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*%&|^~.,;?()[]{}'))) 52 | 53 | -- Fold points. 54 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 55 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 56 | lex:add_fold_point(lexer.STRING, '') 57 | 58 | lexer.property['scintillua.comment'] = '//' 59 | 60 | return lex 61 | -------------------------------------------------------------------------------- /lua/lexers/eiffel.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Eiffel LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('eiffel') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'alias', 'all', 'and', 'as', 'check', 'class', 'creation', 'debug', 15 | 'deferred', 'do', 'else', 'elseif', 'end', 'ensure', 'expanded', 'export', 16 | 'external', 'feature', 'from', 'frozen', 'if', 'implies', 'indexing', 17 | 'infix', 'inherit', 'inspect', 'invariant', 'is', 'like', 'local', 'loop', 18 | 'not', 'obsolete', 'old', 'once', 'or', 'prefix', 'redefine', 'rename', 19 | 'require', 'rescue', 'retry', 'select', 'separate', 'then', 'undefine', 20 | 'until', 'variant', 'when', 'xor', -- 21 | 'current', 'false', 'precursor', 'result', 'strip', 'true', 'unique', 'void' 22 | })) 23 | 24 | -- Types. 25 | lex:add_rule('type', token(lexer.TYPE, word_match( 26 | 'character string bit boolean integer real none any'))) 27 | 28 | -- Identifiers. 29 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 30 | 31 | -- Strings. 32 | local sq_str = lexer.range("'", true) 33 | local dq_str = lexer.range('"', true) 34 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 35 | 36 | -- Comments. 37 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('--'))) 38 | 39 | -- Numbers. 40 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 41 | 42 | -- Operators. 43 | lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*%&|^~.,:;?()[]{}'))) 44 | 45 | -- Fold points. 46 | lex:add_fold_point(lexer.KEYWORD, 'check', 'end') 47 | lex:add_fold_point(lexer.KEYWORD, 'debug', 'end') 48 | lex:add_fold_point(lexer.KEYWORD, 'deferred', function(text, pos, line, s) 49 | return line:find('deferred%s+class') and 0 or 1 50 | end) 51 | lex:add_fold_point(lexer.KEYWORD, 'do', 'end') 52 | lex:add_fold_point(lexer.KEYWORD, 'from', 'end') 53 | lex:add_fold_point(lexer.KEYWORD, 'if', 'end') 54 | lex:add_fold_point(lexer.KEYWORD, 'inspect', 'end') 55 | lex:add_fold_point(lexer.KEYWORD, 'once', 'end') 56 | lex:add_fold_point(lexer.KEYWORD, 'class', function(text, pos, line, s) 57 | return line:find('deferred%s+class') and 0 or 1 58 | end) 59 | 60 | lexer.property['scintillua.comment'] = '--' 61 | 62 | return lex 63 | -------------------------------------------------------------------------------- /lua/lexers/powershell.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2015-2023 Mitchell. See LICENSE. 2 | -- PowerShell LPeg lexer. 3 | -- Contributed by Jeff Stone. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('powershell') 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Comments. 14 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 15 | 16 | -- Keywords. 17 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ 18 | 'Begin', 'Break', 'Continue', 'Do', 'Else', 'End', 'Exit', 'For', 'ForEach', 19 | 'ForEach-Object', 'Get-Date', 'Get-Random', 'If', 'Param', 'Pause', 20 | 'Powershell', 'Process', 'Read-Host', 'Return', 'Switch', 'While', 21 | 'Write-Host' 22 | }, true))) 23 | 24 | -- Comparison Operators. 25 | lex:add_rule('comparison', token(lexer.KEYWORD, '-' * word_match({ 26 | 'and', 'as', 'band', 'bor', 'contains', 'eq', 'ge', 'gt', 'is', 'isnot', 27 | 'le', 'like', 'lt', 'match', 'ne', 'nomatch', 'not', 'notcontains', 28 | 'notlike', 'or', 'replace' 29 | }, true))) 30 | 31 | -- Parameters. 32 | lex:add_rule('parameter', token(lexer.KEYWORD, '-' * word_match( 33 | 'Confirm Debug ErrorAction ErrorVariable OutBuffer OutVariable Verbose WhatIf', 34 | true))) 35 | 36 | -- Properties. 37 | lex:add_rule('property', token(lexer.KEYWORD, '.' * word_match( 38 | 'day dayofweek dayofyear hour millisecond minute month second timeofday year', 39 | true))) 40 | 41 | -- Types. 42 | lex:add_rule('type', token(lexer.KEYWORD, '[' * word_match({ 43 | 'array', 'boolean', 'byte', 'char', 'datetime', 'decimal', 'double', 44 | 'hashtable', 'int', 'long', 'single', 'string', 'xml' 45 | }, true) * ']')) 46 | 47 | -- Variables. 48 | lex:add_rule('variable', token(lexer.VARIABLE, '$' * 49 | (lexer.digit ^ 1 + lexer.word + 50 | lexer.range('{', '}', true)))) 51 | 52 | -- Strings. 53 | lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) 54 | 55 | -- Numbers. 56 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 57 | 58 | -- Operators. 59 | lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^&|~.,:;?()[]{}%`'))) 60 | 61 | -- Fold points. 62 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 63 | 64 | lexer.property['scintillua.comment'] = '#' 65 | 66 | return lex 67 | -------------------------------------------------------------------------------- /lua/lexers/icon.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- LPeg lexer for the Icon programming language. 3 | -- http://www.cs.arizona.edu/icon 4 | -- Contributed by Carl Sturtivant. 5 | local lexer = require('lexer') 6 | local token, word_match = lexer.token, lexer.word_match 7 | local P, S = lpeg.P, lpeg.S 8 | 9 | local lex = lexer.new('icon') 10 | 11 | -- Whitespace. 12 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 13 | 14 | -- Keywords. 15 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 16 | 'break', 'by', 'case', 'create', 'default', 'do', 'else', 'end', 'every', 17 | 'fail', 'global', 'if', 'initial', 'invocable', 'link', 'local', 'next', 18 | 'not', 'of', 'procedure', 'record', 'repeat', 'return', 'static', 'suspend', 19 | 'then', 'to', 'until', 'while' 20 | })) 21 | 22 | -- Icon Keywords: unique to Icon. 23 | lex:add_rule('special_keyword', token('special_keyword', '&' * word_match { 24 | 'allocated', 'ascii', 'clock', 'collections', 'cset', 'current', 'date', 25 | 'dateline', 'digits', 'dump', 'e', 'error', 'errornumber', 'errortext', 26 | 'errorvalue', 'errout', 'fail', 'features', 'file', 'host', 'input', 27 | 'lcase', 'letters', 'level', 'line', 'main', 'null', 'output', 'phi', 'pi', 28 | 'pos', 'progname', 'random', 'regions', 'source', 'storage', 'subject', 29 | 'time', 'trace', 'ucase', 'version' 30 | })) 31 | lex:add_style('special_keyword', lexer.styles.type) 32 | 33 | -- Identifiers. 34 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 35 | 36 | -- Strings. 37 | local sq_str = lexer.range("'") 38 | local dq_str = lexer.range('"') 39 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 40 | 41 | -- Comments. 42 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#', true))) 43 | 44 | -- Numbers. 45 | local radix_literal = P('-') ^ -1 * lexer.dec_num * S('rR') * lexer.alnum ^ 1 46 | lex:add_rule('number', token(lexer.NUMBER, radix_literal + lexer.number)) 47 | 48 | -- Preprocessor. 49 | lex:add_rule('preproc', token(lexer.PREPROCESSOR, '$' * 50 | word_match( 51 | 'define else endif error ifdef ifndef include line undef'))) 52 | 53 | -- Operators. 54 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>~!=^&|?~@:;,.()[]{}'))) 55 | 56 | -- Fold points. 57 | lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') 58 | lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') 59 | lex:add_fold_point(lexer.KEYWORD, 'procedure', 'end') 60 | 61 | lexer.property['scintillua.comment'] = '#' 62 | 63 | return lex 64 | -------------------------------------------------------------------------------- /lua/lexers/vb.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- VisualBasic LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(..., {case_insensitive_fold_points = true}) 7 | 8 | -- Keywords. 9 | lex:add_rule('keyword', 10 | lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD, true))) 11 | 12 | -- Types. 13 | lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE, true))) 14 | 15 | -- Comments. 16 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol( 17 | "'" + lexer.word_match('rem', true)))) 18 | 19 | -- Identifiers. 20 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 21 | 22 | -- Strings. 23 | lex:add_rule('string', lex:tag(lexer.STRING, lexer.range('"', true, false))) 24 | 25 | -- Numbers. 26 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number * S('LlUuFf') ^ -2)) 27 | 28 | -- Operators. 29 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('=><+-*^&:.,_()'))) 30 | 31 | -- Fold points. 32 | lex:add_fold_point(lexer.KEYWORD, 'If', 'End If') 33 | lex:add_fold_point(lexer.KEYWORD, 'Select', 'End Select') 34 | lex:add_fold_point(lexer.KEYWORD, 'For', 'Next') 35 | lex:add_fold_point(lexer.KEYWORD, 'While', 'End While') 36 | lex:add_fold_point(lexer.KEYWORD, 'While', 'Wend') 37 | lex:add_fold_point(lexer.KEYWORD, 'Do', 'Loop') 38 | lex:add_fold_point(lexer.KEYWORD, 'With', 'End With') 39 | lex:add_fold_point(lexer.KEYWORD, 'Sub', 'End Sub') 40 | lex:add_fold_point(lexer.KEYWORD, 'Function', 'End Function') 41 | lex:add_fold_point(lexer.KEYWORD, 'Property', 'End Property') 42 | lex:add_fold_point(lexer.KEYWORD, 'Module', 'End Module') 43 | lex:add_fold_point(lexer.KEYWORD, 'Class', 'End Class') 44 | lex:add_fold_point(lexer.KEYWORD, 'Try', 'End Try') 45 | 46 | -- Word lists. 47 | lex:set_word_list(lexer.KEYWORD, { 48 | -- Control. 49 | 'If', 'Then', 'Else', 'ElseIf', 'While', 'Wend', 'For', 'To', 'Each', 'In', 50 | 'Step', 'Case', 'Select', 'Return', 'Continue', 'Do', 'Until', 'Loop', 51 | 'Next', 'With', 'Exit', -- Operators. 52 | 'Mod', 'And', 'Not', 'Or', 'Xor', 'Is', -- Storage types. 53 | 'Call', 'Class', 'Const', 'Dim', 'ReDim', 'Preserve', 'Function', 'Sub', 54 | 'Property', 'End', 'Set', 'Let', 'Get', 'New', 'Randomize', 'Option', 55 | 'Explicit', 'On', 'Error', 'Execute', 'Module', -- Storage modifiers. 56 | 'Private', 'Public', 'Default', -- Constants. 57 | 'Empty', 'False', 'Nothing', 'Null', 'True' 58 | }) 59 | 60 | lex:set_word_list(lexer.TYPE, { 61 | 'Boolean', 'Byte', 'Char', 'Date', 'Decimal', 'Double', 'Long', 'Object', 62 | 'Short', 'Single', 'String' 63 | }) 64 | 65 | lexer.property['scintillua.comment'] = "'" 66 | 67 | return lex 68 | -------------------------------------------------------------------------------- /lua/lexers/nemerle.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Nemerle LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('nemerle') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | '_', 'abstract', 'and', 'array', 'as', 'base', 'catch', 'class', 'def', 15 | 'do', 'else', 'extends', 'extern', 'finally', 'foreach', 'for', 'fun', 'if', 16 | 'implements', 'in', 'interface', 'internal', 'lock', 'macro', 'match', 17 | 'module', 'mutable', 'namespace', 'new', 'out', 'override', 'params', 18 | 'private', 'protected', 'public', 'ref', 'repeat', 'sealed', 'static', 19 | 'struct', 'syntax', 'this', 'throw', 'try', 'type', 'typeof', 'unless', 20 | 'until', 'using', 'variant', 'virtual', 'when', 'where', 'while', 21 | -- Values. 22 | 'null', 'true', 'false' 23 | })) 24 | 25 | -- Types. 26 | lex:add_rule('type', token(lexer.TYPE, word_match { 27 | 'bool', 'byte', 'char', 'decimal', 'double', 'float', 'int', 'list', 'long', 28 | 'object', 'sbyte', 'short', 'string', 'uint', 'ulong', 'ushort', 'void' 29 | })) 30 | 31 | -- Strings. 32 | local sq_str = P('L') ^ -1 * lexer.range("'", true) 33 | local dq_str = P('L') ^ -1 * lexer.range('"', true) 34 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 35 | 36 | -- Identifiers. 37 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 38 | 39 | -- Comments. 40 | local line_comment = lexer.to_eol('//', true) 41 | local block_comment = lexer.range('/*', '*/') 42 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 43 | 44 | -- Numbers. 45 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 46 | 47 | -- Preprocessor. 48 | lex:add_rule('preproc', 49 | token(lexer.PREPROCESSOR, 50 | lexer.starts_line('#') * S('\t ') ^ 0 * word_match { 51 | 'define', 'elif', 'else', 'endif', 'endregion', 'error', 'if', 'ifdef', 52 | 'ifndef', 'line', 'pragma', 'region', 'undef', 'using', 'warning' 53 | })) 54 | 55 | -- Operators. 56 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) 57 | 58 | -- Fold points. 59 | lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion') 60 | lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif') 61 | lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') 62 | lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') 63 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 64 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 65 | 66 | lexer.property['scintillua.comment'] = '//' 67 | 68 | return lex 69 | -------------------------------------------------------------------------------- /lua/lexers/fsharp.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- F# LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('fsharp', {fold_by_indentation = true}) 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'abstract', 'and', 'as', 'assert', 'asr', 'begin', 'class', 'default', 15 | 'delegate', 'do', 'done', 'downcast', 'downto', 'else', 'end', 'enum', 16 | 'exception', 'false', 'finaly', 'for', 'fun', 'function', 'if', 'in', 17 | 'iherit', 'interface', 'land', 'lazy', 'let', 'lor', 'lsl', 'lsr', 'lxor', 18 | 'match', 'member', 'mod', 'module', 'mutable', 'namespace', 'new', 'null', 19 | 'of', 'open', 'or', 'override', 'sig', 'static', 'struct', 'then', 'to', 20 | 'true', 'try', 'type', 'val', 'when', 'inline', 'upcast', 'while', 'with', 21 | 'async', 'atomic', 'break', 'checked', 'component', 'const', 'constructor', 22 | 'continue', 'eager', 'event', 'external', 'fixed', 'functor', 'include', 23 | 'method', 'mixin', 'process', 'property', 'protected', 'public', 'pure', 24 | 'readonly', 'return', 'sealed', 'switch', 'virtual', 'void', 'volatile', 25 | 'where', -- Booleans. 26 | 'true', 'false' 27 | })) 28 | 29 | -- Types. 30 | lex:add_rule('type', token(lexer.TYPE, word_match { 31 | 'bool', 'byte', 'sbyte', 'int16', 'uint16', 'int', 'uint32', 'int64', 32 | 'uint64', 'nativeint', 'unativeint', 'char', 'string', 'decimal', 'unit', 33 | 'void', 'float32', 'single', 'float', 'double' 34 | })) 35 | 36 | -- Identifiers. 37 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 38 | 39 | -- Strings. 40 | local sq_str = lexer.range("'", true) 41 | local dq_str = lexer.range('"', true) 42 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 43 | 44 | -- Comments. 45 | local line_comment = lexer.to_eol('//') 46 | local block_comment = lexer.range('(*', '*)', false, false, true) 47 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 48 | 49 | -- Numbers. 50 | lex:add_rule('number', 51 | token(lexer.NUMBER, lexer.float + lexer.integer * S('uUlL') ^ -1)) 52 | 53 | -- Preprocessor. 54 | lex:add_rule('preproc', 55 | token(lexer.PREPROCESSOR, lexer.starts_line('#') * S('\t ') ^ 0 * 56 | word_match( 57 | 'else endif endregion if ifdef ifndef light region'))) 58 | 59 | -- Operators. 60 | lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+-*/^.,:;~!@#%^&|?[](){}'))) 61 | 62 | lexer.property['scintillua.comment'] = '//' 63 | 64 | return lex 65 | -------------------------------------------------------------------------------- /lua/lexers/boo.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Boo LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('boo') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'and', 'break', 'cast', 'continue', 'elif', 'else', 'ensure', 'except', 15 | 'for', 'given', 'goto', 'if', 'in', 'isa', 'is', 'not', 'or', 'otherwise', 16 | 'pass', 'raise', 'ref', 'try', 'unless', 'when', 'while', -- Definitions. 17 | 'abstract', 'callable', 'class', 'constructor', 'def', 'destructor', 'do', 18 | 'enum', 'event', 'final', 'get', 'interface', 'internal', 'of', 'override', 19 | 'partial', 'private', 'protected', 'public', 'return', 'set', 'static', 20 | 'struct', 'transient', 'virtual', 'yield', -- Namespaces. 21 | 'as', 'from', 'import', 'namespace', -- Other. 22 | 'self', 'super', 'null', 'true', 'false' 23 | })) 24 | 25 | -- Types. 26 | lex:add_rule('type', token(lexer.TYPE, word_match { 27 | 'bool', 'byte', 'char', 'date', 'decimal', 'double', 'duck', 'float', 'int', 28 | 'long', 'object', 'operator', 'regex', 'sbyte', 'short', 'single', 'string', 29 | 'timespan', 'uint', 'ulong', 'ushort' 30 | })) 31 | 32 | -- Functions. 33 | lex:add_rule('function', token(lexer.FUNCTION, word_match { 34 | 'array', 'assert', 'checked', 'enumerate', '__eval__', 'filter', 'getter', 35 | 'len', 'lock', 'map', 'matrix', 'max', 'min', 'normalArrayIndexing', 36 | 'print', 'property', 'range', 'rawArrayIndexing', 'required', '__switch__', 37 | 'typeof', 'unchecked', 'using', 'yieldAll', 'zip' 38 | })) 39 | 40 | -- Identifiers. 41 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 42 | 43 | -- Strings. 44 | local sq_str = lexer.range("'", true) 45 | local dq_str = lexer.range('"', true) 46 | local tq_str = lexer.range('"""') 47 | local string = token(lexer.STRING, tq_str + sq_str + dq_str) 48 | local regex_str = lexer.after_set('!%^&*([{-=+|:;,?<>~', lexer.range('/', true)) 49 | local regex = token(lexer.REGEX, regex_str) 50 | lex:add_rule('string', string + regex) 51 | 52 | -- Comments. 53 | local line_comment = lexer.to_eol('#', true) 54 | local block_comment = lexer.range('/*', '*/') 55 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 56 | 57 | -- Numbers. 58 | lex:add_rule('number', 59 | token(lexer.NUMBER, lexer.number * (S('msdhsfFlL') + 'ms') ^ -1)) 60 | 61 | -- Operators. 62 | lex:add_rule('operator', token(lexer.OPERATOR, S('!%^&*()[]{}-=+/|:;.,?<>~`'))) 63 | 64 | lexer.property['scintillua.comment'] = '#' 65 | 66 | return lex 67 | -------------------------------------------------------------------------------- /lua/lexers/go.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Go LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(...) 7 | 8 | -- Keywords. 9 | lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) 10 | 11 | -- Constants. 12 | lex:add_rule('constant', lex:tag(lexer.CONSTANT_BUILTIN, 13 | lex:word_match(lexer.CONSTANT_BUILTIN))) 14 | 15 | -- Types. 16 | lex:add_rule('type', lex:tag(lexer.TYPE, lex:word_match(lexer.TYPE))) 17 | 18 | -- Functions. 19 | local builtin_func = -lpeg.B('.') * 20 | lex:tag(lexer.FUNCTION_BUILTIN, 21 | lex:word_match(lexer.FUNCTION_BUILTIN)) 22 | local func = lex:tag(lexer.FUNCTION, lexer.word) 23 | local method = lpeg.B('.') * lex:tag(lexer.FUNCTION_METHOD, lexer.word) 24 | lex:add_rule('function', 25 | (builtin_func + method + func) * #(lexer.space ^ 0 * '(')) 26 | 27 | -- Identifiers. 28 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 29 | 30 | -- Strings. 31 | local sq_str = lexer.range("'", true) 32 | local dq_str = lexer.range('"', true) 33 | local raw_str = lexer.range('`', false, false) 34 | lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str + raw_str)) 35 | 36 | -- Comments. 37 | local line_comment = lexer.to_eol('//') 38 | local block_comment = lexer.range('/*', '*/') 39 | lex:add_rule('comment', lex:tag(lexer.COMMENT, line_comment + block_comment)) 40 | 41 | -- Numbers. 42 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number * P('i') ^ -1)) 43 | 44 | -- Operators. 45 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('+-*/%&|^<>=!~:;.,()[]{}'))) 46 | 47 | -- Fold points. 48 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 49 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 50 | 51 | -- Word lists. 52 | lex:set_word_list(lexer.KEYWORD, { 53 | 'break', 'case', 'chan', 'const', 'continue', 'default', 'defer', 'else', 54 | 'fallthrough', 'for', 'func', 'go', 'goto', 'if', 'import', 'interface', 55 | 'map', 'package', 'range', 'return', 'select', 'struct', 'switch', 'type', 56 | 'var' 57 | }) 58 | 59 | lex:set_word_list(lexer.CONSTANT_BUILTIN, 'true false iota nil') 60 | 61 | lex:set_word_list(lexer.TYPE, { 62 | 'any', 'bool', 'byte', 'comparable', 'complex64', 'complex128', 'error', 63 | 'float32', 'float64', 'int', 'int8', 'int16', 'int32', 'int64', 'rune', 64 | 'string', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', 'uintptr' 65 | }) 66 | 67 | lex:set_word_list(lexer.FUNCTION_BUILTIN, { 68 | 'append', 'cap', 'close', 'complex', 'copy', 'delete', 'imag', 'len', 69 | 'make', 'new', 'panic', 'print', 'println', 'real', 'recover' 70 | }) 71 | 72 | lexer.property['scintillua.comment'] = '//' 73 | 74 | return lex 75 | -------------------------------------------------------------------------------- /lua/lexers/objective_c.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Objective C LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('objective_c') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | -- From C. 15 | 'asm', 'auto', 'break', 'case', 'const', 'continue', 'default', 'do', 16 | 'else', 'extern', 'false', 'for', 'goto', 'if', 'inline', 'register', 17 | 'return', 'sizeof', 'static', 'switch', 'true', 'typedef', 'void', 18 | 'volatile', 'while', 'restrict', '_Bool', '_Complex', '_Pragma', 19 | '_Imaginary', -- Objective C. 20 | 'oneway', 'in', 'out', 'inout', 'bycopy', 'byref', 'self', 'super', 21 | -- Preprocessor directives. 22 | '@interface', '@implementation', '@protocol', '@end', '@private', 23 | '@protected', '@public', '@class', '@selector', '@encode', '@defs', 24 | '@synchronized', '@try', '@throw', '@catch', '@finally', -- Constants. 25 | 'TRUE', 'FALSE', 'YES', 'NO', 'NULL', 'nil', 'Nil', 'METHOD_NULL' 26 | })) 27 | 28 | -- Types. 29 | lex:add_rule('type', token(lexer.TYPE, word_match( 30 | 'apply_t id Class MetaClass Object Protocol retval_t SEL STR IMP BOOL TypedStream'))) 31 | 32 | -- Strings. 33 | local sq_str = P('L') ^ -1 * lexer.range("'", true) 34 | local dq_str = P('L') ^ -1 * lexer.range('"', true) 35 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 36 | 37 | -- Identifiers. 38 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 39 | 40 | -- Comments. 41 | local line_comment = lexer.to_eol('//', true) 42 | local block_comment = lexer.range('/*', '*/') 43 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 44 | 45 | -- Numbers. 46 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 47 | 48 | -- Preprocessor. 49 | lex:add_rule('preprocessor', #lexer.starts_line('#') * 50 | token(lexer.PREPROCESSOR, '#' * S('\t ') ^ 0 * word_match { 51 | 'define', 'elif', 'else', 'endif', 'error', 'if', 'ifdef', 'ifndef', 52 | 'import', 'include', 'line', 'pragma', 'undef', 'warning' 53 | })) 54 | 55 | -- Operators. 56 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}'))) 57 | 58 | -- Fold symbols. 59 | lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion') 60 | lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif') 61 | lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') 62 | lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') 63 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 64 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 65 | 66 | lexer.property['scintillua.comment'] = '//' 67 | 68 | return lex 69 | -------------------------------------------------------------------------------- /lua/lexers/csharp.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- C# LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('csharp') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'class', 'delegate', 'enum', 'event', 'interface', 'namespace', 'struct', 15 | 'using', 'abstract', 'const', 'explicit', 'extern', 'fixed', 'implicit', 16 | 'internal', 'lock', 'out', 'override', 'params', 'partial', 'private', 17 | 'protected', 'public', 'ref', 'sealed', 'static', 'readonly', 'unsafe', 18 | 'virtual', 'volatile', 'add', 'as', 'assembly', 'base', 'break', 'case', 19 | 'catch', 'checked', 'continue', 'default', 'do', 'else', 'finally', 'for', 20 | 'foreach', 'get', 'goto', 'if', 'in', 'is', 'new', 'remove', 'return', 21 | 'set', 'sizeof', 'stackalloc', 'super', 'switch', 'this', 'throw', 'try', 22 | 'typeof', 'unchecked', 'value', 'var', 'void', 'while', 'yield', 'null', 23 | 'true', 'false' 24 | })) 25 | 26 | -- Types. 27 | lex:add_rule('type', token(lexer.TYPE, word_match { 28 | 'bool', 'byte', 'char', 'decimal', 'double', 'float', 'int', 'long', 29 | 'object', 'operator', 'sbyte', 'short', 'string', 'uint', 'ulong', 'ushort' 30 | })) 31 | 32 | -- Identifiers. 33 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 34 | 35 | -- Comments. 36 | local line_comment = lexer.to_eol('//', true) 37 | local block_comment = lexer.range('/*', '*/') 38 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 39 | 40 | -- Strings. 41 | local sq_str = lexer.range("'", true) 42 | local dq_str = lexer.range('"', true) 43 | local ml_str = P('@') ^ -1 * lexer.range('"', false, false) 44 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ml_str)) 45 | 46 | -- Numbers. 47 | lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('lLdDfFmM') ^ -1)) 48 | 49 | -- Preprocessor. 50 | lex:add_rule('preprocessor', token(lexer.PREPROCESSOR, '#' * S('\t ') ^ 0 * 51 | word_match( 52 | 'define elif else endif error if line undef warning region endregion'))) 53 | 54 | -- Operators. 55 | lex:add_rule('operator', token(lexer.OPERATOR, S('~!.,:;+-*/<>=\\^|&%?()[]{}'))) 56 | 57 | -- Fold points. 58 | lex:add_fold_point(lexer.PREPROCESSOR, 'if', 'endif') 59 | lex:add_fold_point(lexer.PREPROCESSOR, 'ifdef', 'endif') 60 | lex:add_fold_point(lexer.PREPROCESSOR, 'ifndef', 'endif') 61 | lex:add_fold_point(lexer.PREPROCESSOR, 'region', 'endregion') 62 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 63 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 64 | 65 | lexer.property['scintillua.comment'] = '//' 66 | 67 | return lex 68 | -------------------------------------------------------------------------------- /lua/lexers/lisp.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Lisp LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('lisp') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'defclass', 'defconstant', 'defgeneric', 'define-compiler-macro', 15 | 'define-condition', 'define-method-combination', 'define-modify-macro', 16 | 'define-setf-expander', 'define-symbol-macro', 'defmacro', 'defmethod', 17 | 'defpackage', 'defparameter', 'defsetf', 'defstruct', 'deftype', 'defun', 18 | 'defvar', -- 19 | 'abort', 'assert', 'block', 'break', 'case', 'catch', 'ccase', 'cerror', 20 | 'cond', 'ctypecase', 'declaim', 'declare', 'do', 'do*', 'do-all-symbols', 21 | 'do-external-symbols', 'do-symbols', 'dolist', 'dotimes', 'ecase', 'error', 22 | 'etypecase', 'eval-when', 'flet', 'handler-bind', 'handler-case', 'if', 23 | 'ignore-errors', 'in-package', 'labels', 'lambda', 'let', 'let*', 'locally', 24 | 'loop', 'macrolet', 'multiple-value-bind', 'proclaim', 'prog', 'prog*', 25 | 'prog1', 'prog2', 'progn', 'progv', 'provide', 'require', 'restart-bind', 26 | 'restart-case', 'restart-name', 'return', 'return-from', 'signal', 27 | 'symbol-macrolet', 'tagbody', 'the', 'throw', 'typecase', 'unless', 28 | 'unwind-protect', 'when', 'with-accessors', 'with-compilation-unit', 29 | 'with-condition-restarts', 'with-hash-table-iterator', 30 | 'with-input-from-string', 'with-open-file', 'with-open-stream', 31 | 'with-output-to-string', 'with-package-iterator', 'with-simple-restart', 32 | 'with-slots', 'with-standard-io-syntax', -- 33 | 't', 'nil' 34 | })) 35 | 36 | -- Identifiers. 37 | local word = lexer.alpha * (lexer.alnum + S('_-')) ^ 0 38 | lex:add_rule('identifier', token(lexer.IDENTIFIER, word)) 39 | 40 | -- Strings. 41 | lex:add_rule('string', token(lexer.STRING, 42 | "'" * word + lexer.range('"') + '#\\' * lexer.any)) 43 | 44 | -- Comments. 45 | local line_comment = lexer.to_eol(';') 46 | local block_comment = lexer.range('#|', '|#') 47 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 48 | 49 | -- Numbers. 50 | lex:add_rule('number', token(lexer.NUMBER, P('-') ^ -1 * lexer.digit ^ 1 * 51 | (S('./') * lexer.digit ^ 1) ^ -1)) 52 | 53 | -- Operators. 54 | lex:add_rule('operator', token(lexer.OPERATOR, S('<>=*/+-`@%()'))) 55 | 56 | -- Fold points. 57 | lex:add_fold_point(lexer.OPERATOR, '(', ')') 58 | lex:add_fold_point(lexer.OPERATOR, '[', ']') 59 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 60 | lex:add_fold_point(lexer.COMMENT, '#|', '|#') 61 | 62 | lexer.property['scintillua.comment'] = ';' 63 | 64 | return lex 65 | -------------------------------------------------------------------------------- /lua/lexers/snobol4.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2013-2023 Michael T. Richter. See LICENSE. 2 | -- SNOBOL4 lexer. 3 | -- This lexer works with classic SNOBOL4 as well as the CSNOBOL4 extensions. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local B, P, S = lpeg.B, lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('snobol4') 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Keywords. 14 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ 15 | 'ABORT', 'ARRAY', 'CONTINUE', 'DEFINE', 'END', 'FRETURN', 'INPUT', 16 | 'NRETURN', 'OUTPUT', 'PUNCH', 'RETURN', 'SCONTINUE', 'TABLE' 17 | }, true) + '&' * lexer.word)) 18 | 19 | -- Helper patterns. 20 | local dotted_id = lexer.word * ('.' * lexer.word) ^ 0 21 | 22 | -- Labels. 23 | lex:add_rule('label', token(lexer.LABEL, lexer.starts_line(dotted_id))) 24 | 25 | -- Targets. 26 | local branch = B(lexer.space * ':(') * dotted_id * #P(')') 27 | local sbranch = B(lexer.space * ':' * S('SsFf') * '(') * dotted_id * #P(')') 28 | local sbranchx = B(')' * S('SsFf') * '(') * dotted_id * #P(')') 29 | lex:add_rule('target', token(lexer.LABEL, branch + sbranch + sbranchx)) 30 | 31 | -- Patterns. 32 | lex:add_rule('pattern', lexer.token(lexer.CLASS, word_match({ 33 | -- Keep distinct. 34 | 'ABORT', 'ANY', 'ARB', 'ARBNO', 'BAL', 'BREAK', 'BREAKX', 'FAIL', 'FENCE', 35 | 'LEN', 'NOTANY', 'POS', 'REM', 'RPOS', 'RTAB', 'SPAN', 'SUCCEED', 'TAB' 36 | }, true) * #P('('))) 37 | 38 | -- Token definitions. 39 | lex:add_rule('built-in', token(lexer.FUNCTION, word_match({ 40 | 'APPLY', 'ARRAY', 'CHAR', 'CONVERT', 'COPY', 'DATA', 'DATE', 'DIFFER', 41 | 'DUPL', 'EQ', 'EVAL', 'FILE_ABSPATH', 'FILE_ISDIR', 'FREEZE', 'FUNCTION', 42 | 'GE', 'GT', 'HOST', 'IDENT', 'INTEGER', 'IO_FINDUNIT', 'ITEM', 'LABEL', 43 | 'LOAD', 'LPAD', 'LE', 'LGT', 'LT', 'NE', 'OPSYN', 'ORD', 'PROTOTYPE', 44 | 'REMDR', 'REPLACE', 'REVERSE', 'RPAD', 'RSORT', 'SERV_LISTEN', 'SET', 45 | 'SETEXIT', 'SIZE', 'SORT', 'SQRT', 'SSET', 'SUBSTR', 'TABLE', 'THAW', 46 | 'TIME', 'TRACE', 'TRIM', 'UNLOAD', 'VALUE', 'VDIFFER' 47 | }, true) * #P('('))) 48 | 49 | -- Identifiers. 50 | lex:add_rule('identifier', token(lexer.DEFAULT, dotted_id)) 51 | 52 | -- Strings. 53 | local dq_str = lexer.range('"', true, false) 54 | local sq_str = lexer.range("'", true, false) 55 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 56 | 57 | -- Comments. 58 | lex:add_rule('comment', 59 | token(lexer.COMMENT, lexer.starts_line(lexer.to_eol(S('*#|;!'))))) 60 | 61 | -- Numbers. 62 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 63 | 64 | -- Control. 65 | lex:add_rule('control', 66 | token(lexer.PREPROCESSOR, lexer.starts_line('-' * lexer.word))) 67 | 68 | -- Operators. 69 | lex:add_rule('operator', token(lexer.OPERATOR, S '¬?$.!%*/#+-@⊥&^~\\=')) 70 | 71 | lexer.property['scintillua.comment'] = '#' 72 | 73 | return lex 74 | -------------------------------------------------------------------------------- /lua/lexers/django.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Django LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(...) 7 | 8 | -- Keywords. 9 | lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) 10 | 11 | -- Functions. 12 | lex:add_rule('function', lpeg.B('|') * 13 | lex:tag(lexer.FUNCTION_BUILTIN, 14 | lex:word_match(lexer.FUNCTION_BUILTIN))) 15 | 16 | -- Identifiers. 17 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 18 | 19 | -- Strings. 20 | lex:add_rule('string', lex:tag(lexer.STRING, lexer.range('"', false, false))) 21 | 22 | -- Operators. 23 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S(':,.|'))) 24 | 25 | -- Embed Django in HTML. 26 | local html = lexer.load('html') 27 | html:add_rule('django_comment', 28 | lex:tag(lexer.COMMENT, lexer.range('{#', '#}', true))) 29 | local django_start_rule = lex:tag(lexer.PREPROCESSOR, '{' * S('{%')) 30 | local django_end_rule = lex:tag(lexer.PREPROCESSOR, S('%}') * '}') 31 | html:embed(lex, django_start_rule, django_end_rule) 32 | 33 | -- Fold points. 34 | lex:add_fold_point(lexer.PREPROCESSOR, '{{', '}}') 35 | lex:add_fold_point(lexer.PREPROCESSOR, '{%', '%}') 36 | 37 | -- Word lists. 38 | lex:set_word_list(lexer.KEYWORD, { 39 | 'autoescape', 'endautoescape', 'block', 'endblock', 'comment', 'endcomment', 40 | 'csrf_token', 'cycle', 'as', 'debug', 'extends', 'filter', 'endfilter', 41 | 'firstof', 'for', 'in', 'endfor', 'empty', 'if', 'elif', 'else', 'endif', 42 | 'and', 'or', 'not', 'is', 'ifchanged', 'endifchanged', 'include', 'load', 43 | 'lorem', 'now', 'regroup', 'resetcycle', 'spaceless', 'endspaceless', 44 | 'templatetag', 'url', 'verbatim', 'endverbatim', 'widthratio', 'with', 45 | 'endwith', -- 46 | 'blocktranslate', 'endblocktranslate', 'translate', 'language', 47 | 'get_available_languages', 'get_current_language', 48 | 'get_current_language_bidi', 'get_language_info', 'get_language_info_list', -- 49 | 'get_static_prefix', 'get_media_prefix' 50 | }) 51 | 52 | lex:set_word_list(lexer.FUNCTION_BUILTIN, { 53 | 'add', 'addslashes', 'capfirst', 'center', 'cut', 'date', 'default', 54 | 'default_if_none', 'dictsort', 'dictsortreversed', 'divisibleby', 'escape', 55 | 'escapejs', 'filesizeformat', 'first', 'floatformat', 'force_escape', 56 | 'get_digit', 'iriencode', 'join', 'json_script', 'last', 'length', 57 | 'length_is', 'linebreaks', 'linebreaksbr', 'linenumbers', 'ljust', 'lower', 58 | 'make_list', 'phone2numeric', 'pluralize', 'pprint', 'random', 'rjust', 59 | 'safe', 'safeseq', 'slice', 'slugify', 'stringformat', 'striptags', 'time', 60 | 'timesince', 'timeuntil', 'title', 'truncatechars_html', 'truncatewords', 61 | 'truncatewords_html', 'unordered_list', 'upper', 'urlencode', 'urlize', 62 | 'urlizetrunc', 'wordcount', 'wordwrap', 'yesno', -- 63 | 'language_name', 'language_name_local', 'language_bidi', 64 | 'language_name_translated' 65 | }) 66 | 67 | lexer.property['scintillua.comment'] = '{#|#}' 68 | 69 | return lex 70 | -------------------------------------------------------------------------------- /lua/lexers/forth.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Forth LPeg lexer. 3 | -- Contributions from Joseph Eib. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('forth') 9 | 10 | -- Whitespace. 11 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 12 | 13 | -- Strings. 14 | local c_str = 'c' * lexer.range('"', true, false) 15 | local s_str = 's' * lexer.range('"', true, false) 16 | local s_bs_str = 's\\' * lexer.range('"', true) 17 | local dot_str = '.' * lexer.range('"', true, false) 18 | local dot_paren_str = '.' * lexer.range('(', ')', true) 19 | local abort_str = 'abort' * lexer.range('"', true, false) 20 | lex:add_rule('string', token(lexer.STRING, c_str + s_str + s_bs_str + dot_str + 21 | dot_paren_str + abort_str)) 22 | 23 | -- Keywords. 24 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ 25 | '#>', '#s', '*/', '*/mod', '+loop', ',', '.', '.r', '/mod', '0<', '0<>', 26 | '0>', '0=', '1+', '1-', '2!', '2*', '2/', '2>r', '2@', '2drop', '2dup', 27 | '2over', '2r>', '2r@', '2swap', ':noname', '<#', '<>', '>body', '>in', 28 | '>number', '>r', '?do', '?dup', '@', 'abort', 'abs', 'accept', 'action-of', 29 | 'again', 'align', 'aligned', 'allot', 'and', 'base', 'begin', 'bl', 30 | 'buffer:', 'c!', 'c,', 'c@', 'case', 'cell+', 'cells', 'char', 'char+', 31 | 'chars', 'compile,', 'constant,', 'count', 'cr', 'create', 'decimal', 32 | 'defer', 'defer!', 'defer@', 'depth', 'do', 'does>', 'drop', 'dup', 'else', 33 | 'emit', 'endcase', 'endof', 'environment?', 'erase', 'evaluate', 'execute', 34 | 'exit', 'false', 'fill', 'find', 'fm/mod', 'here', 'hex', 'hold', 'holds', 35 | 'i', 'if', 'immediate', 'invert', 'is', 'j', 'key', 'leave', 'literal', 36 | 'loop', 'lshift', 'm*', 'marker', 'max', 'min', 'mod', 'move', 'negate', 37 | 'nip', 'of', 'or', 'over', 'pad', 'parse', 'parse-name', 'pick', 'postpone', 38 | 'quit', 'r>', 'r@', 'recurse', 'refill', 'restore-input', 'roll', 'rot', 39 | 'rshift', 's>d', 'save-input', 'sign', 'sm/rem', 'source', 'source-id', 40 | 'space', 'spaces', 'state', 'swap', 'to', 'then', 'true', 'tuck', 'type', 41 | 'u.', 'u.r', 'u>', 'u<', 'um*', 'um/mod', 'unloop', 'until', 'unused', 42 | 'value', 'variable', 'while', 'within', 'word', 'xor', "[']", '[char]', 43 | '[compile]' 44 | }, true))) 45 | 46 | -- Identifiers. 47 | lex:add_rule('identifier', 48 | token(lexer.IDENTIFIER, (lexer.alnum + S('+-*=<>.?/\'%,_$#')) ^ 1)) 49 | 50 | -- Comments. 51 | local line_comment = lexer.to_eol(S('|\\')) 52 | local block_comment = lexer.range('(', ')') 53 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 54 | 55 | -- Numbers. 56 | lex:add_rule('number', token(lexer.NUMBER, P('-') ^ -1 * lexer.digit ^ 1 * 57 | (S('./') * lexer.digit ^ 1) ^ -1)) 58 | 59 | -- Operators. 60 | lex:add_rule('operator', token(lexer.OPERATOR, S(':;<>+*-/[]#'))) 61 | 62 | lexer.property['scintillua.comment'] = '\\' 63 | 64 | return lex 65 | -------------------------------------------------------------------------------- /lua/lexers/pascal.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Pascal LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('pascal') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ 14 | 'and', 'array', 'as', 'at', 'asm', 'begin', 'case', 'class', 'const', 15 | 'constructor', 'destructor', 'dispinterface', 'div', 'do', 'downto', 'else', 16 | 'end', 'except', 'exports', 'file', 'final', 'finalization', 'finally', 17 | 'for', 'function', 'goto', 'if', 'implementation', 'in', 'inherited', 18 | 'initialization', 'inline', 'interface', 'is', 'label', 'mod', 'not', 19 | 'object', 'of', 'on', 'or', 'out', 'packed', 'procedure', 'program', 20 | 'property', 'raise', 'record', 'repeat', 'resourcestring', 'set', 'sealed', 21 | 'shl', 'shr', 'static', 'string', 'then', 'threadvar', 'to', 'try', 'type', 22 | 'unit', 'unsafe', 'until', 'uses', 'var', 'while', 'with', 'xor', 23 | 'absolute', 'abstract', 'assembler', 'automated', 'cdecl', 'contains', 24 | 'default', 'deprecated', 'dispid', 'dynamic', 'export', 'external', 'far', 25 | 'forward', 'implements', 'index', 'library', 'local', 'message', 'name', 26 | 'namespaces', 'near', 'nodefault', 'overload', 'override', 'package', 27 | 'pascal', 'platform', 'private', 'protected', 'public', 'published', 'read', 28 | 'readonly', 'register', 'reintroduce', 'requires', 'resident', 'safecall', 29 | 'stdcall', 'stored', 'varargs', 'virtual', 'write', 'writeln', 'writeonly', -- 30 | 'false', 'nil', 'self', 'true' 31 | }, true))) 32 | 33 | -- Functions. 34 | lex:add_rule('function', token(lexer.FUNCTION, word_match({ 35 | 'chr', 'ord', 'succ', 'pred', 'abs', 'round', 'trunc', 'sqr', 'sqrt', 36 | 'arctan', 'cos', 'sin', 'exp', 'ln', 'odd', 'eof', 'eoln' 37 | }, true))) 38 | 39 | -- Types. 40 | lex:add_rule('type', token(lexer.TYPE, word_match({ 41 | 'shortint', 'byte', 'char', 'smallint', 'integer', 'word', 'longint', 42 | 'cardinal', 'boolean', 'bytebool', 'wordbool', 'longbool', 'real', 'single', 43 | 'double', 'extended', 'comp', 'currency', 'pointer' 44 | }, true))) 45 | 46 | -- Strings. 47 | lex:add_rule('string', token(lexer.STRING, 48 | S('uUrR') ^ -1 * lexer.range("'", true, false))) 49 | 50 | -- Identifiers. 51 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 52 | 53 | -- Comments. 54 | local line_comment = lexer.to_eol('//', true) 55 | local bblock_comment = lexer.range('{', '}') 56 | local pblock_comment = lexer.range('(*', '*)') 57 | lex:add_rule('comment', token(lexer.COMMENT, 58 | line_comment + bblock_comment + pblock_comment)) 59 | 60 | -- Numbers. 61 | lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlDdFf') ^ -1)) 62 | 63 | -- Operators. 64 | lex:add_rule('operator', token(lexer.OPERATOR, S('.,;^@:=<>+-/*()[]'))) 65 | 66 | lexer.property['scintillua.comment'] = '//' 67 | 68 | return lex 69 | -------------------------------------------------------------------------------- /lua/lexers/groovy.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Groovy LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('groovy') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'abstract', 'break', 'case', 'catch', 'continue', 'default', 'do', 'else', 15 | 'extends', 'final', 'finally', 'for', 'if', 'implements', 'instanceof', 16 | 'native', 'new', 'private', 'protected', 'public', 'return', 'static', 17 | 'switch', 'synchronized', 'throw', 'throws', 'transient', 'try', 'volatile', 18 | 'while', 'strictfp', 'package', 'import', 'as', 'assert', 'def', 'mixin', 19 | 'property', 'test', 'using', 'in', 'false', 'null', 'super', 'this', 'true', 20 | 'it' 21 | })) 22 | 23 | -- Functions. 24 | lex:add_rule('function', token(lexer.FUNCTION, word_match { 25 | 'abs', 'any', 'append', 'asList', 'asWritable', 'call', 'collect', 26 | 'compareTo', 'count', 'div', 'dump', 'each', 'eachByte', 'eachFile', 27 | 'eachLine', 'every', 'find', 'findAll', 'flatten', 'getAt', 'getErr', 28 | 'getIn', 'getOut', 'getText', 'grep', 'immutable', 'inject', 'inspect', 29 | 'intersect', 'invokeMethods', 'isCase', 'join', 'leftShift', 'minus', 30 | 'multiply', 'newInputStream', 'newOutputStream', 'newPrintWriter', 31 | 'newReader', 'newWriter', 'next', 'plus', 'pop', 'power', 'previous', 32 | 'print', 'println', 'push', 'putAt', 'read', 'readBytes', 'readLines', 33 | 'reverse', 'reverseEach', 'round', 'size', 'sort', 'splitEachLine', 'step', 34 | 'subMap', 'times', 'toInteger', 'toList', 'tokenize', 'upto', 35 | 'waitForOrKill', 'withPrintWriter', 'withReader', 'withStream', 36 | 'withWriter', 'withWriterAppend', 'write', 'writeLine' 37 | })) 38 | 39 | -- Types. 40 | lex:add_rule('type', token(lexer.TYPE, word_match( 41 | 'boolean byte char class double float int interface long short void'))) 42 | 43 | -- Identifiers. 44 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 45 | 46 | -- Comments. 47 | local line_comment = lexer.to_eol('//', true) 48 | local block_comment = lexer.range('/*', '*/') 49 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 50 | 51 | -- Strings. 52 | local sq_str = lexer.range("'") 53 | local dq_str = lexer.range('"') 54 | local tq_str = lexer.range("'''") + lexer.range('"""') 55 | local string = token(lexer.STRING, tq_str + sq_str + dq_str) 56 | local regex_str = lexer.after_set('=~|!<>+-*?&,:;([{', lexer.range('/', true)) 57 | local regex = token(lexer.REGEX, regex_str) 58 | lex:add_rule('string', string + regex) 59 | 60 | -- Numbers. 61 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 62 | 63 | -- Operators. 64 | lex:add_rule('operator', token(lexer.OPERATOR, S('=~|!<>+-/*?&.,:;()[]{}'))) 65 | 66 | -- Fold points. 67 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 68 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 69 | 70 | lexer.property['scintillua.comment'] = '//' 71 | 72 | return lex 73 | -------------------------------------------------------------------------------- /lua/lexers/vcard.lua: -------------------------------------------------------------------------------- 1 | -- Copyright (c) 2015-2023 Piotr Orzechowski [drzewo.org]. See LICENSE. 2 | -- vCard 2.1, 3.0 and 4.0 LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('vcard') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Begin vCard, end vCard. 13 | lex:add_rule('begin_sequence', 14 | token(lexer.KEYWORD, 'BEGIN') * token(lexer.OPERATOR, ':') * 15 | token(lexer.COMMENT, 'VCARD')) 16 | lex:add_rule('end_sequence', 17 | token(lexer.KEYWORD, 'END') * token(lexer.OPERATOR, ':') * 18 | token(lexer.COMMENT, 'VCARD')) 19 | 20 | -- vCard version (in v3.0 and v4.0 must appear immediately after BEGIN:VCARD). 21 | lex:add_rule('version_sequence', 22 | token(lexer.KEYWORD, 'VERSION') * token(lexer.OPERATOR, ':') * 23 | token(lexer.CONSTANT, 24 | lexer.digit ^ 1 * ('.' * lexer.digit ^ 1) ^ -1)) 25 | 26 | -- Required properties. 27 | local required_property = token(lexer.KEYWORD, word_match({ 28 | 'BEGIN', 'END', 'FN', 'VERSION', -- 29 | 'N' -- Not required in v4.0. 30 | }, true)) * #P(':') 31 | lex:add_rule('required_property', required_property) 32 | 33 | -- Supported properties. 34 | local supported_property = token(lexer.TYPE, word_match({ 35 | 'ADR', 'BDAY', 'CATEGORIES', 'EMAIL', 'END', 'GEO', 'KEY', 'LOGO', 'NOTE', 36 | 'ORG', 'PHOTO', 'REV', 'ROLE', 'SOUND', 'SOURCE', 'TEL', 'TITLE', 'TZ', 37 | 'UID', 'URL', -- Supported in v4.0 only. 38 | 'ANNIVERSARY', 'CALADRURI', 'CALURI', 'CLIENTPIDMAP', 'FBURL', 'GENDER', 39 | 'KIND', 'LANG', 'MEMBER', 'RELATED', 'XML', -- Not supported in v4.0. 40 | 'AGENT', 'LABEL', 'MAILER', 'PROFILE', 'SORT-STRING', 41 | -- Supported in v3.0 only. 42 | 'CLASS', 'NAME', -- Not supported in v2.1. 43 | 'IMPP', 'NICKNAME', 'PRODID' 44 | }, true)) * #S(':;') 45 | lex:add_rule('supported_property', supported_property) 46 | 47 | -- Group and property. 48 | local identifier = lexer.alpha ^ 1 * lexer.digit ^ 0 * ('-' * lexer.alnum ^ 1) ^ 49 | 0 50 | local property = required_property + supported_property + 51 | lexer.token(lexer.TYPE, S('xX') * '-' * identifier) * 52 | #S(':;') 53 | lex:add_rule('group_sequence', 54 | token(lexer.CONSTANT, lexer.starts_line(identifier)) * 55 | token(lexer.OPERATOR, '.') * property) 56 | 57 | -- Extension. 58 | lex:add_rule('extension', token(lexer.TYPE, lexer.starts_line( 59 | S('xX') * '-' * identifier * #S(':;')))) 60 | 61 | -- Parameter. 62 | local parameter = (token(lexer.IDENTIFIER, lexer.starts_line(identifier)) + 63 | token(lexer.STRING, identifier)) * #S(':=') 64 | lex:add_rule('parameter', parameter) 65 | 66 | -- Operators. 67 | lex:add_rule('operator', token(lexer.OPERATOR, S('.:;='))) 68 | 69 | -- Data. 70 | lex:add_rule('data', token(lexer.IDENTIFIER, lexer.any)) 71 | 72 | -- Fold points. 73 | lex:add_fold_point(lexer.KEYWORD, 'BEGIN', 'END') 74 | 75 | return lex 76 | -------------------------------------------------------------------------------- /lua/lexers/applescript.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Applescript LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('applescript') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ 14 | 'script', 'property', 'prop', 'end', 'copy', 'to', 'set', 'global', 'local', 15 | 'on', 'to', 'of', 'in', 'given', 'with', 'without', 'return', 'continue', 16 | 'tell', 'if', 'then', 'else', 'repeat', 'times', 'while', 'until', 'from', 17 | 'exit', 'try', 'error', 'considering', 'ignoring', 'timeout', 'transaction', 18 | 'my', 'get', 'put', 'into', 'is', -- References. 19 | 'each', 'some', 'every', 'whose', 'where', 'id', 'index', 'first', 'second', 20 | 'third', 'fourth', 'fifth', 'sixth', 'seventh', 'eighth', 'ninth', 'tenth', 21 | 'last', 'front', 'back', 'st', 'nd', 'rd', 'th', 'middle', 'named', 22 | 'through', 'thru', 'before', 'after', 'beginning', 'the', -- Commands. 23 | 'close', 'copy', 'count', 'delete', 'duplicate', 'exists', 'launch', 'make', 24 | 'move', 'open', 'print', 'quit', 'reopen', 'run', 'save', 'saving', 25 | -- Operators. 26 | 'div', 'mod', 'and', 'not', 'or', 'as', 'contains', 'equal', 'equals', 27 | 'isn\'t' 28 | }, true))) 29 | 30 | -- Constants. 31 | lex:add_rule('constant', token(lexer.CONSTANT, word_match({ 32 | 'case', 'diacriticals', 'expansion', 'hyphens', 'punctuation', 33 | -- Predefined variables. 34 | 'it', 'me', 'version', 'pi', 'result', 'space', 'tab', 'anything', 35 | -- Text styles. 36 | 'bold', 'condensed', 'expanded', 'hidden', 'italic', 'outline', 'plain', 37 | 'shadow', 'strikethrough', 'subscript', 'superscript', 'underline', 38 | -- Save options. 39 | 'ask', 'no', 'yes', -- Booleans. 40 | 'false', 'true', -- Date and time. 41 | 'weekday', 'monday', 'mon', 'tuesday', 'tue', 'wednesday', 'wed', 42 | 'thursday', 'thu', 'friday', 'fri', 'saturday', 'sat', 'sunday', 'sun', 43 | 'month', 'january', 'jan', 'february', 'feb', 'march', 'mar', 'april', 44 | 'apr', 'may', 'june', 'jun', 'july', 'jul', 'august', 'aug', 'september', 45 | 'sep', 'october', 'oct', 'november', 'nov', 'december', 'dec', 'minutes', 46 | 'hours', 'days', 'weeks' 47 | }, true))) 48 | 49 | -- Identifiers. 50 | lex:add_rule('identifier', 51 | token(lexer.IDENTIFIER, lexer.alpha * (lexer.alnum + '_') ^ 0)) 52 | 53 | -- Strings. 54 | lex:add_rule('string', token(lexer.STRING, lexer.range('"', true))) 55 | 56 | -- Comments. 57 | local line_comment = lexer.to_eol('--') 58 | local block_comment = lexer.range('(*', '*)') 59 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 60 | 61 | -- Numbers. 62 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 63 | 64 | -- Operators. 65 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-^*/&<>=:,(){}'))) 66 | 67 | -- Fold points. 68 | lex:add_fold_point(lexer.COMMENT, '(*', '*)') 69 | 70 | lexer.property['scintillua.comment'] = '--' 71 | 72 | return lex 73 | -------------------------------------------------------------------------------- /lua/lexers/xml.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- XML LPeg lexer. 3 | local lexer = lexer 4 | local P, S = lpeg.P, lpeg.S 5 | 6 | local lex = lexer.new(...) 7 | 8 | -- Comments and CDATA. 9 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.range(''))) 10 | lex:add_rule('cdata', lex:tag('cdata', lexer.range(''))) 11 | 12 | -- Doctype. 13 | local ws = lex:get_rule('whitespace') 14 | local identifier = (lexer.alpha + S('_-')) * (lexer.alnum + S('_-')) ^ 0 15 | local doctype = lex:tag(lexer.TAG .. '.doctype', '')) ^ 0 * 18 | lex:tag(lexer.TAG .. '.doctype', '>') 19 | lex:add_rule('doctype', doctype) 20 | 21 | -- Processing instructions. 22 | lex:add_rule('proc_insn', lex:tag(lexer.TAG .. '.pi', '')) 23 | 24 | -- Tags. 25 | local namespace = lex:tag(lexer.OPERATOR, ':') * 26 | lex:tag(lexer.LABEL, identifier) 27 | lex:add_rule('element', lex:tag(lexer.TAG, '<' * P('/') ^ -1 * identifier) * 28 | namespace ^ -1) 29 | 30 | -- Closing tags. 31 | lex:add_rule('close_tag', lex:tag(lexer.TAG, P('/') ^ -1 * '>')) 32 | 33 | -- Equals. 34 | -- TODO: performance is terrible on large files. 35 | local in_tag = P(function(input, index) 36 | local before = input:sub(1, index - 1) 37 | local s, e = before:find('<[^>]-$'), before:find('>[^<]-$') 38 | if s and e then return s > e end 39 | if s then return true end 40 | return input:find('^[^<]->', index) ~= nil 41 | end) 42 | 43 | local equals = lex:tag(lexer.OPERATOR, '=') -- * in_tag 44 | -- lex:add_rule('equal', equals) 45 | 46 | -- Attributes. 47 | local attribute_eq = 48 | lex:tag(lexer.ATTRIBUTE, identifier) * namespace ^ -1 * ws ^ -1 * equals 49 | lex:add_rule('attribute', attribute_eq) 50 | 51 | -- Strings. 52 | local sq_str = lexer.range("'", false, false) 53 | local dq_str = lexer.range('"', false, false) 54 | lex:add_rule('string', 55 | lex:tag(lexer.STRING, lexer.after_set('=', sq_str + dq_str))) 56 | 57 | -- Numbers. 58 | local number = lex:tag(lexer.NUMBER, lexer.dec_num * P('%') ^ -1) 59 | lex:add_rule('number', lexer.after_set('=', number)) -- *in_tag) 60 | 61 | -- Entities. 62 | local predefined = lex:tag(lexer.CONSTANT_BUILTIN .. '.entity', 63 | '&' * lexer.word_match('lt gt amp apos quot') * ';') 64 | local general = lex:tag(lexer.CONSTANT .. '.entity', '&' * identifier * ';') 65 | lex:add_rule('entity', predefined + general) 66 | 67 | -- Fold Points. 68 | local function disambiguate_lt(text, pos, line, s) 69 | return not line:find('^', -1) 73 | lex:add_fold_point(lexer.COMMENT, '') 74 | lex:add_fold_point('cdata', '') 75 | 76 | lexer.property['scintillua.comment'] = '' 77 | lexer.property['scintillua.angle.braces'] = '1' 78 | lexer.property['scintillua.word.chars'] = 79 | 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-' 80 | 81 | return lex 82 | -------------------------------------------------------------------------------- /lua/lexers/fantom.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2018-2023 Simeon Maryasin (MarSoft). See LICENSE. 2 | -- Fantom LPeg lexer. 3 | -- Based on Java LPeg lexer by Mitchell and Vim's Fantom syntax. 4 | local lexer = require('lexer') 5 | local token, word_match = lexer.token, lexer.word_match 6 | local P, S = lpeg.P, lpeg.S 7 | 8 | local lex = lexer.new('fantom') 9 | 10 | -- Whitespace. 11 | local ws = token(lexer.WHITESPACE, lexer.space ^ 1) 12 | lex:add_rule('whitespace', ws) 13 | 14 | -- Classes. 15 | local type = token(lexer.TYPE, lexer.word) 16 | lex:add_rule('class_sequence', 17 | token(lexer.KEYWORD, 'class') * ws * type * 18 | ( -- at most one inheritance spec 19 | ws * token(lexer.OPERATOR, ':') * ws * type * 20 | ( -- at least 0 (i.e. any number) of additional classes 21 | ws ^ -1 * token(lexer.OPERATOR, ',') * ws ^ -1 * type) ^ 0) ^ -1) 22 | 23 | -- Keywords. 24 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 25 | 'using', 'native', -- external 26 | 'goto', 'void', 'serializable', 'volatile', -- error 27 | 'if', 'else', 'switch', -- conditional 28 | 'do', 'while', 'for', 'foreach', 'each', -- repeat 29 | 'true', 'false', -- boolean 30 | 'null', -- constant 31 | 'this', 'super', -- typedef 32 | 'new', 'is', 'isnot', 'as', -- operator 33 | 'plus', 'minus', 'mult', 'div', 'mod', 'get', 'set', 'slice', 'lshift', 34 | 'rshift', 'and', 'or', 'xor', 'inverse', 'negate', -- 35 | 'increment', 'decrement', 'equals', 'compare', -- long operator 36 | 'return', -- stmt 37 | 'static', 'const', 'final', -- storage class 38 | 'virtual', 'override', 'once', -- slot 39 | 'readonly', -- field 40 | 'throw', 'try', 'catch', 'finally', -- exceptions 41 | 'assert', -- assert 42 | 'class', 'enum', 'mixin', -- typedef 43 | 'break', 'continue', -- branch 44 | 'default', 'case', -- labels 45 | 'public', 'internal', 'protected', 'private', 'abstract' -- scope decl 46 | })) 47 | 48 | -- Types. 49 | lex:add_rule('type', token(lexer.TYPE, word_match( 50 | 'Void Bool Int Float Decimal Str Duration Uri Type Range List Map Obj Err Env'))) 51 | 52 | -- Functions. 53 | -- lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('(')) 54 | 55 | -- Identifiers. 56 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 57 | 58 | -- Strings. 59 | local sq_str = lexer.range("'", true) 60 | local dq_str = lexer.range('"', true) 61 | local bq_str = lexer.range('`', true) 62 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + bq_str)) 63 | 64 | -- Comments. 65 | local line_comment = lexer.to_eol('//', true) 66 | local block_comment = lexer.range('/*', '*/') 67 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 68 | 69 | -- Numbers. 70 | lex:add_rule('number', token(lexer.NUMBER, lexer.number * S('LlFfDd') ^ -1)) 71 | 72 | -- Operators. 73 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}#'))) 74 | 75 | -- Annotations. 76 | lex:add_rule('facet', token(lexer.ANNOTATION, '@' * lexer.word)) 77 | 78 | -- Fold points. 79 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 80 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 81 | 82 | lexer.property['scintillua.comment'] = '//' 83 | 84 | return lex 85 | -------------------------------------------------------------------------------- /lua/ansi_codes.lua: -------------------------------------------------------------------------------- 1 | local M = {} 2 | 3 | local rgb_to_ansi = {} 4 | 5 | local sgr_params = { 6 | reset = 0, 7 | clear = 0, 8 | default = 0, 9 | bright = 1, 10 | dim = 2, 11 | italic = 3, 12 | underscore = 4, 13 | blink = 5, 14 | reverse = 7, 15 | hidden = 8, 16 | 17 | -- foreground 18 | black = 30, 19 | red = 31, 20 | green = 32, 21 | yellow = 33, 22 | blue = 34, 23 | magenta = 35, 24 | cyan = 36, 25 | white = 37, 26 | 27 | brblack = 90, 28 | brred = 91, 29 | brgreen = 92, 30 | bryellow = 93, 31 | brblue = 94, 32 | brmagenta = 95, 33 | brcyan = 96, 34 | brwhite = 97, 35 | 36 | -- background 37 | onblack = 40, 38 | onred = 41, 39 | ongreen = 42, 40 | onyellow = 43, 41 | onblue = 44, 42 | onmagenta = 45, 43 | oncyan = 46, 44 | onwhite = 47, 45 | 46 | onbrblack = 100, 47 | onbrred = 101, 48 | onbrgreen = 102, 49 | onbryellow = 103, 50 | onbrblue = 104, 51 | onbrmagenta = 105, 52 | onbrcyan = 106, 53 | onbrwhite = 107 54 | } 55 | 56 | local function hex_to_rgb(hex) 57 | hex = hex:sub(2) 58 | local r = tonumber(hex:sub(1, 2), 16) 59 | local g = tonumber(hex:sub(3, 4), 16) 60 | local b = tonumber(hex:sub(5, 6), 16) 61 | return r, g, b 62 | end 63 | 64 | -- converts a SGR parameter to an ANSI escape string 65 | -- https://en.wikipedia.org/wiki/ANSI_escape_code#Colors 66 | local function ansi_string(sgr_number) 67 | return string.char(27) .. '[' .. tostring(sgr_number) .. 'm' 68 | end 69 | 70 | -- converts a SGR parameter to an ANSI escape string 71 | -- https://en.wikipedia.org/wiki/ANSI_escape_code#Colors 72 | -- not currently used 73 | -- local function ansi_string_265(sgr_number) 74 | -- return string.char(27) .."[38;5;" .. tostring(sgr_number) .. 'm' 75 | -- end 76 | 77 | -- converts a SGR parameter to an ANSI escape string 78 | -- https://en.wikipedia.org/wiki/ANSI_escape_code#Colors 79 | local function ansi_string_4b(color) 80 | local ansi_code = "" 81 | for attr, value in pairs(color) do 82 | if attr == "color" then 83 | for param in string.gmatch(value, "[^%s]+") do 84 | local code = sgr_params[param] 85 | if code ~= nil then 86 | ansi_code = ansi_code .. ansi_string(code) 87 | end 88 | end 89 | else 90 | print('error in 4 bit color value') 91 | end 92 | end 93 | return ansi_code 94 | end 95 | 96 | -- converts a SGR parameter to an ANSI escape string 97 | -- https://en.wikipedia.org/wiki/ANSI_escape_code#Colors 98 | local function ansi_string_24b(r, g, b) 99 | return string.char(27) .. "[38;2;" .. tostring(r) .. ';' .. tostring(g) .. 100 | ';' .. tostring(b) .. 'm' 101 | end 102 | 103 | M.begin_line_hl_ansi = string.char(27) .. "[47m" .. string.char(27) .. "[30m" 104 | M.end_line_hl_ansi = string.char(27) .. "[K" .. string.char(27) .. "[0m" 105 | 106 | M.ansi_string = ansi_string 107 | M.rgb_to_ansi = rgb_to_ansi 108 | M.reset_sequence = ansi_string(0) 109 | M.hex_to_rgb = hex_to_rgb 110 | M.ansi_string_24b = ansi_string_24b 111 | M.ansi_string_4b = ansi_string_4b 112 | 113 | return M 114 | -------------------------------------------------------------------------------- /lua/lexers/xtend.lua: -------------------------------------------------------------------------------- 1 | -- Copyright (c) 2014-2023 Piotr Orzechowski [drzewo.org]. See LICENSE. 2 | -- Xtend LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('xtend') 8 | 9 | -- Whitespace. 10 | local ws = token(lexer.WHITESPACE, lexer.space ^ 1) 11 | lex:add_rule('whitespace', ws) 12 | 13 | -- Classes. 14 | lex:add_rule('class', token(lexer.KEYWORD, 'class') * ws ^ 1 * 15 | token(lexer.CLASS, lexer.word)) 16 | 17 | -- Keywords. 18 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 19 | -- General. 20 | 'abstract', 'annotation', 'as', 'case', 'catch', 'class', 'create', 'def', 21 | 'default', 'dispatch', 'do', 'else', 'enum', 'extends', 'extension', 22 | 'final', 'finally', 'for', 'if', 'implements', 'import', 'interface', 23 | 'instanceof', 'it', 'new', 'override', 'package', 'private', 'protected', 24 | 'public', 'return', 'self', 'static', 'super', 'switch', 'synchronized', 25 | 'this', 'throw', 'throws', 'try', 'typeof', 'val', 'var', 'while', 26 | -- Templates. 27 | 'AFTER', 'BEFORE', 'ENDFOR', 'ENDIF', 'FOR', 'IF', 'SEPARATOR', 28 | -- Literals. 29 | 'true', 'false', 'null' 30 | })) 31 | 32 | -- Types. 33 | lex:add_rule('type', token(lexer.TYPE, word_match { 34 | 'boolean', 'byte', 'char', 'double', 'float', 'int', 'long', 'short', 35 | 'void', 'Boolean', 'Byte', 'Character', 'Double', 'Float', 'Integer', 36 | 'Long', 'Short', 'String' 37 | })) 38 | 39 | -- Functions. 40 | lex:add_rule('function', token(lexer.FUNCTION, lexer.word) * #P('(')) 41 | 42 | -- Identifiers. 43 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 44 | 45 | -- Templates. 46 | lex:add_rule('template', token(lexer.EMBEDDED, lexer.range("'''"))) 47 | 48 | -- Strings. 49 | local sq_str = lexer.range("'", true) 50 | local dq_str = lexer.range('"', true) 51 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 52 | 53 | -- Comments. 54 | local line_comment = lexer.to_eol('//', true) 55 | local block_comment = lexer.range('/*', '*/') 56 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 57 | 58 | -- Numbers. 59 | local small_suff = S('lL') 60 | local med_suff = S('bB') * S('iI') 61 | local large_suff = S('dD') + S('fF') + S('bB') * S('dD') 62 | local exp = S('eE') * lexer.digit ^ 1 63 | 64 | local dec_inf = ('_' * lexer.digit ^ 1) ^ 0 65 | local hex_inf = ('_' * lexer.xdigit ^ 1) ^ 0 66 | local float_pref = lexer.digit ^ 1 * '.' * lexer.digit ^ 1 67 | local float_suff = exp ^ -1 * med_suff ^ -1 * large_suff ^ -1 68 | 69 | local dec = lexer.digit * dec_inf * (small_suff ^ -1 + float_suff) 70 | local hex = lexer.hex_num * hex_inf * P('#' * (small_suff + med_suff)) ^ -1 71 | local float = float_pref * dec_inf * float_suff 72 | 73 | lex:add_rule('number', token(lexer.NUMBER, float + hex + dec)) 74 | 75 | -- Annotations. 76 | lex:add_rule('annotation', token(lexer.ANNOTATION, '@' * lexer.word)) 77 | 78 | -- Operators. 79 | lex:add_rule('operator', token(lexer.OPERATOR, S('+-/*%<>!=^&|?~:;.()[]{}#'))) 80 | 81 | -- Error. 82 | lex:add_rule('error', token(lexer.ERROR, lexer.any)) 83 | 84 | -- Fold points. 85 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 86 | lex:add_fold_point(lexer.COMMENT, '/*', '*/') 87 | 88 | lexer.property['scintillua.comment'] = '//' 89 | 90 | return lex 91 | -------------------------------------------------------------------------------- /lua/lexers/pkgbuild.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 gwash. See LICENSE. 2 | -- Archlinux PKGBUILD LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('pkgbuild') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Comments. 13 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('#'))) 14 | 15 | -- Strings. 16 | local sq_str = lexer.range("'", false, false) 17 | local dq_str = lexer.range('"') 18 | local ex_str = lexer.range('`') 19 | local heredoc = '<<' * P(function(input, index) 20 | local s, e, _, delimiter = input:find('(["\']?)([%a_][%w_]*)%1[\n\r\f;]+', 21 | index) 22 | if s == index and delimiter then 23 | e = select(2, input:find('[\n\r\f]+' .. delimiter, e)) 24 | return e and e + 1 or #input + 1 25 | end 26 | end) 27 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + ex_str + heredoc)) 28 | 29 | -- Numbers. 30 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 31 | 32 | -- Keywords. 33 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 34 | 'patch', 'cd', 'make', 'patch', 'mkdir', 'cp', 'sed', 'install', 'rm', 'if', 35 | 'then', 'elif', 'else', 'fi', 'case', 'in', 'esac', 'while', 'for', 'do', 36 | 'done', 'continue', 'local', 'return', 'git', 'svn', 'co', 'clone', 37 | 'gconf-merge-schema', 'msg', 'echo', 'ln', -- Operators. 38 | '-a', '-b', '-c', '-d', '-e', '-f', '-g', '-h', '-k', '-p', '-r', '-s', 39 | '-t', '-u', '-w', '-x', '-O', '-G', '-L', '-S', '-N', '-nt', '-ot', '-ef', 40 | '-o', '-z', '-n', '-eq', '-ne', '-lt', '-le', '-gt', '-ge', '-Np', '-i' 41 | })) 42 | 43 | -- Functions. 44 | lex:add_rule('function', token(lexer.FUNCTION, word_match( 45 | 'build check package pkgver prepare') * '()')) 46 | 47 | -- Constants. 48 | lex:add_rule('constant', token(lexer.CONSTANT, word_match { 49 | -- We do *not* list pkgver srcdir and startdir here. 50 | -- These are defined by makepkg but user should not alter them. 51 | 'arch', 'backup', 'changelog', 'checkdepends', 'conflicts', 'depends', 52 | 'epoch', 'groups', 'install', 'license', 'makedepends', 'md5sums', 53 | 'noextract', 'optdepends', 'options', 'pkgbase', 'pkgdesc', 'pkgname', 54 | 'pkgrel', 'pkgver', 'provides', 'replaces', 'sha1sums', 'sha256sums', 55 | 'sha384sums', 'sha512sums', 'source', 'url', 'validpgpkeys' 56 | })) 57 | 58 | -- Identifiers. 59 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 60 | 61 | -- Variables. 62 | local symbol = S('!#?*@$') 63 | local parens = lexer.range('(', ')', true) 64 | local brackets = lexer.range('[', ']', true) 65 | local braces = lexer.range('{', '}', true) 66 | local backticks = lexer.range('`', true, false) 67 | local number = lexer.dec_num 68 | lex:add_rule('variable', token(lexer.VARIABLE, '$' * 69 | (symbol + parens + brackets + braces + 70 | backticks + number + lexer.word))) 71 | 72 | -- Operators. 73 | lex:add_rule('operator', token(lexer.OPERATOR, S('=!<>+-/*^~.,:;?()[]{}'))) 74 | 75 | -- Fold points. 76 | lex:add_fold_point(lexer.OPERATOR, '(', ')') 77 | lex:add_fold_point(lexer.OPERATOR, '{', '}') 78 | 79 | lexer.property['scintillua.comment'] = '#' 80 | 81 | return lex 82 | -------------------------------------------------------------------------------- /lua/lexers/caml.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- OCaml LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('caml') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'and', 'as', 'asr', 'begin', 'class', 'closed', 'constraint', 'do', 'done', 15 | 'downto', 'else', 'end', 'exception', 'external', 'failwith', 'false', 16 | 'flush', 'for', 'fun', 'function', 'functor', 'if', 'in', 'include', 'incr', 17 | 'inherit', 'land', 'let', 'load', 'los', 'lsl', 'lsr', 'lxor', 'match', 18 | 'method', 'mod', 'module', 'mutable', 'new', 'not', 'of', 'open', 'option', 19 | 'or', 'parser', 'private', 'raise', 'rec', 'ref', 'regexp', 'sig', 'stderr', 20 | 'stdin', 'stdout', 'struct', 'then', 'to', 'true', 'try', 'type', 'val', 21 | 'virtual', 'when', 'while', 'with' 22 | })) 23 | 24 | -- Types. 25 | lex:add_rule('type', 26 | token(lexer.TYPE, word_match('bool char float int string unit'))) 27 | 28 | -- Functions. 29 | lex:add_rule('function', token(lexer.FUNCTION, word_match { 30 | 'abs', 'abs_float', 'acos', 'asin', 'atan', 'atan2', 'at_exit', 31 | 'bool_of_string', 'ceil', 'char_of_int', 'classify_float', 'close_in', 32 | 'close_in_noerr', 'close_out', 'close_out_noerr', 'compare', 'cos', 'cosh', 33 | 'decr', 'epsilon_float', 'exit', 'exp', 'failwith', 'float', 'float_of_int', 34 | 'float_of_string', 'floor', 'flush', 'flush_all', 'format_of_string', 35 | 'frexp', 'fst', 'ignore', 'in_channel_length', 'incr', 'infinity', 'input', 36 | 'input_binary_int', 'input_byte', 'input_char', 'input_line', 'input_value', 37 | 'int_of_char', 'int_of_float', 'int_of_string', 'invalid_arg', 'ldexp', 38 | 'log', 'log10', 'max', 'max_float', 'max_int', 'min', 'min_float', 39 | 'min_int', 'mod', 'modf', 'mod_float', 'nan', 'open_in', 'open_in_bin', 40 | 'open_in_gen', 'open_out', 'open_out_bin', 'open_out_gen', 41 | 'out_channel_length', 'output', 'output_binary_int', 'output_byte', 42 | 'output_char', 'output_string', 'output_value', 'pos_in', 'pos_out', 'pred', 43 | 'prerr_char', 'prerr_endline', 'prerr_float', 'prerr_int', 'prerr_newline', 44 | 'prerr_string', 'print_char', 'print_endline', 'print_float', 'print_int', 45 | 'print_newline', 'print_string', 'raise', 'read_float', 'read_int', 46 | 'read_line', 'really_input', 'seek_in', 'seek_out', 'set_binary_mode_in', 47 | 'set_binary_mode_out', 'sin', 'sinh', 'snd', 'sqrt', 'stderr', 'stdin', 48 | 'stdout', 'string_of_bool', 'string_of_float', 'string_of_format', 49 | 'string_of_int', 'succ', 'tan', 'tanh', 'truncate' 50 | })) 51 | 52 | -- Identifiers. 53 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 54 | 55 | -- Strings. 56 | local sq_str = lexer.range("'", true) 57 | local dq_str = lexer.range('"', true) 58 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 59 | 60 | -- Comments. 61 | lex:add_rule('comment', 62 | token(lexer.COMMENT, lexer.range('(*', '*)', false, false, true))) 63 | 64 | -- Numbers. 65 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 66 | 67 | -- Operators. 68 | lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+-*/.,:;~!#%^&|?[](){}'))) 69 | 70 | lexer.property['scintillua.comment'] = '(*|*)' 71 | 72 | return lex 73 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # clp 2 | 3 | clp writes input files to stdout with syntax highlighting. It aims to be relatively fast, provide wide language support, and be easy to extend with new languages. It currently supports 150 languages. 4 | 5 | ![clpm](https://github.com/jpe90/images/raw/master/clpm.png) 6 | 7 | Language support is implemented with LPEG, a tool developed by PUC which uses parsing expression grammars to improve upon traditional regex parsers (described in depth in [this article](http://www.inf.puc-rio.br/~roberto/docs/peg.pdf)). 8 | 9 | More information is available [here](https://jeskin.net/blog/clp/), along with a blog post showing [how I use it with fzf](https://jeskin.net/blog/grep-fzf-clp/). 10 | 11 | ## Installation 12 | 13 | ### MacOS 14 | 15 | ``` 16 | brew tap jpe90/clp 17 | brew install jpe90/clp/clp 18 | ``` 19 | 20 | ### Linux 21 | 22 | [AUR](https://aur.archlinux.org/packages/clp-git) 23 | 24 | 25 | ### Building from source 26 | Requirements: 27 | 28 | - a POSIX compliant operating system 29 | - a C99 Compiler 30 | - Lua >= 5.1 or LuaJIT (the latter for better performance) 31 | - [LPEG](http://www.inf.puc-rio.br/~roberto/lpeg/) 32 | - [luautf8](https://github.com/starwing/luautf8) 33 | ``` 34 | $ ./configure 35 | $ make 36 | # make install 37 | ``` 38 | 39 | #### Distro Specific Dependencies 40 | 41 | ##### Ubuntu 42 | ``` 43 | # apt-get install -y build-essential pkg-config libluajit-5.1-dev lua-lpeg 44 | ``` 45 | 46 | ##### Arch 47 | ``` 48 | # pacman -S --needed --noconfirm base-devel pkg-config luajit lua51-lpeg lua51-luautf8 49 | ``` 50 | 51 | ## Usage 52 | 53 | ``` 54 | $ clp [options] filename 55 | ``` 56 | 57 | #### -t, --override-filetype {filetype} 58 | 59 | Force a language's syntax for highlighting the file. To see available filetypes, run clp --list-overrides 60 | 61 | #### -h, --highlight-line {number} 62 | 63 | Highlight a non-blank line 64 | 65 | ## Motivation 66 | 67 | Existing syntax highlighting programs are either relatively slow or support a small number of languages. `clp` aims to be faster without compromising on language support. 68 | 69 | Here are some quick benchmarks comparing similar programs running on my machine (clp 70 | installed with LuaJIT, highlighting 71 | [sqlite3.c](https://fossies.org/linux/sqlite/sqlite3.c)) 72 | 73 | | Command | Mean [ms] | Min [ms] | Max [ms] | Relative | 74 | |:---|---:|---:|---:|---:| 75 | | `clp sqlite3.c` | 216.6 ± 2.4 | 212.2 | 220.8 | 1.00 | 76 | | `bat --color=always sqlite3.c` | 3161.0 ± 12.3 | 3149.7 | 3182.7 | 14.59 ± 0.17 | 77 | | `source-highlight sqlite3.c` | 4313.6 ± 25.5 | 4277.7 | 4355.9 | 19.91 ± 0.25 | 78 | 79 | More benchmarks are available [here](https://github.com/sharkdp/bat/blob/master/doc/alternatives.md) 80 | 81 | Parsers are upstreamed from the 82 | [Scintillua](https://orbitalquark.github.io/scintillua/) project. It's actively 83 | maintained, has great support even for niche languages, and easy to use 84 | relative to other syntax definition mechanisms. 85 | 86 | ## Configuration 87 | 88 | `clp` can be configured in `~/.config/clp/clprc.lua`: 89 | 90 | ```lua 91 | clprc = {} 92 | clprc.theme = "ansi-16" 93 | return clprc 94 | ``` 95 | 96 | ### Changing your colorscheme 97 | 98 | Instructions [available here](https://jeskin.net/blog/clp/#adding-color-themes). 99 | 100 | ## Contributing 101 | 102 | Contributions are welcome! Feel free to send a pull request on [Github](https://github.com/jpe90/clp) 103 | or a patch on [Sourcehut](https://git.sr.ht/~eskin/clp). 104 | -------------------------------------------------------------------------------- /lua/lexers/vhdl.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- VHDL LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('vhdl') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'access', 'after', 'alias', 'all', 'architecture', 'array', 'assert', 15 | 'attribute', 'begin', 'block', 'body', 'buffer', 'bus', 'case', 'component', 16 | 'configuration', 'constant', 'disconnect', 'downto', 'else', 'elsif', 'end', 17 | 'entity', 'exit', 'file', 'for', 'function', 'generate', 'generic', 'group', 18 | 'guarded', 'if', 'impure', 'in', 'inertial', 'inout', 'is', 'label', 19 | 'library', 'linkage', 'literal', 'loop', 'map', 'new', 'next', 'null', 'of', 20 | 'on', 'open', 'others', 'out', 'package', 'port', 'postponed', 'procedure', 21 | 'process', 'pure', 'range', 'record', 'register', 'reject', 'report', 22 | 'return', 'select', 'severity', 'signal', 'shared', 'subtype', 'then', 'to', 23 | 'transport', 'type', 'unaffected', 'units', 'until', 'use', 'variable', 24 | 'wait', 'when', 'while', 'with', -- 25 | 'note', 'warning', 'error', 'failure', -- 26 | 'and', 'nand', 'or', 'nor', 'xor', 'xnor', 'rol', 'ror', 'sla', 'sll', 27 | 'sra', 'srl', 'mod', 'rem', -- 28 | 'abs', 'not', 'false', 'true' 29 | })) 30 | 31 | -- Functions. 32 | lex:add_rule('function', token(lexer.FUNCTION, word_match { 33 | 'rising_edge', 'shift_left', 'shift_right', 'rotate_left', 'rotate_right', 34 | 'resize', 'std_match', 'to_integer', 'to_unsigned', 'to_signed', 'unsigned', 35 | 'signed', 'to_bit', 'to_bitvector', 'to_stdulogic', 'to_stdlogicvector', 36 | 'to_stdulogicvector' 37 | })) 38 | 39 | -- Types. 40 | lex:add_rule('type', token(lexer.TYPE, word_match { 41 | 'bit', 'bit_vector', 'character', 'boolean', 'integer', 'real', 'time', 42 | 'string', 'severity_level', 'positive', 'natural', 'signed', 'unsigned', 43 | 'line', 'text', 'std_logic', 'std_logic_vector', 'std_ulogic', 44 | 'std_ulogic_vector', 'qsim_state', 'qsim_state_vector', 'qsim_12state', 45 | 'qsim_12state_vector', 'qsim_strength', 'mux_bit', 'mux_vectory', 'reg_bit', 46 | 'reg_vector', 'wor_bit', 'wor_vector' 47 | })) 48 | 49 | -- Constants. 50 | lex:add_rule('constant', token(lexer.CONSTANT, word_match { 51 | 'EVENT', 'BASE', 'LEFT', 'RIGHT', 'LOW', 'HIGH', 'ASCENDING', 'IMAGE', 52 | 'VALUE', 'POS', 'VAL', 'SUCC', 'VAL', 'POS', 'PRED', 'VAL', 'POS', 'LEFTOF', 53 | 'RIGHTOF', 'LEFT', 'RIGHT', 'LOW', 'HIGH', 'RANGE', 'REVERSE', 'LENGTH', 54 | 'ASCENDING', 'DELAYED', 'STABLE', 'QUIET', 'TRANSACTION', 'EVENT', 'ACTIVE', 55 | 'LAST', 'LAST', 'LAST', 'DRIVING', 'DRIVING', 'SIMPLE', 'INSTANCE', 'PATH' 56 | })) 57 | 58 | -- Identifiers. 59 | lex:add_rule('identifier', token(lexer.IDENTIFIER, (lexer.alpha + "'") * 60 | (lexer.alnum + S("_'")) ^ 1)) 61 | 62 | -- Strings. 63 | local sq_str = lexer.range("'", true, false) 64 | local dq_str = lexer.range('"', true) 65 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 66 | 67 | -- Comments. 68 | lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('--'))) 69 | 70 | -- Numbers. 71 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 72 | 73 | -- Operators. 74 | lex:add_rule('operator', token(lexer.OPERATOR, S('=/!:;<>+-/*%&|^~()'))) 75 | 76 | lexer.property['scintillua.comment'] = '--' 77 | 78 | return lex 79 | -------------------------------------------------------------------------------- /lua/lexers/reason.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2018-2023 Hugo O. Rivera. See LICENSE. 2 | -- Reason (https://reasonml.github.io/) LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('reason') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 14 | 'and', 'as', 'asr', 'begin', 'class', 'closed', 'constraint', 'do', 'done', 15 | 'downto', 'else', 'end', 'exception', 'external', 'failwith', 'false', 16 | 'flush', 'for', 'fun', 'function', 'functor', 'if', 'in', 'include', 17 | 'inherit', 'incr', 'land', 'let', 'load', 'los', 'lsl', 'lsr', 'lxor', 18 | 'method', 'mod', 'module', 'mutable', 'new', 'not', 'of', 'open', 'option', 19 | 'or', 'parser', 'private', 'ref', 'rec', 'raise', 'regexp', 'sig', 'struct', 20 | 'stdout', 'stdin', 'stderr', 'switch', 'then', 'to', 'true', 'try', 'type', 21 | 'val', 'virtual', 'when', 'while', 'with' 22 | })) 23 | 24 | -- Types. 25 | lex:add_rule('type', 26 | token(lexer.TYPE, word_match('int float bool char string unit'))) 27 | 28 | -- Functions. 29 | lex:add_rule('function', token(lexer.FUNCTION, word_match { 30 | 'raise', 'invalid_arg', 'failwith', 'compare', 'min', 'max', 'succ', 'pred', 31 | 'mod', 'abs', 'max_int', 'min_int', 'sqrt', 'exp', 'log', 'log10', 'cos', 32 | 'sin', 'tan', 'acos', 'asin', 'atan', 'atan2', 'cosh', 'sinh', 'tanh', 33 | 'ceil', 'floor', 'abs_float', 'mod_float', 'frexp', 'ldexp', 'modf', 34 | 'float', 'float_of_int', 'truncate', 'int_of_float', 'infinity', 'nan', 35 | 'max_float', 'min_float', 'epsilon_float', 'classify_float', 'int_of_char', 36 | 'char_of_int', 'ignore', 'string_of_bool', 'bool_of_string', 37 | 'string_of_int', 'int_of_string', 'string_of_float', 'float_of_string', 38 | 'fst', 'snd', 'stdin', 'stdout', 'stderr', 'print_char', 'print_string', 39 | 'print_int', 'print_float', 'print_endline', 'print_newline', 'prerr_char', 40 | 'prerr_string', 'prerr_int', 'prerr_float', 'prerr_endline', 41 | 'prerr_newline', 'read_line', 'read_int', 'read_float', 'open_out', 42 | 'open_out_bin', 'open_out_gen', 'flush', 'flush_all', 'output_char', 43 | 'output_string', 'output', 'output_byte', 'output_binary_int', 44 | 'output_value', 'seek_out', 'pos_out', 'out_channel_length', 'close_out', 45 | 'close_out_noerr', 'set_binary_mode_out', 'open_in', 'open_in_bin', 46 | 'open_in_gen', 'input_char', 'input_line', 'input', 'really_input', 47 | 'input_byte', 'input_binary_int', 'input_value', 'seek_in', 'pos_in', 48 | 'in_channel_length', 'close_in', 'close_in_noerr', 'set_binary_mode_in', 49 | 'incr', 'decr', 'string_of_format', 'format_of_string', 'exit', 'at_exit' 50 | })) 51 | 52 | -- Identifiers. 53 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 54 | 55 | -- Strings. 56 | local sq_str = lexer.range("'", true) 57 | local dq_str = lexer.range('"', true) 58 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str)) 59 | 60 | -- Comments. 61 | local line_comment = lexer.to_eol('//') 62 | local block_comment = lexer.range('/*', '*/', false, false, true) 63 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 64 | 65 | -- Numbers. 66 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 67 | 68 | -- Operators. 69 | lex:add_rule('operator', token(lexer.OPERATOR, S('=<>+-*/.,:;~!#%^&|?[](){}'))) 70 | 71 | lexer.property['scintillua.comment'] = '//' 72 | 73 | return lex 74 | -------------------------------------------------------------------------------- /lua/lexers/gnuplot.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- Gnuplot LPeg lexer. 3 | local lexer = lexer 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new(...) 8 | 9 | -- Keywords. 10 | lex:add_rule('keyword', lex:tag(lexer.KEYWORD, lex:word_match(lexer.KEYWORD))) 11 | 12 | -- Functions. 13 | lex:add_rule('function', lex:tag(lexer.FUNCTION_BUILTIN, 14 | lex:word_match(lexer.FUNCTION_BUILTIN))) 15 | 16 | -- Variables. 17 | lex:add_rule('variable', lex:tag(lexer.VARIABLE_BUILTIN, 18 | lex:word_match(lexer.VARIABLE_BUILTIN))) 19 | 20 | -- Identifiers. 21 | lex:add_rule('identifier', lex:tag(lexer.IDENTIFIER, lexer.word)) 22 | 23 | -- Strings. 24 | local sq_str = lexer.range("'") 25 | local dq_str = lexer.range('"') 26 | local br_str = lexer.range('[', ']', true) + lexer.range('{', '}', true) 27 | lex:add_rule('string', lex:tag(lexer.STRING, sq_str + dq_str + br_str)) 28 | 29 | -- Comments. 30 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.to_eol('#'))) 31 | 32 | -- Numbers. 33 | lex:add_rule('number', lex:tag(lexer.NUMBER, lexer.number)) 34 | 35 | -- Operators. 36 | lex:add_rule('operator', lex:tag(lexer.OPERATOR, S('-+~!$*%=<>&|^?:;()'))) 37 | 38 | -- Word lists. 39 | lex:set_word_list(lexer.KEYWORD, { 40 | 'cd', 'call', 'clear', 'exit', 'fit', 'help', 'history', 'if', 'load', 41 | 'pause', 'plot', 'using', 'with', 'index', 'every', 'smooth', 'thru', 42 | 'print', 'pwd', 'quit', 'replot', 'reread', 'reset', 'save', 'set', 'show', 43 | 'unset', 'shell', 'splot', 'system', 'test', 'unset', 'update' 44 | }) 45 | 46 | lex:set_word_list(lexer.FUNCTION_BUILTIN, { 47 | 'abs', 'acos', 'acosh', 'arg', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 48 | 'besj0', 'besj1', 'besy0', 'besy1', 'ceil', 'cos', 'cosh', 'erf', 'erfc', 49 | 'exp', 'floor', 'gamma', 'ibeta', 'inverf', 'igamma', 'imag', 'invnorm', 50 | 'int', 'lambertw', 'lgamma', 'log', 'log10', 'norm', 'rand', 'real', 'sgn', 51 | 'sin', 'sinh', 'sqrt', 'tan', 'tanh', 'column', 'defined', 'tm_hour', 52 | 'tm_mday', 'tm_min', 'tm_mon', 'tm_sec', 'tm_wday', 'tm_yday', 'tm_year', 53 | 'valid' 54 | }) 55 | 56 | lex:set_word_list(lexer.VARIABLE_BUILTIN, { 57 | 'angles', 'arrow', 'autoscale', 'bars', 'bmargin', 'border', 'boxwidth', 58 | 'clabel', 'clip', 'cntrparam', 'colorbox', 'contour', 'datafile', 59 | 'decimalsign', 'dgrid3d', 'dummy', 'encoding', 'fit', 'fontpath', 'format', 60 | 'functions', 'function', 'grid', 'hidden3d', 'historysize', 'isosamples', 61 | 'key', 'label', 'lmargin', 'loadpath', 'locale', 'logscale', 'mapping', 62 | 'margin', 'mouse', 'multiplot', 'mx2tics', 'mxtics', 'my2tics', 'mytics', 63 | 'mztics', 'offsets', 'origin', 'output', 'parametric', 'plot', 'pm3d', 64 | 'palette', 'pointsize', 'polar', 'print', 'rmargin', 'rrange', 'samples', 65 | 'size', 'style', 'surface', 'terminal', 'tics', 'ticslevel', 'ticscale', 66 | 'timestamp', 'timefmt', 'title', 'tmargin', 'trange', 'urange', 'variables', 67 | 'version', 'view', 'vrange', 'x2data', 'x2dtics', 'x2label', 'x2mtics', 68 | 'x2range', 'x2tics', 'x2zeroaxis', 'xdata', 'xdtics', 'xlabel', 'xmtics', 69 | 'xrange', 'xtics', 'xzeroaxis', 'y2data', 'y2dtics', 'y2label', 'y2mtics', 70 | 'y2range', 'y2tics', 'y2zeroaxis', 'ydata', 'ydtics', 'ylabel', 'ymtics', 71 | 'yrange', 'ytics', 'yzeroaxis', 'zdata', 'zdtics', 'cbdata', 'cbdtics', 72 | 'zero', 'zeroaxis', 'zlabel', 'zmtics', 'zrange', 'ztics', 'cblabel', 73 | 'cbmtics', 'cbrange', 'cbtics' 74 | }) 75 | 76 | lexer.property['scintillua.comment'] = '#' 77 | 78 | return lex 79 | -------------------------------------------------------------------------------- /lua/lexers/wsf.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- WSF LPeg lexer (based on XML). 3 | -- Contributed by Jeff Stone. 4 | local lexer = lexer 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new(...) 8 | 9 | -- Comments. 10 | lex:add_rule('comment', lex:tag(lexer.COMMENT, lexer.range(''))) 11 | 12 | -- Elements. 13 | local identifier = (lexer.alpha + S('_-')) * (lexer.alnum + S('_-')) ^ 0 14 | local tag = lex:tag(lexer.TAG, '<' * P('/') ^ -1 * identifier) 15 | lex:add_rule('tag', tag) 16 | 17 | -- Closing tags. 18 | local tag_close = lex:tag(lexer.TAG, P('/') ^ -1 * '>') 19 | lex:add_rule('tag_close', tag_close) 20 | 21 | -- Equals. 22 | -- TODO: performance is terrible on large files. 23 | local in_tag = P(function(input, index) 24 | local before = input:sub(1, index - 1) 25 | local s, e = before:find('<[^>]-$'), before:find('>[^<]-$') 26 | if s and e then return s > e end 27 | if s then return true end 28 | return input:find('^[^<]->', index) ~= nil 29 | end) 30 | 31 | local equals = lex:tag(lexer.OPERATOR, '=') -- * in_tag 32 | -- lex:add_rule('equals', equals) 33 | 34 | -- Attributes. 35 | local ws = lex:get_rule('whitespace') 36 | local attribute_eq = lex:tag(lexer.ATTRIBUTE, identifier) * ws ^ -1 * equals 37 | lex:add_rule('attribute', attribute_eq) 38 | 39 | -- Strings. 40 | local sq_str = lexer.range("'", false, false) 41 | local dq_str = lexer.range('"', false, false) 42 | local string = lex:tag(lexer.STRING, lexer.after_set('=', sq_str + dq_str)) 43 | lex:add_rule('string', string) 44 | 45 | -- Numbers. 46 | local number = lex:tag(lexer.NUMBER, lexer.dec_num * P('%') ^ -1) 47 | lex:add_rule('number', lexer.after_set('=', number)) -- * in_tag) 48 | 49 | -- Entities. 50 | local predefined = lex:tag(lexer.CONSTANT_BUILTIN .. '.entity', 51 | '&' * lexer.word_match('lt gt amp apos quot') * ';') 52 | local general = lex:tag(lexer.CONSTANT .. '.entity', '&' * identifier * ';') 53 | lex:add_rule('entity', predefined + general) 54 | 55 | -- Fold points. 56 | local function disambiguate_lt(text, pos, line, s) 57 | return not line:find('^', -1) 61 | lex:add_fold_point(lexer.COMMENT, '') 62 | 63 | -- Finally, add JavaScript and VBScript as embedded languages 64 | 65 | -- Tags that start embedded languages. 66 | local embed_start_tag = tag * (ws * attribute_eq * ws ^ -1 * string) ^ 0 * ws ^ 67 | -1 * tag_close 68 | local embed_end_tag = tag * tag_close 69 | 70 | -- Embedded JavaScript. 71 | local js = lexer.load('javascript') 72 | local js_start_rule = #(P('')) * embed_start_tag -- ') * embed_end_tag -- 79 | lex:embed(js, js_start_rule, js_end_rule) 80 | 81 | -- Embedded VBScript. 82 | local vbs = lexer.load('vb', 'vbscript') 83 | local vbs_start_rule = #(P('')) * embed_start_tag -- ') * embed_end_tag -- 90 | lex:embed(vbs, vbs_start_rule, vbs_end_rule) 91 | 92 | lexer.property['scintillua.comment'] = '' 93 | lexer.property['scintillua.angle.braces'] = '1' 94 | 95 | return lex 96 | -------------------------------------------------------------------------------- /lua/lexers/sql.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2006-2023 Mitchell. See LICENSE. 2 | -- SQL LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('sql') 8 | 9 | -- Whitespace. 10 | lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space ^ 1)) 11 | 12 | -- Keywords. 13 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match({ 14 | 'add', 'all', 'alter', 'analyze', 'and', 'as', 'asc', 'asensitive', 15 | 'before', 'between', 'bigint', 'binary', 'blob', 'both', 'by', 'call', 16 | 'cascade', 'case', 'change', 'char', 'character', 'check', 'collate', 17 | 'column', 'condition', 'connection', 'constraint', 'continue', 'convert', 18 | 'create', 'cross', 'current_date', 'current_time', 'current_timestamp', 19 | 'current_user', 'cursor', 'database', 'databases', 'day_hour', 20 | 'day_microsecond', 'day_minute', 'day_second', 'dec', 'decimal', 'declare', 21 | 'default', 'delayed', 'delete', 'desc', 'describe', 'deterministic', 22 | 'distinct', 'distinctrow', 'div', 'double', 'drop', 'dual', 'each', 'else', 23 | 'elseif', 'enclosed', 'escaped', 'exists', 'exit', 'explain', 'false', 24 | 'fetch', 'float', 'for', 'force', 'foreign', 'from', 'fulltext', 'goto', 25 | 'grant', 'group', 'having', 'high_priority', 'hour_microsecond', 26 | 'hour_minute', 'hour_second', 'if', 'ignore', 'in', 'index', 'infile', 27 | 'inner', 'inout', 'insensitive', 'insert', 'int', 'integer', 'interval', 28 | 'into', 'is', 'iterate', 'join', 'key', 'keys', 'kill', 'leading', 'leave', 29 | 'left', 'like', 'limit', 'lines', 'load', 'localtime', 'localtimestamp', 30 | 'lock', 'long', 'longblob', 'longtext', 'loop', 'low_priority', 'match', 31 | 'mediumblob', 'mediumint', 'mediumtext', 'middleint', 'minute_microsecond', 32 | 'minute_second', 'mod', 'modifies', 'natural', 'not', 'no_write_to_binlog', 33 | 'null', 'numeric', 'on', 'optimize', 'option', 'optionally', 'or', 'order', 34 | 'out', 'outer', 'outfile', 'precision', 'primary', 'procedure', 'purge', 35 | 'read', 'reads', 'real', 'references', 'regexp', 'rename', 'repeat', 36 | 'replace', 'require', 'restrict', 'return', 'revoke', 'right', 'rlike', 37 | 'schema', 'schemas', 'second_microsecond', 'select', 'sensitive', 38 | 'separator', 'set', 'show', 'smallint', 'soname', 'spatial', 'specific', 39 | 'sql', 'sqlexception', 'sqlstate', 'sqlwarning', 'sql_big_result', 40 | 'sql_calc_found_rows', 'sql_small_result', 'ssl', 'starting', 41 | 'straight_join', 'table', 'terminated', 'text', 'then', 'tinyblob', 42 | 'tinyint', 'tinytext', 'to', 'trailing', 'trigger', 'true', 'undo', 'union', 43 | 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use', 'using', 44 | 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary', 'varchar', 45 | 'varcharacter', 'varying', 'when', 'where', 'while', 'with', 'write', 'xor', 46 | 'year_month', 'zerofill' 47 | }, true))) 48 | 49 | -- Identifiers. 50 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word)) 51 | 52 | -- Strings. 53 | local sq_str = lexer.range("'") 54 | local dq_str = lexer.range('"') 55 | local bq_str = lexer.range('`') 56 | lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + bq_str)) 57 | 58 | -- Comments. 59 | local line_comment = lexer.to_eol(P('--') + '#') 60 | local block_comment = lexer.range('/*', '*/') 61 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 62 | 63 | -- Numbers. 64 | lex:add_rule('number', token(lexer.NUMBER, lexer.number)) 65 | 66 | -- Operators. 67 | lex:add_rule('operator', token(lexer.OPERATOR, S(',()'))) 68 | 69 | lexer.property['scintillua.comment'] = '--' 70 | 71 | return lex 72 | -------------------------------------------------------------------------------- /lua/lexers/sml.lua: -------------------------------------------------------------------------------- 1 | -- Copyright 2017-2023 Murray Calavera. See LICENSE. 2 | -- Standard ML LPeg lexer. 3 | local lexer = require('lexer') 4 | local token, word_match = lexer.token, lexer.word_match 5 | local P, S = lpeg.P, lpeg.S 6 | 7 | local lex = lexer.new('sml') 8 | 9 | -- Whitespace. 10 | local ws = token(lexer.WHITESPACE, lexer.space ^ 1) 11 | lex:add_rule('whitespace', ws) 12 | 13 | -- Structures. 14 | local id = (lexer.alnum + "'" + '_') ^ 0 15 | local aid = lexer.alpha * id 16 | local longid = (aid * '.') ^ 0 * aid 17 | local struct_dec = token(lexer.KEYWORD, 'structure') * ws * 18 | token(lexer.CLASS, aid) * ws * token(lexer.OPERATOR, '=') * 19 | ws 20 | lex:add_rule('struct_new', struct_dec * token(lexer.KEYWORD, 'struct')) 21 | lex:add_rule('struct_alias', struct_dec * token(lexer.CLASS, longid)) 22 | lex:add_rule('structure', token(lexer.CLASS, aid * '.')) 23 | 24 | -- Open. 25 | lex:add_rule('open', 26 | token(lexer.KEYWORD, word_match('open structure functor')) * ws * 27 | token(lexer.CLASS, longid)) 28 | 29 | -- Keywords. 30 | lex:add_rule('keyword', token(lexer.KEYWORD, word_match { 31 | 'abstype', 'and', 'andalso', 'as', 'case', 'do', 'datatype', 'else', 'end', 32 | 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix', 'infixr', 'let', 33 | 'local', 'nonfix', 'of', 'op', 'orelse', 'raise', 'rec', 'then', 'type', 34 | 'val', 'with', 'withtype', 'while', -- 35 | 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature', 'struct', 36 | 'structure' 37 | })) 38 | 39 | -- Types. 40 | lex:add_rule('type', token(lexer.TYPE, word_match { 41 | 'int', 'real', 'word', 'bool', 'char', 'string', 'unit', 'array', 'exn', 42 | 'list', 'option', 'order', 'ref', 'substring', 'vector' 43 | })) 44 | 45 | -- Functions. 46 | -- `real`, `vector` and `substring` are a problem. 47 | lex:add_rule('function', token(lexer.FUNCTION, word_match { 48 | 'app', 'before', 'ceil', 'chr', 'concat', 'exnMessage', 'exnName', 49 | 'explode', 'floor', 'foldl', 'foldr', 'getOpt', 'hd', 'ignore', 'implode', 50 | 'isSome', 'length', 'map', 'not', 'null', 'ord', 'print', 'real', 'rev', 51 | 'round', 'size', 'str', 'substring', 'tl', 'trunc', 'valOf', 'vector', 'o', 52 | 'abs', 'mod', 'div' 53 | })) 54 | 55 | -- Constants. 56 | lex:add_rule('constant', token(lexer.CONSTANT, 57 | word_match('true false nil') + lexer.upper * id)) 58 | 59 | -- Indentifiers (non-symbolic). 60 | lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.lower * id)) 61 | 62 | -- Strings. 63 | lex:add_rule('string', token(lexer.STRING, P('#') ^ -1 * lexer.range('"', true))) 64 | 65 | -- Comments. 66 | local line_comment = lexer.to_eol('(*)') 67 | local block_comment = lexer.range('(*', '*)', false, false, true) 68 | lex:add_rule('comment', token(lexer.COMMENT, line_comment + block_comment)) 69 | 70 | -- Numbers. 71 | local function num(digit) 72 | return digit * (digit ^ 0 * '_') ^ 0 * digit ^ 1 + digit 73 | end 74 | local int = num(lexer.digit) 75 | local frac = '.' * int 76 | local minus = lpeg.P('~') ^ -1 77 | local exp = lpeg.S('eE') * minus * int 78 | local real = int * frac ^ -1 * exp + int * frac * exp ^ -1 79 | local hex = num(lexer.xdigit) 80 | local bin = num(lpeg.S('01')) 81 | -- LuaFormatter off 82 | lex:add_rule('number', token(lexer.NUMBER, 83 | '0w' * int + 84 | (P('0wx') + '0xw') * hex + 85 | (P('0wb') + '0bw') * bin + 86 | minus * '0x' * hex + 87 | minus * '0b' * bin + 88 | minus * real + 89 | minus * int)) 90 | -- LuaFormatter on 91 | 92 | -- Type variables. 93 | lex:add_rule('typevar', token(lexer.VARIABLE, "'" * id)) 94 | 95 | -- Operators. 96 | lex:add_rule('operator', 97 | token(lexer.OPERATOR, S('!*/+-^:@=<>()[]{},;._|#%&$?~`\\'))) 98 | 99 | lexer.property['scintillua.comment'] = '(*)' 100 | 101 | return lex 102 | --------------------------------------------------------------------------------