├── .gitignore ├── internal ├── soak │ ├── soak.go │ ├── Vfs.go │ ├── Mirror.go │ └── VirtualAsset.go ├── wirtokenizer │ ├── wirtokenizer.go │ ├── TokenFile.go │ ├── Token.go │ └── Tokenizer.go ├── mood │ ├── Arg.go │ ├── Cmd.go │ ├── mood.go │ └── Cli.go ├── cli │ ├── main.go │ └── cmd │ │ ├── CmdDefault.go │ │ └── CmdTokenize.go ├── wirparser │ └── wirparser.go ├── glam │ └── glam.go ├── wherr │ └── wherr.go └── runelexer │ ├── AbstractLexer.go │ └── runelexer.go ├── go.mod ├── examples ├── raw │ ├── button.wir │ ├── h1.wir │ └── user_list.wir └── toks │ ├── button.tok │ ├── h1.tok │ └── user_list.tok ├── Makefile ├── README.md └── wir_test.go /.gitignore: -------------------------------------------------------------------------------- 1 | tmp -------------------------------------------------------------------------------- /internal/soak/soak.go: -------------------------------------------------------------------------------- 1 | package soak 2 | 3 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/phillip-england/wir 2 | 3 | go 1.25.3 4 | -------------------------------------------------------------------------------- /examples/raw/button.wir: -------------------------------------------------------------------------------- 1 | button -------------------------------------------------------------------------------- /examples/raw/h1.wir: -------------------------------------------------------------------------------- 1 | h1 { 2 | "Hello, World!" 3 | } -------------------------------------------------------------------------------- /internal/wirtokenizer/wirtokenizer.go: -------------------------------------------------------------------------------- 1 | package wirtokenizer 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | tokenize: 2 | go run ./internal/cli/main.go tokenize ./examples/raw ./examples/toks -o -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # WebIR 2 | A langauge for expressing reactive web UI's across multiple platforms. 3 | -------------------------------------------------------------------------------- /internal/mood/Arg.go: -------------------------------------------------------------------------------- 1 | package mood 2 | 3 | type Arg struct { 4 | Position int 5 | Value string 6 | } -------------------------------------------------------------------------------- /internal/mood/Cmd.go: -------------------------------------------------------------------------------- 1 | package mood 2 | 3 | type Cmd interface { 4 | Execute(cli *Cli) error 5 | } 6 | 7 | type CommandFactory func(cli *Cli) (Cmd, error) 8 | -------------------------------------------------------------------------------- /examples/raw/user_list.wir: -------------------------------------------------------------------------------- 1 | ul { 2 | li 3 | 'Name: ${listName: string}' 4 | @for(user: User) { 5 | li { 'name: ${user.name: string}' } 6 | } 7 | } -------------------------------------------------------------------------------- /examples/toks/button.tok: -------------------------------------------------------------------------------- 1 | HTML_TAG_NAME:button 2 | HTML_TAG_INFO_START:< 3 | HTML_ATTR_KEY:class 4 | HTML_ATTR_EQUAL_SIGN:= 5 | HTML_ATTR_VALUE:'p-4 text-sm bg-black rounded-lg' 6 | HTML_TAG_INFO_END:> 7 | END_OF_FILE:EOF 8 | -------------------------------------------------------------------------------- /examples/toks/h1.tok: -------------------------------------------------------------------------------- 1 | HTML_TAG_NAME:h1 2 | HTML_TAG_INFO_START:< 3 | HTML_ATTR_KEY:class 4 | HTML_ATTR_EQUAL_SIGN:= 5 | HTML_ATTR_VALUE:'text-3xl font-bold' 6 | HTML_TAG_INFO_END:> 7 | HTML_CURLY_BRACE_OPEN:{ 8 | STRING_START:" 9 | STRING_CONTENT:Hello, World! 10 | STRING_END:" 11 | HTML_CURLY_BRACE_CLOSE:} 12 | END_OF_FILE:EOF 13 | -------------------------------------------------------------------------------- /internal/cli/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/phillip-england/wir/internal/cli/cmd" 7 | "github.com/phillip-england/wir/internal/mood" 8 | ) 9 | 10 | func main() { 11 | 12 | 13 | cli, err := mood.New(cmd.NewCmdDefault) 14 | if err != nil { 15 | fmt.Println(err.Error()) 16 | return 17 | } 18 | 19 | cli.At("tokenize", cmd.NewCmdTokenize) 20 | 21 | err = cli.Run() 22 | if err != nil { 23 | fmt.Println(err.Error()) 24 | return 25 | } 26 | } 27 | 28 | -------------------------------------------------------------------------------- /internal/cli/cmd/CmdDefault.go: -------------------------------------------------------------------------------- 1 | package cmd 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/phillip-england/wir/internal/mood" 7 | ) 8 | 9 | type CmdDefault struct{} 10 | 11 | func NewCmdDefault(cli *mood.Cli) (mood.Cmd, error) { 12 | return CmdDefault{}, nil 13 | } 14 | 15 | func (cmd CmdDefault) Execute(cli *mood.Cli) error { 16 | fmt.Println(`[webIR]: a language for expressing reactive web UI's across multiple platforms 17 | [tokenize example/usage]: 18 | -wir tokenize 19 | -wir tokenize ./input.wir ./output.txt`) 20 | return nil 21 | } -------------------------------------------------------------------------------- /internal/mood/mood.go: -------------------------------------------------------------------------------- 1 | package mood 2 | 3 | import ( 4 | "os" 5 | "path/filepath" 6 | "strings" 7 | ) 8 | 9 | 10 | func FileExists(path string) bool { 11 | _, err := os.Stat(path) 12 | return err == nil 13 | } 14 | 15 | func DirExists(path string) bool { 16 | info, err := os.Stat(path) 17 | if err != nil { 18 | return false 19 | } 20 | return info.IsDir() 21 | } 22 | 23 | func IsFile(path string) bool { 24 | p := filepath.Clean(path) 25 | if strings.HasSuffix(p, string(filepath.Separator)) { 26 | return false 27 | } 28 | ext := filepath.Ext(p) 29 | return ext != "" 30 | } 31 | 32 | func IsDir(path string) bool { 33 | p := filepath.Clean(path) 34 | if strings.HasSuffix(path, string(filepath.Separator)) { 35 | return true 36 | } 37 | ext := filepath.Ext(p) 38 | return ext == "" 39 | } -------------------------------------------------------------------------------- /internal/wirparser/wirparser.go: -------------------------------------------------------------------------------- 1 | package wirparser 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/phillip-england/wir/internal/runelexer" 7 | "github.com/phillip-england/wir/internal/wherr" 8 | "github.com/phillip-england/wir/internal/wirtokenizer" 9 | ) 10 | 11 | type AstNode struct { 12 | IsRoot bool 13 | Children []AstNode 14 | } 15 | 16 | type Ast struct { 17 | Root *AstNode 18 | } 19 | 20 | type Parser struct { 21 | lexer *runelexer.AbstractLexer[wirtokenizer.Token] 22 | } 23 | 24 | func ParserNew(toks []wirtokenizer.Token) (*Parser, error) { 25 | l := runelexer.AbstractLexerNew(toks) 26 | err := recursiveParse(l) 27 | if err != nil { 28 | return &Parser{}, wherr.Consume(wherr.Here(), err, "") 29 | } 30 | return &Parser{ 31 | lexer: l, 32 | }, nil 33 | } 34 | 35 | func recursiveParse(l *runelexer.AbstractLexer[wirtokenizer.Token]) (error) { 36 | l.Iter(func(item wirtokenizer.Token, pos int) bool { 37 | fmt.Println(item.Str()) 38 | return true 39 | }) 40 | return nil 41 | } 42 | 43 | -------------------------------------------------------------------------------- /internal/wirtokenizer/TokenFile.go: -------------------------------------------------------------------------------- 1 | package wirtokenizer 2 | 3 | import ( 4 | "os" 5 | 6 | "github.com/phillip-england/wir/internal/wherr" 7 | ) 8 | 9 | type TokenFile struct { 10 | Path string 11 | Text string 12 | Tokens []Token 13 | TokenStr string 14 | } 15 | 16 | func TokenFileNewFromTokenizer(path string, tk *Tokenizer) (TokenFile, error) { 17 | s := tk.Str() 18 | toks, err := TokenManyFromStr(s) 19 | if err != nil { 20 | return TokenFile{}, wherr.Consume(wherr.Here(), err, "") 21 | } 22 | tokenStr := "" 23 | for _, tok := range toks { 24 | tokenStr += tok.text 25 | } 26 | return TokenFile{ 27 | Path: path, 28 | Text: s, 29 | Tokens: toks, 30 | TokenStr: tokenStr, 31 | }, nil 32 | } 33 | 34 | func TokenFileLoad(path string) (TokenFile, error) { 35 | fBytes, err := os.ReadFile(path) 36 | if err != nil { 37 | return TokenFile{}, wherr.Consume(wherr.Here(), err, "") 38 | } 39 | fStr := string(fBytes) 40 | toks, err := TokenManyFromStr(fStr) 41 | if err != nil { 42 | return TokenFile{}, wherr.Consume(wherr.Here(), err, "") 43 | } 44 | tokenStr := "" 45 | for _, tok := range toks { 46 | tokenStr += tok.text 47 | } 48 | return TokenFile{ 49 | Path: path, 50 | Text: fStr, 51 | Tokens: toks, 52 | TokenStr: tokenStr, 53 | }, nil 54 | } 55 | 56 | 57 | -------------------------------------------------------------------------------- /internal/glam/glam.go: -------------------------------------------------------------------------------- 1 | package glam 2 | 3 | import "fmt" 4 | 5 | type ColorCode = string 6 | 7 | const ( 8 | ColorCodeRed = "31" 9 | ColorCodeGreen = "32" 10 | ColorCodeYellow = "33" 11 | ColorCodeBlue = "34" 12 | ColorCodeMagenta = "35" 13 | ColorCodeCyan = "36" 14 | ColorCodeWhite = "37" 15 | ) 16 | 17 | // Wrap wraps text with ANSI escape codes for the given color. 18 | func Wrap(code ColorCode, text string) string { 19 | return fmt.Sprintf("\033[%sm%s\033[0m", code, text) 20 | } 21 | 22 | // Color functions — behave like fmt.Sprintf but return colored text 23 | func Red(format string, args ...any) string { 24 | return Wrap(ColorCodeRed, fmt.Sprintf(format, args...)) 25 | } 26 | 27 | func Green(format string, args ...any) string { 28 | return Wrap(ColorCodeGreen, fmt.Sprintf(format, args...)) 29 | } 30 | 31 | func Yellow(format string, args ...any) string { 32 | return Wrap(ColorCodeYellow, fmt.Sprintf(format, args...)) 33 | } 34 | 35 | func Blue(format string, args ...any) string { 36 | return Wrap(ColorCodeBlue, fmt.Sprintf(format, args...)) 37 | } 38 | 39 | func Magenta(format string, args ...any) string { 40 | return Wrap(ColorCodeMagenta, fmt.Sprintf(format, args...)) 41 | } 42 | 43 | func Cyan(format string, args ...any) string { 44 | return Wrap(ColorCodeCyan, fmt.Sprintf(format, args...)) 45 | } 46 | 47 | func White(format string, args ...any) string { 48 | return Wrap(ColorCodeWhite, fmt.Sprintf(format, args...)) 49 | } 50 | -------------------------------------------------------------------------------- /examples/toks/user_list.tok: -------------------------------------------------------------------------------- 1 | HTML_TAG_NAME:ul 2 | HTML_TAG_INFO_START:< 3 | HTML_ATTR_KEY:id 4 | HTML_ATTR_EQUAL_SIGN:= 5 | HTML_ATTR_VALUE:'my-list' 6 | HTML_ATTR_KEY:class 7 | HTML_ATTR_EQUAL_SIGN:= 8 | HTML_ATTR_VALUE_PARTIAL:'bg-black 9 | DOLLAR_SIGN_INTERPOLATION_OPEN:${ 10 | DOLLAR_SIGN_INTERPOLATION_VALUE:someClass 11 | DOLLAR_SIGN_INTERPOLATION_SEMICOLON:: 12 | DOLLAR_SIGN_INTERPOLATION_TYPE:string 13 | DOLLAR_SIGN_INTERPOLATION_CLOSE:} 14 | HTML_ATTR_VALUE_PARTIAL:' 15 | HTML_TAG_INFO_END:> 16 | HTML_CURLY_BRACE_OPEN:{ 17 | HTML_TAG_NAME:li 18 | STRING_START:' 19 | STRING_CONTENT:Name: 20 | DOLLAR_SIGN_INTERPOLATION_OPEN:${ 21 | DOLLAR_SIGN_INTERPOLATION_VALUE:listName 22 | DOLLAR_SIGN_INTERPOLATION_SEMICOLON:: 23 | DOLLAR_SIGN_INTERPOLATION_TYPE:string 24 | DOLLAR_SIGN_INTERPOLATION_CLOSE:} 25 | STRING_END:' 26 | AT_DIRECTIVE_START:@ 27 | AT_DIRECTIVE_NAME:for 28 | AT_DIRECTIVE_PARENTHESIS_OPEN:( 29 | AT_DIRECTIVE_PARAM_VALUE:user 30 | AT_DIRECTIVE_SEMICOLON:: 31 | AT_DIRECTIVE_PARAM_TYPE:User 32 | AT_DIRECTIVE_PARENTHESIS_CLOSE:) 33 | HTML_CURLY_BRACE_OPEN:{ 34 | HTML_TAG_NAME:li 35 | HTML_CURLY_BRACE_OPEN:{ 36 | STRING_START:' 37 | STRING_CONTENT:name: 38 | DOLLAR_SIGN_INTERPOLATION_OPEN:${ 39 | DOLLAR_SIGN_INTERPOLATION_VALUE:user.name 40 | DOLLAR_SIGN_INTERPOLATION_SEMICOLON:: 41 | DOLLAR_SIGN_INTERPOLATION_TYPE:string 42 | DOLLAR_SIGN_INTERPOLATION_CLOSE:} 43 | STRING_END:' 44 | HTML_CURLY_BRACE_CLOSE:} 45 | HTML_CURLY_BRACE_CLOSE:} 46 | HTML_CURLY_BRACE_CLOSE:} 47 | END_OF_FILE:EOF 48 | -------------------------------------------------------------------------------- /internal/wherr/wherr.go: -------------------------------------------------------------------------------- 1 | package wherr 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "os" 7 | "path/filepath" 8 | "runtime" 9 | ) 10 | 11 | type Wherr struct { 12 | File string 13 | RelPath string 14 | Line int 15 | Message string 16 | Err error 17 | } 18 | 19 | func Err(location *Location, format string, args ...any) *Wherr { 20 | message := fmt.Sprintf(format, args...) 21 | return &Wherr{ 22 | File: location.File, 23 | RelPath: location.RelPath, 24 | Line: location.Line, 25 | Message: message, 26 | Err: errors.New(message), 27 | } 28 | } 29 | 30 | func Consume(location *Location, err error, prepend string, args ...any) *Wherr { 31 | if err == nil { 32 | return nil 33 | } 34 | message := fmt.Sprintf(prepend, args...) + "\n" + err.Error() 35 | return &Wherr{ 36 | File: location.File, 37 | RelPath: location.RelPath, 38 | Line: location.Line, 39 | Message: message, 40 | Err: err, 41 | } 42 | } 43 | 44 | func (e *Wherr) Error() string { 45 | return fmt.Sprintf("\033[31m[WHERR]\033[0m\033[33m[%s:%d]:\033[0m %s", e.RelPath, e.Line, e.Message) 46 | } 47 | 48 | func (e *Wherr) Unwrap() error { 49 | return e.Err 50 | } 51 | 52 | func (e *Wherr) Print() { 53 | fmt.Fprintf(os.Stderr, "🚨 %s:%d — %s\n", e.RelPath, e.Line, e.Message) 54 | } 55 | 56 | func (e *Wherr) Fail() { 57 | e.Print() 58 | os.Exit(1) 59 | } 60 | 61 | type Location struct { 62 | RelPath string 63 | File string 64 | Line int 65 | Cwd string 66 | } 67 | 68 | func (l Location) Str() string { 69 | return fmt.Sprintf("Line: %d: File: %s", l.Line, l.RelPath) 70 | } 71 | 72 | func Here() *Location { 73 | _, file, line, _ := runtime.Caller(1) 74 | cwd, _ := os.Getwd() 75 | fpath, _ := filepath.Rel(cwd, file) 76 | return &Location{File: file, RelPath: fpath, Line: line, Cwd: cwd} 77 | } 78 | -------------------------------------------------------------------------------- /internal/soak/Vfs.go: -------------------------------------------------------------------------------- 1 | package soak 2 | 3 | import ( 4 | "os" 5 | "path" 6 | 7 | "github.com/phillip-england/wir/internal/wherr" 8 | ) 9 | 10 | type Vfs struct { 11 | isLocked bool 12 | Path string 13 | Assets []*VirtualAsset 14 | } 15 | 16 | func LoadVfs(isLocked bool, relPath ...string) (*Vfs, error) { 17 | cwd, err := os.Getwd() 18 | if err != nil { 19 | return nil, wherr.Consume(wherr.Here(), err, "") 20 | } 21 | outParts := append([]string{}, cwd) 22 | outParts = append(outParts, relPath...) 23 | outPath := path.Join(outParts...) 24 | assets, err := LoadVirtualAssets(isLocked, cwd, outPath) 25 | if err != nil { 26 | return nil, wherr.Consume(wherr.Here(), err, "") 27 | } 28 | return &Vfs{ 29 | isLocked: isLocked, 30 | Path: outPath, 31 | Assets: assets, 32 | }, nil 33 | } 34 | 35 | func LoadVfsAbsolute(isLocked bool, absPath string) (*Vfs, error) { 36 | cwd, err := os.Getwd() 37 | if err != nil { 38 | return nil, wherr.Consume(wherr.Here(), err, "") 39 | } 40 | assets, err := LoadVirtualAssets(isLocked, cwd, absPath) 41 | if err != nil { 42 | return nil, wherr.Consume(wherr.Here(), err, "") 43 | } 44 | return &Vfs{ 45 | isLocked: isLocked, 46 | Path: absPath, 47 | Assets: assets, 48 | }, nil 49 | } 50 | 51 | func (v *Vfs) IterAssets(fn func(a *VirtualAsset) bool) { 52 | for _, asset := range v.Assets { 53 | shouldContinue := fn(asset) 54 | if shouldContinue { 55 | continue 56 | } 57 | break 58 | } 59 | } 60 | 61 | func (v *Vfs) Sync() error { 62 | if v.isLocked { 63 | return wherr.Err(wherr.Here(), "attempted to sync a locked virtual file system") 64 | } 65 | var potErr error 66 | v.IterAssets(func(a *VirtualAsset) bool { 67 | err := a.Save() 68 | if err != nil { 69 | potErr = wherr.Consume(wherr.Here(), err, "") 70 | return false 71 | } 72 | return true 73 | }) 74 | if potErr != nil { 75 | return potErr 76 | } 77 | return nil 78 | } 79 | -------------------------------------------------------------------------------- /wir_test.go: -------------------------------------------------------------------------------- 1 | package webir_test 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "path" 7 | "testing" 8 | 9 | "github.com/phillip-england/wir/internal/soak" 10 | "github.com/phillip-england/wir/internal/wherr" 11 | "github.com/phillip-england/wir/internal/wirparser" 12 | "github.com/phillip-england/wir/internal/wirtokenizer" 13 | ) 14 | 15 | func fail(t *testing.T, err error) { 16 | fmt.Println(err.Error()) 17 | t.Fail() 18 | } 19 | 20 | func TestWirParser(t *testing.T) { 21 | cwd, _ := os.Getwd() 22 | tk, err := wirtokenizer.TokenizerNewFromFile(path.Join(cwd, "examples", "raw", "h1.wir")) 23 | if err != nil { 24 | fail(t, wherr.Consume(wherr.Here(), err, "")) 25 | } 26 | _, err = wirparser.ParserNew(tk.Lexer.Tokens()) 27 | if err != nil { 28 | fail(t, wherr.Consume(wherr.Here(), err, "")) 29 | } 30 | } 31 | 32 | func TestLoadTokenFile(t *testing.T) { 33 | cwd, _ := os.Getwd() 34 | _, err := wirtokenizer.TokenFileLoad(path.Join(cwd, "examples", "toks", "user_list.tok")) 35 | if err != nil { 36 | fail(t, wherr.Consume(wherr.Here(), err, "")) 37 | } 38 | } 39 | 40 | func TestExamplesTokenized(t *testing.T) { 41 | d, err := soak.NewMirror([]string{"examples", "raw"}, []string{"examples", "toks"}) 42 | if err != nil { 43 | fail(t, wherr.Consume(wherr.Here(), err, "")) 44 | } 45 | d.Iter(func(target *soak.VirtualAsset, compare *soak.VirtualAsset) bool { 46 | compareTokFile, err := wirtokenizer.TokenFileLoad(compare.Path) 47 | if err != nil { 48 | fail(t, wherr.Consume(wherr.Here(), err, "")) 49 | } 50 | tk, err := wirtokenizer.TokenizerNewFromString(target.Text) 51 | if err != nil { 52 | fail(t, wherr.Consume(wherr.Here(), err, "")) 53 | } 54 | targetTokFile, err := wirtokenizer.TokenFileNewFromTokenizer("", tk) 55 | if err != nil { 56 | fail(t, wherr.Consume(wherr.Here(), err, "")) 57 | } 58 | targetStr := targetTokFile.TokenStr 59 | compareStr := compareTokFile.TokenStr 60 | if len(targetStr) != len(compareStr) { 61 | fail(t, wherr.Err(wherr.Here(), "target file: [%s] does not mirror comparison file: [%s]", target.Path, compare.Path)) 62 | } 63 | return true 64 | }) 65 | if err != nil { 66 | fail(t, wherr.Consume(wherr.Here(), err, "")) 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /internal/soak/Mirror.go: -------------------------------------------------------------------------------- 1 | package soak 2 | 3 | import ( 4 | "os" 5 | "path" 6 | 7 | "github.com/phillip-england/wir/internal/wherr" 8 | ) 9 | 10 | type Mirror struct { 11 | targetVFS *Vfs 12 | compareVFS *Vfs 13 | } 14 | 15 | func NewMirror(targetDirRelPath []string, compareToDirRelPath []string) (Mirror, error) { 16 | cwd, err := os.Getwd() 17 | if err != nil { 18 | return Mirror{}, wherr.Consume(wherr.Here(), err, "") 19 | } 20 | targetDirPath := path.Join(append([]string{cwd}, targetDirRelPath...)...) 21 | targetVFS, err := LoadVfsAbsolute(true, targetDirPath) 22 | if err != nil { 23 | return Mirror{}, wherr.Consume(wherr.Here(), err, "") 24 | } 25 | compareToDirPath := path.Join(append([]string{cwd}, compareToDirRelPath...)...) 26 | compareVFS, err := LoadVfsAbsolute(true, compareToDirPath) 27 | if err != nil { 28 | return Mirror{}, wherr.Consume(wherr.Here(), err, "") 29 | } 30 | err = ensureDirsMirror(targetVFS, compareVFS) 31 | if err != nil { 32 | return Mirror{}, wherr.Consume(wherr.Here(), err, "") 33 | } 34 | return Mirror{ 35 | targetVFS: targetVFS, 36 | compareVFS: compareVFS, 37 | }, nil 38 | } 39 | 40 | func ensureDirsMirror(targetVFS *Vfs, compareVFS *Vfs) error { 41 | var potErr error 42 | targetVFS.IterAssets(func(targetAsset *VirtualAsset) bool { 43 | foundCompareAsset := false 44 | shouldBreak := false 45 | compareVFS.IterAssets(func(compareAsset *VirtualAsset) bool { 46 | if targetAsset.FileNameNoExt != compareAsset.FileNameNoExt { 47 | return true 48 | } 49 | foundCompareAsset = true 50 | return false 51 | }) 52 | if !foundCompareAsset { 53 | potErr = wherr.Err(wherr.Here(), "failed to locate comparison asset for %s", targetAsset.Path) 54 | return false 55 | } 56 | if shouldBreak { 57 | return false 58 | } 59 | return true 60 | }) 61 | if potErr != nil { 62 | return potErr 63 | } 64 | return nil 65 | } 66 | 67 | func (d Mirror) Iter(fn func(target *VirtualAsset, compare *VirtualAsset) bool) { 68 | d.targetVFS.IterAssets(func(targetAsset *VirtualAsset) bool { 69 | d.compareVFS.IterAssets(func(compareAsset *VirtualAsset) bool { 70 | if targetAsset.FileNameNoExt == compareAsset.FileNameNoExt { 71 | shouldContinue := fn(targetAsset, compareAsset) 72 | if shouldContinue { 73 | return true 74 | } 75 | return false 76 | } 77 | return true 78 | }) 79 | return true 80 | }) 81 | } 82 | -------------------------------------------------------------------------------- /internal/soak/VirtualAsset.go: -------------------------------------------------------------------------------- 1 | package soak 2 | 3 | import ( 4 | "io/fs" 5 | "os" 6 | "path" 7 | "path/filepath" 8 | "strings" 9 | 10 | "github.com/phillip-england/wir/internal/wherr" 11 | ) 12 | 13 | type VirtualAsset struct { 14 | Path string 15 | Dirname string 16 | Text string 17 | Ext string 18 | RelPath string 19 | FileName string 20 | isLocked bool 21 | FileNameNoExt string 22 | } 23 | 24 | func UnixRelPath(basePath, targetPath string) (string, error) { 25 | relPath, err := filepath.Rel(basePath, targetPath) 26 | if err != nil { 27 | return "", wherr.Consume(wherr.Here(), err, "") 28 | } 29 | unixPath := filepath.ToSlash(relPath) 30 | if !strings.HasPrefix(unixPath, "./") && !strings.HasPrefix(unixPath, "../") { 31 | unixPath = "./" + unixPath 32 | } 33 | return unixPath, nil 34 | } 35 | 36 | func LoadVirtualAssets(isLocked bool, cwd string, pth string) ([]*VirtualAsset, error) { 37 | var assets []*VirtualAsset 38 | err := filepath.WalkDir(pth, func(p string, d fs.DirEntry, err error) error { 39 | if err != nil { 40 | return wherr.Consume(wherr.Here(), err, "") 41 | } 42 | if d.IsDir() { 43 | return nil 44 | } 45 | dirname := path.Dir(p) 46 | fBytes, err := os.ReadFile(p) 47 | if err != nil { 48 | return wherr.Consume(wherr.Here(), err, "") 49 | } 50 | relPath, err := UnixRelPath(cwd, p) 51 | if err != nil { 52 | return wherr.Consume(wherr.Here(), err, "") 53 | } 54 | ext := path.Ext(p) 55 | filename := path.Base(p) 56 | asset := &VirtualAsset{ 57 | Path: p, 58 | Dirname: dirname, 59 | Text: string(fBytes), 60 | Ext: ext, 61 | RelPath: relPath, 62 | FileName: filename, 63 | isLocked: isLocked, 64 | FileNameNoExt: strings.TrimSuffix(filename, ext), 65 | } 66 | assets = append(assets, asset) 67 | return nil 68 | }) 69 | if err != nil { 70 | return assets, wherr.Consume(wherr.Here(), err, "") 71 | } 72 | return assets, nil 73 | } 74 | 75 | func (a *VirtualAsset) Save() error { 76 | if a.isLocked { 77 | return wherr.Err(wherr.Here(), "attemped to save a locked virtual asset") 78 | } 79 | err := os.WriteFile(a.Path, []byte(a.Text), 0644) 80 | if err != nil { 81 | return wherr.Consume(wherr.Here(), err, "") 82 | } 83 | return nil 84 | } 85 | 86 | func (a *VirtualAsset) OverWrite(s string) error { 87 | if a.isLocked { 88 | return wherr.Err(wherr.Here(), "attemped to overwrite a locked virtual asset") 89 | } 90 | a.Text = s 91 | return nil 92 | } 93 | 94 | func (a *VirtualAsset) Append(s string) error { 95 | if a.isLocked { 96 | return wherr.Err(wherr.Here(), "attemped to append to a locked virtual asset") 97 | } 98 | a.Text = a.Text + s 99 | return nil 100 | } 101 | 102 | func (a *VirtualAsset) Prepend(s string) error { 103 | if a.isLocked { 104 | return wherr.Err(wherr.Here(), "attemped to prepend to a locked virtual asset") 105 | } 106 | a.Text = s + a.Text 107 | return nil 108 | } 109 | -------------------------------------------------------------------------------- /internal/wirtokenizer/Token.go: -------------------------------------------------------------------------------- 1 | package wirtokenizer 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | 7 | "github.com/phillip-england/wir/internal/runelexer" 8 | "github.com/phillip-england/wir/internal/wherr" 9 | ) 10 | 11 | type TokenType string 12 | 13 | const ( 14 | TokenTypeRawText = "RAW_TEXT" 15 | TokenTypeString = "STRING" 16 | TokenTypeTagInfo = "TAG_INFO" 17 | TokenTypeAtDirective = "AT_DIRECTIVE" 18 | TokenTypeCurlyBraceOpen = "CURLY_BRACE_OPEN" 19 | TokenTypeCurlyBraceClose = "CURLY_BRACE_CLOSE" 20 | TokenTypeDollarSignInterpolation = "DOLLAR_SIGN_INTERPOLATION" 21 | 22 | TokenTypeHTMLTagInfoStart = "HTML_TAG_INFO_START" 23 | TokenTypeHTMLTagInfoEnd = "HTML_TAG_INFO_END" 24 | TokenTypeHTMLTagName = "HTML_TAG_NAME" 25 | TokenTypeHTMLCurlyBraceOpen = "HTML_CURLY_BRACE_OPEN" 26 | TokenTypeHTMLCurlyBraceClose = "HTML_CURLY_BRACE_CLOSE" 27 | 28 | TokenTypeHTMLAttrKey = "HTML_ATTR_KEY" 29 | TokenTypeHTMLAttrEqualSign = "HTML_ATTR_EQUAL_SIGN" 30 | TokenTypeHTMLAttrValue = "HTML_ATTR_VALUE" 31 | TokenTypeHTMLAttrValuePartial = "HTML_ATTR_VALUE_PARTIAL" 32 | 33 | TokenTypeStringStart = "STRING_START" 34 | TokenTypeStringEnd = "STRING_END" 35 | TokenTypeStringContent = "STRING_CONTENT" 36 | 37 | TokenTypeDollarSignInterpolationOpen = "DOLLAR_SIGN_INTERPOLATION_OPEN" 38 | TokenTypeDollarSignInterpolationClose = "DOLLAR_SIGN_INTERPOLATION_CLOSE" 39 | TokenTypeDollarSignInterpolationValue = "DOLLAR_SIGN_INTERPOLATION_VALUE" 40 | TokenTypeDollarSignInterpolationSemiColon = "DOLLAR_SIGN_INTERPOLATION_SEMICOLON" 41 | TokenTypeDollarSignInterpolationType = "DOLLAR_SIGN_INTERPOLATION_TYPE" 42 | 43 | TokenTypeAtDirectiveStart = "AT_DIRECTIVE_START" 44 | TokenTypeAtDirectiveName = "AT_DIRECTIVE_NAME" 45 | TokenTypeAtDirectiveParenthesisOpen = "AT_DIRECTIVE_PARENTHESIS_OPEN" 46 | TokenTypeAtDirectiveParenthesisClose = "AT_DIRECTIVE_PARENTHESIS_CLOSE" 47 | TokenTypeAtDirectiveParamValue = "AT_DIRECTIVE_PARAM_VALUE" 48 | TokenTypeAtDirectiveSemiColon = "AT_DIRECTIVE_SEMICOLON" 49 | TokenTypeAtDirectiveParamType = "AT_DIRECTIVE_PARAM_TYPE" 50 | 51 | 52 | TokenTypeEndOfFile = "END_OF_FILE" 53 | ) 54 | 55 | type Token struct { 56 | t TokenType 57 | text string 58 | } 59 | 60 | func (t Token) Str() string { 61 | return fmt.Sprintf("%s:%s", t.t, t.text) 62 | } 63 | 64 | func TokenManyFromStr(s string) ([]Token, error) { 65 | var toks []Token 66 | lines := strings.Split(s, "\n") 67 | for i, line := range lines { 68 | if strings.TrimSpace(line) == "" { 69 | continue 70 | } 71 | l := runelexer.NewRuneLexer[Token](line) 72 | foundColon := false 73 | l.Iter(func(ch string, pos int) bool { 74 | if ch == ":" { 75 | foundColon = true 76 | return false 77 | } 78 | return true 79 | }) 80 | if !foundColon { 81 | return toks, wherr.Err(wherr.Here(), "token string is malformed on line %d: %s", i+1, line) 82 | } 83 | t := l.PullFromStart() 84 | l.Next() 85 | text := l.PullFromEnd() 86 | tok := Token{ 87 | t: TokenType(t), 88 | text: text, 89 | } 90 | toks = append(toks, tok) 91 | } 92 | return toks, nil 93 | } -------------------------------------------------------------------------------- /internal/mood/Cli.go: -------------------------------------------------------------------------------- 1 | package mood 2 | 3 | import ( 4 | "os" 5 | 6 | "github.com/phillip-england/wir/internal/wherr" 7 | ) 8 | 9 | type Cli struct { 10 | Source string 11 | Args map[string]*Arg 12 | Flags map[string]*Arg 13 | Commands map[string]CommandFactory 14 | DefaultFactory CommandFactory 15 | DefaultCmd Cmd 16 | Store map[string]any 17 | Cwd string 18 | } 19 | 20 | func New(factory CommandFactory) (Cli, error) { 21 | cwd, err := os.Getwd() 22 | if err != nil { 23 | return Cli{}, wherr.Consume(wherr.Here(), err, "") 24 | } 25 | osArgs := os.Args 26 | flags := make(map[string]*Arg) 27 | args := make(map[string]*Arg) 28 | source := "" 29 | if len(osArgs) > 0 { 30 | source = osArgs[0] 31 | } 32 | for i, arg := range osArgs { 33 | if len(arg) > 1 && i > 0 && (arg[0] == '-' || (len(arg) > 2 && arg[:2] == "--")) { 34 | flags[arg] = &Arg{ 35 | Position: i, 36 | Value: arg, 37 | } 38 | continue 39 | } 40 | args[arg] = &Arg{ 41 | Position: i, 42 | Value: arg, 43 | } 44 | } 45 | cli := Cli{ 46 | Source: source, 47 | Args: args, 48 | Flags: flags, 49 | Commands: make(map[string]CommandFactory), 50 | Cwd: cwd, 51 | } 52 | err = cli.setDefault(factory) 53 | if err != nil { 54 | return cli, wherr.Consume(wherr.Here(), err, "") 55 | } 56 | return cli, nil 57 | } 58 | 59 | func (cli *Cli) At(commandName string, factory CommandFactory) { 60 | cli.Commands[commandName] = factory 61 | } 62 | 63 | func (cli *Cli) setDefault(factory CommandFactory) error { 64 | cli.DefaultFactory = factory 65 | cmd, err := factory(cli) 66 | if err != nil { 67 | return wherr.Consume(wherr.Here(), err, "") 68 | } 69 | cli.DefaultCmd = cmd 70 | return nil 71 | } 72 | 73 | func (cli *Cli) Run() error { 74 | 75 | 76 | firstArgPosition := 1 77 | var firstArg string 78 | 79 | for _, arg := range cli.Args { 80 | if arg.Position == firstArgPosition { 81 | firstArg = arg.Value 82 | break 83 | } 84 | } 85 | 86 | if firstArg == "" { 87 | return cli.DefaultCmd.Execute(cli) 88 | } 89 | 90 | if factory, exists := cli.Commands[firstArg]; exists { 91 | cmd, err := factory(cli) 92 | if err != nil { 93 | return wherr.Consume(wherr.Here(), err, "") 94 | } 95 | if err != nil { 96 | return wherr.Consume(wherr.Here(), err, "") 97 | } 98 | return cmd.Execute(cli) 99 | } 100 | 101 | return cli.DefaultCmd.Execute(cli) 102 | } 103 | 104 | 105 | func (cli *Cli) FlagExists(flag string) bool { 106 | _, exists := cli.Flags[flag] 107 | return exists 108 | } 109 | 110 | func (cli *Cli) ArgExists(arg string) bool { 111 | _, exists := cli.Args[arg] 112 | return exists 113 | } 114 | 115 | func (cli *Cli) ArgGetByStr(arg string) (string, bool) { 116 | val, exists := cli.Args[arg] 117 | return val.Value, exists 118 | } 119 | 120 | func (cli *Cli) ArgForceByStr(arg string) (string, error) { 121 | val, exists := cli.Args[arg] 122 | if !exists { 123 | return "", wherr.Err(wherr.Here(), "arg %s not found", arg) 124 | } 125 | return val.Value, nil 126 | } 127 | 128 | func (cli *Cli) ArgGetOrDefaultValue(arg string, defaultValue string) string { 129 | if val, exists := cli.Args[arg]; exists { 130 | return val.Value 131 | } 132 | return defaultValue 133 | } 134 | 135 | func (cli *Cli) ArgGetByPosition(position int) (string, bool) { 136 | for _, arg := range cli.Args { 137 | if arg.Position == position { 138 | return arg.Value, true 139 | } 140 | } 141 | return "", false 142 | } 143 | 144 | func (cli *Cli) ArgGetByPositionForce(position int, errMsg string) (string, error) { 145 | arg, exists := cli.ArgGetByPosition(position) 146 | if !exists { 147 | return "", wherr.Err(wherr.Here(), "%s", errMsg) 148 | } 149 | return arg, nil 150 | } 151 | 152 | func (cli *Cli) ArgMorphAtPosition(position int, value string) { 153 | for _, arg := range cli.Args { 154 | if arg.Position == position { 155 | arg.Value = value 156 | } 157 | } 158 | } 159 | 160 | -------------------------------------------------------------------------------- /internal/cli/cmd/CmdTokenize.go: -------------------------------------------------------------------------------- 1 | package cmd 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "path" 7 | 8 | "github.com/phillip-england/wir/internal/mood" 9 | "github.com/phillip-england/wir/internal/soak" 10 | "github.com/phillip-england/wir/internal/wherr" 11 | "github.com/phillip-england/wir/internal/wirtokenizer" 12 | ) 13 | 14 | type CmdTokenize struct{ 15 | argInPath string 16 | argOutPath string 17 | inPathAbs string 18 | outPathAbs string 19 | shouldOverwrite bool 20 | isTargetingDir bool 21 | } 22 | 23 | func NewCmdTokenize(cli *mood.Cli) (mood.Cmd, error) { 24 | argInPath, err := cli.ArgGetByPositionForce(2, "missing for wir tokenize") 25 | if err != nil { 26 | return CmdTokenize{}, wherr.Consume(wherr.Here(), err, "") 27 | } 28 | argOutPath, err := cli.ArgGetByPositionForce(3, "missing for wir tokenize") 29 | if err != nil { 30 | return CmdTokenize{}, wherr.Consume(wherr.Here(), err, "") 31 | } 32 | inPathAbs := path.Join(cli.Cwd, argInPath) 33 | if !mood.FileExists(inPathAbs) { 34 | return CmdTokenize{}, wherr.Err(wherr.Here(), " does not exist in wir tokenize") 35 | } 36 | isTargetingDir := false 37 | if mood.IsDir(inPathAbs) { 38 | isTargetingDir = true 39 | } 40 | if isTargetingDir { 41 | if !mood.IsDir(argOutPath) { 42 | return CmdTokenize{}, wherr.Err(wherr.Here(), " must be a directory if is a directory") 43 | } 44 | } else { 45 | if !mood.IsFile(argOutPath) { 46 | return CmdTokenize{}, wherr.Err(wherr.Here(), " must be a file path if is a file") 47 | } 48 | } 49 | return CmdTokenize{ 50 | argInPath: argInPath, 51 | argOutPath: argOutPath, 52 | inPathAbs: inPathAbs, 53 | outPathAbs: path.Join(cli.Cwd, argOutPath), 54 | shouldOverwrite: cli.FlagExists("-o"), 55 | isTargetingDir: isTargetingDir, 56 | }, nil 57 | } 58 | 59 | func (cmd CmdTokenize) Execute(cli *mood.Cli) error { 60 | if cmd.isTargetingDir { 61 | err := tokenizeDir(cmd) 62 | if err != nil { 63 | return wherr.Consume(wherr.Here(), err, "") 64 | } 65 | } else { 66 | err := tokenizeFile(cmd) 67 | if err != nil { 68 | return wherr.Consume(wherr.Here(), err, "") 69 | } 70 | } 71 | return nil 72 | } 73 | 74 | func tokenizeFile(cmd CmdTokenize) error { 75 | if cmd.shouldOverwrite { 76 | os.RemoveAll(cmd.argOutPath) 77 | } 78 | if mood.FileExists(cmd.argOutPath) { 79 | return wherr.Err(wherr.Here(), "file already exists at %s", cmd.outPathAbs) 80 | } 81 | fBytes, err := os.ReadFile(cmd.inPathAbs) 82 | if err != nil { 83 | return wherr.Consume(wherr.Here(), err, "") 84 | } 85 | fStr := string(fBytes) 86 | tk, err := wirtokenizer.TokenizerNewFromString(fStr) 87 | if err != nil { 88 | return wherr.Consume(wherr.Here(), err, "") 89 | } 90 | err = os.WriteFile(cmd.outPathAbs, []byte(tk.Str()), 0775) 91 | if err != nil { 92 | return wherr.Consume(wherr.Here(), err, "") 93 | } 94 | return nil 95 | } 96 | 97 | func tokenizeDir(cmd CmdTokenize) error { 98 | if cmd.shouldOverwrite { 99 | os.RemoveAll(cmd.outPathAbs) 100 | } 101 | if mood.FileExists(cmd.outPathAbs) { 102 | return wherr.Err(wherr.Here(), "dir already exists at %s", cmd.outPathAbs) 103 | } 104 | vfs, err := soak.LoadVfsAbsolute(true, cmd.inPathAbs) 105 | if err != nil { 106 | return wherr.Consume(wherr.Here(), err, "") 107 | } 108 | var potErr error 109 | vfs.IterAssets(func(a *soak.VirtualAsset) bool { 110 | outDirPath := path.Join(cmd.outPathAbs, a.FileNameNoExt+".tok") 111 | tk, err := wirtokenizer.TokenizerNewFromString(a.Text) 112 | if err != nil { 113 | potErr = wherr.Consume(wherr.Here(), err, "") 114 | } 115 | err = os.MkdirAll(path.Dir(outDirPath), 0755) 116 | if err != nil { 117 | potErr = wherr.Consume(wherr.Here(), err, "") 118 | } 119 | err = os.WriteFile(outDirPath, []byte(tk.Str()), 0644) 120 | if err != nil { 121 | fmt.Println(err.Error()) 122 | potErr = wherr.Consume(wherr.Here(), err, "") 123 | } 124 | return true 125 | }) 126 | if potErr != nil { 127 | return potErr 128 | } 129 | return nil 130 | } -------------------------------------------------------------------------------- /internal/runelexer/AbstractLexer.go: -------------------------------------------------------------------------------- 1 | package runelexer 2 | 3 | import ( 4 | "math" 5 | ) 6 | 7 | type AbstractLexer[T any] struct { 8 | position int 9 | endPosition int 10 | markedPosition int 11 | items []T 12 | store []T 13 | } 14 | 15 | func AbstractLexerNew[T any](items []T) *AbstractLexer[T] { 16 | return &AbstractLexer[T]{ 17 | position: 0, 18 | endPosition: len(items) - 1, 19 | markedPosition: 0, 20 | items: items, 21 | store: []T{}, 22 | } 23 | } 24 | 25 | func (l *AbstractLexer[T]) Items() []T { 26 | return l.items 27 | } 28 | 29 | func (l *AbstractLexer[T]) Item() T { 30 | var zero T 31 | if len(l.items) == 0 { 32 | return zero 33 | } 34 | if l.position < 0 || l.position >= len(l.items) { 35 | return zero 36 | } 37 | return l.items[l.position] 38 | } 39 | 40 | func (l *AbstractLexer[T]) AtEnd() bool { 41 | return l.position >= l.endPosition 42 | } 43 | 44 | func (l *AbstractLexer[T]) AtStart() bool { 45 | return l.position == 0 46 | } 47 | 48 | func (l *AbstractLexer[T]) Len() int { 49 | return len(l.items) 50 | } 51 | 52 | func (l *AbstractLexer[T]) Pos() int { 53 | return l.position 54 | } 55 | 56 | func (l *AbstractLexer[T]) Next() { 57 | l.position++ 58 | if l.AtEnd() { 59 | l.position = l.endPosition 60 | } 61 | } 62 | 63 | func (l *AbstractLexer[T]) Prev() { 64 | l.position-- 65 | if l.position < 0 { 66 | l.position = 0 67 | } 68 | } 69 | 70 | func (l *AbstractLexer[T]) Iter(fn func(item T, pos int) bool) { 71 | if len(l.items) == 0 { 72 | return 73 | } 74 | for { 75 | shouldContinue := fn(l.Item(), l.Pos()) 76 | if !shouldContinue { 77 | break 78 | } 79 | if l.AtEnd() { 80 | break 81 | } 82 | l.Next() 83 | } 84 | } 85 | 86 | func (l *AbstractLexer[T]) Mark() { 87 | l.markedPosition = l.Pos() 88 | } 89 | 90 | func (l *AbstractLexer[T]) GoToEnd() { 91 | l.position = l.endPosition 92 | } 93 | 94 | func (l *AbstractLexer[T]) GoToStart() { 95 | l.position = 0 96 | } 97 | 98 | func (l *AbstractLexer[T]) GoToMark() { 99 | l.position = l.markedPosition 100 | } 101 | 102 | func (l *AbstractLexer[T]) PullFromStart() []T { 103 | start := 0 104 | end := l.position + 1 105 | if end > len(l.items) { 106 | end = len(l.items) 107 | } 108 | if start >= end { 109 | return []T{} 110 | } 111 | return l.items[start:end] 112 | } 113 | 114 | func (l *AbstractLexer[T]) PullFromEnd() []T { 115 | start := l.position 116 | if start < 0 { 117 | start = 0 118 | } 119 | if start >= len(l.items) { 120 | return []T{} 121 | } 122 | return l.items[start:] 123 | } 124 | 125 | func (l *AbstractLexer[T]) PullFromMark() []T { 126 | start := l.markedPosition 127 | end := l.position 128 | if start > end { 129 | start, end = end, start 130 | } 131 | end++ 132 | if end > len(l.items) { 133 | end = len(l.items) 134 | } 135 | if start < 0 { 136 | start = 0 137 | } 138 | if start >= end { 139 | return []T{} 140 | } 141 | return l.items[start:end] 142 | } 143 | 144 | func (l *AbstractLexer[T]) NextBy(n int) { 145 | for count := 0; count < n; count++ { 146 | l.Next() 147 | } 148 | } 149 | 150 | func (l *AbstractLexer[T]) PrevBy(n int) { 151 | for count := 0; count < n; count++ { 152 | l.Prev() 153 | } 154 | } 155 | 156 | func (l *AbstractLexer[T]) Peek(n int) T { 157 | start := l.position 158 | if n >= 0 { 159 | l.NextBy(n) 160 | } else { 161 | l.PrevBy(int(math.Abs(float64(n)))) 162 | } 163 | item := l.Item() 164 | l.position = start 165 | return item 166 | } 167 | 168 | func (l *AbstractLexer[T]) PullFrom(start, end int) []T { 169 | if start < 0 { 170 | start = 0 171 | } 172 | if end < 0 { 173 | end = 0 174 | } 175 | if start >= len(l.items) { 176 | start = len(l.items) - 1 177 | } 178 | if end >= len(l.items) { 179 | end = len(l.items) - 1 180 | } 181 | if start < 0 { 182 | start = 0 183 | } 184 | if end < 0 { 185 | end = 0 186 | } 187 | if start > end { 188 | start, end = end, start 189 | } 190 | end++ 191 | if end > len(l.items) { 192 | end = len(l.items) 193 | } 194 | if start >= end { 195 | return []T{} 196 | } 197 | return l.items[start:end] 198 | } 199 | 200 | func (l *AbstractLexer[T]) MarkedPos() int { 201 | return l.markedPosition 202 | } 203 | 204 | func (l *AbstractLexer[T]) Store() { 205 | if l.position >= 0 && l.position < len(l.items) { 206 | l.store = append(l.store, l.items[l.position]) 207 | } 208 | } 209 | 210 | func (l *AbstractLexer[T]) StoreItems() []T { 211 | return l.store 212 | } 213 | 214 | func (l *AbstractLexer[T]) StoreClear() { 215 | l.store = []T{} 216 | } 217 | 218 | func (l *AbstractLexer[T]) StoreLen() int { 219 | return len(l.store) 220 | } 221 | 222 | func (l *AbstractLexer[T]) StoreFlush() []T { 223 | s := l.StoreItems() 224 | l.StoreClear() 225 | return s 226 | } 227 | 228 | func (l *AbstractLexer[T]) Pull(n int) []T { 229 | if len(l.items) == 0 { 230 | return []T{} 231 | } 232 | 233 | if n == 0 { 234 | return []T{l.Item()} 235 | } 236 | 237 | var start, end int 238 | 239 | if n > 0 { 240 | start = l.position 241 | end = l.position + n + 1 242 | if end > len(l.items) { 243 | end = len(l.items) 244 | } 245 | } else { 246 | start = l.position + n 247 | end = l.position + 1 248 | if start < 0 { 249 | start = 0 250 | } 251 | if end > len(l.items) { 252 | end = len(l.items) 253 | } 254 | } 255 | if start < 0 { 256 | start = 0 257 | } 258 | if end > len(l.items) { 259 | end = len(l.items) 260 | } 261 | if start >= len(l.items) { 262 | return []T{} 263 | } 264 | if start >= end { 265 | return []T{} 266 | } 267 | 268 | return l.items[start:end] 269 | } -------------------------------------------------------------------------------- /internal/runelexer/runelexer.go: -------------------------------------------------------------------------------- 1 | package runelexer 2 | 3 | import ( 4 | "math" 5 | ) 6 | 7 | type RuneLexer[T any] struct { 8 | position int 9 | endPosition int 10 | runes []rune 11 | markedPos int 12 | tokens []T 13 | state int 14 | store []rune 15 | } 16 | 17 | func NewRuneLexer[T any](s string) *RuneLexer[T] { 18 | runes := []rune(s) 19 | return &RuneLexer[T]{ 20 | position: 0, 21 | endPosition: len(runes) - 1, 22 | runes: runes, 23 | markedPos: 0, 24 | tokens: []T{}, 25 | state: 0, 26 | } 27 | } 28 | 29 | func (l *RuneLexer[T]) State() int { 30 | return l.state 31 | } 32 | 33 | func (l *RuneLexer[T]) SetState(n int) { 34 | l.state = n 35 | } 36 | 37 | func (l *RuneLexer[T]) Runes() []rune { 38 | return l.runes 39 | } 40 | 41 | func (l *RuneLexer[T]) Char() string { 42 | if len(l.runes) == 0 { 43 | return "" 44 | } 45 | return string(l.runes[l.position]) 46 | } 47 | 48 | func (l *RuneLexer[T]) AtEnd() bool { 49 | return l.position >= l.endPosition 50 | } 51 | 52 | func (l *RuneLexer[T]) AtStart() bool { 53 | return l.position == 0 54 | } 55 | 56 | func (l *RuneLexer[T]) Len() int { 57 | return len(l.runes) 58 | } 59 | 60 | func (l *RuneLexer[T]) Pos() int { 61 | return l.position 62 | } 63 | 64 | func (l *RuneLexer[T]) Next() { 65 | l.position++ 66 | if l.AtEnd() { 67 | l.position = l.endPosition 68 | } 69 | } 70 | 71 | func (l *RuneLexer[T]) Prev() { 72 | l.position-- 73 | if l.position < 0 { 74 | l.position = 0 75 | } 76 | } 77 | 78 | func (l *RuneLexer[T]) Iter(fn func(ch string, pos int) bool) { 79 | for { 80 | shouldContinue := fn(l.Char(), l.Pos()) 81 | if !shouldContinue { 82 | break 83 | } 84 | if l.AtEnd() { 85 | break 86 | } 87 | l.Next() 88 | } 89 | } 90 | 91 | func (l *RuneLexer[T]) Mark() { 92 | l.markedPos = l.Pos() 93 | } 94 | 95 | func (l *RuneLexer[T]) GoToEnd() { 96 | l.position = l.endPosition 97 | } 98 | 99 | func (l *RuneLexer[T]) GoToStart() { 100 | l.position = 0 101 | } 102 | 103 | func (l *RuneLexer[T]) GoToMark() { 104 | l.position = l.markedPos 105 | } 106 | 107 | func (l *RuneLexer[T]) PullFromStart() string { 108 | start := 0 109 | end := l.position + 1 110 | if end > len(l.runes) { 111 | end = len(l.runes) 112 | } 113 | return string(l.runes[start:end]) 114 | } 115 | 116 | func (l *RuneLexer[T]) PullFromEnd() string { 117 | start := l.position 118 | if start < 0 { 119 | start = 0 120 | } 121 | return string(l.runes[start:]) 122 | } 123 | 124 | func (l *RuneLexer[T]) PullFromMark() string { 125 | start := l.markedPos 126 | end := l.position 127 | if start > end { 128 | start, end = end, start 129 | } 130 | end++ 131 | if end > len(l.runes) { 132 | end = len(l.runes) 133 | } 134 | return string(l.runes[start:end]) 135 | } 136 | 137 | func (l *RuneLexer[T]) NextBy(n int) { 138 | for count := 0; count < n; count++ { 139 | l.Next() 140 | } 141 | } 142 | 143 | func (l *RuneLexer[T]) PrevBy(n int) { 144 | for count := 0; count < n; count++ { 145 | l.Prev() 146 | } 147 | } 148 | 149 | func (l *RuneLexer[T]) Peek(n int) string { 150 | start := l.position 151 | if n >= 0 { 152 | l.NextBy(n) 153 | } else { 154 | l.PrevBy(int(math.Abs(float64(n)))) 155 | } 156 | c := l.Char() 157 | l.position = start 158 | return c 159 | } 160 | 161 | func (l *RuneLexer[T]) Str() string { 162 | return string(l.runes) 163 | } 164 | 165 | func (l *RuneLexer[T]) NextUntil(ch string) bool { 166 | l.Next() 167 | for !l.AtEnd() { 168 | if l.Char() == ch { 169 | return true 170 | } 171 | l.Next() 172 | } 173 | return l.Char() == ch 174 | } 175 | 176 | func (l *RuneLexer[T]) PrevUntil(ch string) bool { 177 | l.Prev() 178 | for l.position > 0 { 179 | if l.Char() == ch { 180 | return true 181 | } 182 | l.Prev() 183 | } 184 | return l.Char() == ch 185 | } 186 | 187 | func (l *RuneLexer[T]) NextUntilNot(ch string) bool { 188 | if l.Char() != ch { 189 | return true 190 | } 191 | for !l.AtEnd() { 192 | l.Next() 193 | if l.Char() != ch { 194 | return true 195 | } 196 | } 197 | return l.Char() != ch 198 | } 199 | 200 | func (l *RuneLexer[T]) PrevUntilNot(ch string) bool { 201 | if l.Char() != ch { 202 | return true 203 | } 204 | for l.position > 0 { 205 | l.Prev() 206 | if l.Char() != ch { 207 | return true 208 | } 209 | } 210 | return l.Char() != ch 211 | } 212 | 213 | func (l *RuneLexer[T]) PullFrom(start, end int) string { 214 | if start < 0 { 215 | start = 0 216 | } 217 | if end < 0 { 218 | end = 0 219 | } 220 | if start >= len(l.runes) { 221 | start = len(l.runes) - 1 222 | } 223 | if end >= len(l.runes) { 224 | end = len(l.runes) - 1 225 | } 226 | if start > end { 227 | start, end = end, start 228 | } 229 | end++ 230 | if end > len(l.runes) { 231 | end = len(l.runes) 232 | } 233 | return string(l.runes[start:end]) 234 | } 235 | 236 | func (l *RuneLexer[T]) MarkedPos() int { 237 | return l.markedPos 238 | } 239 | 240 | func (l *RuneLexer[T]) NextUntilAny(chars ...string) string { 241 | start := l.position 242 | for !l.AtEnd() { 243 | ch := string(l.runes[l.position]) 244 | if contains(chars, ch) { 245 | break 246 | } 247 | l.position++ 248 | } 249 | return string(l.runes[start:l.position]) 250 | } 251 | 252 | func (l *RuneLexer[T]) PrevUntilAny(chars ...string) string { 253 | start := l.position 254 | for !l.AtStart() { 255 | ch := string(l.runes[l.position]) 256 | if contains(chars, ch) { 257 | break 258 | } 259 | l.position-- 260 | } 261 | return string(l.runes[l.position+1 : start+1]) 262 | } 263 | 264 | func (l *RuneLexer[T]) NextUntilNotAny(chars ...string) string { 265 | start := l.position 266 | for !l.AtEnd() { 267 | ch := string(l.runes[l.position]) 268 | if !contains(chars, ch) { 269 | break 270 | } 271 | l.position++ 272 | } 273 | return string(l.runes[start:l.position]) 274 | } 275 | 276 | func (l *RuneLexer[T]) PrevUntilNotAny(chars ...string) string { 277 | start := l.position 278 | for !l.AtStart() { 279 | ch := string(l.runes[l.position]) 280 | if !contains(chars, ch) { 281 | break 282 | } 283 | l.position-- 284 | } 285 | return string(l.runes[l.position+1 : start+1]) 286 | } 287 | 288 | func (l *RuneLexer[T]) InQuote() bool { 289 | return isInQuote(l.runes, l.position) 290 | } 291 | 292 | func (l *RuneLexer[T]) TokenAppend(token T) { 293 | l.tokens = append(l.tokens, token) 294 | } 295 | 296 | func (l *RuneLexer[T]) TokenIter(fn func(tk T, index int) bool) { 297 | for i, token := range l.tokens { 298 | shouldContinue := fn(token, i) 299 | if !shouldContinue { 300 | break 301 | } 302 | } 303 | } 304 | 305 | func (l *RuneLexer[T]) TokenLast() T { 306 | var zero T 307 | if len(l.tokens) == 0 { 308 | return zero 309 | } 310 | return l.tokens[len(l.tokens)-1] 311 | } 312 | 313 | func (l *RuneLexer[T]) TokenLen() int { 314 | return len(l.tokens) 315 | } 316 | 317 | func (l *RuneLexer[T]) Tokens() []T { 318 | return l.tokens 319 | } 320 | 321 | func (l *RuneLexer[T]) TokenClean() { 322 | l.tokens = []T{} 323 | } 324 | 325 | func (l *RuneLexer[T]) TokenOverwrite(toks []T) { 326 | l.tokens = toks 327 | } 328 | 329 | func contains(arr []string, target string) bool { 330 | for _, a := range arr { 331 | if a == target { 332 | return true 333 | } 334 | } 335 | return false 336 | } 337 | 338 | func (l *RuneLexer[T]) Store() { 339 | if l.position >= 0 && l.position < len(l.runes) { 340 | l.store = append(l.store, l.runes[l.position]) 341 | } 342 | } 343 | 344 | func (l *RuneLexer[T]) StoreStr() string { 345 | return string(l.store) 346 | } 347 | 348 | func (l *RuneLexer[T]) StoreClear() { 349 | l.store = []rune{} 350 | } 351 | 352 | func (l *RuneLexer[T]) StoreLen() int { 353 | return len(l.store) 354 | } 355 | 356 | func (l *RuneLexer[T]) StoreFlush() string { 357 | s := l.StoreStr() 358 | l.StoreClear() 359 | return s 360 | } 361 | 362 | func (l *RuneLexer[T]) Pull(n int) string { 363 | if n == 0 { 364 | return l.Char() 365 | } 366 | 367 | var start, end int 368 | 369 | if n > 0 { 370 | start = l.position 371 | end = l.position + n + 1 372 | if end > len(l.runes) { 373 | end = len(l.runes) 374 | } 375 | } else { 376 | start = l.position + n 377 | end = l.position + 1 378 | if start < 0 { 379 | start = 0 380 | } 381 | if end > len(l.runes) { 382 | end = len(l.runes) 383 | } 384 | } 385 | 386 | return string(l.runes[start:end]) 387 | } 388 | 389 | func isInQuote(r []rune, pos int) bool { 390 | inDouble := false 391 | inSingle := false 392 | inQuote := false 393 | for i, rn := range r { 394 | 395 | prevChar := "" 396 | if i > 0 { 397 | prevChar = string(r[i-1]) 398 | } 399 | if i > pos { 400 | break 401 | } 402 | ch := string(rn) 403 | switch ch { 404 | default: 405 | { 406 | continue 407 | } 408 | case "\"": 409 | { 410 | if prevChar == "\\" { 411 | continue 412 | } 413 | if !inDouble && !inSingle { 414 | inDouble = true 415 | inQuote = true 416 | continue 417 | } 418 | if inDouble && !inSingle { 419 | inDouble = false 420 | inQuote = false 421 | continue 422 | } 423 | if inDouble && inSingle { 424 | inDouble = false 425 | continue 426 | } 427 | if !inDouble && inSingle { 428 | inQuote = true 429 | continue 430 | } 431 | } 432 | case "'": 433 | { 434 | if prevChar == "\\" { 435 | continue 436 | } 437 | if !inSingle && !inDouble { 438 | inSingle = true 439 | inQuote = true 440 | continue 441 | } 442 | if inSingle && !inDouble { 443 | inSingle = false 444 | inQuote = false 445 | continue 446 | } 447 | if inSingle && inDouble { 448 | inSingle = false 449 | continue 450 | } 451 | if !inSingle && inDouble { 452 | inQuote = true 453 | continue 454 | } 455 | } 456 | } 457 | } 458 | return inQuote 459 | } 460 | -------------------------------------------------------------------------------- /internal/wirtokenizer/Tokenizer.go: -------------------------------------------------------------------------------- 1 | package wirtokenizer 2 | 3 | import ( 4 | "os" 5 | "strings" 6 | 7 | "github.com/phillip-england/wir/internal/runelexer" 8 | "github.com/phillip-england/wir/internal/wherr" 9 | ) 10 | 11 | type Tokenizer struct { 12 | Lexer *runelexer.RuneLexer[Token] 13 | } 14 | 15 | func TokenizerNewFromFile(path string) (*Tokenizer, error) { 16 | fBytes, err := os.ReadFile(path) 17 | if err != nil { 18 | return &Tokenizer{}, wherr.Consume(wherr.Here(), err, "") 19 | } 20 | fStr := string(fBytes) 21 | tk, err := TokenizerNewFromString(fStr) 22 | if err != nil { 23 | return &Tokenizer{}, wherr.Consume(wherr.Here(), err, "") 24 | } 25 | return tk, nil 26 | } 27 | 28 | func TokenizerNewFromString(s string) (*Tokenizer, error) { 29 | s = strings.TrimSpace(s) 30 | l := runelexer.NewRuneLexer[Token](s) 31 | err := tokenizeWir(l) 32 | if err != nil { 33 | return &Tokenizer{}, err 34 | } 35 | return &Tokenizer{ 36 | Lexer: l, 37 | }, nil 38 | } 39 | 40 | func (t *Tokenizer) Str() string { 41 | s := "" 42 | if t.Lexer.TokenLen() == 0 { 43 | return "" 44 | } 45 | t.Lexer.TokenIter(func(token Token, index int) bool { 46 | s += string(token.t) + ":" + token.text + "\n" 47 | return true 48 | }) 49 | return s 50 | } 51 | 52 | type TokenState = int 53 | 54 | const ( 55 | TokenStateInit = iota 56 | ) 57 | 58 | func tokenizeWir(l *runelexer.RuneLexer[Token]) error { 59 | err := phase1(l) 60 | if err != nil { 61 | return err 62 | } 63 | err = phase2(l) 64 | if err != nil { 65 | return err 66 | } 67 | err = phase3(l) 68 | if err != nil { 69 | return err 70 | } 71 | return nil 72 | } 73 | 74 | func phase3(l *runelexer.RuneLexer[Token]) error { 75 | var toks []Token 76 | l.TokenIter(func(tk Token, index int) bool { 77 | switch tk.t { 78 | default: 79 | { 80 | toks = append(toks, tk) 81 | } 82 | case TokenTypeCurlyBraceOpen: 83 | { 84 | toks = append(toks, Token{ 85 | t: TokenTypeHTMLCurlyBraceOpen, 86 | text: tk.text, 87 | }) 88 | } 89 | case TokenTypeCurlyBraceClose: 90 | { 91 | toks = append(toks, Token{ 92 | t: TokenTypeHTMLCurlyBraceClose, 93 | text: tk.text, 94 | }) 95 | } 96 | case TokenTypeRawText: 97 | { 98 | toks = append(toks, Token{ 99 | t: TokenTypeHTMLTagName, 100 | text: tk.text, 101 | }) 102 | } 103 | case TokenTypeDollarSignInterpolation: 104 | { 105 | toks = append(toks, Token{ 106 | t: TokenTypeDollarSignInterpolationOpen, 107 | text: "${", 108 | }) 109 | s := tk.text 110 | s = strings.Replace(s, "${", "", 1) 111 | s = s[0 : len(s)-1] 112 | innerParts := strings.Split(s, ":") 113 | for i, v := range innerParts { 114 | if i%2 == 0 { 115 | toks = append(toks, Token{ 116 | t: TokenTypeDollarSignInterpolationValue, 117 | text: strings.TrimSpace(v), 118 | }) 119 | toks = append(toks, Token{ 120 | t: TokenTypeDollarSignInterpolationSemiColon, 121 | text: ":", 122 | }) 123 | } else { 124 | toks = append(toks, Token{ 125 | t: TokenTypeDollarSignInterpolationType, 126 | text: strings.TrimSpace(v), 127 | }) 128 | } 129 | } 130 | } 131 | toks = append(toks, Token{ 132 | t: TokenTypeDollarSignInterpolationClose, 133 | text: "}", 134 | }) 135 | } 136 | return true 137 | }) 138 | l.TokenOverwrite(toks) 139 | return nil 140 | } 141 | 142 | func phase2(l *runelexer.RuneLexer[Token]) error { 143 | var toks []Token 144 | l.TokenIter(func(tk Token, index int) bool { 145 | switch tk.t { 146 | default: 147 | { 148 | toks = append(toks, tk) 149 | } 150 | case TokenTypeTagInfo: 151 | { 152 | l2 := runelexer.NewRuneLexer[Token](tk.text) 153 | l2.Iter(func(ch string, pos int) bool { 154 | switch ch { 155 | default: 156 | { 157 | l2.Store() 158 | } 159 | case "=": 160 | { 161 | attrKey := strings.TrimSpace(l2.StoreFlush()) 162 | toks = append(toks, Token{ 163 | t: TokenTypeHTMLAttrKey, 164 | text: attrKey, 165 | }) 166 | toks = append(toks, Token{ 167 | t: TokenTypeHTMLAttrEqualSign, 168 | text: "=", 169 | }) 170 | } 171 | case "<": 172 | { 173 | toks = append(toks, Token{ 174 | t: TokenTypeHTMLTagInfoStart, 175 | text: "<", 176 | }) 177 | } 178 | case ">": 179 | { 180 | attrKey := strings.TrimSpace(l2.StoreFlush()) 181 | if attrKey != "" { 182 | toks = append(toks, Token{ 183 | t: TokenTypeHTMLAttrKey, 184 | text: attrKey, 185 | }) 186 | } 187 | toks = append(toks, Token{ 188 | t: TokenTypeHTMLTagInfoEnd, 189 | text: ">", 190 | }) 191 | } 192 | case "'": 193 | { 194 | l2.Mark() 195 | l2.Next() 196 | l2.Iter(func(ch2 string, pos int) bool { 197 | if ch2 == "'" && l2.Peek(-1) != "\\" { 198 | htmlAttr := l2.PullFromMark() 199 | brokeAttr := false 200 | l3 := runelexer.NewRuneLexer[Token](htmlAttr) 201 | l3.Iter(func(ch3 string, pos int) bool { 202 | switch ch3 { 203 | default: 204 | { 205 | l3.Store() 206 | } 207 | case "$": 208 | if l3.Peek(1) != "{" { 209 | l3.Store() 210 | return true 211 | } 212 | brokeAttr = true 213 | attrBit := l3.StoreFlush() 214 | toks = append(toks, Token{ 215 | t: TokenTypeHTMLAttrValuePartial, 216 | text: attrBit, 217 | }) 218 | l3.Mark() 219 | l3.NextUntil("}") 220 | dollarSignInterpolation := l3.PullFromMark() 221 | toks = append(toks, Token{ 222 | t: TokenTypeDollarSignInterpolation, 223 | text: dollarSignInterpolation, 224 | }) 225 | } 226 | return true 227 | }) 228 | if brokeAttr { 229 | toks = append(toks, Token{ 230 | t: TokenTypeHTMLAttrValuePartial, 231 | text: l3.StoreFlush(), 232 | }) 233 | } else { 234 | toks = append(toks, Token{ 235 | t: TokenTypeHTMLAttrValue, 236 | text: l3.PullFromMark(), 237 | }) 238 | } 239 | return false 240 | } 241 | return true 242 | }) 243 | } 244 | case "\"": 245 | { 246 | l2.Mark() 247 | l2.Next() 248 | l2.Iter(func(ch2 string, pos int) bool { 249 | if ch2 == "\"" && l2.Peek(-1) != "\\" { 250 | toks = append(toks, Token{ 251 | t: TokenTypeHTMLAttrValue, 252 | text: l2.PullFromMark(), 253 | }) 254 | return false 255 | } 256 | return true 257 | }) 258 | } 259 | } 260 | return true 261 | }) 262 | } 263 | case TokenTypeString: 264 | { 265 | l2 := runelexer.NewRuneLexer[Token](tk.text) 266 | l2.Iter(func(ch string, pos int) bool { 267 | switch ch { 268 | default: 269 | { 270 | l2.Store() 271 | } 272 | case "$": 273 | { 274 | if l2.Peek(1) != "{" { 275 | l2.Store() 276 | return true 277 | } 278 | toks = append(toks, Token{ 279 | t: TokenTypeStringContent, 280 | text: l2.StoreFlush(), 281 | }) 282 | l2.Mark() 283 | l2.NextUntil("}") 284 | toks = append(toks, Token{ 285 | t: TokenTypeDollarSignInterpolation, 286 | text: l2.PullFromMark(), 287 | }) 288 | } 289 | case "'": 290 | { 291 | if pos == 0 { 292 | toks = append(toks, Token{ 293 | t: TokenTypeStringStart, 294 | text: "'", 295 | }) 296 | return true 297 | } 298 | if l2.AtEnd() && ch == "'" { 299 | flush := l2.StoreFlush() 300 | if flush != "" { 301 | toks = append(toks, Token{ 302 | t: TokenTypeStringContent, 303 | text: flush, 304 | }) 305 | } 306 | toks = append(toks, Token{ 307 | t: TokenTypeStringEnd, 308 | text: "'", 309 | }) 310 | return true 311 | } 312 | } 313 | case "\"": 314 | { 315 | if pos == 0 { 316 | toks = append(toks, Token{ 317 | t: TokenTypeStringStart, 318 | text: "\"", 319 | }) 320 | return true 321 | } 322 | if l2.AtEnd() && ch == "\"" { 323 | toks = append(toks, Token{ 324 | t: TokenTypeStringContent, 325 | text: l2.StoreFlush(), 326 | }) 327 | toks = append(toks, Token{ 328 | t: TokenTypeStringEnd, 329 | text: "\"", 330 | }) 331 | return true 332 | } 333 | } 334 | } 335 | return true 336 | }) 337 | } 338 | case TokenTypeAtDirective: 339 | { 340 | l2 := runelexer.NewRuneLexer[Token](tk.text) 341 | l2.Iter(func(ch string, pos int) bool { 342 | switch ch { 343 | default: 344 | { 345 | l2.Store() 346 | } 347 | case ")": 348 | { 349 | toks = append(toks, Token{ 350 | t: TokenTypeAtDirectiveParenthesisClose, 351 | text: ")", 352 | }) 353 | } 354 | case "(": 355 | { 356 | directiveName := l2.StoreFlush() 357 | toks = append(toks, Token{ 358 | t: TokenTypeAtDirectiveName, 359 | text: directiveName, 360 | }) 361 | toks = append(toks, Token{ 362 | t: TokenTypeAtDirectiveParenthesisOpen, 363 | text: "(", 364 | }) 365 | l2.Next() 366 | l2.Mark() 367 | l2.GoToEnd() 368 | l2.Prev() 369 | l2.PullFromMark() 370 | directiveInputParams := l2.PullFromMark() 371 | directiveInputParts := strings.Split(directiveInputParams, ":") 372 | for i, v := range directiveInputParts { 373 | v = strings.TrimSpace(v) 374 | if i%2 == 0 { 375 | toks = append(toks, Token{ 376 | t: TokenTypeAtDirectiveParamValue, 377 | text: v, 378 | }) 379 | toks = append(toks, Token{ 380 | t: TokenTypeAtDirectiveSemiColon, 381 | text: ":", 382 | }) 383 | } else { 384 | toks = append(toks, Token{ 385 | t: TokenTypeAtDirectiveParamType, 386 | text: v, 387 | }) 388 | } 389 | } 390 | } 391 | case "@": 392 | { 393 | toks = append(toks, Token{ 394 | t: TokenTypeAtDirectiveStart, 395 | text: "@", 396 | }) 397 | } 398 | } 399 | return true 400 | }) 401 | } 402 | } 403 | return true 404 | }) 405 | l.TokenOverwrite(toks) 406 | return nil 407 | } 408 | 409 | func phase1(l *runelexer.RuneLexer[Token]) error { 410 | collectStore := func(l *runelexer.RuneLexer[Token]) { 411 | flush := l.StoreFlush() 412 | s := strings.TrimSpace(flush) 413 | if s != "" { 414 | l.TokenAppend(Token{ 415 | t: TokenTypeRawText, 416 | text: strings.TrimSpace(s), 417 | }) 418 | } 419 | } 420 | ranFinal := false 421 | for { 422 | ch := l.Char() 423 | switch ch { 424 | default: 425 | { 426 | l.Store() 427 | break 428 | } 429 | case "@": 430 | { 431 | if l.InQuote() { 432 | l.Store() 433 | break 434 | } 435 | collectStore(l) 436 | if l.Pull(4) == "@for(" { 437 | l.Mark() 438 | l.NextUntil(")") 439 | l.TokenAppend(Token{ 440 | t: TokenTypeAtDirective, 441 | text: l.PullFromMark(), 442 | }) 443 | } else { 444 | l.Store() 445 | break 446 | } 447 | } 448 | case "'": 449 | { 450 | collectStore(l) 451 | l.Mark() 452 | l.Next() 453 | l.Iter(func(ch2 string, pos int) bool { 454 | if ch2 == "'" && l.Peek(-1) != "\\" { 455 | l.TokenAppend(Token{ 456 | t: TokenTypeString, 457 | text: l.PullFromMark(), 458 | }) 459 | return false 460 | } 461 | return true 462 | }) 463 | } 464 | case "\"": 465 | { 466 | collectStore(l) 467 | l.Mark() 468 | l.Next() 469 | l.Iter(func(ch2 string, pos int) bool { 470 | if ch2 == "\"" && l.Peek(-1) != "\\" { 471 | l.TokenAppend(Token{ 472 | t: TokenTypeString, 473 | text: l.PullFromMark(), 474 | }) 475 | return false 476 | } 477 | return true 478 | }) 479 | } 480 | case "{": 481 | { 482 | if l.InQuote() { 483 | l.Store() 484 | break 485 | } 486 | collectStore(l) 487 | l.TokenAppend(Token{ 488 | t: TokenTypeCurlyBraceOpen, 489 | text: "{", 490 | }) 491 | } 492 | case "}": 493 | { 494 | if l.InQuote() { 495 | l.Store() 496 | break 497 | } 498 | collectStore(l) 499 | l.TokenAppend(Token{ 500 | t: TokenTypeCurlyBraceClose, 501 | text: "}", 502 | }) 503 | } 504 | case "<": 505 | { 506 | if l.InQuote() { 507 | l.Store() 508 | break 509 | } 510 | collectStore(l) 511 | if l.StoreLen() > 0 { 512 | l.TokenAppend(Token{ 513 | t: TokenTypeRawText, 514 | text: l.StoreFlush(), 515 | }) 516 | } 517 | l.Mark() 518 | l.Iter(func(ch2 string, pos int) bool { 519 | if l.InQuote() { 520 | return true 521 | } 522 | if ch2 != ">" && ch2 != "}" { 523 | return true 524 | } 525 | if ch2 == "}" { 526 | l.TokenAppend(Token{ 527 | t: TokenTypeRawText, 528 | text: l.PullFromMark(), 529 | }) 530 | } 531 | if ch2 == ">" { 532 | l.TokenAppend(Token{ 533 | t: TokenTypeTagInfo, 534 | text: l.PullFromMark(), 535 | }) 536 | } 537 | return false 538 | }) 539 | } 540 | } 541 | l.Next() 542 | if l.AtEnd() { 543 | if ranFinal { 544 | break 545 | } else { 546 | ranFinal = true 547 | } 548 | } 549 | } 550 | l.TokenAppend(Token{ 551 | t: TokenTypeEndOfFile, 552 | text: "EOF", 553 | }) 554 | return nil 555 | } --------------------------------------------------------------------------------