├── LICENSE
├── README.md
├── go.mod
├── internal
├── parser
│ ├── evaluator.go
│ ├── evaluator_test.go
│ ├── lexer.go
│ ├── parser.go
│ └── token.go
└── testdata
│ ├── files
│ └── initial.sql
│ └── testdata.go
└── sql2.go
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Pedro Henrique Pessoa
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # sql2go
2 |
3 | sql2go is a lightweight [Go](https://go.dev/) library designed to parse SQL files into corresponding Golang structs.
4 |
5 | ## Installation
6 |
7 | ```bash
8 | go get -u github.com/phenpessoa/sql2go
9 | ```
10 |
11 | ## Usage
12 |
13 | ### API
14 |
15 | The library exposes a single function:
16 |
17 | ```go
18 | func Parse[T any](dst *T, r io.Reader) error
19 | ```
20 |
21 | - `dst`: A struct with string fields representing query names in the SQL file.
22 | - `r`: An io.Reader pointing to the SQL file to be parsed.
23 |
24 | ### Syntax
25 |
26 | The expected syntax in the SQL file is as follows:
27 |
28 | ```sql
29 | -- name: QueryName
30 | SELECT * FROM table_name;
31 | ```
32 |
33 | This lib does not validate the query. It just simply passes the query as is (minus whitespaces) to the according field in the struct.
34 |
35 | Check the [evaluator_test](https://github.com/phenpessoa/sql2go/blob/main/internal/parser/evaluator_test.go) file and the files in the [files](https://github.com/phenpessoa/sql2go/tree/main/internal/testdata/files) dir, to better understand how the file will be parsed.
36 |
37 | On the tests there are a lot of edge cases that you can check.
38 |
39 | ### Options
40 |
41 | You can pass options to the parser to shape the output.
42 |
43 | The syntax to pass an option is as follows:
44 |
45 | ```sql
46 | -- name: QueryName
47 | -- options: option-1 option-2
48 | SELECT * FROM table_name;
49 | ```
50 |
51 | Note that options must be passed in a newline right after the `-- name` line.
52 |
53 | #### Available Options:
54 |
55 | - remove-trailing-semicolon
56 | - This option will make a trailing semicolon not make it to the parsed query
57 |
58 | ### Examples
59 |
60 | queries.sql:
61 |
62 | ```sql
63 | -- name: GetUsers
64 | SELECT * FROM users;
65 |
66 | -- name: DeleteUsers
67 | -- Warning! This will erase all users in the system
68 | DELETE FROM users;
69 |
70 | -- name: DeleteUser
71 | -- Deletes a specific user from the system
72 | DELETE FROM users
73 | -- Just to demonstrate whitespace and in-query comments
74 | WHERE id = 123;
75 |
76 | -- name: UpdateUsers
77 | -- options: remove-trailing-semicolon
78 | UPDATE users SET foo = 'bar';
79 | ```
80 |
81 | main.go:
82 |
83 | ```go
84 | package main
85 |
86 | import (
87 | _ "embed"
88 | "fmt"
89 | "strings"
90 |
91 | "github.com/phenpessoa/sql2go"
92 | )
93 |
94 | //go:embed queries.sql
95 | var queryFile string
96 |
97 | type Queries struct {
98 | GetUsers string
99 | DeleteUsers string
100 | DeleteUser string
101 | UpdateUsers string
102 | }
103 |
104 | var queries Queries
105 |
106 | func main() {
107 | if err := sql2go.Parse(&queries, strings.NewReader(queryFile)); err != nil {
108 | panic(err)
109 | }
110 |
111 | // Now, queries will be populated with SQL queries.
112 |
113 | fmt.Println(
114 | queries.GetUsers == "SELECT * FROM users;",
115 | ) // true
116 |
117 | fmt.Println(
118 | queries.DeleteUsers == "DELETE FROM users;",
119 | ) // true
120 |
121 | fmt.Println(
122 | queries.DeleteUser == "DELETE FROM users\nWHERE id = 123;",
123 | ) // true
124 |
125 | fmt.Println(
126 | queries.UpdateUsers == "UPDATE users SET foo = 'bar'",
127 | ) // true (notice the missing semicolon)
128 | }
129 | ```
130 |
131 | ## Why not sqlc?
132 |
133 | [sqlc](https://github.com/sqlc-dev/sqlc) is amazing. But it comes with 2 drawbacks, imo.
134 |
135 | - It it not 100% compatible with windows
136 | - this can be solved with Docker
137 | - It doesn't work flawlessly for complex queries and schemas
138 | - The fact that it checks all the queries, and it has to parse the schema, while great, is a problem when your schema/query is complex. For example: it does not handle enums in postgresql properly and will parse the type as an `interface{}`.
139 |
140 | On the other hand, it writes a lot of boilerplate for you, which is amazing. With sql2go you will still have to write the database code manually. The main goal is to keep syntax highlighting by using a .sql file and to prevent the headaches when having to use a backtick on your query. Go's strings are to blame here. There is [a proposal](https://github.com/golang/go/issues/32590) to improve the experience, but it has not been accepted yet.
141 |
--------------------------------------------------------------------------------
/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/phenpessoa/sql2go
2 |
3 | go 1.21.4
4 |
--------------------------------------------------------------------------------
/internal/parser/evaluator.go:
--------------------------------------------------------------------------------
1 | package parser
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "io"
7 | "reflect"
8 | "strings"
9 | )
10 |
11 | func Parse[T any](dst *T, r io.Reader) error {
12 | v := reflect.ValueOf(dst).Elem()
13 |
14 | data, _ := io.ReadAll(r)
15 | input := string(data)
16 | l := newLexer(input)
17 | p := newParser(l)
18 | tree := p.parse()
19 |
20 | i := 0
21 | for i < len(tree.nodes) {
22 | n := tree.nodes[i]
23 | switch t := n.(type) {
24 | case nodeName:
25 | if !t.valid {
26 | return errors.New("sql2go: found an empty name")
27 | }
28 |
29 | field := v.FieldByName(t.val)
30 | if !field.IsValid() || !field.CanSet() || !field.CanInterface() {
31 | return fmt.Errorf(
32 | "sql2go: field not found or invalid in dst struct: %s",
33 | t.val,
34 | )
35 | }
36 |
37 | if _, ok := field.Interface().(string); !ok {
38 | return fmt.Errorf(
39 | "sql2go: field %s is not of type string", t.val,
40 | )
41 | }
42 |
43 | var (
44 | query strings.Builder
45 | lastByte byte
46 | opts options
47 | lastNode node = n
48 | secondToLastNode node = nil
49 | )
50 | i++
51 | for _, nn := range tree.nodes[i:] {
52 | switch t := nn.(type) {
53 | case nodeOptions:
54 | _, ok := lastNode.(nodeNewLine)
55 | if !ok || secondToLastNode != n {
56 | continue
57 | }
58 | opts = parseOptions(t.val)
59 | case nodeEndOfQuery:
60 | if opts.RemoveTrailingSemicolon {
61 | continue
62 | }
63 | query.Grow(1)
64 | query.WriteByte(';')
65 | lastByte = ';'
66 | case nodeName:
67 | goto out
68 | case nodeQuery:
69 | val := strings.TrimSpace(t.val)
70 | if lastByte == '\'' || lastByte == '"' || lastByte == '`' {
71 | query.Grow(len(val) + 1)
72 | query.WriteRune(' ')
73 | } else {
74 | query.Grow(len(val))
75 | }
76 | query.WriteString(val)
77 | lastByte = val[len(val)-1]
78 | case nodeStringLiteral:
79 | if lastByte != '\n' && lastByte != ' ' {
80 | query.Grow(len(t.val) + 3)
81 | query.WriteByte(' ')
82 | } else {
83 | query.Grow(len(t.val) + 2)
84 | }
85 | query.WriteByte('\'')
86 | query.WriteString(t.val)
87 | query.WriteByte('\'')
88 | lastByte = '\''
89 | case nodeIdentifier:
90 | if lastByte != '\n' && lastByte != ' ' {
91 | query.Grow(len(t.val) + 3)
92 | query.WriteByte(' ')
93 | } else {
94 | query.Grow(len(t.val) + 2)
95 | }
96 | query.WriteString(t.tok.literal)
97 | query.WriteString(t.val)
98 | query.WriteString(t.tok.literal)
99 | lastByte = t.tok.literal[0]
100 | case nodeNewLine:
101 | if lastByte != '\n' {
102 | query.Grow(1)
103 | query.WriteByte('\n')
104 | }
105 | lastByte = '\n'
106 | }
107 |
108 | secondToLastNode = lastNode
109 | lastNode = nn
110 | i++
111 | }
112 |
113 | out:
114 | field.Set(reflect.ValueOf(strings.TrimSpace(query.String())))
115 | default:
116 | i++
117 | }
118 | }
119 |
120 | return nil
121 | }
122 |
123 | type options struct {
124 | RemoveTrailingSemicolon bool
125 | }
126 |
127 | func parseOptions(str string) options {
128 | opts := options{}
129 | str = strings.ToLower(str)
130 |
131 | if strings.Contains(str, "remove-trailing-semicolon") {
132 | opts.RemoveTrailingSemicolon = true
133 | }
134 |
135 | return opts
136 | }
137 |
--------------------------------------------------------------------------------
/internal/parser/evaluator_test.go:
--------------------------------------------------------------------------------
1 | package parser
2 |
3 | import (
4 | "reflect"
5 | "testing"
6 |
7 | "github.com/phenpessoa/sql2go/internal/testdata"
8 | )
9 |
10 | func TestParser(t *testing.T) {
11 | type queries struct {
12 | Foo string
13 | Bar string
14 | Baz string
15 | Qux string
16 | Quux string
17 | Corge string
18 | Grault string
19 | HardToLex string
20 | Empty string
21 | Garply string
22 | Waldo string
23 | Fred string
24 | Whatif string
25 | WhatAboutThis string
26 | WhatAboutThis2 string
27 | Plugh string
28 | Xyzzy string
29 | Thud string
30 | AnotherMultiline string
31 | RemoveTrailingSemicolon string
32 | ShouldNotRemoveTrailingSemicolon string
33 | InvalidOptions string
34 | RemoveTrailingSemicolon2 string
35 | RemoveTrailingSemicolon3 string
36 | }
37 |
38 | want := queries{
39 | Foo: "SELECT * FROM foo;",
40 | Bar: "SELECT * FROM bar\nWHERE id = 123;",
41 | Baz: "SELECT\n*\nFROM\nbaz\nWHERE\nbaz = 123 AND\nbaz = baz;",
42 | Qux: "SELECT * FROM qux;",
43 | Quux: "SELECT * FROM quux\nWHERE quux = 123;",
44 | Corge: "SELECT '--' FROM corge;",
45 | Grault: "SELECT '\n-- name: Grault\n' FROM grault;",
46 | HardToLex: "SELECT;",
47 | Empty: "",
48 | Garply: "SELECT 'garply-string-literal' FROM garply;",
49 | Waldo: "SELECT \"waldo_identifier_1\" FROM waldo;",
50 | Fred: "SELECT `fred_identifier_2` FROM fred;",
51 | Whatif: "SELECT * FROM whatif;",
52 | WhatAboutThis: "SELECT 'foo--hard--string--literal' FROM whatAboutThis; `foo\"_identifier_3'`",
53 | WhatAboutThis2: "SELECT\n`foo\"_identifier_4`",
54 | Plugh: "SELECT * FROM plugh",
55 | Xyzzy: "SELECT * FROM xyzzy",
56 | Thud: "SELECT * FROM thud;\nSELECT * FROM thud2;",
57 | AnotherMultiline: "SELECT '\n-- name: AnotherMultiline\n' FROM AnotherMultiline\nWHERE id = 123;",
58 | RemoveTrailingSemicolon: "SELECT * FROM semicolon",
59 | ShouldNotRemoveTrailingSemicolon: "SELECT * FROM semicolon;",
60 | InvalidOptions: "SELECT * FROM options\nWHERE 1 = 1;",
61 | RemoveTrailingSemicolon2: "SELECT * FROM semicolon",
62 | RemoveTrailingSemicolon3: "SELECT * FROM semicolon",
63 | }
64 |
65 | f, err := testdata.TestFS.Open("files/initial.sql")
66 | if err != nil {
67 | t.Errorf("failed to open inital.sql: %s", err)
68 | t.FailNow()
69 | return
70 | }
71 |
72 | var got queries
73 | if err := Parse(&got, f); err != nil {
74 | t.Errorf("failed to parse initial.sql: %s", err)
75 | t.FailNow()
76 | return
77 | }
78 |
79 | if got != want {
80 | t.Error("initial.sql not parsed properly\n")
81 | typ := reflect.TypeOf(got)
82 | for i := 0; i < typ.NumField(); i++ {
83 | f := typ.Field(i)
84 | fv1 := reflect.ValueOf(got).Field(i).Interface()
85 | fv2 := reflect.ValueOf(want).Field(i).Interface()
86 |
87 | if fv1 == fv2 {
88 | continue
89 | }
90 |
91 | t.Errorf(
92 | "field: %s\nwanted: %#+v\ngot: %#+v\n",
93 | f.Name, fv2, fv1,
94 | )
95 | }
96 |
97 | t.FailNow()
98 | return
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/internal/parser/lexer.go:
--------------------------------------------------------------------------------
1 | package parser
2 |
3 | import (
4 | "strings"
5 | )
6 |
7 | type lexer struct {
8 | input string
9 | pos int
10 | readPos int
11 | ch byte
12 | }
13 |
14 | func newLexer(input string) *lexer {
15 | l := &lexer{input: strings.ReplaceAll(input, "\r\n", "\n")}
16 | l.readChar()
17 | return l
18 | }
19 |
20 | func (l *lexer) readChar() {
21 | if l.readPos >= len(l.input) {
22 | l.ch = 0
23 | } else {
24 | l.ch = l.input[l.readPos]
25 | }
26 | l.pos = l.readPos
27 | l.readPos++
28 | }
29 |
30 | func (l *lexer) moveBack() {
31 | if l.pos > 0 && l.pos < len(l.input) {
32 | l.readPos = l.pos
33 | l.pos--
34 | l.ch = l.input[l.readPos]
35 | } else {
36 | l.ch = 0
37 | }
38 | }
39 |
40 | func (l *lexer) peekChar() byte {
41 | if l.readPos >= len(l.input) {
42 | return 0
43 | }
44 | return l.input[l.readPos]
45 | }
46 |
47 | const (
48 | nameBytes = " name: "
49 | )
50 |
51 | // isName detects if we are in a name token,
52 | // if true it will consume the bytes
53 | func (l *lexer) isName() bool {
54 | counter := 0
55 | for counter < len(nameBytes) &&
56 | l.input[l.readPos+counter] == nameBytes[counter] {
57 | counter++
58 | }
59 | if counter == len(nameBytes) {
60 | for i := 0; i < len(nameBytes); i++ {
61 | l.readChar()
62 | }
63 | return true
64 | }
65 | return false
66 | }
67 |
68 | const (
69 | optionsBytes = " options: "
70 | )
71 |
72 | // isOptions detects if we are in an options token,
73 | // if true it will consume the bytes
74 | func (l *lexer) isOptions() bool {
75 | counter := 0
76 | for counter < len(optionsBytes) &&
77 | l.input[l.readPos+counter] == optionsBytes[counter] {
78 | counter++
79 | }
80 | if counter == len(optionsBytes) {
81 | for i := 0; i < len(optionsBytes); i++ {
82 | l.readChar()
83 | }
84 | return true
85 | }
86 | return false
87 | }
88 |
89 | func (l *lexer) skipWhitespace() {
90 | for l.ch == ' ' || l.ch == '\t' || l.ch == '\r' {
91 | l.readChar()
92 | }
93 | }
94 |
95 | func (l *lexer) readRawInput() string {
96 | pos := l.pos
97 | outer:
98 | for {
99 | switch l.ch {
100 | case '-':
101 | if l.peekChar() == '-' {
102 | break outer
103 | }
104 | case ';', '\'', '"', '`':
105 | break outer
106 | case '\n', 0:
107 | break outer
108 | }
109 | l.readChar()
110 | }
111 | data := l.input[pos:l.pos]
112 | l.moveBack()
113 | return data
114 | }
115 |
116 | func (l *lexer) nextToken() token {
117 | var t token
118 |
119 | l.skipWhitespace()
120 |
121 | switch l.ch {
122 | case '-':
123 | if l.peekChar() == '-' {
124 | l.readChar()
125 | switch {
126 | case l.isName():
127 | t.literal = "-- name: "
128 | t.typ = tokenTypeName
129 | case l.isOptions():
130 | t.literal = "-- options: "
131 | t.typ = tokenTypeOptions
132 | default:
133 | t.literal = "--"
134 | t.typ = tokenTypeComment
135 | }
136 | } else {
137 | t.typ = tokenTypeUndefined
138 | }
139 | case '\n':
140 | t.typ = tokenTypeNewLine
141 | t.literal = "\n"
142 | case '"', '`':
143 | t.typ = tokenTypeIdentifier
144 | t.literal = string(l.ch)
145 | case ';':
146 | t.typ = tokenTypeSemicolon
147 | t.literal = ";"
148 | case 0:
149 | t.typ = tokenTypeEOF
150 | case '\'':
151 | t.typ = tokenTypeStringLiteral
152 | t.literal = "'"
153 | default:
154 | t.typ = tokenTypeRawInput
155 | t.literal = l.readRawInput()
156 | }
157 |
158 | l.readChar()
159 | return t
160 | }
161 |
--------------------------------------------------------------------------------
/internal/parser/parser.go:
--------------------------------------------------------------------------------
1 | package parser
2 |
3 | import (
4 | "strings"
5 | )
6 |
7 | type node interface{}
8 |
9 | type nodeComment struct {
10 | valid bool
11 | val string
12 | }
13 |
14 | type nodeName struct {
15 | valid bool
16 | val string
17 | }
18 |
19 | type nodeOptions struct {
20 | valid bool
21 | val string
22 | }
23 |
24 | type nodeStringLiteral struct {
25 | valid bool
26 | val string
27 | }
28 |
29 | type nodeIdentifier struct {
30 | valid bool
31 | val string
32 | tok token
33 | }
34 |
35 | type nodeQuery struct {
36 | valid bool
37 | val string
38 | }
39 |
40 | type nodeEndOfQuery struct{}
41 |
42 | type nodeNewLine struct{}
43 |
44 | type ast struct {
45 | nodes []node
46 | }
47 |
48 | type parser struct {
49 | l *lexer
50 | lastTok token
51 | curTok token
52 | peekTok token
53 | }
54 |
55 | func newParser(l *lexer) *parser {
56 | p := &parser{l: l}
57 | return p
58 | }
59 |
60 | func (p *parser) nextToken() {
61 | p.lastTok = p.curTok
62 | p.curTok = p.peekTok
63 | p.peekTok = p.l.nextToken()
64 | }
65 |
66 | func (p *parser) parse() *ast {
67 | ast := new(ast)
68 | p.findFirstName()
69 | for p.curTok.typ != tokenTypeEOF {
70 | stmt := p.parseStatement()
71 | if stmt != nil {
72 | ast.nodes = append(ast.nodes, stmt)
73 | }
74 | }
75 | return ast
76 | }
77 |
78 | func (p *parser) findFirstName() {
79 | for {
80 | if p.lastTok.typ != tokenTypeStringLiteral &&
81 | p.lastTok.typ != tokenTypeIdentifier &&
82 | p.curTok.typ == tokenTypeName &&
83 | p.peekTok.typ == tokenTypeRawInput {
84 | return
85 | }
86 | p.nextToken()
87 | if p.curTok.typ == tokenTypeEOF {
88 | return
89 | }
90 | }
91 | }
92 |
93 | func (p *parser) parseStatement() node {
94 | _switch:
95 | switch p.curTok.typ {
96 | case tokenTypeName:
97 | return p.parseNameStatement()
98 | case tokenTypeOptions:
99 | return p.parseOptionsStatement()
100 | case tokenTypeComment:
101 | return p.parseCommentStatement()
102 | case tokenTypeStringLiteral:
103 | return p.parseStringLiteralStatement()
104 | case tokenTypeIdentifier:
105 | return p.parseIdentifierStatement()
106 | case tokenTypeRawInput:
107 | return p.parseQueryStatement()
108 | case tokenTypeEOF:
109 | return nil
110 | case tokenTypeSemicolon:
111 | return p.parseEndOfQueryStatement()
112 | case tokenTypeNewLine:
113 | return p.parseNewLineStatement()
114 | default:
115 | p.nextToken()
116 | goto _switch
117 | }
118 | }
119 |
120 | func (p *parser) parseNameStatement() nodeName {
121 | if p.peekTok.typ == tokenTypeRawInput {
122 | node := nodeName{
123 | valid: true,
124 | val: strings.TrimSpace(p.peekTok.literal),
125 | }
126 | p.nextToken()
127 | p.nextToken()
128 | return node
129 | }
130 | return nodeName{}
131 | }
132 |
133 | func (p *parser) parseOptionsStatement() nodeOptions {
134 | if p.peekTok.typ == tokenTypeRawInput {
135 | node := nodeOptions{
136 | valid: true,
137 | val: strings.TrimSpace(p.peekTok.literal),
138 | }
139 | p.nextToken()
140 | p.nextToken()
141 | return node
142 | }
143 | return nodeOptions{}
144 | }
145 |
146 | func (p *parser) parseCommentStatement() nodeComment {
147 | var b strings.Builder
148 |
149 | p.nextToken()
150 |
151 | for {
152 | if p.curTok.typ == tokenTypeNewLine || p.curTok.typ == tokenTypeEOF {
153 | break
154 | }
155 | b.Grow(len(p.curTok.literal))
156 | b.WriteString(p.curTok.literal)
157 | p.nextToken()
158 | }
159 |
160 | return nodeComment{
161 | valid: true,
162 | val: b.String(),
163 | }
164 | }
165 |
166 | func (p *parser) parseStringLiteralStatement() nodeStringLiteral {
167 | var b strings.Builder
168 |
169 | p.nextToken()
170 |
171 | for {
172 | if p.curTok.typ == tokenTypeStringLiteral {
173 | p.nextToken()
174 | break
175 | }
176 |
177 | if p.curTok.typ == tokenTypeEOF {
178 | break
179 | }
180 |
181 | b.Grow(len(p.curTok.literal))
182 | b.WriteString(p.curTok.literal)
183 | p.nextToken()
184 | }
185 |
186 | return nodeStringLiteral{
187 | valid: true,
188 | val: b.String(),
189 | }
190 | }
191 |
192 | func (p *parser) parseIdentifierStatement() nodeIdentifier {
193 | seeking := p.curTok.literal
194 | tok := p.curTok
195 |
196 | var b strings.Builder
197 |
198 | p.nextToken()
199 |
200 | for {
201 | if p.curTok.typ == tokenTypeIdentifier && p.curTok.literal == seeking {
202 | p.nextToken()
203 | break
204 | }
205 |
206 | if p.curTok.typ == tokenTypeEOF {
207 | break
208 | }
209 |
210 | b.Grow(len(p.curTok.literal))
211 | b.WriteString(p.curTok.literal)
212 | p.nextToken()
213 | }
214 |
215 | return nodeIdentifier{
216 | valid: true,
217 | val: b.String(),
218 | tok: tok,
219 | }
220 | }
221 |
222 | func (p *parser) parseQueryStatement() nodeQuery {
223 | var b strings.Builder
224 |
225 | for {
226 | if p.curTok.typ == tokenTypeEOF {
227 | break
228 | }
229 |
230 | if p.curTok.typ != tokenTypeRawInput {
231 | break
232 | }
233 |
234 | b.Grow(len(p.curTok.literal))
235 | b.WriteString(p.curTok.literal)
236 | p.nextToken()
237 | }
238 |
239 | return nodeQuery{
240 | valid: true,
241 | val: b.String(),
242 | }
243 | }
244 |
245 | func (p *parser) parseEndOfQueryStatement() nodeEndOfQuery {
246 | p.nextToken()
247 | return nodeEndOfQuery{}
248 | }
249 |
250 | func (p *parser) parseNewLineStatement() nodeNewLine {
251 | p.nextToken()
252 | return nodeNewLine{}
253 | }
254 |
--------------------------------------------------------------------------------
/internal/parser/token.go:
--------------------------------------------------------------------------------
1 | package parser
2 |
3 | type tokenType uint8
4 |
5 | const (
6 | tokenTypeUndefined tokenType = iota
7 | tokenTypeEOF
8 |
9 | // -- comment
10 | tokenTypeComment
11 |
12 | // ;
13 | tokenTypeSemicolon
14 |
15 | // -- name:
16 | tokenTypeName
17 |
18 | // -- options:
19 | tokenTypeOptions
20 |
21 | // '
22 | tokenTypeStringLiteral
23 |
24 | // " or `
25 | tokenTypeIdentifier
26 |
27 | // \n
28 | tokenTypeNewLine
29 |
30 | // anything else
31 | tokenTypeRawInput
32 | )
33 |
34 | type token struct {
35 | typ tokenType
36 | literal string
37 | }
38 |
--------------------------------------------------------------------------------
/internal/testdata/files/initial.sql:
--------------------------------------------------------------------------------
1 | -- name: Foo
2 | -- This is a comment that must be ignored
3 | SELECT * FROM foo;
4 |
5 | -- name: Bar
6 | SELECT * FROM bar
7 | -- hard coded id for testing
8 | WHERE id = 123;
9 |
10 | -- name: Baz
11 | SELECT
12 | *
13 | FROM
14 | -- test trimming of whitespaces and comments
15 | baz
16 | WHERE
17 | baz = 123 AND
18 | baz = baz;
19 |
20 | -- name: Qux
21 | -- break;
22 | SELECT * FROM qux;
23 |
24 | -- name: Quux
25 | SELECT * FROM quux -- this is a comment;
26 | WHERE quux = 123;
27 |
28 | -- Random comments
29 | -- will the parser handle these properly?
30 |
31 | -- name: Corge
32 | SELECT '--' FROM corge; -- Last comment;
33 |
34 | -- name: Grault
35 | SELECT '
36 | -- name: Grault
37 | ' FROM grault;
38 |
39 | -- name: HardToLex -- this-is-a-comment;
40 | SELECT;
41 |
42 | -- name: Empty
43 |
44 | -- name: Garply
45 | SELECT 'garply-string-literal' FROM garply;
46 |
47 | -- name: Waldo
48 | SELECT "waldo_identifier_1" FROM waldo;
49 |
50 | -- name: Fred
51 | SELECT `fred_identifier_2` FROM fred;
52 |
53 | -- name: Whatif -- let's see
54 | SELECT * FROM whatif;
55 |
56 | -- name: WhatAboutThis
57 | SELECT 'foo--hard--string--literal' FROM whatAboutThis; `foo"_identifier_3'`
58 |
59 | -- name: WhatAboutThis2
60 | SELECT
61 |
62 | `foo"_identifier_4`
63 |
64 | -- name: Plugh
65 | SELECT * FROM plugh
66 |
67 | -- name: Xyzzy
68 | SELECT * FROM xyzzy
69 |
70 | -- name: Thud
71 | SELECT * FROM thud;
72 | SELECT * FROM thud2;
73 |
74 | -- name: AnotherMultiline
75 | SELECT '
76 | -- name: AnotherMultiline
77 | ' FROM AnotherMultiline -- this is a comment
78 | -- yet another comment
79 | WHERE id = 123;
80 |
81 | -- options: foobar
82 |
83 | -- name: RemoveTrailingSemicolon
84 | -- options: remove-trailing-semicolon
85 | SELECT * FROM semicolon;
86 |
87 | -- name: ShouldNotRemoveTrailingSemicolon
88 | SELECT * FROM semicolon;
89 |
90 | -- name: InvalidOptions
91 | SELECT * FROM options
92 | -- options: remove-trailing-semicolon
93 | WHERE 1 = 1;
94 |
95 | -- name: RemoveTrailingSemicolon2
96 | -- options: remove-trailing-semicolon
97 | SELECT * FROM semicolon
98 |
99 | -- name: RemoveTrailingSemicolon3
100 | -- options: remove-trailing-semicolon
101 | SELECT * FROM semicolon;
--------------------------------------------------------------------------------
/internal/testdata/testdata.go:
--------------------------------------------------------------------------------
1 | package testdata
2 |
3 | import "embed"
4 |
5 | //go:embed files/*.sql
6 | var TestFS embed.FS
7 |
--------------------------------------------------------------------------------
/sql2.go:
--------------------------------------------------------------------------------
1 | package sql2go
2 |
3 | import (
4 | "io"
5 |
6 | "github.com/phenpessoa/sql2go/internal/parser"
7 | )
8 |
9 | func Parse[T any](dst *T, r io.Reader) error {
10 | return parser.Parse(dst, r)
11 | }
12 |
--------------------------------------------------------------------------------