├── .deepsource.toml ├── .github ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml └── workflows │ ├── codeql-analysis.yml │ └── test.yml ├── CHANGELOGS.md ├── LICENSE ├── README.md ├── _images └── logo.png ├── example ├── main.go └── vcl.vcl ├── go.mod ├── go.sum ├── internal ├── ast │ └── ast.go ├── decoder │ ├── decoder.go │ └── decoder_test.go ├── lexer │ ├── lexer.go │ └── lexer_test.go ├── parser │ ├── parser.go │ └── parser_test.go ├── schema │ └── schema.go ├── token │ └── token.go └── traversal │ ├── traversal.go │ └── traversal_test.go └── vcl ├── vcl.go └── vcl_test.go /.deepsource.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | 3 | test_patterns = [ 4 | '**/*_test.go' 5 | ] 6 | 7 | [[analyzers]] 8 | name = 'go' 9 | enabled = true 10 | 11 | [analyzers.meta] 12 | import_path = 'github.com/KeisukeYamashita/i' 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## What 2 | 3 | 4 | ## Why 5 | 6 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## What 2 | 3 | 4 | ## Why 5 | 6 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for Go 4 | - package-ecosystem: gomod 5 | directory: / 6 | schedule: 7 | interval: monthly 8 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | schedule: 9 | - cron: '16 14 * * 4' 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ 'go' ] 24 | 25 | steps: 26 | - name: Checkout repository 27 | uses: actions/checkout@v2 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v1 31 | with: 32 | languages: ${{ matrix.language }} 33 | 34 | - name: Autobuild 35 | uses: github/codeql-action/autobuild@v1 36 | 37 | - name: Perform CodeQL Analysis 38 | uses: github/codeql-action/analyze@v1 39 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | test: 11 | name: Test 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Set up Go 15 | uses: actions/setup-go@v2 16 | with: 17 | go-version: 1.17.x 18 | id: go 19 | 20 | - name: Check out code into the Go module directory 21 | uses: actions/checkout@v1 22 | 23 | - name: Download modules 24 | run: go get -d -v ./... 25 | 26 | - name: Test 27 | run: go test -coverpkg=./... -coverprofile=coverage.txt -v ./... 28 | 29 | - name: Send test coverage to Codecov 30 | uses: codecov/codecov-action@v1 31 | with: 32 | token: ${{ secrets.CODECOV_TOKEN }} 33 | -------------------------------------------------------------------------------- /CHANGELOGS.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## Released 9 | 10 | ## 0.3.0 - 2019-12-19 11 | 12 | ### Add 13 | 14 | * `map[string]interface{}` output 15 | 16 | ## 0.2.0 - 2019-12-16 17 | 18 | ### Add 19 | 20 | * `director` block support 21 | * `table` block support 22 | * `comment` flag 23 | 24 | ## 0.1.0 - 2019-12-11 25 | 26 | ### Add 27 | 28 | * Initial commit 29 | * Add fundamental decode features 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # VCL 2 | 3 | ``` 4 | :::::::: :::::::: ::: ::: :::::::: ::: 5 | :+: :+: :+: :+: :+: :+: :+: :+: :+: 6 | +:+ +:+ +:+ +:+ +:+ +:+ +:+ 7 | :#: +#+ +:+ +#+ +:+ +#+ +#+ 8 | +#+ +#+# +#+ +#+ +#+ +#+ +#+ +#+ 9 | #+# #+# #+# #+# #+#+#+# #+# #+# #+# 10 | ######## ######## ### ######## ########## 11 | ``` 12 | 13 | > VCL parser written in Go 14 | 15 | 16 | [![GitHub Actions][github-actions-badge]][github-actions] 17 | [![GoDoc][godoc-badge]][godoc] 18 | 19 | [![codecov](https://codecov.io/gh/KeisukeYamashita/go-vcl/branch/master/graph/badge.svg)](https://codecov.io/gh/KeisukeYamashita/go-vcl) 20 | [![Go Report Card][go-report-card-badge]][go-report-card] 21 | [![GolangCI][golangci-badge]][golangci] 22 | 23 | [![License][license-badge]][license] 24 | [![Dependabot][dependabot-badge]][dependabot] 25 | 26 | [![DeepSource][deepsource-badge]][deepsource] 27 | 28 | ## Usage 29 | 30 | ### Decode 31 | 32 | Let's say you have a VCL file. 33 | 34 | ```vcl 35 | acl purge_ip { 36 | "localhost"; 37 | "127.0.0.1"; 38 | } 39 | ``` 40 | 41 | Define a go struct how you what to retrieve the VCL expressions and attributes. 42 | 43 | ```golang 44 | type Root struct { 45 | ACLs []*ACL `vcl:"acl,block"` 46 | } 47 | 48 | type ACL struct { 49 | Type string `vcl:"type,label` 50 | Endpoints []string `vcl:",flat"` 51 | } 52 | ``` 53 | 54 | Then decode your like following. 55 | 56 | ```golang 57 | var r Root 58 | err := vcl.Decode(b, &r) 59 | fmt.Println(r.ACLs[0].Type) 60 | fmt.Println(r.ACLs[0].Endpoints) 61 | ``` 62 | 63 | ```console 64 | $ go run main.go 65 | => "purge_ip" 66 | => []string{"localhost","127.0.0.1"} 67 | ``` 68 | 69 | ## Supported tags 70 | 71 | I am not a VCL master so there may be not supported features. 72 | 73 | There are struct tags you can use for you input. 74 | 75 | * `block`: Represents a unit of your block like `acl`, `sub`, etc... 76 | * `label`: The label of your block. 77 | * `flat`: Represents a expression field 78 | * `comment`: Get comments 79 | * `attr`: (Default) Attribute of your block 80 | 81 | ## Releases 82 | 83 | Release tag will be based on [Semantic Versioning 2.0.0](https://semver.org/). 84 | See the [CHANGELOGS.md](./CHANGELOGS.md) 85 | 86 | ## How to Contribute 87 | 88 | I am always welcome for any contributions. 89 | 90 | * Raise a Issue. 91 | * Create a PR. 92 | 93 | Simple:) 94 | 95 | ## License 96 | 97 | go-vcl is released under the MIT license. 98 | © 2019 KeisukeYamashita. 99 | 100 | ## Author 101 | 102 | * [KeisukeYamashita](https://github.com/KeisukeYamashita) 103 | 104 | 105 | 106 | 107 | [dependabot]: https://dependabot.com 108 | [dependabot-badge]: https://badgen.net/badge/icon/Dependabot?icon=dependabot&label&color=blue 109 | 110 | [license]: LICENSE 111 | [license-badge]: https://img.shields.io/badge/license-Apache%202.0-%23E93424 112 | 113 | [godoc]: https://godoc.org/github.com/KeisukeYamashita/go-vcl 114 | [godoc-badge]: https://img.shields.io/badge/godoc.org-reference-blue.svg 115 | 116 | [go-report-card]: https://goreportcard.com/report/github.com/KeisukeYamashita/go-vcl 117 | [go-report-card-badge]: https://goreportcard.com/badge/github.com/KeisukeYamashita/go-vcl 118 | 119 | [deepsource]: https://deepsource.io/gh/KeisukeYamashita/go-vcl/?ref=repository-badge 120 | [deepsource-badge]: https://static.deepsource.io/deepsource-badge-light.svg 121 | 122 | [github-actions]: https://github.com/KeisukeYamashita/go-vcl/actions 123 | [github-actions-badge]: https://github.com/KeisukeYamashita/go-vcl/workflows/Test/badge.svg 124 | 125 | [golangci]: https://golangci.com/r/github.com/KeisukeYamashita/go-vcl 126 | [golangci-badge]: https://golangci.com/badges/github.com/KeisukeYamashita/go-vcl.svg 127 | -------------------------------------------------------------------------------- /_images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KeisukeYamashita/go-vcl/361a32ad21707d2b7e59a37a88c952aaf884a41b/_images/logo.png -------------------------------------------------------------------------------- /example/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "io/ioutil" 6 | "log" 7 | 8 | "github.com/KeisukeYamashita/go-vcl/vcl" 9 | ) 10 | 11 | // Root represents the top level object which is a file 12 | type Root struct { 13 | ACls []*ACL `vcl:"acl,block"` 14 | } 15 | 16 | // ACL are acl blocks 17 | type ACL struct { 18 | Type string `vcl:"type,label"` 19 | Endpoints []string `vcl:",flat"` 20 | } 21 | 22 | func main() { 23 | dat, err := ioutil.ReadFile("./example/vcl.vcl") 24 | if err != nil { 25 | log.Fatal(err) 26 | } 27 | 28 | r := &Root{} 29 | if errs := vcl.Decode(dat, r); len(errs) > 0 { 30 | log.Fatal(errs) 31 | } 32 | 33 | fmt.Println(r.ACls) 34 | fmt.Println(r.ACls[0].Endpoints) 35 | } 36 | -------------------------------------------------------------------------------- /example/vcl.vcl: -------------------------------------------------------------------------------- 1 | acl purge_ip { 2 | "localhost"; 3 | "127.0.0.1"; 4 | } -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/KeisukeYamashita/go-vcl 2 | 3 | go 1.17 4 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KeisukeYamashita/go-vcl/361a32ad21707d2b7e59a37a88c952aaf884a41b/go.sum -------------------------------------------------------------------------------- /internal/ast/ast.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | import "github.com/KeisukeYamashita/go-vcl/internal/token" 4 | 5 | // Program represents a single program file 6 | type Program struct { 7 | Statements []Statement 8 | } 9 | 10 | // Node ... 11 | type Node interface { 12 | TokenLiteral() string 13 | } 14 | 15 | // Statement ... 16 | type Statement interface { 17 | Node 18 | statementNode() 19 | } 20 | 21 | // Expression ... 22 | type Expression interface { 23 | Node 24 | expressionNode() 25 | } 26 | 27 | // PrefixExpression ... 28 | type PrefixExpression struct { 29 | Token token.Token 30 | Operator string 31 | Right Expression 32 | } 33 | 34 | func (exp *PrefixExpression) expressionNode() {} 35 | func (exp *PrefixExpression) TokenLiteral() string { 36 | return exp.Token.Literal 37 | } 38 | 39 | // InfixExpression ... 40 | type InfixExpression struct { 41 | Token token.Token 42 | Operator string 43 | Left Expression 44 | Right Expression 45 | } 46 | 47 | func (exp *InfixExpression) expressionNode() {} 48 | func (exp *InfixExpression) TokenLiteral() string { 49 | return exp.Token.Literal 50 | } 51 | 52 | // IfExpression ... 53 | type IfExpression struct { 54 | Token token.Token 55 | Condition Expression 56 | Consequence *BlockStatement 57 | Alternative *BlockStatement 58 | } 59 | 60 | func (exp *IfExpression) expressionNode() {} 61 | func (exp *IfExpression) TokenLiteral() string { 62 | return exp.Token.Literal 63 | } 64 | 65 | // BlockExpression ... 66 | type BlockExpression struct { 67 | Token token.Token 68 | Labels []string 69 | Blocks *BlockStatement 70 | } 71 | 72 | func (exp *BlockExpression) expressionNode() {} 73 | func (exp *BlockExpression) TokenLiteral() string { 74 | return exp.Token.Literal 75 | } 76 | 77 | // BlockStatement ... 78 | type BlockStatement struct { 79 | Token token.Token // token.LBRACE 80 | Statements []Statement 81 | } 82 | 83 | func (bs *BlockStatement) statementNode() {} 84 | func (bs *BlockStatement) TokenLiteral() string { 85 | return bs.Token.Literal 86 | } 87 | 88 | // AssignStatement holds the Name for the Identifier and its value 89 | type AssignStatement struct { 90 | Token token.Token // token.ASSIGN 91 | Name *Identifier 92 | Value Expression 93 | } 94 | 95 | // AssignFieldStatement holds the Name for the Identifier and its value 96 | type AssignFieldStatement struct { 97 | Token token.Token // token.ASSIGN_FIELD 98 | Name *Identifier 99 | Value Expression 100 | } 101 | 102 | func (as *AssignFieldStatement) statementNode() {} 103 | func (as *AssignFieldStatement) TokenLiteral() string { 104 | return as.Token.Literal 105 | } 106 | 107 | func (as *AssignStatement) statementNode() {} 108 | func (as *AssignStatement) TokenLiteral() string { 109 | return as.Token.Literal 110 | } 111 | 112 | // ReturnStatement holds the Name for the Identifier and its value 113 | type ReturnStatement struct { 114 | Token token.Token // token.RETURN 115 | ReturnValue Expression 116 | } 117 | 118 | func (as *ReturnStatement) statementNode() {} 119 | func (as *ReturnStatement) TokenLiteral() string { 120 | return as.Token.Literal 121 | } 122 | 123 | type CommentStatement struct { 124 | Token token.Token 125 | Value string 126 | } 127 | 128 | func (as *CommentStatement) statementNode() {} 129 | func (as *CommentStatement) TokenLiteral() string { 130 | return as.Token.Literal 131 | } 132 | 133 | // CallStatement holds the Name for the Identifier and its value 134 | type CallStatement struct { 135 | Token token.Token // token.ASSIGN 136 | CallValue Expression 137 | } 138 | 139 | func (as *CallStatement) statementNode() {} 140 | func (as *CallStatement) TokenLiteral() string { 141 | return as.Token.Literal 142 | } 143 | 144 | // ExpressionStatement holds the Name for the Identifier and its value 145 | type ExpressionStatement struct { 146 | Token token.Token // token.ASSIGN 147 | Expression Expression 148 | } 149 | 150 | func (as *ExpressionStatement) statementNode() {} 151 | func (as *ExpressionStatement) TokenLiteral() string { 152 | return as.Token.Literal 153 | } 154 | 155 | // Identifier ... 156 | type Identifier struct { 157 | Token token.Token // token.IDENT 158 | Value string 159 | } 160 | 161 | func (i *Identifier) expressionNode() {} 162 | func (i *Identifier) TokenLiteral() string { 163 | return i.Token.Literal 164 | } 165 | 166 | // IntegerLiteral ... 167 | type IntegerLiteral struct { 168 | Token token.Token 169 | Value int64 170 | } 171 | 172 | func (i *IntegerLiteral) expressionNode() {} 173 | func (i *IntegerLiteral) TokenLiteral() string { 174 | return i.Token.Literal 175 | } 176 | 177 | // BooleanLiteral ... 178 | type BooleanLiteral struct { 179 | Token token.Token 180 | Value bool 181 | } 182 | 183 | func (i *BooleanLiteral) expressionNode() {} 184 | func (i *BooleanLiteral) TokenLiteral() string { 185 | return i.Token.Literal 186 | } 187 | 188 | // StringLiteral ... 189 | type StringLiteral struct { 190 | Token token.Token 191 | Value string 192 | } 193 | 194 | func (i *StringLiteral) expressionNode() {} 195 | func (i *StringLiteral) TokenLiteral() string { 196 | return i.Token.Literal 197 | } 198 | 199 | type CIDRLiteral struct { 200 | Token token.Token 201 | Value string 202 | } 203 | 204 | func (i *CIDRLiteral) expressionNode() {} 205 | func (i *CIDRLiteral) TokenLiteral() string { 206 | return i.Token.Literal 207 | } 208 | 209 | // PercentageLiteral ... 210 | type PercentageLiteral struct { 211 | Token token.Token 212 | Value string 213 | } 214 | 215 | func (i *PercentageLiteral) expressionNode() {} 216 | func (i *PercentageLiteral) TokenLiteral() string { 217 | return i.Token.Literal 218 | } 219 | -------------------------------------------------------------------------------- /internal/decoder/decoder.go: -------------------------------------------------------------------------------- 1 | package decoder 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "reflect" 7 | "sort" 8 | "strings" 9 | 10 | "github.com/KeisukeYamashita/go-vcl/internal/ast" 11 | "github.com/KeisukeYamashita/go-vcl/internal/schema" 12 | "github.com/KeisukeYamashita/go-vcl/internal/traversal" 13 | ) 14 | 15 | var attrType = reflect.TypeOf((*schema.Attribute)(nil)) 16 | 17 | // Decode is a function for mapping the program of parser output to your custom struct. 18 | func Decode(program *ast.Program, val interface{}) []error { 19 | rv := reflect.ValueOf(val) 20 | if rv.Kind() != reflect.Ptr { 21 | return []error{fmt.Errorf("target value must be a pointer, not: %s", rv.Type().String())} 22 | } 23 | 24 | return decodeProgramToValue(program, rv.Elem()) 25 | } 26 | 27 | func decodeProgramToValue(program *ast.Program, val reflect.Value) []error { 28 | et := val.Type() 29 | switch et.Kind() { 30 | case reflect.Struct: 31 | return decodeProgramToStruct(program, val) 32 | case reflect.Map: 33 | return decodeProgramToMap(program, val) 34 | default: 35 | panic(fmt.Sprintf("target value must be a pointer to struct, not: %s", et.String())) 36 | } 37 | } 38 | 39 | func decodeProgramToStruct(program *ast.Program, val reflect.Value) []error { 40 | content := traversal.Content(program) 41 | return decodeContentToStruct(content, val) 42 | } 43 | 44 | func decodeContentToStruct(content *schema.BodyContent, val reflect.Value) []error { 45 | tags := getFieldTags(val.Type()) 46 | decodeAttr(content, tags, val) 47 | decodeFlats(content.Flats, tags, val) 48 | decodeComments(content.Comments, tags, val) 49 | return decodeBlocks(content.Blocks, tags, val) 50 | } 51 | 52 | func decodeAttr(content *schema.BodyContent, tags *fieldTags, val reflect.Value) { 53 | for name, fieldIdx := range tags.Attributes { 54 | attr := content.Attributes[name] 55 | field := val.Type().Field(fieldIdx) 56 | fieldTy := field.Type 57 | fieldV := val.Field(fieldIdx) 58 | 59 | if attr == nil { 60 | fieldV.Set(reflect.Zero(field.Type)) 61 | continue 62 | } 63 | 64 | switch { 65 | case attrType.AssignableTo(field.Type): 66 | fieldV.Set(reflect.ValueOf(attr)) 67 | case fieldTy.AssignableTo(reflect.ValueOf(attr.Value).Type()): 68 | fieldV.Set(reflect.ValueOf(attr.Value)) 69 | } 70 | } 71 | } 72 | 73 | func decodeBlocks(blocks schema.Blocks, tags *fieldTags, val reflect.Value) []error { 74 | errs := []error{} 75 | blocksByType := blocks.ByType() 76 | 77 | for typeName, fieldIdx := range tags.Blocks { 78 | blocks := blocksByType[typeName] 79 | field := val.Type().Field(fieldIdx) 80 | ty := field.Type 81 | 82 | var isSlice bool 83 | var isPtr bool 84 | if ty.Kind() == reflect.Slice { 85 | isSlice = true 86 | ty = ty.Elem() 87 | } 88 | 89 | if ty.Kind() == reflect.Ptr { 90 | isPtr = true 91 | ty = ty.Elem() 92 | } 93 | 94 | if len(blocks) > 1 && !isSlice { 95 | errs = append(errs, errors.New("more than one block but the field type is not slice")) 96 | } 97 | 98 | if len(blocks) == 0 { 99 | if isSlice || isPtr { 100 | val.Field(fieldIdx).Set(reflect.Zero(field.Type)) 101 | } else { 102 | errs = append(errs, errors.New("no block")) 103 | } 104 | } 105 | 106 | switch { 107 | case isSlice: 108 | elemType := ty 109 | if isPtr { 110 | elemType = reflect.PtrTo(ty) 111 | } 112 | 113 | sli := reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks)) 114 | 115 | for i, block := range blocks { 116 | if isPtr { 117 | v := reflect.New(ty) 118 | decodeBlockToStruct(block, v.Elem()) 119 | sli.Index(i).Set(v) 120 | } else { 121 | errs = append(errs, errors.New("block is not a pointer")) 122 | } 123 | } 124 | 125 | val.Field(fieldIdx).Set(sli) 126 | default: 127 | if isPtr { 128 | v := reflect.New(ty) 129 | decodeBlockToStruct(blocks[0], v.Elem()) 130 | val.Field(fieldIdx).Set(v) 131 | } else { 132 | errs = append(errs, errors.New("block is not a pointer")) 133 | } 134 | } 135 | } 136 | 137 | return errs 138 | } 139 | 140 | // decodeBlockToStruct decodes a block into a struct passed by val 141 | func decodeBlockToStruct(block *schema.Block, val reflect.Value) []error { 142 | tags := getFieldTags(val.Type()) 143 | 144 | for i, n := range tags.Labels { 145 | if i+1 > len(block.Labels) { 146 | continue 147 | } 148 | label := block.Labels[i] 149 | fieldV := val.Field(n.FieldIndex) 150 | fieldV.Set(reflect.ValueOf(label)) 151 | } 152 | 153 | content := traversal.BodyContent(block.Body) 154 | return decodeContentToStruct(content, val) 155 | } 156 | 157 | func decodeFlats(flats schema.Flats, tags *fieldTags, val reflect.Value) { 158 | for _, n := range tags.Flats { 159 | field := val.Type().Field(n.FieldIndex) 160 | ty := field.Type 161 | 162 | var isSlice bool 163 | var isPtr bool 164 | if ty.Kind() == reflect.Slice { 165 | isSlice = true 166 | ty = ty.Elem() 167 | } 168 | 169 | if ty.Kind() == reflect.Ptr { 170 | isPtr = true 171 | ty = ty.Elem() 172 | } 173 | 174 | switch { 175 | case isSlice: 176 | elemType := ty 177 | if isPtr { 178 | elemType = reflect.PtrTo(ty) 179 | } 180 | 181 | sli := reflect.MakeSlice(reflect.SliceOf(elemType), len(flats), len(flats)) 182 | 183 | for i, flat := range flats { 184 | if isPtr { 185 | v := reflect.New(ty) 186 | decodeBlockToStruct(flat.(*schema.Block), v.Elem()) 187 | sli.Index(i).Set(v) 188 | } else { 189 | sli.Index(i).Set(reflect.ValueOf(flat)) 190 | } 191 | } 192 | 193 | val.Field(n.FieldIndex).Set(sli) 194 | } 195 | } 196 | } 197 | 198 | func decodeComments(comments schema.Comments, tags *fieldTags, val reflect.Value) { 199 | for _, n := range tags.Comments { 200 | field := val.Type().Field(n.FieldIndex) 201 | fieldTy := field.Type 202 | 203 | var isSlice bool 204 | if fieldTy.Kind() == reflect.Slice { 205 | isSlice = true 206 | fieldTy = fieldTy.Elem() 207 | } 208 | 209 | switch { 210 | case isSlice: 211 | sli := reflect.MakeSlice(reflect.SliceOf(fieldTy), len(comments), len(comments)) 212 | 213 | for i, comment := range comments { 214 | sli.Index(i).Set(reflect.ValueOf(comment)) 215 | } 216 | 217 | val.Field(n.FieldIndex).Set(sli) 218 | } 219 | } 220 | } 221 | 222 | func decodeProgramToMap(program *ast.Program, val reflect.Value) []error { 223 | var errs []error 224 | content := traversal.Content(program) 225 | if content.Attributes == nil { 226 | return nil 227 | } 228 | 229 | var mv reflect.Value 230 | if len(content.Attributes) > 0 || len(content.Blocks) > 0 { 231 | mv = reflect.MakeMap(val.Type()) 232 | 233 | for k, attr := range content.Attributes { 234 | mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr.Value)) 235 | } 236 | 237 | blocksByType := content.Blocks.ByType() 238 | 239 | for tyName, blocks := range blocksByType { 240 | mp := reflect.MakeMap(val.Type()) 241 | for _, block := range blocks { 242 | content := traversal.BodyContent(block.Body) 243 | var v reflect.Value 244 | var blockType string 245 | if len(block.Labels) > 0 { 246 | blockType = block.Labels[0] 247 | } 248 | 249 | if len(content.Attributes) > 0 || len(content.Blocks) > 0 { 250 | v = reflect.New(val.Type()).Elem() 251 | decodeBlockToMap(block, v) 252 | 253 | for _, label := range block.Labels[1:] { 254 | tmpMap := reflect.MakeMap(val.Type()) 255 | tmpMap.SetMapIndex(reflect.ValueOf(label), v) 256 | v = tmpMap 257 | } 258 | } else { 259 | v = reflect.MakeSlice(reflect.TypeOf([]interface{}{}), len(content.Flats), len(content.Flats)) 260 | for i, flat := range content.Flats { 261 | v.Index(i).Set(reflect.ValueOf(flat)) 262 | } 263 | } 264 | mp.SetMapIndex(reflect.ValueOf(blockType), v) 265 | } 266 | 267 | mv.SetMapIndex(reflect.ValueOf(tyName), mp) 268 | } 269 | } 270 | 271 | val.Set(mv) 272 | return errs 273 | } 274 | 275 | func decodeBlockToMap(block *schema.Block, val reflect.Value) { 276 | content := traversal.BodyContent(block.Body) 277 | mv := reflect.MakeMap(val.Type()) 278 | 279 | for k, attr := range content.Attributes { 280 | key := removeAttrDot(k) 281 | mv.SetMapIndex(reflect.ValueOf(key), reflect.ValueOf(attr.Value)) 282 | } 283 | 284 | blocksByType := content.Blocks.ByType() 285 | 286 | for tyName, blocks := range blocksByType { 287 | var isSlice bool 288 | if len(blocks) != 1 { 289 | isSlice = true 290 | } 291 | 292 | switch { 293 | case isSlice: 294 | sli := reflect.MakeSlice(reflect.SliceOf(val.Type()), len(blocks), len(blocks)) 295 | for i, block := range blocks { 296 | v := reflect.New(val.Type()).Elem() 297 | decodeBlockToMap(block, v) 298 | 299 | for _, label := range block.Labels { 300 | tmpMap := reflect.MakeMap(val.Type()) 301 | tmpMap.SetMapIndex(reflect.ValueOf(label), v) 302 | v = tmpMap 303 | } 304 | 305 | sli.Index(i).Set(v) 306 | } 307 | 308 | mv.SetMapIndex(reflect.ValueOf(tyName), sli) 309 | default: 310 | block := blocks[0] 311 | v := reflect.New(val.Type()).Elem() 312 | decodeBlockToMap(block, v) 313 | mv.SetMapIndex(reflect.ValueOf(tyName), v) 314 | } 315 | } 316 | 317 | val.Set(mv) 318 | } 319 | 320 | // imipliedBodySchema will retrieves the root body schema from the given val. 321 | // For Varnish & Fastly usecases, there will be only blocks in the root. But as a configuration language, 322 | // the root schema can contain attribute as HCL. Therefore, I left the attributes slice for that. 323 | func impliedBodySchema(val interface{}) *schema.File { 324 | ty := reflect.TypeOf(val) 325 | if ty.Kind() == reflect.Ptr { 326 | ty = ty.Elem() 327 | } 328 | 329 | if ty.Kind() != reflect.Struct { 330 | panic(fmt.Sprintf("target value must be a struct, not: %T", val)) 331 | } 332 | 333 | var attrSchemas []schema.AttributeSchema 334 | var blockSchemas []schema.BlockHeaderSchema 335 | 336 | tags := getFieldTags(ty) 337 | attrNames := make([]string, 0, len(tags.Attributes)) 338 | for n := range tags.Attributes { 339 | attrNames = append(attrNames, n) 340 | } 341 | 342 | sort.Strings(attrNames) 343 | for _, n := range attrNames { 344 | attr := tags.Attributes[n] 345 | field := ty.Field(attr) 346 | var required bool 347 | 348 | switch { 349 | case field.Type.Kind() != reflect.Ptr: 350 | required = true 351 | } 352 | 353 | attrSchemas = append(attrSchemas, schema.AttributeSchema{ 354 | Name: n, 355 | Required: required, 356 | }) 357 | } 358 | 359 | blockNames := make([]string, 0, len(tags.Blocks)) 360 | for n := range tags.Blocks { 361 | blockNames = append(blockNames, n) 362 | } 363 | 364 | sort.Strings(blockNames) 365 | for _, n := range blockNames { 366 | idx := tags.Blocks[n] 367 | field := ty.Field(idx) 368 | fty := field.Type 369 | if fty.Kind() == reflect.Ptr { 370 | fty = fty.Elem() 371 | } 372 | 373 | if fty.Kind() != reflect.Struct { 374 | panic(fmt.Sprintf("hcl 'block' tag kind cannot be applied to %s field %s: struct required", field.Type.String(), field.Name)) 375 | } 376 | 377 | ftags := getFieldTags(fty) 378 | var labelNames []string 379 | if len(ftags.Labels) > 0 { 380 | labelNames = make([]string, len(ftags.Labels)) 381 | for i, l := range ftags.Labels { 382 | labelNames[i] = l.Name 383 | } 384 | } 385 | 386 | blockSchemas = append(blockSchemas, schema.BlockHeaderSchema{ 387 | Type: n, 388 | LabelNames: labelNames, 389 | }) 390 | } 391 | 392 | file := &schema.File{ 393 | Body: &schema.BodySchema{ 394 | Attributes: attrSchemas, 395 | Blocks: blockSchemas, 396 | }, 397 | } 398 | 399 | return file 400 | } 401 | 402 | // fieldTags is a struct that represents info about the field of the passed val. 403 | type fieldTags struct { 404 | Attributes map[string]int 405 | Blocks map[string]int 406 | Labels []labelField 407 | Flats []flatField 408 | Comments []commentField 409 | } 410 | 411 | // labelField is a struct that represents info about the struct tags of "vcl". 412 | type labelField struct { 413 | FieldIndex int 414 | Name string 415 | } 416 | type flatField struct { 417 | FieldIndex int 418 | Name string 419 | } 420 | 421 | type commentField struct { 422 | FieldIndex int 423 | Name string 424 | } 425 | 426 | // getFieldTags retrieves the "vcl" tags of the given struct type. 427 | func getFieldTags(ty reflect.Type) *fieldTags { 428 | ret := &fieldTags{ 429 | Attributes: map[string]int{}, 430 | Blocks: map[string]int{}, 431 | Labels: []labelField{}, 432 | Flats: []flatField{}, 433 | Comments: []commentField{}, 434 | } 435 | 436 | ct := ty.NumField() 437 | for i := 0; i < ct; i++ { 438 | field := ty.Field(i) 439 | tag := field.Tag.Get("vcl") 440 | if tag == "" { 441 | continue 442 | } 443 | 444 | comma := strings.Index(tag, ",") 445 | var name, kind string 446 | if comma != -1 { 447 | name = tag[:comma] 448 | kind = tag[comma+1:] 449 | } else { 450 | name = tag 451 | kind = "attr" 452 | } 453 | 454 | switch kind { 455 | case "attr": 456 | ret.Attributes[name] = i 457 | case "block": 458 | ret.Blocks[name] = i 459 | case "label": 460 | ret.Labels = append(ret.Labels, labelField{ 461 | FieldIndex: i, 462 | Name: name, 463 | }) 464 | case "flat": 465 | ret.Flats = append(ret.Flats, flatField{ 466 | FieldIndex: i, 467 | Name: name, 468 | }) 469 | case "comment": 470 | ret.Comments = append(ret.Comments, commentField{ 471 | FieldIndex: i, 472 | Name: name, 473 | }) 474 | default: 475 | panic(fmt.Sprintf("invalid vcl field tag kind %q on %s %q", kind, field.Type.String(), field.Name)) 476 | } 477 | } 478 | 479 | return ret 480 | } 481 | 482 | func removeAttrDot(v interface{}) interface{} { 483 | str, ok := v.(string) 484 | if !ok { 485 | return v 486 | } 487 | 488 | return strings.Trim(str, ".") 489 | } 490 | -------------------------------------------------------------------------------- /internal/decoder/decoder_test.go: -------------------------------------------------------------------------------- 1 | package decoder 2 | 3 | import ( 4 | "reflect" 5 | "testing" 6 | 7 | "github.com/KeisukeYamashita/go-vcl/internal/ast" 8 | "github.com/KeisukeYamashita/go-vcl/internal/lexer" 9 | "github.com/KeisukeYamashita/go-vcl/internal/parser" 10 | "github.com/KeisukeYamashita/go-vcl/internal/schema" 11 | ) 12 | 13 | func TestDecode(t *testing.T) { 14 | type TestStruct struct { 15 | Name string 16 | } 17 | 18 | testStruct := &TestStruct{} 19 | prog := &ast.Program{} 20 | 21 | testCases := map[string]struct { 22 | input interface{} 23 | program *ast.Program 24 | shouldError bool 25 | }{ 26 | "with pointer": {testStruct, prog, false}, 27 | "with not-pointer": {*testStruct, prog, true}, 28 | } 29 | 30 | for n, tc := range testCases { 31 | t.Run(n, func(t *testing.T) { 32 | if errs := Decode(tc.program, tc.input); len(errs) > 0 { 33 | if tc.shouldError { 34 | return 35 | } 36 | 37 | t.Fatalf("decode failed with error: %v", errs) 38 | } 39 | 40 | if tc.shouldError { 41 | t.Fatalf("decode should failed but successed") 42 | } 43 | }) 44 | } 45 | } 46 | 47 | func TestDecodeProgramToStruct_Attribute(t *testing.T) { 48 | type Root struct { 49 | X int64 `vcl:"x"` 50 | API string `vcl:"api"` 51 | } 52 | 53 | testCases := []struct { 54 | input string 55 | val interface{} 56 | expected interface{} 57 | }{ 58 | {`x = 1`, &Root{}, &Root{X: 1}}, 59 | {`api = "localhost"`, &Root{}, &Root{API: "localhost"}}, 60 | } 61 | 62 | for n, tc := range testCases { 63 | l := lexer.NewLexer(tc.input) 64 | p := parser.NewParser(l) 65 | program := p.ParseProgram() 66 | root := tc.val 67 | val := reflect.ValueOf(root).Elem() 68 | errs := decodeProgramToStruct(program, val) 69 | 70 | if len(errs) > 0 { 71 | t.Fatalf("decodeProgramToStruct has errors[testCase:%d], err:%v", n, errs) 72 | } 73 | 74 | if !reflect.DeepEqual(tc.val, tc.expected) { 75 | t.Fatalf("decodeProgramToStruct got wrong result[testCase:%d]", n) 76 | } 77 | } 78 | } 79 | 80 | func TestDecodeProgramToStruct_Block(t *testing.T) { 81 | type ACL struct { 82 | Type string `vcl:"type,label"` 83 | Name string `vcl:"name,label"` 84 | Endpoints []string `vcl:",flat"` 85 | } 86 | 87 | type Sub struct { 88 | Type string `vcl:"type,label"` 89 | Endpoints []string `vcl:",flat"` // Memo(KeisukeYamashita): Wont test inside of the block 90 | } 91 | 92 | type SubObj struct { 93 | Type string `vcl:"type,label"` 94 | Name string `vcl:"name,label"` 95 | Host string `vcl:".host"` 96 | IP string `vcl:".ip"` 97 | } 98 | 99 | type RootSub struct { 100 | Subs []*SubObj `vcl:"sub,block"` 101 | } 102 | 103 | type Root struct { 104 | ACLs []*ACL `vcl:"acl,block"` 105 | Subs []*Sub `vcl:"sub,block"` 106 | } 107 | 108 | testCases := map[string]struct { 109 | input string 110 | val interface{} 111 | expected interface{} 112 | }{ 113 | "with single block": { 114 | `acl local { 115 | "local"; 116 | "localhost"; 117 | }`, &Root{}, &Root{Subs: []*Sub{}, ACLs: []*ACL{&ACL{Type: "local", Endpoints: []string{"local", "localhost"}}}}, 118 | }, 119 | "with two same block": { 120 | `acl local { 121 | "local"; 122 | "localhost"; 123 | } 124 | 125 | acl remote { 126 | "remote"; 127 | } 128 | `, &Root{}, &Root{Subs: []*Sub{}, ACLs: []*ACL{&ACL{Type: "local", Endpoints: []string{"local", "localhost"}}, &ACL{Type: "remote", Endpoints: []string{"remote"}}}}, 129 | }, 130 | "with two mixed block type": { 131 | `acl local { 132 | "local"; 133 | "localhost"; 134 | } 135 | 136 | sub pipe_something { 137 | "inside_sub"; 138 | "34.100.0.0"/23; 139 | } 140 | `, &Root{}, &Root{ACLs: []*ACL{&ACL{Type: "local", Endpoints: []string{"local", "localhost"}}}, Subs: []*Sub{&Sub{Type: "pipe_something", Endpoints: []string{"inside_sub", "\"34.100.0.0\"/23"}}}}, 141 | }, 142 | "with sub block": { 143 | `sub pipe_something { 144 | .host = "host"; 145 | .ip = "ip"; 146 | } 147 | `, &RootSub{}, &RootSub{Subs: []*SubObj{&SubObj{Type: "pipe_something", Host: "host", IP: "ip"}}}, 148 | }, 149 | "with multi label": { 150 | `sub pipe_something pipe_keke { 151 | .host = "host"; 152 | .ip = "ip"; 153 | } 154 | `, &RootSub{}, &RootSub{Subs: []*SubObj{&SubObj{Type: "pipe_something", Name: "pipe_keke", Host: "host", IP: "ip"}}}, 155 | }, 156 | } 157 | 158 | for n, tc := range testCases { 159 | t.Run(n, func(t *testing.T) { 160 | l := lexer.NewLexer(tc.input) 161 | p := parser.NewParser(l) 162 | program := p.ParseProgram() 163 | root := tc.val 164 | val := reflect.ValueOf(root).Elem() 165 | errs := decodeProgramToStruct(program, val) 166 | 167 | if len(errs) > 0 { 168 | t.Fatalf("decodeProgramToStruct_Block has errorr, err:%v", errs) 169 | } 170 | 171 | if !reflect.DeepEqual(tc.val, tc.expected) { 172 | t.Fatalf("decodeProgramToStruct_Block got wrong result, got:%#v, want:%#v", tc.val, tc.expected) 173 | } 174 | }) 175 | } 176 | } 177 | 178 | func TestDecodeProgramToStruct_DirectorBlock(t *testing.T) { 179 | type Backend struct { 180 | Backend string `vcl:".backend"` 181 | Weight int64 `vcl:".weight"` 182 | } 183 | 184 | type Director struct { 185 | Type string `vcl:"type,label"` 186 | Name string `vcl:"name,label"` 187 | Quorum string `vcl:".quorum"` 188 | Retries int64 `vcl:".retries"` 189 | Backends []*Backend `vcl:",flat"` 190 | } 191 | 192 | type Root struct { 193 | Directors []*Director `vcl:"director,block"` 194 | } 195 | 196 | testCases := map[string]struct { 197 | input string 198 | val interface{} 199 | expected interface{} 200 | }{ 201 | "with single director block": { 202 | `director my_dir random { 203 | .quorum = 50%; 204 | .retries = 3; 205 | }`, &Root{}, &Root{Directors: []*Director{&Director{Type: "my_dir", Name: "random", Quorum: "50%", Retries: 3, Backends: []*Backend{}}}}, 206 | }, 207 | "with deep director block": { 208 | `director my_dir random { 209 | .quorum = 50%; 210 | .retries = 3; 211 | { .backend = K_backend1; .weight = 1; } 212 | }`, &Root{}, &Root{Directors: []*Director{&Director{Type: "my_dir", Name: "random", Quorum: "50%", Retries: 3, Backends: []*Backend{&Backend{Backend: "K_backend1", Weight: 1}}}}}, 213 | }, 214 | "with multiple deep director block": { 215 | `director my_dir random { 216 | .quorum = 50%; 217 | .retries = 3; 218 | { .backend = K_backend1; .weight = 1; } 219 | { .backend = E_backend1; .weight = 3; } 220 | }`, &Root{}, &Root{Directors: []*Director{&Director{Type: "my_dir", Name: "random", Quorum: "50%", Retries: 3, Backends: []*Backend{&Backend{Backend: "K_backend1", Weight: 1}, &Backend{Backend: "E_backend1", Weight: 3}}}}}, 221 | }, 222 | } 223 | 224 | for n, tc := range testCases { 225 | t.Run(n, func(t *testing.T) { 226 | l := lexer.NewLexer(tc.input) 227 | p := parser.NewParser(l) 228 | program := p.ParseProgram() 229 | root := tc.val 230 | val := reflect.ValueOf(root).Elem() 231 | errs := decodeProgramToStruct(program, val) 232 | 233 | if len(errs) > 0 { 234 | t.Fatalf("decodeProgramToStruct_Block has errorr, err:%v", errs) 235 | } 236 | 237 | if !reflect.DeepEqual(tc.val, tc.expected) { 238 | t.Fatalf("decodeProgramToStruct_Block got wrong result, got:%#v, want:%#v", tc.val, tc.expected) 239 | } 240 | }) 241 | } 242 | } 243 | 244 | func TestDecodeProgramToStruct_TableBlock(t *testing.T) { 245 | type Table struct { 246 | Type string `vcl:"type,label"` 247 | Username string `vcl:"username"` 248 | } 249 | 250 | type Root struct { 251 | Tables []*Table `vcl:"table,block"` 252 | } 253 | 254 | testCases := map[string]struct { 255 | input string 256 | val interface{} 257 | expected interface{} 258 | }{ 259 | "with single table block": { 260 | `table my_id { 261 | "username": "keke" 262 | }`, &Root{}, &Root{[]*Table{&Table{Type: "my_id", Username: "keke"}}}, 263 | }, 264 | "with multiple table block": { 265 | `table my_id { 266 | "username": "keke" 267 | } 268 | 269 | table my_keke { 270 | "username": "kekekun", 271 | }`, &Root{}, &Root{[]*Table{&Table{Type: "my_id", Username: "keke"}, &Table{Type: "my_keke", Username: "kekekun"}}}, 272 | }, 273 | } 274 | 275 | for n, tc := range testCases { 276 | t.Run(n, func(t *testing.T) { 277 | l := lexer.NewLexer(tc.input) 278 | p := parser.NewParser(l) 279 | program := p.ParseProgram() 280 | root := tc.val 281 | val := reflect.ValueOf(root).Elem() 282 | errs := decodeProgramToStruct(program, val) 283 | 284 | if len(errs) > 0 { 285 | t.Fatalf("decodeProgramToStruct_Block has errorr, err:%v", errs) 286 | } 287 | 288 | if !reflect.DeepEqual(tc.val, tc.expected) { 289 | t.Fatalf("decodeProgramToStruct_Block got wrong result, got:%#v, want:%#v", tc.val, tc.expected) 290 | } 291 | }) 292 | } 293 | } 294 | 295 | func TestDecodeProgramToStruct_NestedBlock(t *testing.T) { 296 | type Probe struct { 297 | X int64 `vcl:"x"` 298 | } 299 | 300 | type Backend struct { 301 | Type string `vcl:"type,label"` 302 | IP string `vcl:".ip"` 303 | Probe *Probe `vcl:".probe,block"` 304 | } 305 | 306 | type Root struct { 307 | Backends []*Backend `vcl:"backend,block"` 308 | } 309 | 310 | testCases := map[string]struct { 311 | input string 312 | val interface{} 313 | expected interface{} 314 | }{ 315 | "with nested simple block": { 316 | `backend remote { 317 | .ip = "localhost"; 318 | .probe = { 319 | x = 10; 320 | }; 321 | }`, &Root{}, &Root{Backends: []*Backend{&Backend{Type: "remote", IP: "localhost", Probe: &Probe{X: 10}}}}, 322 | }, 323 | } 324 | 325 | for n, tc := range testCases { 326 | t.Run(n, func(t *testing.T) { 327 | l := lexer.NewLexer(tc.input) 328 | p := parser.NewParser(l) 329 | program := p.ParseProgram() 330 | root := tc.val 331 | val := reflect.ValueOf(root).Elem() 332 | errs := decodeProgramToStruct(program, val) 333 | 334 | if len(errs) > 0 { 335 | t.Fatalf("decodeProgramToStruct_Block has errorr, err:%v", errs) 336 | } 337 | 338 | if !reflect.DeepEqual(tc.val, tc.expected) { 339 | t.Fatalf("decodeProgramToStruct_Block got wrong result, got:%#v", tc.val) 340 | } 341 | }) 342 | } 343 | } 344 | 345 | func TestDecodeProgramToStruct_Comments(t *testing.T) { 346 | type ACL struct { 347 | Type string `vcl:"type,label"` 348 | Comments []string `vcl:",comment"` 349 | } 350 | 351 | type Root struct { 352 | ACLs []*ACL `vcl:"acl,block"` 353 | Comments []string `vcl:",comment"` 354 | } 355 | 356 | testCases := map[string]struct { 357 | input string 358 | val interface{} 359 | expected interface{} 360 | }{ 361 | "with root comment by hash": { 362 | `# keke`, &Root{}, &Root{Comments: []string{"keke"}, ACLs: []*ACL{}}, 363 | }, 364 | "with root comment by double slash": { 365 | `// keke`, &Root{}, &Root{Comments: []string{"keke"}, ACLs: []*ACL{}}, 366 | }, 367 | "with root by double slash with block": { 368 | `// keke 369 | acl "tag" {} 370 | `, &Root{}, &Root{Comments: []string{"keke"}, ACLs: []*ACL{&ACL{Type: "tag", Comments: []string{}}}}, 371 | }, 372 | "with nested block": { 373 | `// keke 374 | acl "tag" { 375 | // internal-keke 376 | "localhost"; 377 | } 378 | `, &Root{}, &Root{Comments: []string{"keke"}, ACLs: []*ACL{&ACL{Type: "tag", Comments: []string{"internal-keke"}}}}, 379 | }, 380 | } 381 | 382 | for n, tc := range testCases { 383 | t.Run(n, func(t *testing.T) { 384 | l := lexer.NewLexer(tc.input) 385 | p := parser.NewParser(l) 386 | program := p.ParseProgram() 387 | root := tc.val 388 | val := reflect.ValueOf(root).Elem() 389 | errs := decodeProgramToStruct(program, val) 390 | 391 | if len(errs) > 0 { 392 | t.Fatalf("decodeProgramToStruct_Block has errorr, err:%v", errs) 393 | } 394 | 395 | if !reflect.DeepEqual(tc.val, tc.expected) { 396 | t.Fatalf("decodeProgramToStruct_Block got wrong result, got:%#v", tc.val) 397 | } 398 | }) 399 | } 400 | } 401 | 402 | func TestDecodeProgramToMap(t *testing.T) { 403 | testCases := map[string]struct { 404 | input string 405 | val map[string]interface{} 406 | expected map[string]interface{} 407 | }{ 408 | "with single attr": {`x = hello`, map[string]interface{}{}, map[string]interface{}{"x": "hello"}}, 409 | "with multiple attr": {`x = hello; 410 | y = bye`, map[string]interface{}{}, map[string]interface{}{"x": "hello", "y": "bye"}}, 411 | "with single block": {`acl hello {x = "test"}`, map[string]interface{}{}, map[string]interface{}{"acl": map[string]interface{}{"hello": map[string]interface{}{"x": "test"}}}}, 412 | "with multiple block": {`acl hello { 413 | x = "test"; 414 | } 415 | 416 | acl bye { 417 | y = "keke"; 418 | } 419 | `, map[string]interface{}{}, map[string]interface{}{"acl": map[string]interface{}{"hello": map[string]interface{}{"x": "test"}, "bye": map[string]interface{}{"y": "keke"}}}}, 420 | "with flat block": {`acl hello { 421 | "localhost"; 422 | "local"; 423 | }`, map[string]interface{}{}, map[string]interface{}{"acl": map[string]interface{}{"hello": []interface{}{"localhost", "local"}}}}, 424 | "with dot attribute block": {`backend default { 425 | .port = "8080"; 426 | }`, map[string]interface{}{}, map[string]interface{}{"backend": map[string]interface{}{"default": map[string]interface{}{"port": "8080"}}}}} 427 | 428 | for n, tc := range testCases { 429 | t.Run(n, func(t *testing.T) { 430 | l := lexer.NewLexer(tc.input) 431 | p := parser.NewParser(l) 432 | program := p.ParseProgram() 433 | val := reflect.ValueOf(&tc.val).Elem() 434 | errs := decodeProgramToMap(program, val) 435 | 436 | if len(errs) > 0 { 437 | t.Fatalf("decodeProgramToStruct has errors, err:%v", errs) 438 | } 439 | 440 | if !reflect.DeepEqual(&tc.val, &tc.expected) { 441 | t.Fatalf("decodeProgramToStruct got wrong result got:%v want:%v", tc.val, tc.expected) 442 | } 443 | }) 444 | } 445 | } 446 | 447 | func TestImpliedBodySchema(t *testing.T) { 448 | type testBlock struct { 449 | Type string `vcl:"type,label"` 450 | MiddelName string `vcl:"middelname"` 451 | } 452 | 453 | type testStruct struct { 454 | Type string `vcl:"type,label"` 455 | Name string `vcl:"name"` 456 | Resource *testBlock `vcl:"resource,block"` 457 | } 458 | 459 | input := &testStruct{ 460 | Type: "my-type", 461 | Name: "keke", 462 | Resource: &testBlock{ 463 | MiddelName: "middelName", 464 | }, 465 | } 466 | 467 | testCases := []struct { 468 | input interface{} 469 | }{ 470 | {input}, 471 | } 472 | 473 | for n, tc := range testCases { 474 | file := impliedBodySchema(tc.input) 475 | bs := file.Body.(*schema.BodySchema) 476 | if len(bs.Attributes) != 1 { 477 | t.Fatalf("Attribute length wrong[testCase:%d], got:%d, want:%d", n, len(bs.Attributes), 1) 478 | } 479 | 480 | if len(bs.Blocks) != 1 { 481 | t.Fatalf("Block length wrong[testCase:%d], got:%d, want:%d", n, len(bs.Blocks), 1) 482 | } 483 | 484 | if len(bs.Blocks[0].LabelNames) != 1 { 485 | t.Fatalf("Block label are not expected[testCase:%d], got:%d, want:%d", n, len(bs.Blocks[0].LabelNames), 1) 486 | } 487 | } 488 | } 489 | 490 | func TestGetFieldTags(t *testing.T) { 491 | type testStruct struct { 492 | Type string `vcl:"type,label"` 493 | Name string `vcl:"name"` // implied attribute 494 | Resource interface{} `vcl:"resource,block"` 495 | Flats interface{} `vcl:",flat"` 496 | Comments interface{} `vcl:",comment"` 497 | } 498 | 499 | input := &testStruct{ 500 | Type: "my-type", 501 | Name: "keke", 502 | Resource: "", 503 | } 504 | 505 | testCases := []struct { 506 | input *testStruct 507 | }{ 508 | {input}, 509 | } 510 | 511 | for n, tc := range testCases { 512 | ty := reflect.TypeOf(*tc.input) 513 | tags := getFieldTags(ty) 514 | 515 | if len(tags.Attributes) != 1 { 516 | t.Fatalf("Attribute length wrong[testCase:%d], got:%d, want:%d", n, len(tags.Attributes), 1) 517 | } 518 | 519 | if len(tags.Labels) != 1 { 520 | t.Fatalf("Labels length wrong[testCase:%d], got:%d, want:%d", n, len(tags.Labels), 1) 521 | } 522 | 523 | if len(tags.Blocks) != 1 { 524 | t.Fatalf("Blocks length wrong[testCase:%d], got:%d, want:%d", n, len(tags.Blocks), 1) 525 | } 526 | 527 | if len(tags.Flats) != 1 { 528 | t.Fatalf("Flats length wrong[testCase:%d], got:%d, want:%d", n, len(tags.Flats), 1) 529 | } 530 | 531 | if len(tags.Comments) != 1 { 532 | t.Fatalf("Comments length wrong[testCase:%d], got:%d, want:%d", n, len(tags.Comments), 1) 533 | } 534 | } 535 | } 536 | -------------------------------------------------------------------------------- /internal/lexer/lexer.go: -------------------------------------------------------------------------------- 1 | package lexer 2 | 3 | import ( 4 | "strings" 5 | 6 | "github.com/KeisukeYamashita/go-vcl/internal/token" 7 | ) 8 | 9 | // Lexer is a struct for tokenization 10 | type Lexer struct { 11 | input string 12 | pos int 13 | readPos int 14 | char byte 15 | } 16 | 17 | // NewLexer returns the lexer with givin string input 18 | func NewLexer(input string) *Lexer { 19 | l := &Lexer{ 20 | input: input, 21 | } 22 | l.init() 23 | return l 24 | } 25 | 26 | func (l *Lexer) init() { 27 | l.readChar() 28 | } 29 | 30 | // readChar retrieves the byte from readPos 31 | func (l *Lexer) readChar() { 32 | if l.readPos >= len(l.input) { 33 | l.char = 0 34 | } else { 35 | l.char = l.input[l.readPos] 36 | } 37 | l.pos = l.readPos 38 | l.readPos++ 39 | } 40 | 41 | // readIndentifier reads the indentifier 42 | func (l *Lexer) readIndentifier() string { 43 | pos := l.pos 44 | for isLetter(l.char) || isDigit(l.char) { 45 | l.readChar() 46 | } 47 | return l.input[pos:l.pos] 48 | } 49 | 50 | func (l *Lexer) readNumber() string { 51 | pos := l.pos 52 | for isDigit(l.char) { 53 | l.readChar() 54 | } 55 | return l.input[pos:l.pos] 56 | } 57 | 58 | func (l *Lexer) readString() string { 59 | pos := l.pos + 1 60 | for l.char != 0 && l.char != ';' { 61 | l.readChar() 62 | if l.char == '"' { 63 | if l.peekChar() != '/' { 64 | break 65 | } 66 | } 67 | } 68 | 69 | return l.input[pos:l.pos] 70 | } 71 | 72 | func (l *Lexer) readPercentage(number string) string { 73 | l.readChar() 74 | return number + "%" 75 | } 76 | 77 | func (l *Lexer) readCommentLine() string { 78 | l.readChar() 79 | pos := l.pos + 1 // Memo(KeisukeYamashita): Remove the first white space 80 | for !isNewLine(l.char) { 81 | l.readChar() 82 | if l.char == 0 { 83 | break 84 | } 85 | } 86 | 87 | return l.input[pos:l.pos] 88 | } 89 | 90 | func (l *Lexer) peekChar() byte { 91 | if l.readPos >= len(l.input) { 92 | return 0 93 | } 94 | 95 | return l.input[l.readPos] 96 | } 97 | 98 | func (l *Lexer) curCharIs(b byte) bool { 99 | return l.char == b 100 | } 101 | 102 | func (l *Lexer) peekCharIs(b byte) bool { 103 | return l.peekChar() == b 104 | } 105 | 106 | func (l *Lexer) eatWhiteSpace() { 107 | for l.char == ' ' || l.char == '\t' || l.char == '\n' || l.char == '\r' { 108 | l.readChar() 109 | } 110 | } 111 | 112 | // NextToken ... 113 | func (l *Lexer) NextToken() token.Token { 114 | l.eatWhiteSpace() 115 | 116 | tok := token.Token{} 117 | switch l.char { 118 | case '=': 119 | if l.peekCharIs('=') { 120 | char := l.char 121 | l.readChar() 122 | literal := string(char) + string(char) 123 | tok = token.Token{Type: token.EQUAL, Literal: literal} 124 | } else { 125 | tok = token.NewToken(token.ASSIGN, l.char) 126 | } 127 | case ':': 128 | tok = token.NewToken(token.COLON, l.char) 129 | case '~': 130 | tok = token.NewToken(token.MATCH, l.char) 131 | case ',': 132 | tok = token.NewToken(token.COMMA, l.char) 133 | case ';': 134 | tok = token.NewToken(token.SEMICOLON, l.char) 135 | case '#': 136 | literal := l.readCommentLine() 137 | tok = token.Token{Type: token.HASH, Literal: literal} 138 | case '/': 139 | if l.peekCharIs('/') { 140 | l.readChar() 141 | literal := l.readCommentLine() 142 | tok = token.Token{Type: token.COMMENTLINE, Literal: literal} 143 | } else if l.peekCharIs('*') { 144 | char := l.char 145 | l.readChar() 146 | literal := string(char) + string(l.char) 147 | tok = token.Token{Type: token.LMULTICOMMENTLINE, Literal: literal} 148 | } 149 | case '*': 150 | if l.peekCharIs('/') { 151 | char := l.char 152 | l.readChar() 153 | literal := string(char) + string(l.char) 154 | tok = token.Token{Type: token.RMULTICOMMENTLINE, Literal: literal} 155 | } 156 | case '(': 157 | tok = token.NewToken(token.LPAREN, l.char) 158 | case ')': 159 | tok = token.NewToken(token.RPAREN, l.char) 160 | case '{': 161 | tok = token.NewToken(token.LBRACE, l.char) 162 | case '}': 163 | tok = token.NewToken(token.RBRACE, l.char) 164 | case '!': 165 | tok = token.NewToken(token.BANG, l.char) 166 | case '+': 167 | tok = token.NewToken(token.PLUS, l.char) 168 | case '"': 169 | s := l.readString() 170 | if strings.Contains(s, "/") { 171 | tok.Type = token.CIDR 172 | s = "\"" + s // CIDR format is "35.0.0.0"/24 which we have to wrap by ". 173 | } else { 174 | tok.Type = token.STRING 175 | } 176 | tok.Literal = s 177 | case '|': 178 | // it will be always || 179 | if l.peekChar() == '|' { 180 | char := l.char 181 | l.readChar() 182 | literal := string(char) + string(char) 183 | tok = token.Token{Type: token.OR, Literal: literal} 184 | } 185 | case '&': 186 | // it will be always && 187 | if l.peekChar() == '&' { 188 | char := l.char 189 | l.readChar() 190 | literal := string(char) + string(char) 191 | tok = token.Token{Type: token.AND, Literal: literal} 192 | } 193 | case 0: 194 | tok.Type = token.EOF 195 | tok.Literal = "" 196 | default: 197 | if isLetter(l.char) { 198 | tok.Literal = l.readIndentifier() 199 | tok.Type = token.LookupIndent(tok.Literal) 200 | return tok // early return not to walk step 201 | } else if isDigit(l.char) { 202 | number := l.readNumber() 203 | if l.curCharIs('%') { 204 | tok.Type = token.PERCENTAGE 205 | tok.Literal = l.readPercentage(number) 206 | return tok 207 | } 208 | 209 | tok.Literal = number 210 | tok.Type = token.INT 211 | return tok // early return not to walk step 212 | } else { 213 | tok = token.NewToken(token.ILLEGAL, l.char) 214 | } 215 | } 216 | 217 | l.readChar() 218 | return tok 219 | } 220 | 221 | func isLetter(char byte) bool { 222 | return 'a' <= char && char <= 'z' || 'A' <= char && char <= 'Z' || char == '_' || char == '.' 223 | } 224 | 225 | func isDigit(char byte) bool { 226 | return '0' <= char && char <= '9' 227 | } 228 | 229 | func isNewLine(char byte) bool { 230 | return char == '\n' 231 | } 232 | -------------------------------------------------------------------------------- /internal/lexer/lexer_test.go: -------------------------------------------------------------------------------- 1 | package lexer 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/KeisukeYamashita/go-vcl/internal/token" 7 | ) 8 | 9 | func TestNextToken(t *testing.T) { 10 | testCases := []struct { 11 | input string 12 | expectedTokens []struct { 13 | expectedType token.Type 14 | expectedLiteral string 15 | } 16 | }{ 17 | { 18 | `=~,; call == && || 10 "keke" false ! "35.0.0.0"/23; server1 K_backend1 50% table`, 19 | []struct { 20 | expectedType token.Type 21 | expectedLiteral string 22 | }{ 23 | {token.ASSIGN, "="}, 24 | {token.MATCH, "~"}, 25 | {token.COMMA, ","}, 26 | {token.SEMICOLON, ";"}, 27 | {token.CALL, "call"}, 28 | {token.EQUAL, "=="}, 29 | {token.AND, "&&"}, 30 | {token.OR, "||"}, 31 | {token.INT, "10"}, 32 | {token.STRING, "keke"}, 33 | {token.FALSE, "false"}, 34 | {token.BANG, "!"}, 35 | {token.CIDR, "\"35.0.0.0\"/23"}, 36 | {token.IDENT, "server1"}, 37 | {token.IDENT, "K_backend1"}, 38 | {token.PERCENTAGE, "50%"}, 39 | {token.TABLE, "table"}, 40 | }, 41 | }, 42 | { 43 | `sub pipe_if_local { 44 | if (client.ip ~ local) { 45 | return (pipe); 46 | } 47 | } 48 | `, 49 | []struct { 50 | expectedType token.Type 51 | expectedLiteral string 52 | }{ 53 | {token.SUBROUTINE, "sub"}, 54 | {token.IDENT, "pipe_if_local"}, 55 | {token.LBRACE, "{"}, 56 | {token.IF, "if"}, 57 | {token.LPAREN, "("}, 58 | {token.IDENT, "client.ip"}, 59 | {token.MATCH, "~"}, 60 | {token.IDENT, "local"}, 61 | {token.RPAREN, ")"}, 62 | {token.LBRACE, "{"}, 63 | {token.RETURN, "return"}, 64 | {token.LPAREN, "("}, 65 | {token.IDENT, "pipe"}, 66 | {token.RPAREN, ")"}, 67 | {token.SEMICOLON, ";"}, 68 | {token.RBRACE, "}"}, 69 | {token.RBRACE, "}"}, 70 | }, 71 | }, 72 | { 73 | `director my_dir random { 74 | // keke 75 | /* Hello */ 76 | # 3 hi 77 | .retries = 3; 78 | }`, 79 | []struct { 80 | expectedType token.Type 81 | expectedLiteral string 82 | }{ 83 | {token.DIRECTOR, "director"}, 84 | {token.IDENT, "my_dir"}, 85 | {token.IDENT, "random"}, 86 | {token.LBRACE, "{"}, 87 | {token.COMMENTLINE, "keke"}, 88 | {token.LMULTICOMMENTLINE, "/*"}, 89 | {token.IDENT, "Hello"}, 90 | {token.RMULTICOMMENTLINE, "*/"}, 91 | {token.HASH, "3 hi"}, 92 | {token.IDENT, ".retries"}, 93 | {token.ASSIGN, "="}, 94 | {token.INT, "3"}, 95 | {token.SEMICOLON, ";"}, 96 | {token.RBRACE, "}"}, 97 | }, 98 | }, 99 | { 100 | `table my_id { 101 | "key1": "value 1", 102 | }`, 103 | []struct { 104 | expectedType token.Type 105 | expectedLiteral string 106 | }{ 107 | {token.TABLE, "table"}, 108 | {token.IDENT, "my_id"}, 109 | {token.LBRACE, "{"}, 110 | {token.STRING, "key1"}, 111 | {token.COLON, ":"}, 112 | {token.STRING, "value 1"}, 113 | {token.COMMA, ","}, 114 | {token.RBRACE, "}"}, 115 | }, 116 | }, 117 | } 118 | 119 | for i, tc := range testCases { 120 | l := NewLexer(tc.input) 121 | 122 | for j, expectedToken := range tc.expectedTokens { 123 | tok := l.NextToken() 124 | if tok.Type != expectedToken.expectedType { 125 | t.Fatalf("failed[testCase:%d:%d] - wrong tokenType, want: %s(literal:%s), got: %s(literal:%s)", i+1, j+1, expectedToken.expectedType, expectedToken.expectedLiteral, tok.Type, tok.Literal) 126 | } 127 | 128 | if tok.Literal != expectedToken.expectedLiteral { 129 | t.Fatalf("failed[testCase:%d:%d] - wrong literal, want: %s, got: %s", i+1, j+1, expectedToken.expectedLiteral, tok.Literal) 130 | } 131 | } 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /internal/parser/parser.go: -------------------------------------------------------------------------------- 1 | package parser 2 | 3 | import ( 4 | "fmt" 5 | "strconv" 6 | 7 | "github.com/KeisukeYamashita/go-vcl/internal/ast" 8 | "github.com/KeisukeYamashita/go-vcl/internal/lexer" 9 | "github.com/KeisukeYamashita/go-vcl/internal/token" 10 | ) 11 | 12 | var precedences = map[token.Type]int{ 13 | token.EQUAL: EQUALS, 14 | token.MATCH: EQUALS, 15 | token.PLUS: SUM, 16 | token.AND: EQUALS, 17 | token.OR: EQUALS, 18 | } 19 | 20 | const ( 21 | _ int = iota 22 | LOWEST 23 | EQUALS 24 | LESSGREATER 25 | SUM 26 | PRODUCT 27 | PREFIX 28 | CALL 29 | ) 30 | 31 | type ( 32 | prefixParseFn func() ast.Expression 33 | infixParseFn func(ast.Expression) ast.Expression 34 | ) 35 | 36 | // Parser is a struct that contains a lexer and parse spec 37 | type Parser struct { 38 | l *lexer.Lexer 39 | curToken token.Token 40 | peekToken token.Token 41 | 42 | errors []error 43 | prefixParseFn map[token.Type]prefixParseFn 44 | infixParseFn map[token.Type]infixParseFn 45 | } 46 | 47 | // NewParser returns a parser by lexer 48 | func NewParser(l *lexer.Lexer) *Parser { 49 | p := &Parser{ 50 | l: l, 51 | errors: []error{}, 52 | } 53 | p.init() 54 | return p 55 | } 56 | 57 | func (p *Parser) init() { 58 | p.nextToken() 59 | p.nextToken() 60 | 61 | p.prefixParseFn = make(map[token.Type]prefixParseFn) 62 | p.registerPrefix(token.IDENT, p.parseIdentifier) 63 | p.registerPrefix(token.INT, p.parseIntegerLiteral) 64 | p.registerPrefix(token.PERCENTAGE, p.parsePercentageLiteral) 65 | p.registerPrefix(token.STRING, p.parseStringLiteral) 66 | p.registerPrefix(token.CIDR, p.parseCIDRLiteral) 67 | p.registerPrefix(token.TRUE, p.parseBoolean) 68 | p.registerPrefix(token.FALSE, p.parseBoolean) 69 | p.registerPrefix(token.BANG, p.parsePrefixExpression) 70 | p.registerPrefix(token.LPAREN, p.parseGroupedExpression) 71 | p.registerPrefix(token.IF, p.parseIfExpression) 72 | p.registerPrefix(token.SUBROUTINE, p.parseBlockExpression) 73 | p.registerPrefix(token.ACL, p.parseBlockExpression) 74 | p.registerPrefix(token.BACKEND, p.parseBlockExpression) 75 | p.registerPrefix(token.DIRECTOR, p.parseBlockExpression) 76 | p.registerPrefix(token.LBRACE, p.parseObjectExpression) 77 | p.registerPrefix(token.TABLE, p.parseBlockExpression) 78 | 79 | p.infixParseFn = make(map[token.Type]infixParseFn) 80 | p.registerInfix(token.MATCH, p.parseInfixExpression) 81 | p.registerInfix(token.PLUS, p.parseInfixExpression) 82 | } 83 | 84 | func (p *Parser) parseIdentifier() ast.Expression { 85 | return &ast.Identifier{ 86 | Token: p.curToken, 87 | Value: p.curToken.Literal, 88 | } 89 | } 90 | 91 | func (p *Parser) parseIntegerLiteral() ast.Expression { 92 | lit := &ast.IntegerLiteral{ 93 | Token: p.curToken, 94 | } 95 | 96 | value, err := strconv.ParseInt(p.curToken.Literal, 0, 64) 97 | if err != nil { 98 | p.errors = append(p.errors, err) 99 | return nil 100 | } 101 | 102 | lit.Value = value 103 | return lit 104 | } 105 | 106 | func (p *Parser) parseStringLiteral() ast.Expression { 107 | lit := &ast.StringLiteral{ 108 | Token: p.curToken, 109 | Value: p.curToken.Literal, 110 | } 111 | 112 | return lit 113 | } 114 | 115 | func (p *Parser) parsePercentageLiteral() ast.Expression { 116 | lit := &ast.PercentageLiteral{ 117 | Token: p.curToken, 118 | Value: p.curToken.Literal, 119 | } 120 | 121 | return lit 122 | } 123 | 124 | func (p *Parser) parseCIDRLiteral() ast.Expression { 125 | lit := &ast.CIDRLiteral{ 126 | Token: p.curToken, 127 | Value: p.curToken.Literal, 128 | } 129 | 130 | return lit 131 | } 132 | 133 | func (p *Parser) parseBoolean() ast.Expression { 134 | return &ast.BooleanLiteral{ 135 | Token: p.curToken, 136 | Value: p.curTokenIs(token.TRUE), 137 | } 138 | } 139 | 140 | func (p *Parser) parseGroupedExpression() ast.Expression { 141 | p.nextToken() 142 | expr := p.parseExpression(LOWEST) 143 | 144 | if !p.expectPeek(token.RPAREN) { 145 | return nil 146 | } 147 | return expr 148 | } 149 | 150 | func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression { 151 | expr := &ast.InfixExpression{ 152 | Token: p.curToken, 153 | Operator: p.curToken.Literal, 154 | Left: left, 155 | } 156 | 157 | precedence := p.curPrecedence() 158 | p.nextToken() 159 | expr.Right = p.parseExpression(precedence) 160 | return expr 161 | } 162 | 163 | func (p *Parser) parsePrefixExpression() ast.Expression { 164 | expr := &ast.PrefixExpression{ 165 | Token: p.curToken, 166 | Operator: p.curToken.Literal, 167 | } 168 | 169 | p.nextToken() 170 | expr.Right = p.parseExpression(PREFIX) 171 | return expr 172 | } 173 | 174 | func (p *Parser) parseIfExpression() ast.Expression { 175 | expr := &ast.IfExpression{ 176 | Token: p.curToken, 177 | } 178 | 179 | if !p.expectPeek(token.LPAREN) { 180 | return nil 181 | } 182 | 183 | p.nextToken() 184 | expr.Condition = p.parseExpression(LOWEST) 185 | 186 | if !p.expectPeek(token.RPAREN) { 187 | return nil 188 | } 189 | 190 | if !p.expectPeek(token.LBRACE) { 191 | return nil 192 | } 193 | 194 | expr.Consequence = p.parseBlockStatement() 195 | 196 | if p.peekTokenIs(token.ELSE) { 197 | p.nextToken() 198 | 199 | if !p.expectPeek(token.LBRACE) { 200 | return nil 201 | } 202 | 203 | expr.Alternative = p.parseBlockStatement() 204 | } 205 | 206 | return expr 207 | } 208 | 209 | func (p *Parser) parseBlockExpression() ast.Expression { 210 | expr := &ast.BlockExpression{ 211 | Token: p.curToken, 212 | } 213 | 214 | labels := []string{} 215 | for !p.peekTokenIs(token.LBRACE) && !p.peekTokenIs(token.SEMICOLON) { 216 | p.nextToken() 217 | labels = append(labels, p.curToken.Literal) 218 | } 219 | 220 | expr.Labels = labels 221 | 222 | if p.peekTokenIs(token.SEMICOLON) { 223 | return expr 224 | } 225 | 226 | if !p.peekTokenIs(token.LBRACE) { 227 | return nil 228 | } 229 | 230 | p.nextToken() 231 | 232 | expr.Blocks = p.parseBlockStatement() 233 | return expr 234 | } 235 | 236 | func (p *Parser) parseBlockStatement() *ast.BlockStatement { 237 | block := &ast.BlockStatement{ 238 | Token: p.curToken, 239 | } 240 | block.Statements = []ast.Statement{} 241 | 242 | p.nextToken() 243 | for !p.curTokenIs(token.RBRACE) && !p.curTokenIs(token.EOF) { 244 | stmt := p.parseStatement() 245 | if stmt != nil { 246 | block.Statements = append(block.Statements, stmt) 247 | } 248 | p.nextToken() 249 | } 250 | 251 | return block 252 | } 253 | 254 | func (p *Parser) parseObjectExpression() ast.Expression { 255 | expr := &ast.BlockExpression{ 256 | Token: p.curToken, 257 | } 258 | 259 | expr.Labels = []string{} 260 | 261 | if p.peekTokenIs(token.SEMICOLON) { 262 | return expr 263 | } 264 | 265 | expr.Blocks = p.parseBlockStatement() 266 | return expr 267 | } 268 | 269 | func (p *Parser) registerPrefix(tokenType token.Type, fn prefixParseFn) { 270 | p.prefixParseFn[tokenType] = fn 271 | } 272 | 273 | func (p *Parser) registerInfix(tokenType token.Type, fn infixParseFn) { 274 | p.infixParseFn[tokenType] = fn 275 | } 276 | 277 | // Errors return the parse errors 278 | func (p *Parser) Errors() []error { 279 | return p.errors 280 | } 281 | 282 | func (p *Parser) nextToken() { 283 | p.curToken = p.peekToken 284 | p.peekToken = p.l.NextToken() 285 | } 286 | 287 | // ParseProgram parses the program from the lexers input 288 | func (p *Parser) ParseProgram() *ast.Program { 289 | program := new(ast.Program) 290 | program.Statements = []ast.Statement{} 291 | 292 | for p.curToken.Type != token.EOF { 293 | stmt := p.parseStatement() 294 | if stmt != nil { 295 | program.Statements = append(program.Statements, stmt) 296 | } 297 | p.nextToken() 298 | } 299 | return program 300 | } 301 | 302 | func (p *Parser) parseStatement() ast.Statement { 303 | switch p.curToken.Type { 304 | case token.IDENT: 305 | switch p.peekToken.Type { 306 | case token.ASSIGN: 307 | return p.parseAssignStatement() 308 | default: 309 | return p.parseExpressionStatement() 310 | } 311 | case token.LMULTICOMMENTLINE: 312 | return p.parseMultiCommentStatement() 313 | case token.HASH: 314 | return p.parseCommentStatement() 315 | case token.COMMENTLINE: 316 | return p.parseCommentStatement() 317 | case token.RETURN: 318 | return p.parseReturnStatement() 319 | case token.CALL: 320 | return p.parseCallStatement() 321 | case token.STRING: 322 | switch p.peekToken.Type { 323 | case token.COLON: 324 | return p.parseAssignFieldStatement() 325 | } 326 | fallthrough 327 | default: 328 | return p.parseExpressionStatement() 329 | } 330 | } 331 | 332 | func (p *Parser) parseAssignStatement() ast.Statement { 333 | stmt := &ast.AssignStatement{ 334 | Token: p.curToken, 335 | } 336 | 337 | stmt.Name = &ast.Identifier{ 338 | Token: p.curToken, 339 | Value: p.curToken.Literal, 340 | } 341 | 342 | if !p.expectPeek(token.ASSIGN) { 343 | p.peekError(token.ASSIGN) 344 | return nil 345 | } 346 | 347 | p.nextToken() 348 | stmt.Value = p.parseExpression(LOWEST) 349 | 350 | if p.peekTokenIs(token.SEMICOLON) { 351 | p.nextToken() 352 | } 353 | 354 | return stmt 355 | } 356 | 357 | func (p *Parser) parseAssignFieldStatement() ast.Statement { 358 | stmt := &ast.AssignFieldStatement{ 359 | Token: p.curToken, 360 | } 361 | 362 | stmt.Name = &ast.Identifier{ 363 | Token: p.curToken, 364 | Value: p.curToken.Literal, 365 | } 366 | 367 | if !p.expectPeek(token.COLON) { 368 | p.peekError(token.COLON) 369 | return nil 370 | } 371 | 372 | p.nextToken() 373 | stmt.Value = p.parseExpression(LOWEST) 374 | 375 | if p.peekTokenIs(token.COMMA) { 376 | p.nextToken() 377 | } 378 | 379 | return stmt 380 | } 381 | 382 | func (p *Parser) parseReturnStatement() ast.Statement { 383 | stmt := &ast.ReturnStatement{ 384 | Token: p.curToken, 385 | } 386 | 387 | if !p.expectPeek(token.LPAREN) { 388 | p.peekError(token.ASSIGN) 389 | return nil 390 | } 391 | 392 | p.nextToken() 393 | 394 | stmt.ReturnValue = p.parseExpression(LOWEST) 395 | 396 | if !p.expectPeek(token.RPAREN) { 397 | p.peekError(token.ASSIGN) 398 | return nil 399 | } 400 | 401 | if p.peekTokenIs(token.SEMICOLON) { 402 | p.nextToken() 403 | } 404 | 405 | return stmt 406 | } 407 | 408 | func (p *Parser) parseCommentStatement() ast.Statement { 409 | stmt := &ast.CommentStatement{ 410 | Token: p.curToken, 411 | Value: p.curToken.Literal, 412 | } 413 | 414 | return stmt 415 | } 416 | 417 | func (p *Parser) parseMultiCommentStatement() ast.Statement { 418 | stmt := &ast.CommentStatement{ 419 | Token: p.curToken, 420 | } 421 | 422 | var value string 423 | p.nextToken() 424 | for !p.curTokenIs(token.RMULTICOMMENTLINE) { 425 | if p.curTokenIs(token.EOF) { 426 | return nil 427 | } 428 | 429 | if value == "" { 430 | value += p.curToken.Literal 431 | } else { 432 | value += " " + p.curToken.Literal 433 | } 434 | 435 | p.nextToken() 436 | } 437 | 438 | stmt.Value = value 439 | return stmt 440 | } 441 | 442 | func (p *Parser) parseCallStatement() ast.Statement { 443 | stmt := &ast.CallStatement{ 444 | Token: p.curToken, 445 | } 446 | 447 | p.nextToken() 448 | 449 | stmt.CallValue = p.parseExpression(LOWEST) 450 | 451 | if p.peekTokenIs(token.SEMICOLON) { 452 | p.nextToken() 453 | } 454 | 455 | return stmt 456 | } 457 | 458 | func (p *Parser) parseExpressionStatement() ast.Statement { 459 | stmt := &ast.ExpressionStatement{ 460 | Token: p.curToken, 461 | } 462 | stmt.Expression = p.parseExpression(LOWEST) 463 | 464 | if p.peekTokenIs(token.SEMICOLON) { 465 | p.nextToken() 466 | } 467 | 468 | return stmt 469 | } 470 | 471 | func (p *Parser) parseExpression(precedentce int) ast.Expression { 472 | prefix := p.prefixParseFn[p.curToken.Type] 473 | if prefix == nil { 474 | return nil 475 | } 476 | 477 | leftExp := prefix() 478 | 479 | for !p.peekTokenIs(token.SEMICOLON) && precedentce < p.peekPrecedence() { 480 | infix := p.infixParseFn[p.peekToken.Type] 481 | if infix == nil { 482 | return leftExp 483 | } 484 | p.nextToken() 485 | leftExp = infix(leftExp) 486 | } 487 | return leftExp 488 | } 489 | 490 | func (p *Parser) peekError(t token.Type) { 491 | err := fmt.Errorf("expected to be token to be %s, got %s instead", t, p.peekToken.Type) 492 | p.errors = append(p.errors, err) 493 | } 494 | 495 | func (p *Parser) expectPeek(t token.Type) bool { 496 | if p.peekTokenIs(t) { 497 | p.nextToken() 498 | return true 499 | } 500 | 501 | p.peekError(t) 502 | return false 503 | } 504 | 505 | func (p *Parser) curTokenIs(t token.Type) bool { 506 | return p.curToken.Type == t 507 | } 508 | 509 | func (p *Parser) peekTokenIs(t token.Type) bool { 510 | return p.peekToken.Type == t 511 | } 512 | 513 | func (p *Parser) peekPrecedence() int { 514 | if p, ok := precedences[p.peekToken.Type]; ok { 515 | return p 516 | } 517 | 518 | return LOWEST 519 | } 520 | 521 | func (p *Parser) curPrecedence() int { 522 | if p, ok := precedences[p.curToken.Type]; ok { 523 | return p 524 | } 525 | 526 | return LOWEST 527 | } 528 | -------------------------------------------------------------------------------- /internal/parser/parser_test.go: -------------------------------------------------------------------------------- 1 | package parser 2 | 3 | import ( 4 | "fmt" 5 | "testing" 6 | 7 | "github.com/KeisukeYamashita/go-vcl/internal/ast" 8 | "github.com/KeisukeYamashita/go-vcl/internal/lexer" 9 | ) 10 | 11 | func TestAssignStatement(t *testing.T) { 12 | testCases := []struct { 13 | input string 14 | expectedIdentifiers []struct { 15 | expectedIdentifier string 16 | expectedValue interface{} 17 | } 18 | }{ 19 | { 20 | `x = 10; 21 | y = "kekesan"; 22 | keke = true; 23 | `, 24 | []struct { 25 | expectedIdentifier string 26 | expectedValue interface{} 27 | }{ 28 | {"x", 10}, 29 | {"y", "kekesan"}, 30 | {"keke", true}, 31 | }, 32 | }, 33 | } 34 | 35 | for n, tc := range testCases { 36 | l := lexer.NewLexer(tc.input) 37 | p := NewParser(l) 38 | 39 | program := p.ParseProgram() 40 | if program == nil { 41 | t.Fatalf("ParseProgram() failed testCase[%d] got nil program", n) 42 | } 43 | 44 | if len(program.Statements) != len(tc.expectedIdentifiers) { 45 | t.Fatalf("program.Statements wrong number returned, got:%d, want%d", len(program.Statements), len(tc.expectedIdentifiers)) 46 | } 47 | 48 | for i, expectedIdentifiers := range tc.expectedIdentifiers { 49 | stmt := program.Statements[i] 50 | if !testAssignStatement(t, stmt, expectedIdentifiers.expectedIdentifier) { 51 | t.Fatalf("parse assigntStatement failed") 52 | } 53 | } 54 | } 55 | } 56 | 57 | func TestAssignStatement_Object(t *testing.T) { 58 | testCases := []struct { 59 | input string 60 | expectedIdentifiers []struct { 61 | expectedIdentifier string 62 | expectedStmtName string 63 | expectedStmtValue int 64 | } 65 | }{ 66 | { 67 | `x = { 68 | y = 10; 69 | };`, 70 | []struct { 71 | expectedIdentifier string 72 | expectedStmtName string 73 | expectedStmtValue int 74 | }{ 75 | {"x", "y", 10}, 76 | }, 77 | }, 78 | } 79 | 80 | for n, tc := range testCases { 81 | l := lexer.NewLexer(tc.input) 82 | p := NewParser(l) 83 | 84 | program := p.ParseProgram() 85 | if program == nil { 86 | t.Fatalf("ParseProgram() failed testCase[%d] got nil program", n) 87 | } 88 | 89 | if len(program.Statements) != len(tc.expectedIdentifiers) { 90 | t.Fatalf("program.Statements wrong number returned, got:%d, want%d", len(program.Statements), len(tc.expectedIdentifiers)) 91 | } 92 | 93 | for i, expectedIdentifiers := range tc.expectedIdentifiers { 94 | stmt := program.Statements[i] 95 | 96 | if !testAssignStatement(t, stmt, expectedIdentifiers.expectedIdentifier) { 97 | t.Fatalf("parse assigntStatement failed") 98 | } 99 | 100 | asStmt, ok := stmt.(*ast.AssignStatement) 101 | if !ok { 102 | t.Fatalf("stmt was not ast.AssignStatement, got:%T", stmt) 103 | } 104 | 105 | expr, ok := asStmt.Value.(*ast.BlockExpression) 106 | if !ok { 107 | t.Fatalf("stmt.Value was not ast.BlockExpression, got:%T", asStmt.Value) 108 | } 109 | 110 | if len(expr.Blocks.Statements) != 1 { 111 | t.Fatalf("expr.Blocks.Statements wrong number returned, got:%d, want%d", len(expr.Blocks.Statements), 1) 112 | } 113 | 114 | asStmt, ok = expr.Blocks.Statements[0].(*ast.AssignStatement) 115 | if !ok { 116 | t.Fatalf("stmt of block statement was not ast.AssignStatement, got:%T", stmt) 117 | } 118 | 119 | if !testAssignStatement(t, asStmt, expectedIdentifiers.expectedStmtName) { 120 | t.Fatalf("parse assigntStatement failed") 121 | } 122 | } 123 | } 124 | } 125 | 126 | func testAssignStatement(t *testing.T, s ast.Statement, name string) bool { 127 | asStmt, ok := s.(*ast.AssignStatement) 128 | if !ok { 129 | t.Errorf("s not *ast.AssignStatement, got:%T", s) 130 | return false 131 | } 132 | if asStmt.Name.Value != name { 133 | t.Errorf("asStmt.Name.Value(=Identifier) wrong, got: '%s', want: %s", asStmt.Name.Value, name) 134 | return false 135 | } 136 | 137 | return true 138 | } 139 | 140 | func TestCommentStatement(t *testing.T) { 141 | testCases := map[string]struct { 142 | input string 143 | expectedComment string 144 | }{ 145 | "with comment line by hash": {`# keke`, "keke"}, 146 | "with comment line by double slash": {"// keke", "keke"}, 147 | "with single comment by multi line": {"/* keke */", "keke"}, 148 | "with long comment by multi line": {"/* keke is happy */", "keke is happy"}, 149 | } 150 | 151 | for n, tc := range testCases { 152 | t.Run(n, func(t *testing.T) { 153 | l := lexer.NewLexer(tc.input) 154 | p := NewParser(l) 155 | 156 | program := p.ParseProgram() 157 | if program == nil { 158 | t.Fatalf("ParseProgram() failed got nil program") 159 | } 160 | 161 | if len(program.Statements) != 1 { 162 | t.Fatalf("program.Statements wrong number returned, got:%d, want:%d", len(program.Statements), 1) 163 | } 164 | 165 | stmt, ok := program.Statements[0].(*ast.CommentStatement) 166 | if !ok { 167 | t.Fatalf("stmt was not ast.CommentStatement, got:%T", program.Statements[0]) 168 | } 169 | 170 | if stmt.Value != tc.expectedComment { 171 | t.Fatalf("stmt.Value got wrong value got:%s, want:%s", stmt.Value, tc.expectedComment) 172 | } 173 | }) 174 | } 175 | } 176 | 177 | func TestAssignFieldStatement(t *testing.T) { 178 | testCases := []struct { 179 | input string 180 | expectedIdentifiers []struct { 181 | expectedStmtName string 182 | expectedStmtValue string 183 | } 184 | }{ 185 | { 186 | `"key": "value"`, 187 | []struct { 188 | expectedStmtName string 189 | expectedStmtValue string 190 | }{ 191 | {"key", "value"}, 192 | }, 193 | }, 194 | } 195 | 196 | for n, tc := range testCases { 197 | l := lexer.NewLexer(tc.input) 198 | p := NewParser(l) 199 | 200 | program := p.ParseProgram() 201 | if program == nil { 202 | t.Fatalf("ParseProgram() failed testCase[%d] got nil program", n) 203 | } 204 | 205 | if len(program.Statements) != len(tc.expectedIdentifiers) { 206 | t.Fatalf("program.Statements wrong number returned, got:%d, want%d", len(program.Statements), len(tc.expectedIdentifiers)) 207 | } 208 | 209 | for i, expectedIdentifiers := range tc.expectedIdentifiers { 210 | stmt := program.Statements[i] 211 | 212 | if !testAssignFieldStatement(t, stmt, expectedIdentifiers.expectedStmtName) { 213 | t.Fatalf("parse assigntStatement failed") 214 | } 215 | 216 | asStmt, ok := stmt.(*ast.AssignFieldStatement) 217 | if !ok { 218 | t.Fatalf("stmt was not ast.AssignStatement, got:%T", stmt) 219 | } 220 | 221 | expr, ok := asStmt.Value.(*ast.StringLiteral) 222 | if !ok { 223 | t.Fatalf("stmt.Value was not ast.StringLiteral, got:%T", expr.Value) 224 | } 225 | 226 | if expr.Value != expectedIdentifiers.expectedStmtValue { 227 | t.Fatalf("stmt.Value was not currect, got:%s, want:%s", expr.Value, expectedIdentifiers.expectedStmtValue) 228 | } 229 | } 230 | } 231 | } 232 | 233 | func testAssignFieldStatement(t *testing.T, s ast.Statement, name string) bool { 234 | asStmt, ok := s.(*ast.AssignFieldStatement) 235 | if !ok { 236 | t.Errorf("s not *ast.AssignFieldStatement, got:%T", s) 237 | return false 238 | } 239 | if asStmt.Name.Value != name { 240 | t.Errorf("asStmt.Name.Value(=Identifier) wrong, got: '%s', want: %s", asStmt.Name.Value, name) 241 | return false 242 | } 243 | 244 | return true 245 | } 246 | 247 | func TestReturnStatement(t *testing.T) { 248 | testCases := []struct { 249 | input string 250 | expectedIdentifiers []struct { 251 | expectedIdentifier string 252 | } 253 | }{ 254 | { 255 | `return (pass); 256 | return (pipe); 257 | return (cache); 258 | `, 259 | []struct { 260 | expectedIdentifier string 261 | }{ 262 | {"x"}, 263 | {"y"}, 264 | {"keke"}, 265 | }, 266 | }, 267 | } 268 | 269 | for n, tc := range testCases { 270 | l := lexer.NewLexer(tc.input) 271 | p := NewParser(l) 272 | 273 | program := p.ParseProgram() 274 | if len(program.Statements) != 3 { 275 | t.Fatalf("program.Statements wrong number returned testCase[%d], got:%d, want%d", n, len(program.Statements), 3) 276 | } 277 | 278 | for _, stmt := range program.Statements { 279 | returnStmt, ok := stmt.(*ast.ReturnStatement) 280 | if !ok { 281 | t.Fatalf("stmt not *ast.ReturnStatement testCase[%d], got:%T", n, stmt) 282 | } 283 | 284 | if returnStmt.TokenLiteral() != "return" { 285 | t.Fatalf("returnStmt.TokenLiteral not 'return' testCase[%d], got:%q", n, returnStmt.TokenLiteral()) 286 | } 287 | } 288 | } 289 | } 290 | 291 | func TestCallStatement(t *testing.T) { 292 | testCases := []struct { 293 | input string 294 | 295 | expectedIdentifier string 296 | }{ 297 | {`call pipe_if_local;`, "pipe_if_local"}, 298 | } 299 | 300 | for n, tc := range testCases { 301 | l := lexer.NewLexer(tc.input) 302 | p := NewParser(l) 303 | 304 | program := p.ParseProgram() 305 | if len(program.Statements) != 1 { 306 | t.Fatalf("program.Statements wrong number returned testCase[%d], got:%d, want%d", n, len(program.Statements), 3) 307 | } 308 | 309 | for _, stmt := range program.Statements { 310 | callStmt, ok := stmt.(*ast.CallStatement) 311 | if !ok { 312 | t.Fatalf("stmt not *ast.CallStatement testCase[%d], got:%T", n, stmt) 313 | } 314 | 315 | if callStmt.TokenLiteral() != "call" { 316 | t.Fatalf("returnStmt.TokenLiteral not 'return' testCase[%d], got:%q", n, callStmt.TokenLiteral()) 317 | } 318 | 319 | if callStmt.CallValue.(*ast.Identifier).Value != tc.expectedIdentifier { 320 | t.Fatalf("callStmt callValue wrong in testCase[%d], got:%s, want:%s", n, callStmt.CallValue, tc.expectedIdentifier) 321 | } 322 | } 323 | } 324 | } 325 | 326 | func TestIdentifierExpression(t *testing.T) { 327 | input := "keke;" 328 | 329 | l := lexer.NewLexer(input) 330 | p := NewParser(l) 331 | program := p.ParseProgram() 332 | 333 | if len(program.Statements) != 1 { 334 | t.Fatalf("program.Statements length is not expected, got:%d, want:%d", len(program.Statements), 1) 335 | } 336 | 337 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 338 | if !ok { 339 | t.Fatalf("program.Statements[0] is not ast.ExpressionStatement, got:%T", program.Statements[0]) 340 | } 341 | 342 | ident, ok := stmt.Expression.(*ast.Identifier) 343 | if !ok { 344 | t.Fatalf("exp not *ast.Identifier, got:%T", stmt.Expression) 345 | } 346 | 347 | if ident.Value != "keke" { 348 | t.Errorf("ident.Value wrong, got:%s, want:%s", ident.Value, "keke") 349 | } 350 | 351 | if ident.TokenLiteral() != "keke" { 352 | t.Errorf("ident.TokenLiteral wrong, got:%s, want:%s", ident.TokenLiteral(), "keke") 353 | } 354 | } 355 | 356 | func TestIntegerLiteralExpression(t *testing.T) { 357 | testCases := map[string]struct { 358 | input string 359 | expected int64 360 | shouldError bool 361 | }{ 362 | "with single integer": {"5;", 5, false}, 363 | "with invalid float integer": {"5.0;", 5, true}, 364 | } 365 | 366 | for n, tc := range testCases { 367 | t.Run(n, func(t *testing.T) { 368 | l := lexer.NewLexer(tc.input) 369 | p := NewParser(l) 370 | program := p.ParseProgram() 371 | 372 | if len(program.Statements) != 1 { 373 | if tc.shouldError { 374 | return 375 | } 376 | t.Fatalf("program.Statements length is not expected, got:%d, want:%d", len(program.Statements), 1) 377 | } 378 | 379 | if tc.shouldError { 380 | t.Fatalf("test should fail but successed") 381 | } 382 | 383 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 384 | if !ok { 385 | t.Fatalf("program.Statements[0] is not ast.ExpressionStatement, got:%T", program.Statements[0]) 386 | } 387 | 388 | ident, ok := stmt.Expression.(*ast.IntegerLiteral) 389 | if !ok { 390 | t.Fatalf("exp not *ast.Identifier, got:%T", stmt.Expression) 391 | } 392 | 393 | if ident.Value != tc.expected { 394 | t.Fatalf("ident.Value wrong, got:%d, want:%d", ident.Value, 5) 395 | } 396 | 397 | if ident.TokenLiteral() != "5" { 398 | t.Errorf("ident.TokenLiteral wrong, got:%s, want:%s", ident.TokenLiteral(), "5") 399 | } 400 | }) 401 | } 402 | } 403 | 404 | func TestPercentageLiteralExpression(t *testing.T) { 405 | input := "5%;" 406 | 407 | l := lexer.NewLexer(input) 408 | p := NewParser(l) 409 | program := p.ParseProgram() 410 | 411 | if len(program.Statements) != 1 { 412 | t.Fatalf("program.Statements length is not expected, got:%d, want:%d", len(program.Statements), 1) 413 | } 414 | 415 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 416 | if !ok { 417 | t.Fatalf("program.Statements[0] is not ast.ExpressionStatement, got:%T", program.Statements[0]) 418 | } 419 | 420 | ident, ok := stmt.Expression.(*ast.PercentageLiteral) 421 | if !ok { 422 | t.Fatalf("exp not *ast.Identifier, got:%T", stmt.Expression) 423 | } 424 | 425 | if ident.Value != "5%" { 426 | t.Fatalf("ident.Value wrong, got:%s, want:%s", ident.Value, "5%") 427 | } 428 | 429 | if ident.TokenLiteral() != "5%" { 430 | t.Errorf("ident.TokenLiteral wrong, got:%s, want:%s", ident.TokenLiteral(), "5%") 431 | } 432 | } 433 | 434 | func TestBooleanExpression(t *testing.T) { 435 | input := `true; 436 | false;` 437 | 438 | l := lexer.NewLexer(input) 439 | p := NewParser(l) 440 | program := p.ParseProgram() 441 | 442 | if len(program.Statements) != 2 { 443 | t.Fatalf("program.Statements length is not expected, got:%d, want:%d", len(program.Statements), 2) 444 | } 445 | 446 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 447 | if !ok { 448 | t.Fatalf("program.Statements[0] is not ast.ExpressionStatement, got:%T", program.Statements[0]) 449 | } 450 | 451 | ident, ok := stmt.Expression.(*ast.BooleanLiteral) 452 | if !ok { 453 | t.Fatalf("exp not *ast.Identifier, got:%T", stmt.Expression) 454 | } 455 | 456 | if ident.Value != true { 457 | t.Fatalf("ident.Value wrong, got:%t, want:%t", ident.Value, true) 458 | } 459 | 460 | if ident.TokenLiteral() != "true" { 461 | t.Errorf("ident.TokenLiteral wrong, got:%s, want:%s", ident.TokenLiteral(), "true") 462 | } 463 | } 464 | 465 | func TestParsingInfixExpressions(t *testing.T) { 466 | testCases := []struct { 467 | input string 468 | leftValue int64 469 | operator string 470 | rightValue int64 471 | }{ 472 | {"5 ~ 5", 5, "~", 5}, 473 | } 474 | 475 | for n, tc := range testCases { 476 | l := lexer.NewLexer(tc.input) 477 | p := NewParser(l) 478 | program := p.ParseProgram() 479 | 480 | if len(program.Statements) != 1 { 481 | t.Fatalf("program.Statements length is not expected[testCase:%d], got:%d, want:%d", n, len(program.Statements), 1) 482 | } 483 | 484 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 485 | if !ok { 486 | t.Fatalf("program.Statement[0] is not ast.ExpressionStatement[testCase:%d], got:%T", n, program.Statements[0]) 487 | } 488 | 489 | exp, ok := stmt.Expression.(*ast.InfixExpression) 490 | if !ok { 491 | t.Fatalf("program.Statement[0]'s Expression is not ast.InfixExpression[testCase:%d], got:%T", n, program.Statements[0]) 492 | } 493 | 494 | if exp.Operator != tc.operator { 495 | t.Fatalf("parsingInfixExpression failed to get Operator[testCase:%d], got:%s, want:%s", n, exp.Operator, tc.operator) 496 | } 497 | 498 | if !testIntegerLiter(exp.Right, tc.rightValue) { 499 | t.Fatalf("parsingInfixExpression failed to get right value[testCase:%d], want:%d", n, tc.rightValue) 500 | } 501 | 502 | if !testIntegerLiter(exp.Right, tc.leftValue) { 503 | t.Fatalf("parsingInfixExpression failed to getleft value[testCase:%d], want:%d", n, tc.leftValue) 504 | } 505 | } 506 | } 507 | 508 | func TestParsingPrefixExpressions(t *testing.T) { 509 | testCases := []struct { 510 | input string 511 | operator string 512 | integerValue int64 513 | }{ 514 | {"!5", "!", 5}, 515 | } 516 | 517 | for n, tc := range testCases { 518 | l := lexer.NewLexer(tc.input) 519 | p := NewParser(l) 520 | program := p.ParseProgram() 521 | 522 | if len(program.Statements) != 1 { 523 | t.Fatalf("program.Statements wrong length got:%d, want:%d", len(program.Statements), 1) 524 | } 525 | 526 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 527 | if !ok { 528 | t.Fatalf("program.Statement[0] is not ast.ExpressionStatement[testCase:%d], got:%T", n, program.Statements[0]) 529 | } 530 | 531 | exp, ok := stmt.Expression.(*ast.PrefixExpression) 532 | if !ok { 533 | t.Fatalf("stmt is not ast.PrefixExpression, got:%T", stmt.Expression) 534 | } 535 | 536 | if exp.Operator != tc.operator { 537 | t.Fatalf("prefixExpression operator does not match, got:%s, want:%s", exp.Operator, tc.operator) 538 | } 539 | 540 | if !testIntegerLiter(exp.Right, tc.integerValue) { 541 | t.Fatalf("prefixExpression integerValue does not match want:%d", tc.integerValue) 542 | } 543 | } 544 | } 545 | 546 | func testIntegerLiter(il ast.Expression, value int64) bool { 547 | integ, ok := il.(*ast.IntegerLiteral) 548 | if !ok { 549 | return false 550 | } 551 | 552 | if integ.Value != value { 553 | return false 554 | } 555 | 556 | if integ.TokenLiteral() != fmt.Sprintf("%d", value) { 557 | return false 558 | } 559 | 560 | return true 561 | } 562 | 563 | func TestOperatorPrecedenceParsing(t *testing.T) { 564 | testCases := []struct { 565 | input string 566 | expected string 567 | }{ 568 | {"1+(2+3)+4", "((1+(2+3)) + 4)"}, 569 | } 570 | 571 | for n, tc := range testCases { 572 | l := lexer.NewLexer(tc.input) 573 | p := NewParser(l) 574 | program := p.ParseProgram() 575 | 576 | if len(program.Statements) != 1 { 577 | t.Fatalf("program.Statements wrong length got:%d, want:%d", len(program.Statements), 1) 578 | } 579 | 580 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 581 | if !ok { 582 | t.Fatalf("program.Statement[0] is not ast.ExpressionStatement[testCase:%d], got:%T", n, program.Statements[0]) 583 | } 584 | _ = stmt 585 | } 586 | } 587 | 588 | func TestIfStatement(t *testing.T) { 589 | testCases := []struct { 590 | input string 591 | }{ 592 | {"if (x ~ y) { x }"}, 593 | } 594 | 595 | for n, tc := range testCases { 596 | l := lexer.NewLexer(tc.input) 597 | p := NewParser(l) 598 | program := p.ParseProgram() 599 | 600 | if len(program.Statements) != 1 { 601 | t.Fatalf("program.Statements wrong length got:%d, want:%d", len(program.Statements), 1) 602 | } 603 | 604 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 605 | if !ok { 606 | t.Fatalf("program.Statement[0] is not ast.ExpressionStatement[testCase:%d], got:%T", n, program.Statements[0]) 607 | } 608 | 609 | expr, ok := stmt.Expression.(*ast.IfExpression) 610 | if !ok { 611 | t.Fatalf("program.Statement[0] is not ast.IfExpression[testCase:%d], got:%T", n, stmt.Expression) 612 | } 613 | 614 | if !testInfixExpression(t, expr.Condition, "x", "~", "y") { 615 | t.Fatalf("ifStatement test failed to parse condition[testCase:%d]", n) 616 | } 617 | 618 | if len(expr.Consequence.Statements) != 1 { 619 | t.Fatalf("consequence is not 1 statements[testCase:%d], got:%d", n, len(expr.Consequence.Statements)) 620 | } 621 | 622 | consequence, ok := expr.Consequence.Statements[0].(*ast.ExpressionStatement) 623 | if !ok { 624 | t.Fatalf("statement[0] in if consequence is not ast.ExpressionStatement[testCase:%d], got:%T", n, expr.Consequence.Statements[0]) 625 | } 626 | 627 | if !testIdentifier(t, consequence.Expression, "x") { 628 | t.Fatalf("ifStatement failed to test identifier[testCase:%d]", n) 629 | } 630 | } 631 | } 632 | 633 | func TestIfElseStatement(t *testing.T) { 634 | testCases := []struct { 635 | input string 636 | }{ 637 | {`if (x ~ y) { 638 | x 639 | } else { 640 | y 641 | }`}, 642 | } 643 | 644 | for n, tc := range testCases { 645 | l := lexer.NewLexer(tc.input) 646 | p := NewParser(l) 647 | program := p.ParseProgram() 648 | 649 | if len(program.Statements) != 1 { 650 | t.Fatalf("program.Statements wrong length got:%d, want:%d", len(program.Statements), 1) 651 | } 652 | 653 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 654 | if !ok { 655 | t.Fatalf("program.Statement[0] is not ast.ExpressionStatement[testCase:%d], got:%T", n, program.Statements[0]) 656 | } 657 | 658 | expr, ok := stmt.Expression.(*ast.IfExpression) 659 | if !ok { 660 | t.Fatalf("program.Statement[0] is not ast.IfExpression[testCase:%d], got:%T", n, stmt.Expression) 661 | } 662 | 663 | if !testInfixExpression(t, expr.Condition, "x", "~", "y") { 664 | t.Fatalf("ifStatement test failed to parse condition[testCase:%d]", n) 665 | } 666 | 667 | if len(expr.Consequence.Statements) != 1 { 668 | t.Fatalf("consequence is not 1 statements[testCase:%d], got:%d", n, len(expr.Consequence.Statements)) 669 | } 670 | 671 | consequence, ok := expr.Consequence.Statements[0].(*ast.ExpressionStatement) 672 | if !ok { 673 | t.Fatalf("statement[0] in if consequence is not ast.ExpressionStatement[testCase:%d], got:%T", n, expr.Consequence.Statements[0]) 674 | } 675 | 676 | if !testIdentifier(t, consequence.Expression, "x") { 677 | t.Fatalf("ifStatement failed to test identifier[testCase:%d]", n) 678 | } 679 | } 680 | } 681 | 682 | func testIdentifier(t *testing.T, expr ast.Expression, value string) bool { 683 | ident, ok := expr.(*ast.Identifier) 684 | if !ok { 685 | t.Errorf("expr is not ast.Identifier, got:%T", expr) 686 | return false 687 | } 688 | 689 | if ident.Value != value { 690 | t.Errorf("identifier value wrong, want:%s, got:%s", ident.Value, value) 691 | return false 692 | } 693 | 694 | if ident.TokenLiteral() != value { 695 | t.Errorf("identifier token literal wrong, want:%s, got:%s", ident.TokenLiteral(), value) 696 | return false 697 | } 698 | 699 | return true 700 | } 701 | 702 | func testLiteralExpression(t *testing.T, expr ast.Expression, expected interface{}) bool { 703 | switch v := expected.(type) { 704 | case int: 705 | return testIntegerLiter(expr, int64(v)) 706 | case int64: 707 | return testIntegerLiter(expr, v) 708 | case string: 709 | return testIdentifier(t, expr, v) 710 | } 711 | 712 | return false 713 | } 714 | 715 | func testInfixExpression(t *testing.T, expr ast.Expression, left interface{}, operator string, right interface{}) bool { 716 | opExp, ok := expr.(*ast.InfixExpression) 717 | if !ok { 718 | t.Errorf("expr is not ast.InfixExpression, got:%T", expr) 719 | return false 720 | } 721 | 722 | if !testLiteralExpression(t, opExp.Left, left) { 723 | t.Errorf("operationExpression.Right is not expected literal expression, right:%s, got:%s", opExp.Left, left) 724 | return false 725 | } 726 | 727 | if opExp.Operator != operator { 728 | t.Errorf("expr.Operator is wrong, got:%s, want:%s", opExp.Operator, operator) 729 | return false 730 | } 731 | 732 | if !testLiteralExpression(t, opExp.Right, right) { 733 | t.Errorf("operationExpression.Right is not expected literal expression, right:%s, got:%s", opExp.Right, right) 734 | return false 735 | } 736 | 737 | return true 738 | } 739 | 740 | func TestBlockStatement(t *testing.T) { 741 | testCases := map[string]struct { 742 | input string 743 | expectedLabels []string 744 | blockType string 745 | blockIdentifier []string 746 | }{ 747 | "with single block sub": {"sub pipe_if_local { x }", []string{"pipe_if_local"}, "sub", []string{"x"}}, 748 | "with single block acl": {"acl local { \"localhost\"; }", []string{"local"}, "acl", []string{"localhost"}}, 749 | "with two statement acl": {"acl local { \"local\"; \"localhost\"}", []string{"local"}, "acl", []string{"local", "localhost"}}, 750 | "with backend statement": {"backend server1 { .host = \"localhost\"}", []string{"server"}, "backend", []string{}}, 751 | "with none backend": {"backend default none;", []string{"default", "none"}, "backend", []string{}}, 752 | } 753 | 754 | for n, tc := range testCases { 755 | t.Run(n, func(t *testing.T) { 756 | l := lexer.NewLexer(tc.input) 757 | p := NewParser(l) 758 | program := p.ParseProgram() 759 | 760 | if len(program.Statements) != 1 { 761 | t.Fatalf("program.Statements wrong length got:%d, want:%d", len(program.Statements), 1) 762 | } 763 | 764 | stmt, ok := program.Statements[0].(*ast.ExpressionStatement) 765 | if !ok { 766 | t.Fatalf("program.Statement[0] is not ast.ExpressionStatement, got:%T", program.Statements[0]) 767 | } 768 | 769 | expr, ok := stmt.Expression.(*ast.BlockExpression) 770 | if !ok { 771 | t.Fatalf("program.Statement[0] is not ast.BlockExpression, got:%T", stmt.Expression) 772 | } 773 | 774 | if len(expr.Labels) != len(tc.expectedLabels) { 775 | t.Fatalf("blockExpression labels length does not match, got:%d, want:%d", len(expr.Labels), len(tc.expectedLabels)) 776 | } 777 | 778 | for idx, identifier := range tc.blockIdentifier { 779 | block, ok := expr.Blocks.Statements[idx].(*ast.ExpressionStatement) 780 | if !ok { 781 | t.Fatalf("statement[%d] in if consequence is not ast.ExpressionStatement, got:%T", idx, expr.Blocks.Statements[0]) 782 | } 783 | 784 | switch block.Expression.(type) { 785 | case *ast.Identifier: 786 | if !testIdentifier(t, block.Expression, identifier) { 787 | t.Fatalf("blockExpression failed to test identifier") 788 | } 789 | case *ast.StringLiteral: 790 | if !testStringLiteral(t, block.Expression, identifier) { 791 | t.Fatalf("blockExpression failed to test stringLiteral") 792 | } 793 | } 794 | } 795 | }) 796 | } 797 | } 798 | 799 | func testStringLiteral(t *testing.T, expr ast.Expression, value string) bool { 800 | opExp, ok := expr.(*ast.StringLiteral) 801 | if !ok { 802 | t.Errorf("expr is not ast.InfixExpression, got:%T", expr) 803 | return false 804 | } 805 | 806 | if opExp.Value != value { 807 | t.Errorf("value of string literal wrong, got:%s, want:%s", opExp.Value, value) 808 | return false 809 | } 810 | 811 | return true 812 | } 813 | -------------------------------------------------------------------------------- /internal/schema/schema.go: -------------------------------------------------------------------------------- 1 | package schema 2 | 3 | // File is the root of the data structure 4 | type File struct { 5 | Body Body 6 | } 7 | 8 | // Body contains multiple attributes and blocks 9 | type Body interface{} 10 | 11 | // Attributes are like field of objects 12 | type Attributes map[string]*Attribute 13 | 14 | // Blocks are block types containing other block 15 | type Blocks []*Block 16 | 17 | // Flats are attributes that are not key-value 18 | type Flats []interface{} 19 | 20 | // Comments are comment lines 21 | type Comments []string 22 | 23 | // Block ais a structure which contains block header, labels and body 24 | type Block struct { 25 | Type string 26 | Labels []string 27 | Body Body 28 | } 29 | 30 | // BodySchema represents the desired structure of a body. 31 | type BodySchema struct { 32 | Attributes []AttributeSchema 33 | Blocks []BlockHeaderSchema 34 | } 35 | 36 | // Attribute are field of the object 37 | type Attribute struct { 38 | Name string 39 | Value interface{} 40 | } 41 | 42 | // BodyContent is a content from body 43 | type BodyContent struct { 44 | Attributes Attributes 45 | Blocks Blocks 46 | Flats Flats 47 | Comments Comments 48 | } 49 | 50 | // AttributeSchema is the desired attribute 51 | type AttributeSchema struct { 52 | Name string 53 | Required bool 54 | } 55 | 56 | // BlockHeaderSchema is the desired block 57 | type BlockHeaderSchema struct { 58 | Type string 59 | LabelNames []string 60 | Body Body 61 | } 62 | 63 | // ByType transforms the receiving block sequence into a map from type 64 | // name to block sequences of only that type. 65 | func (bs Blocks) ByType() map[string]Blocks { 66 | ret := make(map[string]Blocks) 67 | for _, b := range bs { 68 | ty := b.Type 69 | if ret[ty] == nil { 70 | ret[ty] = make(Blocks, 0, 1) 71 | } 72 | 73 | ret[ty] = append(ret[ty], b) 74 | } 75 | 76 | return ret 77 | } 78 | -------------------------------------------------------------------------------- /internal/token/token.go: -------------------------------------------------------------------------------- 1 | package token 2 | 3 | // Token defineds a single VCL token 4 | type Token struct { 5 | Type Type 6 | Literal string 7 | } 8 | 9 | // Type is a set of lexical tokens of the VCL 10 | type Type string 11 | 12 | const ( 13 | ILLEGAL = "ILLEGAL" 14 | EOF = "EOF" 15 | 16 | IDENT = "IDENT" 17 | INT = "INT" 18 | PERCENTAGE = "PERCENTAGE" 19 | STRING = "STRING" 20 | CIDR = "CIDR" 21 | TRUE = "TRUE" 22 | FALSE = "FALSE" 23 | 24 | ASSIGN = "=" 25 | MATCH = "~" 26 | PLUS = "+" 27 | BANG = "!" 28 | EQUAL = "==" 29 | AND = "&&" 30 | OR = "||" 31 | 32 | COMMA = "," 33 | SEMICOLON = ";" 34 | COLON = ":" 35 | PERCENT = "%" 36 | HASH = "#" 37 | 38 | COMMENTLINE = "//" 39 | LMULTICOMMENTLINE = "/*" 40 | RMULTICOMMENTLINE = "*/" 41 | LPAREN = "(" 42 | RPAREN = ")" 43 | LBRACE = "{" 44 | RBRACE = "}" 45 | 46 | IF = "IF" 47 | ELSE = "ELSE" 48 | 49 | RETURN = "RETURN" 50 | IMPORT = "IMPORT" 51 | TABLE = "TABLE" 52 | ACL = "ACL" 53 | BACKEND = "BACKEND" 54 | SUBROUTINE = "SUBROUTINE" 55 | CALL = "CALL" 56 | DIRECTOR = "DIRECTOR" 57 | ) 58 | 59 | // NewToken returns a token from token type and current char input 60 | func NewToken(tokenType Type, char byte) Token { 61 | return Token{ 62 | Type: tokenType, 63 | Literal: string(char), 64 | } 65 | } 66 | 67 | var keywords = map[string]Type{ 68 | "sub": SUBROUTINE, 69 | "call": CALL, 70 | "true": TRUE, 71 | "false": FALSE, 72 | "if": IF, 73 | "else": ELSE, 74 | "return": RETURN, 75 | "table": TABLE, 76 | "import": IMPORT, 77 | "acl": ACL, 78 | "backend": BACKEND, 79 | "director": DIRECTOR, 80 | } 81 | 82 | // LookupIndent returns keywork if hit from the identifier. 83 | func LookupIndent(indent string) Type { 84 | if tokenType, ok := keywords[indent]; ok { 85 | return tokenType 86 | } 87 | 88 | return IDENT 89 | } 90 | -------------------------------------------------------------------------------- /internal/traversal/traversal.go: -------------------------------------------------------------------------------- 1 | package traversal 2 | 3 | import ( 4 | "github.com/KeisukeYamashita/go-vcl/internal/ast" 5 | "github.com/KeisukeYamashita/go-vcl/internal/schema" 6 | ) 7 | 8 | // Content retrives from ast.Program 9 | func Content(prog *ast.Program) *schema.BodyContent { 10 | b := convertBody(prog.Statements) 11 | return b 12 | } 13 | 14 | // BodyContent retrives body content from body 15 | func BodyContent(body schema.Body) *schema.BodyContent { 16 | return body.(*schema.BodyContent) 17 | } 18 | 19 | // Contents will ast.Program to schema 20 | func convertBody(stmts []ast.Statement) *schema.BodyContent { 21 | attrs := make(map[string]*schema.Attribute) 22 | var blocks schema.Blocks 23 | flats := []interface{}{} 24 | comments := []string{} 25 | 26 | for _, stmt := range stmts { 27 | switch v := stmt.(type) { 28 | case *ast.AssignStatement: 29 | var isBlock bool 30 | var value interface{} 31 | switch lit := v.Value.(type) { 32 | case *ast.StringLiteral: 33 | value = lit.Value 34 | case *ast.CIDRLiteral: 35 | value = lit.Value 36 | case *ast.PercentageLiteral: 37 | value = lit.Value 38 | case *ast.BooleanLiteral: 39 | value = lit.Value 40 | case *ast.IntegerLiteral: 41 | value = lit.Value 42 | case *ast.BlockExpression: 43 | isBlock = true 44 | body := convertBody(lit.Blocks.Statements) 45 | block := &schema.Block{ 46 | Body: body, 47 | } 48 | block.Type = v.TokenLiteral() 49 | blocks = append(blocks, block) 50 | case *ast.Identifier: 51 | value = lit.Value 52 | default: 53 | panic("cannot pass invalid argument which is no a literal") 54 | } 55 | 56 | if isBlock == false { 57 | attrs[v.Name.Value] = &schema.Attribute{ 58 | Name: v.Name.Value, 59 | Value: value, 60 | } 61 | } 62 | case *ast.AssignFieldStatement: 63 | var value interface{} 64 | switch lit := v.Value.(type) { 65 | case *ast.StringLiteral: 66 | value = lit.Value 67 | } 68 | 69 | attrs[v.Name.Value] = &schema.Attribute{ 70 | Name: v.Name.Value, 71 | Value: value, 72 | } 73 | case *ast.ExpressionStatement: 74 | switch expr := v.Expression.(type) { 75 | case *ast.BlockExpression: 76 | body := convertBody(expr.Blocks.Statements) 77 | block := &schema.Block{ 78 | Body: body, 79 | } 80 | 81 | block.Type = expr.TokenLiteral() 82 | if len(expr.Labels) > 0 { 83 | block.Labels = expr.Labels 84 | } 85 | 86 | if block.Type == "{" { 87 | // this is flatten block 88 | flats = append(flats, block) 89 | } else { 90 | blocks = append(blocks, block) 91 | } 92 | case *ast.StringLiteral: 93 | flats = append(flats, expr.Value) 94 | case *ast.CIDRLiteral: 95 | flats = append(flats, expr.Value) 96 | case *ast.BooleanLiteral: 97 | flats = append(flats, expr.Value) 98 | case *ast.IntegerLiteral: 99 | flats = append(flats, expr.Value) 100 | } 101 | case *ast.CommentStatement: 102 | comments = append(comments, v.TokenLiteral()) 103 | } 104 | } 105 | 106 | body := &schema.BodyContent{ 107 | Attributes: attrs, 108 | Blocks: blocks, 109 | Flats: flats, 110 | Comments: comments, 111 | } 112 | 113 | return body 114 | } 115 | -------------------------------------------------------------------------------- /internal/traversal/traversal_test.go: -------------------------------------------------------------------------------- 1 | package traversal 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/KeisukeYamashita/go-vcl/internal/lexer" 7 | "github.com/KeisukeYamashita/go-vcl/internal/parser" 8 | ) 9 | 10 | func TestContents(t *testing.T) { 11 | testCases := []struct { 12 | input string 13 | expectedAttrCount int 14 | expectedBlockCount int 15 | }{ 16 | { 17 | `x = 10`, 18 | 1, 19 | 0, 20 | }, 21 | { 22 | `acl type name { 23 | "local" 24 | }`, 25 | 0, 26 | 1, 27 | }, { 28 | `sub pipe_if_local { x }`, 29 | 0, 30 | 1, 31 | }, 32 | } 33 | 34 | for n, tc := range testCases { 35 | l := lexer.NewLexer(tc.input) 36 | p := parser.NewParser(l) 37 | 38 | program := p.ParseProgram() 39 | content := Content(program) 40 | if len(content.Attributes) != tc.expectedAttrCount { 41 | t.Fatalf("contents.Attributes length failed[testcase:%d], got:%d, want:%d", n, len(content.Attributes), tc.expectedAttrCount) 42 | } 43 | 44 | if len(content.Blocks) != tc.expectedBlockCount { 45 | t.Fatalf("contents.Blocks length failed[testcase:%d], got:%d, want:%d", n, len(content.Blocks), tc.expectedBlockCount) 46 | } 47 | } 48 | } 49 | 50 | func TestConvertBody(t *testing.T) { 51 | testCases := []struct { 52 | input string 53 | expectedAttrCount int 54 | expectedBlockCount int 55 | }{ 56 | { 57 | `x = 10`, 58 | 1, 59 | 0, 60 | }, 61 | { 62 | `acl type name { 63 | "local" 64 | }`, 65 | 0, 66 | 1, 67 | }, { 68 | `sub pipe_if_local { x }`, 69 | 0, 70 | 1, 71 | }, 72 | } 73 | 74 | for n, tc := range testCases { 75 | l := lexer.NewLexer(tc.input) 76 | p := parser.NewParser(l) 77 | program := p.ParseProgram() 78 | content := convertBody(program.Statements) 79 | if len(content.Attributes) != tc.expectedAttrCount { 80 | t.Fatalf("contents.Attributes length failed[testcase:%d], got:%d, want:%d", n, len(content.Attributes), tc.expectedAttrCount) 81 | } 82 | 83 | if len(content.Blocks) != tc.expectedBlockCount { 84 | t.Fatalf("contents.Blocks length failed[testcase:%d], got:%d, want:%d", n, len(content.Blocks), tc.expectedBlockCount) 85 | } 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /vcl/vcl.go: -------------------------------------------------------------------------------- 1 | package vcl 2 | 3 | import ( 4 | "github.com/KeisukeYamashita/go-vcl/internal/decoder" 5 | "github.com/KeisukeYamashita/go-vcl/internal/lexer" 6 | "github.com/KeisukeYamashita/go-vcl/internal/parser" 7 | ) 8 | 9 | // Decode ... 10 | func Decode(bs []byte, val interface{}) []error { 11 | p := getParser(bs) 12 | prog := p.ParseProgram() 13 | return decoder.Decode(prog, val) 14 | } 15 | 16 | func getParser(bs []byte) *parser.Parser { 17 | l := lexer.NewLexer(string(bs)) 18 | return parser.NewParser(l) 19 | } 20 | -------------------------------------------------------------------------------- /vcl/vcl_test.go: -------------------------------------------------------------------------------- 1 | package vcl 2 | 3 | import ( 4 | "reflect" 5 | "testing" 6 | ) 7 | 8 | func TestDecode(t *testing.T) { 9 | type ACL struct { 10 | Type string `vcl:"type,label"` 11 | Endpoints []string `vcl:"endpoints,flat"` 12 | } 13 | 14 | type Root struct { 15 | ACLs []*ACL `vcl:"acl,block"` 16 | } 17 | 18 | testCases := []struct { 19 | input []byte 20 | val interface{} 21 | expectedVal interface{} 22 | }{ 23 | { 24 | []byte(`acl local { 25 | "localhost" 26 | }`), 27 | &Root{}, 28 | &Root{ACLs: []*ACL{&ACL{Type: "local", Endpoints: []string{"localhost"}}}}, 29 | }, 30 | } 31 | 32 | for n, tc := range testCases { 33 | errs := Decode(tc.input, tc.val) 34 | if len(errs) > 0 { 35 | t.Fatalf("decode failed with error[testcase:%d], error:%v", n, errs) 36 | } 37 | 38 | if !reflect.DeepEqual(tc.val, tc.expectedVal) { 39 | t.Fatalf("decode got wrong value, got:%v, want:%v", tc.val, tc.expectedVal) 40 | } 41 | } 42 | } 43 | --------------------------------------------------------------------------------