├── .gazelcfg.json ├── .gitignore ├── .travis.yml ├── BUILD ├── LICENSE ├── README.rst ├── WORKSPACE ├── gazel ├── BUILD ├── config.go ├── diff.go ├── gazel.go ├── generator.go └── sourcerer.go └── vendor ├── BUILD ├── github.com ├── bazelbuild │ ├── LICENSE │ └── buildifier │ │ └── core │ │ ├── build_defs.bzl │ │ ├── lex.go │ │ ├── parse.y │ │ ├── parse.y.go │ │ ├── print.go │ │ ├── quote.go │ │ ├── rewrite.go │ │ ├── rule.go │ │ ├── syntax.go │ │ ├── tables.go │ │ ├── walk.go │ │ └── y.output └── golang │ └── glog │ ├── LICENSE │ ├── README │ ├── glog.go │ └── glog_file.go └── go ├── LICENSE └── path └── filepath └── path.go /.gazelcfg.json: -------------------------------------------------------------------------------- 1 | { 2 | "GoPrefix": "github.com/mikedanese/gazel" 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | bazel-* 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | dist: trusty 2 | sudo: required 3 | 4 | # Install Bazel and set up GOPATH. 5 | before_install: 6 | - wget https://github.com/bazelbuild/bazel/releases/download/0.4.4/bazel_0.4.4-linux-x86_64.deb 7 | - sudo dpkg -i bazel_0.4.4-linux-x86_64.deb 8 | - mkdir -p $GOPATH/src/github.com/mikedanese 9 | - mv $TRAVIS_BUILD_DIR $GOPATH/src/github.com/mikedanese 10 | - cd $GOPATH/src/github.com/mikedanese/gazel 11 | 12 | install: 13 | - go install ./... 14 | 15 | script: 16 | - gazel --print-diff --validate 17 | - bazel build //... 18 | -------------------------------------------------------------------------------- /BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) 4 | 5 | load("@io_bazel_rules_go//go:def.bzl", "go_prefix") 6 | 7 | go_prefix("github.com/mikedanese/gazel") 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2014 The Kubernetes Authors. 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | gazel - a BUILD file generator for go and bazel 2 | =============================================== 3 | 4 | Requirements: 5 | ############# 6 | 7 | * Your project must be somewhat compatible with go tool because 8 | gazel uses go tool to parse your import tree. 9 | * You must have a **GOPATH** and **GOROOT** setup and your project must 10 | be in the correct location in your **GOPATH**. 11 | * Your ``./vendor`` directory may not contain ``BUILD`` files. 12 | 13 | Usage: 14 | ###### 15 | 16 | 1. Get gazel by running ``go get github.com/mikedanese/gazel/gazel``. 17 | 18 | 2. Create a ``.gazelcfg.json`` in the root of the repository. For the 19 | gazel repository, the ``.gazelcfg.json`` would look like: 20 | 21 | .. code-block:: json 22 | 23 | { 24 | "GoPrefix": "github.com/mikedanese/gazel", 25 | "SrcDirs": [ 26 | "./gazel" 27 | ], 28 | "SkippedPaths": [ 29 | ".*foobar(baz)?.*$" 30 | ] 31 | } 32 | 33 | 3. Run gazel: 34 | 35 | .. code-block:: bash 36 | 37 | $ gazel -root=$GOPATH/src/github.com/mikedanese/gazel 38 | 39 | Defaults: 40 | ######### 41 | 42 | * **SrcDirs** in ``.gazelcfg.json`` defaults to ``["./"]`` 43 | * ``-root`` option defaults to the current working directory 44 | 45 | Automanagement: 46 | ############### 47 | 48 | gazel reconciles rules that have the "**automanaged**" tag. If 49 | you no longer want gazel to manage a rule, you can remove the 50 | **automanaged** tag and gazel will no longer manage that rule. 51 | 52 | gazel only manages srcs, deps, and library attributes of a 53 | rule after initial creation so you can add and managed other 54 | attributes like data and copts and gazel will respect your 55 | changes. 56 | 57 | gazel automatically formats all ``BUILD`` files in your repository 58 | except for those matching **SkippedPaths**. 59 | 60 | Adding "sources" rules: 61 | ####################### 62 | 63 | If you set "**AddSourcesRules**": ``true`` in your ``.gazelcfg.json``, 64 | gazel will create "**package-srcs**" and "**all-srcs**" rules in every 65 | package. 66 | 67 | The "**package-srcs**" rule is a glob matching all files in the 68 | package recursively, but not any files owned by packages in 69 | subdirectories. 70 | 71 | The "**all-srcs**" rule includes both the "**package-srcs**" rule and 72 | the "**all-srcs**" rules of all subpackages; i.e. **//:all-srcs** will 73 | include all files in your repository. 74 | 75 | The "**package-srcs**" rule defaults to private visibility, 76 | since it is safer to depend on the "**all-srcs**" rule: if a 77 | subpackage is added, the "**package-srcs**" rule will no longer 78 | include those files. 79 | 80 | You can remove the "**automanaged**" tag from the "**package-srcs**" 81 | rule if you need to modify the glob (such as adding excludes). 82 | It's recommended that you leave the "**all-srcs**" rule 83 | automanaged. 84 | 85 | Getting latest stable version: 86 | ############################## 87 | 88 | The latest tagged release of gazel is v14. To get the latest 89 | stable version of gazel run: 90 | 91 | .. code-block:: bash 92 | 93 | $ go get -u gopkg.in/mikedanese/gazel.v14/gazel 94 | 95 | Validating BUILD files in CI: 96 | ############################# 97 | 98 | If you run gazel with ``--validate``, it will not update any ``BUILD`` files, but it 99 | will exit nonzero if any ``BUILD`` files are out-of-date. You can add ``--print-diff`` 100 | to print out the changes needed. 101 | -------------------------------------------------------------------------------- /WORKSPACE: -------------------------------------------------------------------------------- 1 | workspace(name = "com_github_mikedanese_gazel") 2 | 3 | git_repository( 4 | name = "io_bazel_rules_go", 5 | commit = "adfad77dabd529ed9d90a4e7b823323628e908d9", 6 | remote = "https://github.com/bazelbuild/rules_go.git", 7 | ) 8 | 9 | load("@io_bazel_rules_go//go:def.bzl", "go_repositories") 10 | 11 | go_repositories() 12 | -------------------------------------------------------------------------------- /gazel/BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) 4 | 5 | load( 6 | "@io_bazel_rules_go//go:def.bzl", 7 | "go_binary", 8 | "go_library", 9 | ) 10 | 11 | go_binary( 12 | name = "gazel", 13 | library = ":go_default_library", 14 | tags = ["automanaged"], 15 | ) 16 | 17 | go_library( 18 | name = "go_default_library", 19 | srcs = [ 20 | "config.go", 21 | "diff.go", 22 | "gazel.go", 23 | "generator.go", 24 | "sourcerer.go", 25 | ], 26 | tags = ["automanaged"], 27 | deps = [ 28 | "//vendor:github.com/bazelbuild/buildifier/core", 29 | "//vendor:github.com/golang/glog", 30 | "//vendor:go/path/filepath", 31 | ], 32 | ) 33 | -------------------------------------------------------------------------------- /gazel/config.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "io/ioutil" 6 | ) 7 | 8 | type Cfg struct { 9 | GoPrefix string 10 | // evaluated recursively, defaults to ["."] 11 | SrcDirs []string 12 | // regexps that match packages to skip 13 | SkippedPaths []string 14 | // whether to add "pkg-srcs" and "all-srcs" filegroups 15 | // note that this operates on the entire tree (not just SrcsDirs) but skips anything matching SkippedPaths 16 | AddSourcesRules bool 17 | // whether to have multiple build files in vendor/ or just one. 18 | VendorMultipleBuildFiles bool 19 | // whether to manage kubernetes' pkg/generated/openapi. 20 | K8sOpenAPIGen bool 21 | } 22 | 23 | func ReadCfg(cfgPath string) (*Cfg, error) { 24 | b, err := ioutil.ReadFile(cfgPath) 25 | if err != nil { 26 | return nil, err 27 | } 28 | var cfg Cfg 29 | if err := json.Unmarshal(b, &cfg); err != nil { 30 | return nil, err 31 | } 32 | defaultCfg(&cfg) 33 | return &cfg, nil 34 | } 35 | 36 | func defaultCfg(c *Cfg) { 37 | if len(c.SrcDirs) == 0 { 38 | c.SrcDirs = []string{"."} 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /gazel/diff.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "io/ioutil" 5 | "os" 6 | "os/exec" 7 | ) 8 | 9 | func Diff(left, right []byte) error { 10 | lf, err := ioutil.TempFile("/tmp", "actual-file-") 11 | if err != nil { 12 | return err 13 | } 14 | defer lf.Close() 15 | defer os.Remove(lf.Name()) 16 | 17 | rf, err := ioutil.TempFile("/tmp", "expected-file-") 18 | if err != nil { 19 | return err 20 | } 21 | defer rf.Close() 22 | defer os.Remove(rf.Name()) 23 | 24 | _, err = lf.Write(left) 25 | if err != nil { 26 | return err 27 | } 28 | lf.Close() 29 | 30 | _, err = rf.Write(right) 31 | if err != nil { 32 | return err 33 | } 34 | rf.Close() 35 | 36 | cmd := exec.Command("/usr/bin/diff", "-u", lf.Name(), rf.Name()) 37 | cmd.Stdout = os.Stdout 38 | cmd.Stderr = os.Stderr 39 | cmd.Run() 40 | 41 | return nil 42 | } 43 | -------------------------------------------------------------------------------- /gazel/gazel.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bytes" 5 | "flag" 6 | "fmt" 7 | "go/build" 8 | "io/ioutil" 9 | "os" 10 | "path/filepath" 11 | "reflect" 12 | "regexp" 13 | "runtime" 14 | "sort" 15 | "strings" 16 | 17 | "go/path/filepath" 18 | 19 | bzl "github.com/bazelbuild/buildifier/core" 20 | "github.com/golang/glog" 21 | ) 22 | 23 | const ( 24 | vendorPath = "vendor/" 25 | automanagedTag = "automanaged" 26 | ) 27 | 28 | var ( 29 | root = flag.String("root", ".", "root of go source") 30 | dryRun = flag.Bool("dry-run", false, "run in dry mode") 31 | printDiff = flag.Bool("print-diff", false, "print diff to stdout") 32 | validate = flag.Bool("validate", false, "run in dry mode and exit nonzero if any BUILD files need to be updated") 33 | cfgPath = flag.String("cfg-path", ".gazelcfg.json", "path to gazel config (relative paths interpreted relative to -repo.") 34 | ) 35 | 36 | func main() { 37 | flag.Parse() 38 | flag.Set("alsologtostderr", "true") 39 | if *root == "" { 40 | glog.Fatalf("-root argument is required") 41 | } 42 | if *validate { 43 | *dryRun = true 44 | } 45 | v, err := NewVendorer(*root, *cfgPath, *dryRun) 46 | if err != nil { 47 | glog.Fatalf("unable to build vendorer: %v", err) 48 | } 49 | if err := os.Chdir(v.root); err != nil { 50 | glog.Fatalf("cannot chdir into root %q: %v", v.root, err) 51 | } 52 | 53 | if err := v.walkVendor(); err != nil { 54 | glog.Fatalf("err walking vendor: %v", err) 55 | } 56 | if err := v.walkRepo(); err != nil { 57 | glog.Fatalf("err walking repo: %v", err) 58 | } 59 | if err := v.walkGenerated(); err != nil { 60 | glog.Fatalf("err walking generated: %v", err) 61 | } 62 | if _, err := v.walkSource("."); err != nil { 63 | glog.Fatalf("err walking source: %v", err) 64 | } 65 | written := 0 66 | if written, err = v.reconcileAllRules(); err != nil { 67 | glog.Fatalf("err reconciling rules: %v", err) 68 | } 69 | if *validate && written > 0 { 70 | fmt.Fprintf(os.Stderr, "\n%d BUILD files not up-to-date.\n", written) 71 | os.Exit(1) 72 | } 73 | } 74 | 75 | type Vendorer struct { 76 | ctx *build.Context 77 | icache map[icacheKey]icacheVal 78 | skippedPaths []*regexp.Regexp 79 | dryRun bool 80 | root string 81 | cfg *Cfg 82 | newRules map[string][]*bzl.Rule // package path -> list of rules to add or update 83 | managedAttrs []string 84 | } 85 | 86 | func NewVendorer(root, cfgPath string, dryRun bool) (*Vendorer, error) { 87 | absRoot, err := filepath.Abs(root) 88 | if err != nil { 89 | return nil, fmt.Errorf("could not get absolute path: %v", err) 90 | } 91 | if !filepath.IsAbs(cfgPath) { 92 | cfgPath = filepath.Join(absRoot, cfgPath) 93 | } 94 | cfg, err := ReadCfg(cfgPath) 95 | if err != nil { 96 | return nil, err 97 | } 98 | 99 | v := Vendorer{ 100 | ctx: context(), 101 | dryRun: dryRun, 102 | root: absRoot, 103 | icache: map[icacheKey]icacheVal{}, 104 | cfg: cfg, 105 | newRules: make(map[string][]*bzl.Rule), 106 | managedAttrs: []string{"srcs", "deps", "library"}, 107 | } 108 | 109 | for _, sp := range cfg.SkippedPaths { 110 | r, err := regexp.Compile(sp) 111 | if err != nil { 112 | return nil, err 113 | } 114 | v.skippedPaths = append(v.skippedPaths, r) 115 | } 116 | for _, builtinSkip := range []string{ 117 | "^\\.git", 118 | "^bazel-*", 119 | } { 120 | v.skippedPaths = append(v.skippedPaths, regexp.MustCompile(builtinSkip)) 121 | } 122 | 123 | return &v, nil 124 | 125 | } 126 | 127 | type icacheKey struct { 128 | path, srcDir string 129 | } 130 | 131 | type icacheVal struct { 132 | pkg *build.Package 133 | err error 134 | } 135 | 136 | func (v *Vendorer) importPkg(path string, srcDir string) (*build.Package, error) { 137 | k := icacheKey{path: path, srcDir: srcDir} 138 | if val, ok := v.icache[k]; ok { 139 | return val.pkg, val.err 140 | } 141 | 142 | // cache miss 143 | pkg, err := v.ctx.Import(path, srcDir, build.ImportComment) 144 | v.icache[k] = icacheVal{pkg: pkg, err: err} 145 | return pkg, err 146 | } 147 | 148 | func writeHeaders(file *bzl.File) { 149 | pkgRule := bzl.Rule{ 150 | &bzl.CallExpr{ 151 | X: &bzl.LiteralExpr{Token: "package"}, 152 | }, 153 | } 154 | pkgRule.SetAttr("default_visibility", asExpr([]string{"//visibility:public"})) 155 | 156 | file.Stmt = append(file.Stmt, 157 | []bzl.Expr{ 158 | pkgRule.Call, 159 | &bzl.CallExpr{ 160 | X: &bzl.LiteralExpr{Token: "licenses"}, 161 | List: []bzl.Expr{asExpr([]string{"notice"})}, 162 | }, 163 | &bzl.CallExpr{ 164 | X: &bzl.LiteralExpr{Token: "load"}, 165 | List: asExpr([]string{ 166 | "@io_bazel_rules_go//go:def.bzl", 167 | }).(*bzl.ListExpr).List, 168 | }, 169 | }..., 170 | ) 171 | } 172 | 173 | func writeRules(file *bzl.File, rules []*bzl.Rule) { 174 | for _, rule := range rules { 175 | file.Stmt = append(file.Stmt, rule.Call) 176 | } 177 | } 178 | 179 | func (v *Vendorer) resolve(ipath string) Label { 180 | if ipath == v.cfg.GoPrefix { 181 | return Label{ 182 | tag: "go_default_library", 183 | } 184 | } else if strings.HasPrefix(ipath, v.cfg.GoPrefix) { 185 | return Label{ 186 | pkg: strings.TrimPrefix(ipath, v.cfg.GoPrefix+"/"), 187 | tag: "go_default_library", 188 | } 189 | } 190 | if v.cfg.VendorMultipleBuildFiles { 191 | return Label{ 192 | pkg: "vendor/" + ipath, 193 | tag: "go_default_library", 194 | } 195 | } else { 196 | return Label{ 197 | pkg: "vendor", 198 | tag: ipath, 199 | } 200 | } 201 | } 202 | 203 | func (v *Vendorer) walk(root string, f func(path, ipath string, pkg *build.Package) error) error { 204 | skipVendor := true 205 | if root == vendorPath { 206 | skipVendor = false 207 | } 208 | return sfilepath.Walk(root, func(path string, info os.FileInfo, err error) error { 209 | if err != nil { 210 | return err 211 | } 212 | if !info.IsDir() { 213 | return nil 214 | } 215 | if skipVendor && strings.HasPrefix(path, vendorPath) { 216 | return filepath.SkipDir 217 | } 218 | for _, r := range v.skippedPaths { 219 | if r.Match([]byte(path)) { 220 | return filepath.SkipDir 221 | } 222 | } 223 | ipath, err := filepath.Rel(root, path) 224 | if err != nil { 225 | return err 226 | } 227 | pkg, err := v.importPkg(".", filepath.Join(v.root, path)) 228 | if err != nil { 229 | if _, ok := err.(*build.NoGoError); err != nil && ok { 230 | return nil 231 | } else { 232 | return err 233 | } 234 | } 235 | 236 | return f(path, ipath, pkg) 237 | }) 238 | } 239 | 240 | func (v *Vendorer) walkRepo() error { 241 | for _, root := range v.cfg.SrcDirs { 242 | if err := v.walk(root, v.updatePkg); err != nil { 243 | return err 244 | } 245 | } 246 | return nil 247 | } 248 | 249 | func (v *Vendorer) updateSinglePkg(path string) error { 250 | pkg, err := v.importPkg(".", "./"+path) 251 | if err != nil { 252 | if _, ok := err.(*build.NoGoError); err != nil && ok { 253 | return nil 254 | } else { 255 | return err 256 | } 257 | } 258 | return v.updatePkg(path, "", pkg) 259 | } 260 | 261 | type RuleType int 262 | 263 | const ( 264 | RuleTypeGoBinary RuleType = iota 265 | RuleTypeGoLibrary 266 | RuleTypeGoTest 267 | RuleTypeGoXTest 268 | RuleTypeCGoGenrule 269 | RuleTypeFileGroup 270 | RuleTypeOpenAPILibrary 271 | ) 272 | 273 | func (rt RuleType) RuleKind() string { 274 | switch rt { 275 | case RuleTypeGoBinary: 276 | return "go_binary" 277 | case RuleTypeGoLibrary: 278 | return "go_library" 279 | case RuleTypeGoTest: 280 | return "go_test" 281 | case RuleTypeGoXTest: 282 | return "go_test" 283 | case RuleTypeCGoGenrule: 284 | return "cgo_genrule" 285 | case RuleTypeFileGroup: 286 | return "filegroup" 287 | case RuleTypeOpenAPILibrary: 288 | return "openapi_library" 289 | } 290 | panic("unreachable") 291 | } 292 | 293 | type NamerFunc func(RuleType) string 294 | 295 | func (v *Vendorer) updatePkg(path, _ string, pkg *build.Package) error { 296 | 297 | srcNameMap := func(srcs ...[]string) *bzl.ListExpr { 298 | return asExpr(merge(srcs...)).(*bzl.ListExpr) 299 | } 300 | 301 | srcs := srcNameMap(pkg.GoFiles, pkg.SFiles) 302 | cgoSrcs := srcNameMap(pkg.CgoFiles, pkg.CFiles, pkg.CXXFiles, pkg.HFiles) 303 | testSrcs := srcNameMap(pkg.TestGoFiles) 304 | xtestSrcs := srcNameMap(pkg.XTestGoFiles) 305 | 306 | v.addRules(path, v.emit(srcs, cgoSrcs, testSrcs, xtestSrcs, pkg, func(rt RuleType) string { 307 | switch rt { 308 | case RuleTypeGoBinary: 309 | return filepath.Base(pkg.Dir) 310 | case RuleTypeGoLibrary: 311 | return "go_default_library" 312 | case RuleTypeGoTest: 313 | return "go_default_test" 314 | case RuleTypeGoXTest: 315 | return "go_default_xtest" 316 | case RuleTypeCGoGenrule: 317 | return "cgo_codegen" 318 | } 319 | panic("unreachable") 320 | })) 321 | 322 | return nil 323 | } 324 | 325 | func (v *Vendorer) emit(srcs, cgoSrcs, testSrcs, xtestSrcs *bzl.ListExpr, pkg *build.Package, namer NamerFunc) []*bzl.Rule { 326 | var goLibAttrs Attrs = make(Attrs) 327 | var rules []*bzl.Rule 328 | 329 | deps := v.extractDeps(pkg.Imports) 330 | 331 | if len(srcs.List) >= 0 { 332 | goLibAttrs.Set("srcs", srcs) 333 | } else if len(cgoSrcs.List) == 0 { 334 | return nil 335 | } 336 | 337 | if len(deps.List) > 0 { 338 | goLibAttrs.SetList("deps", deps) 339 | } 340 | 341 | if pkg.IsCommand() { 342 | rules = append(rules, newRule(RuleTypeGoBinary, namer, map[string]bzl.Expr{ 343 | "library": asExpr(":" + namer(RuleTypeGoLibrary)), 344 | })) 345 | } 346 | 347 | addGoDefaultLibrary := len(cgoSrcs.List) > 0 || len(srcs.List) > 0 348 | if len(cgoSrcs.List) != 0 { 349 | cgoRuleAttrs := make(Attrs) 350 | 351 | cgoRuleAttrs.SetList("srcs", cgoSrcs) 352 | cgoRuleAttrs.SetList("clinkopts", asExpr([]string{"-lz", "-lm", "-lpthread", "-ldl"}).(*bzl.ListExpr)) 353 | 354 | rules = append(rules, newRule(RuleTypeCGoGenrule, namer, cgoRuleAttrs)) 355 | 356 | goLibAttrs.Set("library", asExpr(":"+namer(RuleTypeCGoGenrule))) 357 | } 358 | 359 | if len(testSrcs.List) != 0 { 360 | testRuleAttrs := make(Attrs) 361 | 362 | testRuleAttrs.SetList("srcs", testSrcs) 363 | testRuleAttrs.SetList("deps", v.extractDeps(pkg.TestImports)) 364 | 365 | if addGoDefaultLibrary { 366 | testRuleAttrs.Set("library", asExpr(":"+namer(RuleTypeGoLibrary))) 367 | } 368 | rules = append(rules, newRule(RuleTypeGoTest, namer, testRuleAttrs)) 369 | } 370 | 371 | if addGoDefaultLibrary { 372 | rules = append(rules, newRule(RuleTypeGoLibrary, namer, goLibAttrs)) 373 | } 374 | 375 | if len(xtestSrcs.List) != 0 { 376 | xtestRuleAttrs := make(Attrs) 377 | 378 | xtestRuleAttrs.SetList("srcs", xtestSrcs) 379 | xtestRuleAttrs.SetList("deps", v.extractDeps(pkg.XTestImports)) 380 | 381 | rules = append(rules, newRule(RuleTypeGoXTest, namer, xtestRuleAttrs)) 382 | } 383 | 384 | return rules 385 | } 386 | 387 | func (v *Vendorer) addRules(pkgPath string, rules []*bzl.Rule) { 388 | cleanPath := filepath.Clean(pkgPath) 389 | v.newRules[cleanPath] = append(v.newRules[cleanPath], rules...) 390 | } 391 | 392 | func (v *Vendorer) walkVendor() error { 393 | var rules []*bzl.Rule 394 | updateFunc := func(path, ipath string, pkg *build.Package) error { 395 | srcNameMap := func(srcs ...[]string) *bzl.ListExpr { 396 | return asExpr( 397 | apply( 398 | merge(srcs...), 399 | mapper(func(s string) string { 400 | return strings.TrimPrefix(filepath.Join(path, s), "vendor/") 401 | }), 402 | ), 403 | ).(*bzl.ListExpr) 404 | } 405 | 406 | srcs := srcNameMap(pkg.GoFiles, pkg.SFiles) 407 | cgoSrcs := srcNameMap(pkg.CgoFiles, pkg.CFiles, pkg.CXXFiles, pkg.HFiles) 408 | testSrcs := srcNameMap(pkg.TestGoFiles) 409 | xtestSrcs := srcNameMap(pkg.XTestGoFiles) 410 | 411 | tagBase := v.resolve(ipath).tag 412 | 413 | rules = append(rules, v.emit(srcs, cgoSrcs, testSrcs, xtestSrcs, pkg, func(rt RuleType) string { 414 | switch rt { 415 | case RuleTypeGoBinary: 416 | return tagBase + "_bin" 417 | case RuleTypeGoLibrary: 418 | return tagBase 419 | case RuleTypeGoTest: 420 | return tagBase + "_test" 421 | case RuleTypeGoXTest: 422 | return tagBase + "_xtest" 423 | case RuleTypeCGoGenrule: 424 | return tagBase + "_cgo" 425 | } 426 | panic("unreachable") 427 | })...) 428 | 429 | return nil 430 | } 431 | if v.cfg.VendorMultipleBuildFiles { 432 | updateFunc = v.updatePkg 433 | } 434 | if err := v.walk(vendorPath, updateFunc); err != nil { 435 | return err 436 | } 437 | v.addRules(vendorPath, rules) 438 | 439 | return nil 440 | } 441 | 442 | func (v *Vendorer) extractDeps(deps []string) *bzl.ListExpr { 443 | return asExpr( 444 | apply( 445 | merge(deps), 446 | filterer(func(s string) bool { 447 | pkg, err := v.importPkg(s, v.root) 448 | if err != nil { 449 | if strings.Contains(err.Error(), `cannot find package "C"`) || 450 | // added in go1.7 451 | strings.Contains(err.Error(), `cannot find package "context"`) || 452 | strings.Contains(err.Error(), `cannot find package "net/http/httptrace"`) { 453 | return false 454 | } 455 | fmt.Fprintf(os.Stderr, "extract err: %v\n", err) 456 | return false 457 | } 458 | if pkg.Goroot { 459 | return false 460 | } 461 | return true 462 | }), 463 | mapper(func(s string) string { 464 | return v.resolve(s).String() 465 | }), 466 | ), 467 | ).(*bzl.ListExpr) 468 | } 469 | 470 | func (v *Vendorer) reconcileAllRules() (int, error) { 471 | var paths []string 472 | for path, _ := range v.newRules { 473 | paths = append(paths, path) 474 | } 475 | sort.Strings(paths) 476 | written := 0 477 | for _, path := range paths { 478 | w, err := ReconcileRules(path, v.newRules[path], v.managedAttrs, v.dryRun) 479 | if w { 480 | written++ 481 | } 482 | if err != nil { 483 | return written, err 484 | } 485 | } 486 | return written, nil 487 | } 488 | 489 | type Attrs map[string]bzl.Expr 490 | 491 | func (a Attrs) Set(name string, expr bzl.Expr) { 492 | a[name] = expr 493 | } 494 | 495 | func (a Attrs) SetList(name string, expr *bzl.ListExpr) { 496 | if len(expr.List) == 0 { 497 | return 498 | } 499 | a[name] = expr 500 | } 501 | 502 | type Label struct { 503 | pkg, tag string 504 | } 505 | 506 | func (l Label) String() string { 507 | return fmt.Sprintf("//%v:%v", l.pkg, l.tag) 508 | } 509 | 510 | func asExpr(e interface{}) bzl.Expr { 511 | rv := reflect.ValueOf(e) 512 | switch rv.Kind() { 513 | case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, 514 | reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: 515 | return &bzl.LiteralExpr{Token: fmt.Sprintf("%d", e)} 516 | case reflect.Float32, reflect.Float64: 517 | return &bzl.LiteralExpr{Token: fmt.Sprintf("%f", e)} 518 | case reflect.String: 519 | return &bzl.StringExpr{Value: e.(string)} 520 | case reflect.Slice, reflect.Array: 521 | var list []bzl.Expr 522 | for i := 0; i < rv.Len(); i++ { 523 | list = append(list, asExpr(rv.Index(i).Interface())) 524 | } 525 | return &bzl.ListExpr{List: list} 526 | default: 527 | glog.Fatalf("Uh oh") 528 | return nil 529 | } 530 | } 531 | 532 | type Sed func(s []string) []string 533 | 534 | func mapString(in []string, f func(string) string) []string { 535 | var out []string 536 | for _, s := range in { 537 | out = append(out, f(s)) 538 | } 539 | return out 540 | } 541 | 542 | func mapper(f func(string) string) Sed { 543 | return func(in []string) []string { 544 | return mapString(in, f) 545 | } 546 | } 547 | 548 | func filterString(in []string, f func(string) bool) []string { 549 | var out []string 550 | for _, s := range in { 551 | if f(s) { 552 | out = append(out, s) 553 | } 554 | } 555 | return out 556 | } 557 | 558 | func filterer(f func(string) bool) Sed { 559 | return func(in []string) []string { 560 | return filterString(in, f) 561 | } 562 | } 563 | 564 | func apply(stream []string, seds ...Sed) []string { 565 | for _, sed := range seds { 566 | stream = sed(stream) 567 | } 568 | return stream 569 | } 570 | 571 | func merge(streams ...[]string) []string { 572 | var out []string 573 | for _, stream := range streams { 574 | out = append(out, stream...) 575 | } 576 | return out 577 | } 578 | 579 | func newRule(rt RuleType, namer NamerFunc, attrs map[string]bzl.Expr) *bzl.Rule { 580 | rule := &bzl.Rule{ 581 | Call: &bzl.CallExpr{ 582 | X: &bzl.LiteralExpr{Token: rt.RuleKind()}, 583 | }, 584 | } 585 | rule.SetAttr("name", asExpr(namer(rt))) 586 | for k, v := range attrs { 587 | rule.SetAttr(k, v) 588 | } 589 | rule.SetAttr("tags", asExpr([]string{automanagedTag})) 590 | return rule 591 | } 592 | 593 | // findBuildFile determines the name of a preexisting BUILD file, returning 594 | // a default if no such file exists. 595 | func findBuildFile(pkgPath string) (bool, string) { 596 | options := []string{"BUILD.bazel", "BUILD"} 597 | for _, b := range options { 598 | path := filepath.Join(pkgPath, b) 599 | info, err := os.Stat(path) 600 | if err == nil && !info.IsDir() { 601 | return true, path 602 | } 603 | } 604 | return false, filepath.Join(pkgPath, "BUILD") 605 | } 606 | 607 | func ReconcileRules(pkgPath string, rules []*bzl.Rule, managedAttrs []string, dryRun bool) (bool, error) { 608 | _, path := findBuildFile(pkgPath) 609 | info, err := os.Stat(path) 610 | if err != nil && os.IsNotExist(err) { 611 | f := &bzl.File{} 612 | writeHeaders(f) 613 | reconcileLoad(f, rules) 614 | writeRules(f, rules) 615 | return writeFile(path, f, false, dryRun) 616 | } else if err != nil { 617 | return false, err 618 | } 619 | if info.IsDir() { 620 | return false, fmt.Errorf("%q cannot be a directory", path) 621 | } 622 | b, err := ioutil.ReadFile(path) 623 | if err != nil { 624 | return false, err 625 | } 626 | f, err := bzl.Parse(path, b) 627 | if err != nil { 628 | return false, err 629 | } 630 | oldRules := make(map[string]*bzl.Rule) 631 | for _, r := range f.Rules("") { 632 | oldRules[r.Name()] = r 633 | } 634 | for _, r := range rules { 635 | o, ok := oldRules[r.Name()] 636 | if !ok { 637 | f.Stmt = append(f.Stmt, r.Call) 638 | continue 639 | } 640 | if !RuleIsManaged(o) { 641 | continue 642 | } 643 | reconcileAttr := func(o, n *bzl.Rule, name string) { 644 | if e := n.Attr(name); e != nil { 645 | o.SetAttr(name, e) 646 | } else { 647 | o.DelAttr(name) 648 | } 649 | } 650 | for _, attr := range managedAttrs { 651 | reconcileAttr(o, r, attr) 652 | } 653 | delete(oldRules, r.Name()) 654 | } 655 | 656 | for _, r := range oldRules { 657 | if !RuleIsManaged(r) { 658 | continue 659 | } 660 | f.DelRules(r.Kind(), r.Name()) 661 | } 662 | reconcileLoad(f, f.Rules("")) 663 | 664 | return writeFile(path, f, true, dryRun) 665 | } 666 | 667 | func reconcileLoad(f *bzl.File, rules []*bzl.Rule) { 668 | usedRuleKindsMap := map[string]bool{} 669 | for _, r := range rules { 670 | // Select only the Go rules we need to import, excluding builtins like filegroup. 671 | // TODO: make less fragile 672 | switch r.Kind() { 673 | case "go_prefix", "go_library", "go_binary", "go_test", "go_proto_library", "cgo_genrule", "cgo_library": 674 | usedRuleKindsMap[r.Kind()] = true 675 | } 676 | } 677 | 678 | usedRuleKindsList := []string{} 679 | for k, _ := range usedRuleKindsMap { 680 | usedRuleKindsList = append(usedRuleKindsList, k) 681 | } 682 | sort.Strings(usedRuleKindsList) 683 | 684 | for _, r := range f.Rules("load") { 685 | const goRulesLabel = "@io_bazel_rules_go//go:def.bzl" 686 | args := bzl.Strings(&bzl.ListExpr{List: r.Call.List}) 687 | if len(args) == 0 { 688 | continue 689 | } 690 | if args[0] != goRulesLabel { 691 | continue 692 | } 693 | if len(usedRuleKindsList) == 0 { 694 | f.DelRules(r.Kind(), r.Name()) 695 | continue 696 | } 697 | r.Call.List = asExpr(append( 698 | []string{goRulesLabel}, usedRuleKindsList..., 699 | )).(*bzl.ListExpr).List 700 | break 701 | } 702 | } 703 | 704 | func RuleIsManaged(r *bzl.Rule) bool { 705 | var automanaged bool 706 | for _, tag := range r.AttrStrings("tags") { 707 | if tag == automanagedTag { 708 | automanaged = true 709 | break 710 | } 711 | } 712 | return automanaged 713 | } 714 | 715 | func writeFile(path string, f *bzl.File, exists, dryRun bool) (bool, error) { 716 | var info bzl.RewriteInfo 717 | bzl.Rewrite(f, &info) 718 | out := bzl.Format(f) 719 | if exists { 720 | orig, err := ioutil.ReadFile(path) 721 | if err != nil { 722 | return false, err 723 | } 724 | if bytes.Compare(orig, out) == 0 { 725 | return false, nil 726 | } 727 | if *printDiff { 728 | Diff(orig, out) 729 | } 730 | } 731 | if dryRun { 732 | fmt.Fprintf(os.Stderr, "DRY-RUN: wrote %q\n", path) 733 | return true, nil 734 | } 735 | werr := ioutil.WriteFile(path, out, 0644) 736 | if werr == nil { 737 | fmt.Fprintf(os.Stderr, "wrote %q\n", path) 738 | } 739 | return werr == nil, werr 740 | } 741 | 742 | func context() *build.Context { 743 | return &build.Context{ 744 | GOARCH: "amd64", 745 | GOOS: "linux", 746 | GOROOT: build.Default.GOROOT, 747 | GOPATH: build.Default.GOPATH, 748 | ReleaseTags: []string{"go1.1", "go1.2", "go1.3", "go1.4", "go1.5", "go1.6", "go1.7", "go1.8"}, 749 | Compiler: runtime.Compiler, 750 | CgoEnabled: true, 751 | } 752 | } 753 | 754 | func walk(root string, walkFn filepath.WalkFunc) error { 755 | return nil 756 | } 757 | -------------------------------------------------------------------------------- /gazel/generator.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bytes" 5 | "fmt" 6 | "io/ioutil" 7 | "os" 8 | "path/filepath" 9 | "sort" 10 | "strings" 11 | ) 12 | 13 | const ( 14 | openAPIGenTag = "// +k8s:openapi-gen" 15 | 16 | baseImport = "k8s.io/kubernetes/" 17 | staging = "staging/src/" 18 | ) 19 | 20 | // walkGenerated updates the rule for kubernetes' OpenAPI generated file. 21 | // This involves reading all go files in the source tree and looking for the 22 | // "+k8s:openapi-gen" tag. If present, then that package must be supplied to 23 | // the genrule. 24 | func (v *Vendorer) walkGenerated() error { 25 | if !v.cfg.K8sOpenAPIGen { 26 | return nil 27 | } 28 | v.managedAttrs = append(v.managedAttrs, "openapi_targets", "vendor_targets") 29 | paths, err := v.findOpenAPI(v.root) 30 | if err != nil { 31 | return err 32 | } 33 | return v.addGeneratedOpenAPIRule(paths) 34 | } 35 | 36 | // findOpenAPI searches for all packages under root that request OpenAPI. It 37 | // returns the go import paths. It does not follow symlinks. 38 | func (v *Vendorer) findOpenAPI(root string) ([]string, error) { 39 | finfos, err := ioutil.ReadDir(root) 40 | if err != nil { 41 | return nil, err 42 | } 43 | var res []string 44 | var includeMe bool 45 | for _, finfo := range finfos { 46 | path := filepath.Join(root, finfo.Name()) 47 | if finfo.IsDir() && (finfo.Mode()&os.ModeSymlink == 0) { 48 | children, err := v.findOpenAPI(path) 49 | if err != nil { 50 | return nil, err 51 | } 52 | res = append(res, children...) 53 | } else if strings.HasSuffix(path, ".go") && !strings.HasSuffix(path, "_test.go") { 54 | b, err := ioutil.ReadFile(path) 55 | if err != nil { 56 | return nil, err 57 | } 58 | if bytes.Contains(b, []byte(openAPIGenTag)) { 59 | includeMe = true 60 | } 61 | } 62 | } 63 | if includeMe { 64 | pkg, err := v.ctx.ImportDir(root, 0) 65 | if err != nil { 66 | return nil, err 67 | } 68 | res = append(res, pkg.ImportPath) 69 | } 70 | return res, nil 71 | } 72 | 73 | // addGeneratedOpenAPIRule updates the pkg/generated/openapi go_default_library 74 | // rule with the automanaged openapi_targets and vendor_targets. 75 | func (v *Vendorer) addGeneratedOpenAPIRule(paths []string) error { 76 | var openAPITargets []string 77 | var vendorTargets []string 78 | for _, p := range paths { 79 | if !strings.HasPrefix(p, baseImport) { 80 | return fmt.Errorf("openapi-gen path outside of kubernetes: %s", p) 81 | } 82 | np := p[len(baseImport):] 83 | if strings.HasPrefix(np, staging) { 84 | vendorTargets = append(vendorTargets, np[len(staging):]) 85 | } else { 86 | openAPITargets = append(openAPITargets, np) 87 | } 88 | } 89 | sort.Strings(openAPITargets) 90 | sort.Strings(vendorTargets) 91 | 92 | pkgPath := filepath.Join("pkg", "generated", "openapi") 93 | for _, r := range v.newRules[pkgPath] { 94 | if r.Name() == "go_default_library" { 95 | r.SetAttr("openapi_targets", asExpr(openAPITargets)) 96 | r.SetAttr("vendor_targets", asExpr(vendorTargets)) 97 | break 98 | } 99 | } 100 | return nil 101 | } 102 | -------------------------------------------------------------------------------- /gazel/sourcerer.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "io/ioutil" 6 | "path/filepath" 7 | 8 | bzl "github.com/bazelbuild/buildifier/core" 9 | ) 10 | 11 | const ( 12 | pkgSrcsTarget = "package-srcs" 13 | allSrcsTarget = "all-srcs" 14 | ) 15 | 16 | // walkSource walks the source tree recursively from pkgPath, adding 17 | // any BUILD files to v.newRules to be formatted. 18 | // 19 | // If AddSourcesRules is enabled in the Gazel config, then we additionally add 20 | // package-sources and recursive all-srcs filegroups rules to every BUILD file. 21 | // 22 | // Returns the list of children all-srcs targets that should be added to the 23 | // all-srcs rule of the enclosing package. 24 | func (v *Vendorer) walkSource(pkgPath string) ([]string, error) { 25 | // clean pkgPath since we access v.newRules directly 26 | pkgPath = filepath.Clean(pkgPath) 27 | for _, r := range v.skippedPaths { 28 | if r.Match([]byte(pkgPath)) { 29 | return nil, nil 30 | } 31 | } 32 | files, err := ioutil.ReadDir(pkgPath) 33 | if err != nil { 34 | return nil, err 35 | } 36 | 37 | // Find any children packages we need to include in an all-srcs rule. 38 | var children []string = nil 39 | for _, f := range files { 40 | if f.IsDir() { 41 | c, err := v.walkSource(filepath.Join(pkgPath, f.Name())) 42 | if err != nil { 43 | return nil, err 44 | } 45 | children = append(children, c...) 46 | } 47 | } 48 | 49 | // This path is a package either if we've added rules or if a BUILD file already exists. 50 | _, hasRules := v.newRules[pkgPath] 51 | isPkg := hasRules 52 | if !isPkg { 53 | isPkg, _ = findBuildFile(pkgPath) 54 | } 55 | 56 | if !isPkg { 57 | // This directory isn't a package (doesn't contain a BUILD file), 58 | // but there might be subdirectories that are packages, 59 | // so pass that up to our parent. 60 | return children, nil 61 | } 62 | 63 | // Enforce formatting the BUILD file, even if we're not adding srcs rules 64 | if !hasRules { 65 | v.addRules(pkgPath, nil) 66 | } 67 | 68 | if !v.cfg.AddSourcesRules { 69 | return nil, nil 70 | } 71 | 72 | pkgSrcsExpr := &bzl.LiteralExpr{Token: `glob(["**"])`} 73 | if pkgPath == "." { 74 | pkgSrcsExpr = &bzl.LiteralExpr{Token: `glob(["**"], exclude=["bazel-*/**", ".git/**"])`} 75 | } 76 | 77 | v.addRules(pkgPath, []*bzl.Rule{ 78 | newRule(RuleTypeFileGroup, 79 | func(_ RuleType) string { return pkgSrcsTarget }, 80 | map[string]bzl.Expr{ 81 | "srcs": pkgSrcsExpr, 82 | "visibility": asExpr([]string{"//visibility:private"}), 83 | }), 84 | newRule(RuleTypeFileGroup, 85 | func(_ RuleType) string { return allSrcsTarget }, 86 | map[string]bzl.Expr{ 87 | "srcs": asExpr(append(children, fmt.Sprintf(":%s", pkgSrcsTarget))), 88 | }), 89 | }) 90 | return []string{fmt.Sprintf("//%s:%s", pkgPath, allSrcsTarget)}, nil 91 | } 92 | -------------------------------------------------------------------------------- /vendor/BUILD: -------------------------------------------------------------------------------- 1 | package(default_visibility = ["//visibility:public"]) 2 | 3 | licenses(["notice"]) 4 | 5 | load( 6 | "@io_bazel_rules_go//go:def.bzl", 7 | "go_library", 8 | ) 9 | 10 | go_library( 11 | name = "github.com/bazelbuild/buildifier/core", 12 | srcs = [ 13 | "github.com/bazelbuild/buildifier/core/lex.go", 14 | "github.com/bazelbuild/buildifier/core/parse.y.go", 15 | "github.com/bazelbuild/buildifier/core/print.go", 16 | "github.com/bazelbuild/buildifier/core/quote.go", 17 | "github.com/bazelbuild/buildifier/core/rewrite.go", 18 | "github.com/bazelbuild/buildifier/core/rule.go", 19 | "github.com/bazelbuild/buildifier/core/syntax.go", 20 | "github.com/bazelbuild/buildifier/core/tables.go", 21 | "github.com/bazelbuild/buildifier/core/walk.go", 22 | ], 23 | tags = ["automanaged"], 24 | ) 25 | 26 | go_library( 27 | name = "github.com/golang/glog", 28 | srcs = [ 29 | "github.com/golang/glog/glog.go", 30 | "github.com/golang/glog/glog_file.go", 31 | ], 32 | tags = ["automanaged"], 33 | ) 34 | 35 | go_library( 36 | name = "go/path/filepath", 37 | srcs = ["go/path/filepath/path.go"], 38 | tags = ["automanaged"], 39 | ) 40 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/build_defs.bzl: -------------------------------------------------------------------------------- 1 | _GO_TOOL = "@io_bazel_rules_go//go/toolchain:go_tool" 2 | 3 | def go_yacc(src, out, visibility=None): 4 | native.genrule( 5 | name = src + ".go_yacc", 6 | srcs = [src], 7 | outs = [out], 8 | tools = [_GO_TOOL], 9 | cmd = ("export GOROOT=$$(dirname $(location " + _GO_TOOL + "))/..;" + 10 | " $(location " + _GO_TOOL + ") tool yacc " + 11 | " -o $(location " + out + ") $(SRCS)"), 12 | visibility = visibility, 13 | local = 1, 14 | ) 15 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/lex.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2016 Google Inc. All Rights Reserved. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | // Lexical scanning for BUILD file parser. 17 | 18 | package build 19 | 20 | //go:generate go tool yacc -o parse.y.go parse.y 21 | 22 | import ( 23 | "bytes" 24 | "fmt" 25 | "strings" 26 | "unicode/utf8" 27 | ) 28 | 29 | // Parse parses the input data and returns the corresponding parse tree. 30 | // 31 | // The filename is used only for generating error messages. 32 | func Parse(filename string, data []byte) (*File, error) { 33 | in := newInput(filename, data) 34 | return in.parse() 35 | } 36 | 37 | // An input represents a single input file being parsed. 38 | type input struct { 39 | // Lexing state. 40 | filename string // name of input file, for errors 41 | complete []byte // entire input 42 | remaining []byte // remaining input 43 | token []byte // token being scanned 44 | lastToken string // most recently returned token, for error messages 45 | pos Position // current input position 46 | comments []Comment // accumulated comments 47 | endRule int // position of end of current rule 48 | depth int // nesting of [ ] { } ( ) 49 | 50 | // Parser state. 51 | file *File // returned top-level syntax tree 52 | parseError error // error encountered during parsing 53 | 54 | // Comment assignment state. 55 | pre []Expr // all expressions, in preorder traversal 56 | post []Expr // all expressions, in postorder traversal 57 | } 58 | 59 | func newInput(filename string, data []byte) *input { 60 | return &input{ 61 | filename: filename, 62 | complete: data, 63 | remaining: data, 64 | pos: Position{Line: 1, LineRune: 1, Byte: 0}, 65 | } 66 | } 67 | 68 | // parse parses the input file. 69 | func (in *input) parse() (f *File, err error) { 70 | // The parser panics for both routine errors like syntax errors 71 | // and for programmer bugs like array index errors. 72 | // Turn both into error returns. Catching bug panics is 73 | // especially important when processing many files. 74 | defer func() { 75 | if e := recover(); e != nil { 76 | if e == in.parseError { 77 | err = in.parseError 78 | } else { 79 | err = fmt.Errorf("%s:%d:%d: internal error: %v", in.filename, in.pos.Line, in.pos.LineRune, e) 80 | } 81 | } 82 | }() 83 | 84 | // Invoke the parser generated from parse.y. 85 | yyParse(in) 86 | if in.parseError != nil { 87 | return nil, in.parseError 88 | } 89 | in.file.Path = in.filename 90 | 91 | // Assign comments to nearby syntax. 92 | in.assignComments() 93 | 94 | return in.file, nil 95 | } 96 | 97 | // Error is called to report an error. 98 | // When called by the generated code s is always "syntax error". 99 | // Error does not return: it panics. 100 | func (in *input) Error(s string) { 101 | if s == "syntax error" && in.lastToken != "" { 102 | s += " near " + in.lastToken 103 | } 104 | in.parseError = fmt.Errorf("%s:%d:%d: %v", in.filename, in.pos.Line, in.pos.LineRune, s) 105 | panic(in.parseError) 106 | } 107 | 108 | // eof reports whether the input has reached end of file. 109 | func (in *input) eof() bool { 110 | return len(in.remaining) == 0 111 | } 112 | 113 | // peekRune returns the next rune in the input without consuming it. 114 | func (in *input) peekRune() int { 115 | if len(in.remaining) == 0 { 116 | return 0 117 | } 118 | r, _ := utf8.DecodeRune(in.remaining) 119 | return int(r) 120 | } 121 | 122 | // readRune consumes and returns the next rune in the input. 123 | func (in *input) readRune() int { 124 | if len(in.remaining) == 0 { 125 | in.Error("internal lexer error: readRune at EOF") 126 | } 127 | r, size := utf8.DecodeRune(in.remaining) 128 | in.remaining = in.remaining[size:] 129 | if r == '\n' { 130 | in.pos.Line++ 131 | in.pos.LineRune = 1 132 | } else { 133 | in.pos.LineRune++ 134 | } 135 | in.pos.Byte += size 136 | return int(r) 137 | } 138 | 139 | // startToken marks the beginning of the next input token. 140 | // It must be followed by a call to endToken, once the token has 141 | // been consumed using readRune. 142 | func (in *input) startToken(val *yySymType) { 143 | in.token = in.remaining 144 | val.tok = "" 145 | val.pos = in.pos 146 | } 147 | 148 | // yySymType (used in the next few functions) is defined by the 149 | // generated parser. It is a struct containing all the fields listed 150 | // in parse.y's %union [sic] section. 151 | 152 | // endToken marks the end of an input token. 153 | // It records the actual token string in val.tok if the caller 154 | // has not done that already. 155 | func (in *input) endToken(val *yySymType) { 156 | if val.tok == "" { 157 | tok := string(in.token[:len(in.token)-len(in.remaining)]) 158 | val.tok = tok 159 | in.lastToken = val.tok 160 | } 161 | } 162 | 163 | // Lex is called from the generated parser to obtain the next input token. 164 | // It returns the token value (either a rune like '+' or a symbolic token _FOR) 165 | // and sets val to the data associated with the token. 166 | // 167 | // For all our input tokens, the associated data is 168 | // val.Pos (the position where the token begins) 169 | // and val.Token (the input string corresponding to the token). 170 | func (in *input) Lex(val *yySymType) int { 171 | // Skip past spaces, stopping at non-space or EOF. 172 | countNL := 0 // number of newlines we've skipped past 173 | for !in.eof() { 174 | // The parser does not track indentation, because for the most part 175 | // BUILD expressions don't care about how they are indented. 176 | // However, we do need to be able to distinguish 177 | // 178 | // x = y[0] 179 | // 180 | // from the occasional 181 | // 182 | // x = y 183 | // [0] 184 | // 185 | // To handle this one case, when we reach the beginning of a 186 | // top-level BUILD expression, we scan forward to see where 187 | // it should end and record the number of input bytes remaining 188 | // at that endpoint. When we reach that point in the input, we 189 | // insert an implicit semicolon to force the two expressions 190 | // to stay separate. 191 | // 192 | if in.endRule != 0 && len(in.remaining) == in.endRule { 193 | in.endRule = 0 194 | in.lastToken = "implicit ;" 195 | val.tok = ";" 196 | return ';' 197 | } 198 | 199 | // Skip over spaces. Count newlines so we can give the parser 200 | // information about where top-level blank lines are, 201 | // for top-level comment assignment. 202 | c := in.peekRune() 203 | if c == ' ' || c == '\t' || c == '\r' || c == '\n' { 204 | if c == '\n' && in.endRule == 0 { 205 | // Not in a rule. Tell parser about top-level blank line. 206 | in.startToken(val) 207 | in.readRune() 208 | in.endToken(val) 209 | return '\n' 210 | } 211 | if c == '\n' { 212 | countNL++ 213 | } 214 | in.readRune() 215 | continue 216 | } 217 | 218 | // Comment runs to end of line. 219 | if c == '#' { 220 | // Is this comment the only thing on its line? 221 | // Find the last \n before this # and see if it's all 222 | // spaces from there to here. 223 | i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n")) 224 | suffix := len(bytes.TrimSpace(in.complete[i+1:in.pos.Byte])) > 0 225 | 226 | // Consume comment. 227 | in.startToken(val) 228 | for len(in.remaining) > 0 && in.readRune() != '\n' { 229 | } 230 | in.endToken(val) 231 | 232 | val.tok = strings.TrimRight(val.tok, "\n") 233 | in.lastToken = "comment" 234 | 235 | // If we are at top level (not in a rule), hand the comment to 236 | // the parser as a _COMMENT token. The grammar is written 237 | // to handle top-level comments itself. 238 | if in.endRule == 0 { 239 | // Not in a rule. Tell parser about top-level comment. 240 | return _COMMENT 241 | } 242 | 243 | // Otherwise, save comment for later attachment to syntax tree. 244 | if countNL > 1 { 245 | in.comments = append(in.comments, Comment{val.pos, "", false}) 246 | } 247 | in.comments = append(in.comments, Comment{val.pos, val.tok, suffix}) 248 | countNL = 1 249 | continue 250 | } 251 | 252 | if c == '\\' && len(in.remaining) >= 2 && in.remaining[1] == '\n' { 253 | // We can ignore a trailing \ at end of line. 254 | in.readRune() 255 | continue 256 | } 257 | 258 | // Found non-space non-comment. 259 | break 260 | } 261 | 262 | // Found the beginning of the next token. 263 | in.startToken(val) 264 | defer in.endToken(val) 265 | 266 | // End of file. 267 | if in.eof() { 268 | in.lastToken = "EOF" 269 | return _EOF 270 | } 271 | 272 | // If endRule is 0, we need to recompute where the end 273 | // of the next rule (Python expression) is, so that we can 274 | // generate a virtual end-of-rule semicolon (see above). 275 | if in.endRule == 0 { 276 | in.endRule = len(in.skipPython(in.remaining)) 277 | if in.endRule == 0 { 278 | // skipPython got confused. 279 | // No more virtual semicolons. 280 | in.endRule = -1 281 | } 282 | } 283 | 284 | // Punctuation tokens. 285 | switch c := in.peekRune(); c { 286 | case '[', '(', '{': 287 | in.depth++ 288 | in.readRune() 289 | return c 290 | 291 | case ']', ')', '}': 292 | in.depth-- 293 | in.readRune() 294 | return c 295 | 296 | case '.', '-', '%', ':', ';', ',', '/', '*': // single-char tokens 297 | in.readRune() 298 | return c 299 | 300 | case '<', '>', '=', '!', '+': // possibly followed by = 301 | in.readRune() 302 | if in.peekRune() == '=' { 303 | in.readRune() 304 | switch c { 305 | case '<': 306 | return _LE 307 | case '>': 308 | return _GE 309 | case '=': 310 | return _EQ 311 | case '!': 312 | return _NE 313 | case '+': 314 | return _ADDEQ 315 | } 316 | } 317 | return c 318 | 319 | case 'r': // possible beginning of raw quoted string 320 | if len(in.remaining) < 2 || in.remaining[1] != '"' && in.remaining[1] != '\'' { 321 | break 322 | } 323 | in.readRune() 324 | c = in.peekRune() 325 | fallthrough 326 | 327 | case '"', '\'': // quoted string 328 | quote := c 329 | if len(in.remaining) >= 3 && in.remaining[0] == byte(quote) && in.remaining[1] == byte(quote) && in.remaining[2] == byte(quote) { 330 | // Triple-quoted string. 331 | in.readRune() 332 | in.readRune() 333 | in.readRune() 334 | var c1, c2, c3 int 335 | for { 336 | if in.eof() { 337 | in.pos = val.pos 338 | in.Error("unexpected EOF in string") 339 | } 340 | c1, c2, c3 = c2, c3, in.readRune() 341 | if c1 == quote && c2 == quote && c3 == quote { 342 | break 343 | } 344 | if c3 == '\\' { 345 | if in.eof() { 346 | in.pos = val.pos 347 | in.Error("unexpected EOF in string") 348 | } 349 | in.readRune() 350 | } 351 | } 352 | } else { 353 | in.readRune() 354 | for { 355 | if in.eof() { 356 | in.pos = val.pos 357 | in.Error("unexpected EOF in string") 358 | } 359 | if in.peekRune() == '\n' { 360 | in.Error("unexpected newline in string") 361 | } 362 | c := in.readRune() 363 | if c == quote { 364 | break 365 | } 366 | if c == '\\' { 367 | if in.eof() { 368 | in.pos = val.pos 369 | in.Error("unexpected EOF in string") 370 | } 371 | in.readRune() 372 | } 373 | } 374 | } 375 | in.endToken(val) 376 | s, triple, err := unquote(val.tok) 377 | if err != nil { 378 | in.Error(fmt.Sprint(err)) 379 | } 380 | val.str = s 381 | val.triple = triple 382 | return _STRING 383 | } 384 | 385 | // Checked all punctuation. Must be identifier token. 386 | if c := in.peekRune(); !isIdent(c) { 387 | in.Error(fmt.Sprintf("unexpected input character %#q", c)) 388 | } 389 | 390 | // Look for raw Python block (class, def, if, etc at beginning of line) and pass through. 391 | if in.depth == 0 && in.pos.LineRune == 1 && hasPythonPrefix(in.remaining) { 392 | // Find end of Python block and advance input beyond it. 393 | // Have to loop calling readRune in order to maintain line number info. 394 | rest := in.skipPython(in.remaining) 395 | for len(in.remaining) > len(rest) { 396 | in.readRune() 397 | } 398 | return _PYTHON 399 | } 400 | 401 | // Scan over alphanumeric identifier. 402 | for { 403 | c := in.peekRune() 404 | if !isIdent(c) { 405 | break 406 | } 407 | in.readRune() 408 | } 409 | 410 | // Call endToken to set val.tok to identifier we just scanned, 411 | // so we can look to see if val.tok is a keyword. 412 | in.endToken(val) 413 | if k := keywordToken[val.tok]; k != 0 { 414 | return k 415 | } 416 | 417 | return _IDENT 418 | } 419 | 420 | // isIdent reports whether c is an identifier rune. 421 | // We treat all non-ASCII runes as identifier runes. 422 | func isIdent(c int) bool { 423 | return '0' <= c && c <= '9' || 424 | 'A' <= c && c <= 'Z' || 425 | 'a' <= c && c <= 'z' || 426 | c == '_' || 427 | c >= 0x80 428 | } 429 | 430 | // keywordToken records the special tokens for 431 | // strings that should not be treated as ordinary identifiers. 432 | var keywordToken = map[string]int{ 433 | "and": _AND, 434 | "for": _FOR, 435 | "if": _IF, 436 | "else": _ELSE, 437 | "in": _IN, 438 | "is": _IS, 439 | "lambda": _LAMBDA, 440 | "not": _NOT, 441 | "or": _OR, 442 | } 443 | 444 | // Python scanning. 445 | // About 1% of BUILD files embed arbitrary Python into the file. 446 | // We do not attempt to parse it. Instead, we lex just enough to scan 447 | // beyond it, treating the Python block as an unintepreted blob. 448 | 449 | // hasPythonPrefix reports whether p begins with a keyword that would 450 | // introduce an uninterpreted Python block. 451 | func hasPythonPrefix(p []byte) bool { 452 | for _, pre := range prefixes { 453 | if hasPrefixSpace(p, pre) { 454 | return true 455 | } 456 | } 457 | return false 458 | } 459 | 460 | // These keywords introduce uninterpreted Python blocks. 461 | var prefixes = []string{ 462 | "assert", 463 | "class", 464 | "def", 465 | "del", 466 | "for", 467 | "if", 468 | "try", 469 | } 470 | 471 | // hasPrefixSpace reports whether p begins with pre followed by a space or colon. 472 | func hasPrefixSpace(p []byte, pre string) bool { 473 | if len(p) <= len(pre) || p[len(pre)] != ' ' && p[len(pre)] != '\t' && p[len(pre)] != ':' { 474 | return false 475 | } 476 | for i := range pre { 477 | if p[i] != pre[i] { 478 | return false 479 | } 480 | } 481 | return true 482 | } 483 | 484 | func isBlankOrComment(b []byte) bool { 485 | for _, c := range b { 486 | if c == '#' || c == '\n' { 487 | return true 488 | } 489 | if c != ' ' && c != '\t' && c != '\r' { 490 | return false 491 | } 492 | } 493 | return true 494 | } 495 | 496 | // hasPythonContinuation reports whether p begins with a keyword that 497 | // continues an uninterpreted Python block. 498 | func hasPythonContinuation(p []byte) bool { 499 | for _, pre := range continuations { 500 | if hasPrefixSpace(p, pre) { 501 | return true 502 | } 503 | } 504 | return false 505 | } 506 | 507 | // These keywords continue uninterpreted Python blocks. 508 | var continuations = []string{ 509 | "except", 510 | "else", 511 | } 512 | 513 | // skipPython returns the data remaining after the uninterpreted 514 | // Python block beginning at p. It does not advance the input position. 515 | // (The only reason for the input receiver is to be able to call in.Error.) 516 | func (in *input) skipPython(p []byte) []byte { 517 | quote := byte(0) // if non-zero, the kind of quote we're in 518 | tripleQuote := false // if true, the quote is a triple quote 519 | depth := 0 // nesting depth for ( ) [ ] { } 520 | var rest []byte // data after the Python block 521 | 522 | // Scan over input one byte at a time until we find 523 | // an unindented, non-blank, non-comment line 524 | // outside quoted strings and brackets. 525 | for i := 0; i < len(p); i++ { 526 | c := p[i] 527 | if quote != 0 && c == quote && !tripleQuote { 528 | quote = 0 529 | continue 530 | } 531 | if quote != 0 && c == quote && tripleQuote && i+2 < len(p) && p[i+1] == quote && p[i+2] == quote { 532 | i += 2 533 | quote = 0 534 | tripleQuote = false 535 | continue 536 | } 537 | if quote != 0 { 538 | if c == '\\' { 539 | i++ // skip escaped char 540 | } 541 | continue 542 | } 543 | if c == '\'' || c == '"' { 544 | if i+2 < len(p) && p[i+1] == c && p[i+2] == c { 545 | quote = c 546 | tripleQuote = true 547 | i += 2 548 | continue 549 | } 550 | quote = c 551 | continue 552 | } 553 | 554 | if depth == 0 && i > 0 && p[i-1] == '\n' && (i < 2 || p[i-2] != '\\') { 555 | // Possible stopping point. Save the earliest one we find. 556 | if rest == nil { 557 | rest = p[i:] 558 | } 559 | 560 | if !isBlankOrComment(p[i:]) { 561 | if !hasPythonContinuation(p[i:]) && c != ' ' && c != '\t' { 562 | // Yes, stop here. 563 | break 564 | } 565 | // Not a stopping point after all. 566 | rest = nil 567 | } 568 | } 569 | 570 | switch c { 571 | case '#': 572 | // Skip comment. 573 | for i < len(p) && p[i] != '\n' { 574 | i++ 575 | } 576 | 577 | case '(', '[', '{': 578 | depth++ 579 | 580 | case ')', ']', '}': 581 | depth-- 582 | } 583 | } 584 | if quote != 0 { 585 | in.Error("EOF scanning Python quoted string") 586 | } 587 | return rest 588 | } 589 | 590 | // Comment assignment. 591 | // We build two lists of all subexpressions, preorder and postorder. 592 | // The preorder list is ordered by start location, with outer expressions first. 593 | // The postorder list is ordered by end location, with outer expressions last. 594 | // We use the preorder list to assign each whole-line comment to the syntax 595 | // immediately following it, and we use the postorder list to assign each 596 | // end-of-line comment to the syntax immediately preceding it. 597 | 598 | // order walks the expression adding it and its subexpressions to the 599 | // preorder and postorder lists. 600 | func (in *input) order(v Expr) { 601 | if v != nil { 602 | in.pre = append(in.pre, v) 603 | } 604 | switch v := v.(type) { 605 | default: 606 | panic(fmt.Errorf("order: unexpected type %T", v)) 607 | case nil: 608 | // nothing 609 | case *End: 610 | // nothing 611 | case *File: 612 | for _, stmt := range v.Stmt { 613 | in.order(stmt) 614 | } 615 | case *CommentBlock: 616 | // nothing 617 | case *CallExpr: 618 | in.order(v.X) 619 | for _, x := range v.List { 620 | in.order(x) 621 | } 622 | in.order(&v.End) 623 | case *PythonBlock: 624 | // nothing 625 | case *LiteralExpr: 626 | // nothing 627 | case *StringExpr: 628 | // nothing 629 | case *DotExpr: 630 | in.order(v.X) 631 | case *ListExpr: 632 | for _, x := range v.List { 633 | in.order(x) 634 | } 635 | in.order(&v.End) 636 | case *ListForExpr: 637 | in.order(v.X) 638 | for _, c := range v.For { 639 | in.order(c) 640 | } 641 | for _, c := range v.If { 642 | in.order(c) 643 | } 644 | in.order(&v.End) 645 | case *ForClause: 646 | for _, name := range v.Var { 647 | in.order(name) 648 | } 649 | in.order(v.Expr) 650 | case *IfClause: 651 | in.order(v.Cond) 652 | case *KeyValueExpr: 653 | in.order(v.Key) 654 | in.order(v.Value) 655 | case *DictExpr: 656 | for _, x := range v.List { 657 | in.order(x) 658 | } 659 | in.order(&v.End) 660 | case *TupleExpr: 661 | for _, x := range v.List { 662 | in.order(x) 663 | } 664 | in.order(&v.End) 665 | case *UnaryExpr: 666 | in.order(v.X) 667 | case *BinaryExpr: 668 | in.order(v.X) 669 | in.order(v.Y) 670 | case *ConditionalExpr: 671 | in.order(v.Then) 672 | in.order(v.Test) 673 | in.order(v.Else) 674 | case *ParenExpr: 675 | in.order(v.X) 676 | in.order(&v.End) 677 | case *SliceExpr: 678 | in.order(v.X) 679 | in.order(v.Y) 680 | in.order(v.Z) 681 | case *IndexExpr: 682 | in.order(v.X) 683 | in.order(v.Y) 684 | case *LambdaExpr: 685 | for _, name := range v.Var { 686 | in.order(name) 687 | } 688 | in.order(v.Expr) 689 | } 690 | if v != nil { 691 | in.post = append(in.post, v) 692 | } 693 | } 694 | 695 | // assignComments attaches comments to nearby syntax. 696 | func (in *input) assignComments() { 697 | // Generate preorder and postorder lists. 698 | in.order(in.file) 699 | 700 | // Split into whole-line comments and suffix comments. 701 | var line, suffix []Comment 702 | for _, com := range in.comments { 703 | if com.Suffix { 704 | suffix = append(suffix, com) 705 | } else { 706 | line = append(line, com) 707 | } 708 | } 709 | 710 | // Assign line comments to syntax immediately following. 711 | for _, x := range in.pre { 712 | start, _ := x.Span() 713 | xcom := x.Comment() 714 | for len(line) > 0 && start.Byte >= line[0].Start.Byte { 715 | xcom.Before = append(xcom.Before, line[0]) 716 | line = line[1:] 717 | } 718 | } 719 | 720 | // Remaining line comments go at end of file. 721 | in.file.After = append(in.file.After, line...) 722 | 723 | // Assign suffix comments to syntax immediately before. 724 | for i := len(in.post) - 1; i >= 0; i-- { 725 | x := in.post[i] 726 | 727 | // Do not assign suffix comments to call, list, end-of-list, 728 | // whole file, or conditional expression. 729 | // Instead assign them to the last argument, element, or rule. 730 | switch x.(type) { 731 | case *CallExpr, *ListExpr, *End, *File, *ConditionalExpr: 732 | continue 733 | } 734 | 735 | // Do not assign suffix comments to something that starts 736 | // on an earlier line, so that in 737 | // 738 | // tags = [ "a", 739 | // "b" ], # comment 740 | // 741 | // we assign the comment to "b" and not to tags = [ ... ]. 742 | start, end := x.Span() 743 | if start.Line != end.Line { 744 | continue 745 | } 746 | xcom := x.Comment() 747 | for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte { 748 | xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1]) 749 | suffix = suffix[:len(suffix)-1] 750 | } 751 | } 752 | 753 | // We assigned suffix comments in reverse. 754 | // If multiple suffix comments were appended to the same 755 | // expression node, they are now in reverse. Fix that. 756 | for _, x := range in.post { 757 | reverseComments(x.Comment().Suffix) 758 | } 759 | 760 | // Remaining suffix comments go at beginning of file. 761 | in.file.Before = append(in.file.Before, suffix...) 762 | } 763 | 764 | // reverseComments reverses the []Comment list. 765 | func reverseComments(list []Comment) { 766 | for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 { 767 | list[i], list[j] = list[j], list[i] 768 | } 769 | } 770 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/parse.y: -------------------------------------------------------------------------------- 1 | // BUILD file parser. 2 | 3 | // This is a yacc grammar. Its lexer is in lex.go. 4 | // 5 | // For a good introduction to writing yacc grammars, see 6 | // Kernighan and Pike's book The Unix Programming Environment. 7 | // 8 | // The definitive yacc manual is 9 | // Stephen C. Johnson and Ravi Sethi, "Yacc: A Parser Generator", 10 | // online at http://plan9.bell-labs.com/sys/doc/yacc.pdf. 11 | 12 | %{ 13 | package build 14 | %} 15 | 16 | // The generated parser puts these fields in a struct named yySymType. 17 | // (The name %union is historical, but it is inaccurate for Go.) 18 | %union { 19 | // input tokens 20 | tok string // raw input syntax 21 | str string // decoding of quoted string 22 | pos Position // position of token 23 | triple bool // was string triple quoted? 24 | 25 | // partial syntax trees 26 | expr Expr 27 | exprs []Expr 28 | forc *ForClause 29 | fors []*ForClause 30 | ifs []*IfClause 31 | string *StringExpr 32 | strings []*StringExpr 33 | 34 | // supporting information 35 | comma Position // position of trailing comma in list, if present 36 | lastRule Expr // most recent rule, to attach line comments to 37 | } 38 | 39 | // These declarations set the type for a $ reference ($$, $1, $2, ...) 40 | // based on the kind of symbol it refers to. Other fields can be referred 41 | // to explicitly, as in $1. 42 | // 43 | // %token is for input tokens generated by the lexer. 44 | // %type is for higher-level grammar rules defined here. 45 | // 46 | // It is possible to put multiple tokens per line, but it is easier to 47 | // keep ordered using a sparser one-per-line list. 48 | 49 | %token '%' 50 | %token '(' 51 | %token ')' 52 | %token '*' 53 | %token '+' 54 | %token ',' 55 | %token '-' 56 | %token '.' 57 | %token '/' 58 | %token ':' 59 | %token '<' 60 | %token '=' 61 | %token '>' 62 | %token '[' 63 | %token ']' 64 | %token '{' 65 | %token '}' 66 | 67 | // By convention, yacc token names are all caps. 68 | // However, we do not want to export them from the Go package 69 | // we are creating, so prefix them all with underscores. 70 | 71 | %token _ADDEQ // operator += 72 | %token _AND // keyword and 73 | %token _COMMENT // top-level # comment 74 | %token _EOF // end of file 75 | %token _EQ // operator == 76 | %token _FOR // keyword for 77 | %token _GE // operator >= 78 | %token _IDENT // non-keyword identifier or number 79 | %token _IF // keyword if 80 | %token _ELSE // keyword else 81 | %token _IN // keyword in 82 | %token _IS // keyword is 83 | %token _LAMBDA // keyword lambda 84 | %token _LE // operator <= 85 | %token _NE // operator != 86 | %token _NOT // keyword not 87 | %token _OR // keyword or 88 | %token _PYTHON // uninterpreted Python block 89 | %token _STRING // quoted string 90 | 91 | %type comma_opt 92 | %type expr 93 | %type expr_opt 94 | %type exprs 95 | %type exprs_opt 96 | %type for_clause 97 | %type for_clauses 98 | %type ident 99 | %type idents 100 | %type if_clauses_opt 101 | %type stmts 102 | %type stmt 103 | %type keyvalue 104 | %type keyvalues 105 | %type keyvalues_opt 106 | %type string 107 | %type strings 108 | 109 | // Operator precedence. 110 | // Operators listed lower in the table bind tighter. 111 | 112 | // We tag rules with this fake, low precedence to indicate 113 | // that when the rule is involved in a shift/reduce 114 | // conflict, we prefer that the parser shift (try for a longer parse). 115 | // Shifting is the default resolution anyway, but stating it explicitly 116 | // silences yacc's warning for that specific case. 117 | %left ShiftInstead 118 | 119 | %left '\n' 120 | %left _ASSERT 121 | // '=' and '+=' have the lowest precedence 122 | // e.g. "x = a if c > 0 else 'bar'" 123 | // followed by 124 | // 'if' and 'else' which have lower precedence than all other operators. 125 | // e.g. "a, b if c > 0 else 'foo'" is either a tuple of (a,b) or 'foo' 126 | // and not a tuple of "(a, (b if ... ))" 127 | %left '=' _ADDEQ 128 | %left _IF _ELSE 129 | %left ',' 130 | %left ':' 131 | %left _IN _NOT _IS 132 | %left _OR 133 | %left _AND 134 | %left '<' '>' _EQ _NE _LE _GE 135 | %left '+' '-' 136 | %left '*' '/' '%' 137 | %left '.' '[' '(' 138 | %right _UNARY 139 | %left _STRING 140 | 141 | %% 142 | 143 | // Grammar rules. 144 | // 145 | // A note on names: if foo is a rule, then foos is a sequence of foos 146 | // (with interleaved commas or other syntax as appropriate) 147 | // and foo_opt is an optional foo. 148 | 149 | file: 150 | stmts _EOF 151 | { 152 | yylex.(*input).file = &File{Stmt: $1} 153 | return 0 154 | } 155 | 156 | stmts: 157 | { 158 | $$ = nil 159 | $$ = nil 160 | } 161 | | stmts stmt comma_opt semi_opt 162 | { 163 | // If this statement follows a comment block, 164 | // attach the comments to the statement. 165 | if cb, ok := $1.(*CommentBlock); ok { 166 | $$ = $1 167 | $$[len($1)-1] = $2 168 | $2.Comment().Before = cb.After 169 | $$ = $2 170 | break 171 | } 172 | 173 | // Otherwise add to list. 174 | $$ = append($1, $2) 175 | $$ = $2 176 | 177 | // Consider this input: 178 | // 179 | // foo() 180 | // # bar 181 | // baz() 182 | // 183 | // If we've just parsed baz(), the # bar is attached to 184 | // foo() as an After comment. Make it a Before comment 185 | // for baz() instead. 186 | if x := $1; x != nil { 187 | com := x.Comment() 188 | $2.Comment().Before = com.After 189 | com.After = nil 190 | } 191 | } 192 | | stmts '\n' 193 | { 194 | // Blank line; sever last rule from future comments. 195 | $$ = $1 196 | $$ = nil 197 | } 198 | | stmts _COMMENT 199 | { 200 | $$ = $1 201 | $$ = $1 202 | if $$ == nil { 203 | cb := &CommentBlock{Start: $2} 204 | $$ = append($$, cb) 205 | $$ = cb 206 | } 207 | com := $$.Comment() 208 | com.After = append(com.After, Comment{Start: $2, Token: $2}) 209 | } 210 | 211 | stmt: 212 | expr %prec ShiftInstead 213 | | _PYTHON 214 | { 215 | $$ = &PythonBlock{Start: $1, Token: $1} 216 | } 217 | 218 | semi_opt: 219 | | semi_opt ';' 220 | 221 | expr: 222 | ident 223 | | strings %prec ShiftInstead 224 | { 225 | if len($1) == 1 { 226 | $$ = $1[0] 227 | break 228 | } 229 | 230 | $$ = $1[0] 231 | for _, x := range $1[1:] { 232 | _, end := $$.Span() 233 | $$ = binary($$, end, "+", x) 234 | } 235 | } 236 | | '[' exprs_opt ']' 237 | { 238 | $$ = &ListExpr{ 239 | Start: $1, 240 | List: $2, 241 | Comma: $2, 242 | End: End{Pos: $3}, 243 | ForceMultiLine: forceMultiLine($1, $2, $3), 244 | } 245 | } 246 | | '[' expr for_clauses if_clauses_opt ']' 247 | { 248 | exprStart, _ := $2.Span() 249 | $$ = &ListForExpr{ 250 | Brack: "[]", 251 | Start: $1, 252 | X: $2, 253 | For: $3, 254 | If: $4, 255 | End: End{Pos: $5}, 256 | ForceMultiLine: $1.Line != exprStart.Line, 257 | } 258 | } 259 | | '(' expr for_clauses if_clauses_opt ')' 260 | { 261 | exprStart, _ := $2.Span() 262 | $$ = &ListForExpr{ 263 | Brack: "()", 264 | Start: $1, 265 | X: $2, 266 | For: $3, 267 | If: $4, 268 | End: End{Pos: $5}, 269 | ForceMultiLine: $1.Line != exprStart.Line, 270 | } 271 | } 272 | | '{' keyvalue for_clauses if_clauses_opt '}' 273 | { 274 | exprStart, _ := $2.Span() 275 | $$ = &ListForExpr{ 276 | Brack: "{}", 277 | Start: $1, 278 | X: $2, 279 | For: $3, 280 | If: $4, 281 | End: End{Pos: $5}, 282 | ForceMultiLine: $1.Line != exprStart.Line, 283 | } 284 | } 285 | | '{' keyvalues_opt '}' 286 | { 287 | $$ = &DictExpr{ 288 | Start: $1, 289 | List: $2, 290 | Comma: $2, 291 | End: End{Pos: $3}, 292 | ForceMultiLine: forceMultiLine($1, $2, $3), 293 | } 294 | } 295 | | '(' exprs_opt ')' 296 | { 297 | if len($2) == 1 && $2.Line == 0 { 298 | // Just a parenthesized expression, not a tuple. 299 | $$ = &ParenExpr{ 300 | Start: $1, 301 | X: $2[0], 302 | End: End{Pos: $3}, 303 | ForceMultiLine: forceMultiLine($1, $2, $3), 304 | } 305 | } else { 306 | $$ = &TupleExpr{ 307 | Start: $1, 308 | List: $2, 309 | Comma: $2, 310 | End: End{Pos: $3}, 311 | ForceCompact: forceCompact($1, $2, $3), 312 | ForceMultiLine: forceMultiLine($1, $2, $3), 313 | } 314 | } 315 | } 316 | | expr '.' _IDENT 317 | { 318 | $$ = &DotExpr{ 319 | X: $1, 320 | Dot: $2, 321 | NamePos: $3, 322 | Name: $3, 323 | } 324 | } 325 | | expr '(' exprs_opt ')' 326 | { 327 | $$ = &CallExpr{ 328 | X: $1, 329 | ListStart: $2, 330 | List: $3, 331 | End: End{Pos: $4}, 332 | ForceCompact: forceCompact($2, $3, $4), 333 | ForceMultiLine: forceMultiLine($2, $3, $4), 334 | } 335 | } 336 | | expr '(' expr for_clauses if_clauses_opt ')' 337 | { 338 | $$ = &CallExpr{ 339 | X: $1, 340 | ListStart: $2, 341 | List: []Expr{ 342 | &ListForExpr{ 343 | Brack: "", 344 | Start: $2, 345 | X: $3, 346 | For: $4, 347 | If: $5, 348 | End: End{Pos: $6}, 349 | }, 350 | }, 351 | End: End{Pos: $6}, 352 | } 353 | } 354 | | expr '[' expr ']' 355 | { 356 | $$ = &IndexExpr{ 357 | X: $1, 358 | IndexStart: $2, 359 | Y: $3, 360 | End: $4, 361 | } 362 | } 363 | | expr '[' expr_opt ':' expr_opt ']' 364 | { 365 | $$ = &SliceExpr{ 366 | X: $1, 367 | SliceStart: $2, 368 | Y: $3, 369 | Colon: $4, 370 | Z: $5, 371 | End: $6, 372 | } 373 | } 374 | | _LAMBDA exprs ':' expr 375 | { 376 | $$ = &LambdaExpr{ 377 | Lambda: $1, 378 | Var: $2, 379 | Colon: $3, 380 | Expr: $4, 381 | } 382 | } 383 | | '-' expr %prec _UNARY { $$ = unary($1, $1, $2) } 384 | | _NOT expr %prec _UNARY { $$ = unary($1, $1, $2) } 385 | | '*' expr %prec _UNARY { $$ = unary($1, $1, $2) } 386 | | expr '*' expr { $$ = binary($1, $2, $2, $3) } 387 | | expr '%' expr { $$ = binary($1, $2, $2, $3) } 388 | | expr '/' expr { $$ = binary($1, $2, $2, $3) } 389 | | expr '+' expr { $$ = binary($1, $2, $2, $3) } 390 | | expr '-' expr { $$ = binary($1, $2, $2, $3) } 391 | | expr '<' expr { $$ = binary($1, $2, $2, $3) } 392 | | expr '>' expr { $$ = binary($1, $2, $2, $3) } 393 | | expr _EQ expr { $$ = binary($1, $2, $2, $3) } 394 | | expr _LE expr { $$ = binary($1, $2, $2, $3) } 395 | | expr _NE expr { $$ = binary($1, $2, $2, $3) } 396 | | expr _GE expr { $$ = binary($1, $2, $2, $3) } 397 | | expr '=' expr { $$ = binary($1, $2, $2, $3) } 398 | | expr _ADDEQ expr { $$ = binary($1, $2, $2, $3) } 399 | | expr _IN expr { $$ = binary($1, $2, $2, $3) } 400 | | expr _NOT _IN expr { $$ = binary($1, $2, "not in", $4) } 401 | | expr _OR expr { $$ = binary($1, $2, $2, $3) } 402 | | expr _AND expr { $$ = binary($1, $2, $2, $3) } 403 | | expr _IS expr 404 | { 405 | if b, ok := $3.(*UnaryExpr); ok && b.Op == "not" { 406 | $$ = binary($1, $2, "is not", b.X) 407 | } else { 408 | $$ = binary($1, $2, $2, $3) 409 | } 410 | } 411 | | expr _IF expr _ELSE expr 412 | { 413 | $$ = &ConditionalExpr{ 414 | Then: $1, 415 | IfStart: $2, 416 | Test: $3, 417 | ElseStart: $4, 418 | Else: $5, 419 | } 420 | } 421 | 422 | expr_opt: 423 | { 424 | $$ = nil 425 | } 426 | | expr 427 | 428 | // comma_opt is an optional comma. If the comma is present, 429 | // the rule's value is the position of the comma. Otherwise 430 | // the rule's value is the zero position. Tracking this 431 | // lets us distinguish (x) and (x,). 432 | comma_opt: 433 | { 434 | $$ = Position{} 435 | } 436 | | ',' 437 | 438 | keyvalue: 439 | expr ':' expr { 440 | $$ = &KeyValueExpr{ 441 | Key: $1, 442 | Colon: $2, 443 | Value: $3, 444 | } 445 | } 446 | 447 | keyvalues: 448 | keyvalue 449 | { 450 | $$ = []Expr{$1} 451 | } 452 | | keyvalues ',' keyvalue 453 | { 454 | $$ = append($1, $3) 455 | } 456 | 457 | keyvalues_opt: 458 | { 459 | $$, $$ = nil, Position{} 460 | } 461 | | keyvalues comma_opt 462 | { 463 | $$, $$ = $1, $2 464 | } 465 | 466 | exprs: 467 | expr 468 | { 469 | $$ = []Expr{$1} 470 | } 471 | | exprs ',' expr 472 | { 473 | $$ = append($1, $3) 474 | } 475 | 476 | exprs_opt: 477 | { 478 | $$, $$ = nil, Position{} 479 | } 480 | | exprs comma_opt 481 | { 482 | $$, $$ = $1, $2 483 | } 484 | 485 | string: 486 | _STRING 487 | { 488 | $$ = &StringExpr{ 489 | Start: $1, 490 | Value: $1, 491 | TripleQuote: $1, 492 | End: $1.add($1), 493 | Token: $1, 494 | } 495 | } 496 | 497 | strings: 498 | string 499 | { 500 | $$ = []*StringExpr{$1} 501 | } 502 | | strings string 503 | { 504 | $$ = append($1, $2) 505 | } 506 | 507 | ident: 508 | _IDENT 509 | { 510 | $$ = &LiteralExpr{Start: $1, Token: $1} 511 | } 512 | 513 | idents: 514 | ident 515 | { 516 | $$ = []Expr{$1} 517 | } 518 | | idents ',' ident 519 | { 520 | $$ = append($1, $3) 521 | } 522 | 523 | for_clause: 524 | _FOR idents _IN expr 525 | { 526 | $$ = &ForClause{ 527 | For: $1, 528 | Var: $2, 529 | In: $3, 530 | Expr: $4, 531 | } 532 | } 533 | | _FOR '(' idents ')' _IN expr 534 | { 535 | $$ = &ForClause{ 536 | For: $1, 537 | Var: $3, 538 | In: $5, 539 | Expr: $6, 540 | } 541 | } 542 | 543 | for_clauses: 544 | for_clause 545 | { 546 | $$ = []*ForClause{$1} 547 | } 548 | | for_clauses for_clause { 549 | $$ = append($1, $2) 550 | } 551 | 552 | if_clauses_opt: 553 | { 554 | $$ = nil 555 | } 556 | | if_clauses_opt _IF expr 557 | { 558 | $$ = append($1, &IfClause{ 559 | If: $2, 560 | Cond: $3, 561 | }) 562 | } 563 | 564 | %% 565 | 566 | // Go helper code. 567 | 568 | // unary returns a unary expression with the given 569 | // position, operator, and subexpression. 570 | func unary(pos Position, op string, x Expr) Expr { 571 | return &UnaryExpr{ 572 | OpStart: pos, 573 | Op: op, 574 | X: x, 575 | } 576 | } 577 | 578 | // binary returns a binary expression with the given 579 | // operands, position, and operator. 580 | func binary(x Expr, pos Position, op string, y Expr) Expr { 581 | _, xend := x.Span() 582 | ystart, _ := y.Span() 583 | return &BinaryExpr{ 584 | X: x, 585 | OpStart: pos, 586 | Op: op, 587 | LineBreak: xend.Line < ystart.Line, 588 | Y: y, 589 | } 590 | } 591 | 592 | // forceCompact returns the setting for the ForceCompact field for a call or tuple. 593 | // 594 | // NOTE 1: The field is called ForceCompact, not ForceSingleLine, 595 | // because it only affects the formatting associated with the call or tuple syntax, 596 | // not the formatting of the arguments. For example: 597 | // 598 | // call([ 599 | // 1, 600 | // 2, 601 | // 3, 602 | // ]) 603 | // 604 | // is still a compact call even though it runs on multiple lines. 605 | // 606 | // In contrast the multiline form puts a linebreak after the (. 607 | // 608 | // call( 609 | // [ 610 | // 1, 611 | // 2, 612 | // 3, 613 | // ], 614 | // ) 615 | // 616 | // NOTE 2: Because of NOTE 1, we cannot use start and end on the 617 | // same line as a signal for compact mode: the formatting of an 618 | // embedded list might move the end to a different line, which would 619 | // then look different on rereading and cause buildifier not to be 620 | // idempotent. Instead, we have to look at properties guaranteed 621 | // to be preserved by the reformatting, namely that the opening 622 | // paren and the first expression are on the same line and that 623 | // each subsequent expression begins on the same line as the last 624 | // one ended (no line breaks after comma). 625 | func forceCompact(start Position, list []Expr, end Position) bool { 626 | if len(list) <= 1 { 627 | // The call or tuple will probably be compact anyway; don't force it. 628 | return false 629 | } 630 | 631 | // If there are any named arguments or non-string, non-literal 632 | // arguments, cannot force compact mode. 633 | line := start.Line 634 | for _, x := range list { 635 | start, end := x.Span() 636 | if start.Line != line { 637 | return false 638 | } 639 | line = end.Line 640 | switch x.(type) { 641 | case *LiteralExpr, *StringExpr: 642 | // ok 643 | default: 644 | return false 645 | } 646 | } 647 | return end.Line == line 648 | } 649 | 650 | // forceMultiLine returns the setting for the ForceMultiLine field. 651 | func forceMultiLine(start Position, list []Expr, end Position) bool { 652 | if len(list) > 1 { 653 | // The call will be multiline anyway, because it has multiple elements. Don't force it. 654 | return false 655 | } 656 | 657 | if len(list) == 0 { 658 | // Empty list: use position of brackets. 659 | return start.Line != end.Line 660 | } 661 | 662 | // Single-element list. 663 | // Check whether opening bracket is on different line than beginning of 664 | // element, or closing bracket is on different line than end of element. 665 | elemStart, elemEnd := list[0].Span() 666 | return start.Line != elemStart.Line || end.Line != elemEnd.Line 667 | } 668 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/parse.y.go: -------------------------------------------------------------------------------- 1 | //line parse.y:13 2 | package build 3 | 4 | import __yyfmt__ "fmt" 5 | 6 | //line parse.y:13 7 | //line parse.y:18 8 | type yySymType struct { 9 | yys int 10 | // input tokens 11 | tok string // raw input syntax 12 | str string // decoding of quoted string 13 | pos Position // position of token 14 | triple bool // was string triple quoted? 15 | 16 | // partial syntax trees 17 | expr Expr 18 | exprs []Expr 19 | forc *ForClause 20 | fors []*ForClause 21 | ifs []*IfClause 22 | string *StringExpr 23 | strings []*StringExpr 24 | 25 | // supporting information 26 | comma Position // position of trailing comma in list, if present 27 | lastRule Expr // most recent rule, to attach line comments to 28 | } 29 | 30 | const _ADDEQ = 57346 31 | const _AND = 57347 32 | const _COMMENT = 57348 33 | const _EOF = 57349 34 | const _EQ = 57350 35 | const _FOR = 57351 36 | const _GE = 57352 37 | const _IDENT = 57353 38 | const _IF = 57354 39 | const _ELSE = 57355 40 | const _IN = 57356 41 | const _IS = 57357 42 | const _LAMBDA = 57358 43 | const _LE = 57359 44 | const _NE = 57360 45 | const _NOT = 57361 46 | const _OR = 57362 47 | const _PYTHON = 57363 48 | const _STRING = 57364 49 | const ShiftInstead = 57365 50 | const _ASSERT = 57366 51 | const _UNARY = 57367 52 | 53 | var yyToknames = [...]string{ 54 | "$end", 55 | "error", 56 | "$unk", 57 | "'%'", 58 | "'('", 59 | "')'", 60 | "'*'", 61 | "'+'", 62 | "','", 63 | "'-'", 64 | "'.'", 65 | "'/'", 66 | "':'", 67 | "'<'", 68 | "'='", 69 | "'>'", 70 | "'['", 71 | "']'", 72 | "'{'", 73 | "'}'", 74 | "_ADDEQ", 75 | "_AND", 76 | "_COMMENT", 77 | "_EOF", 78 | "_EQ", 79 | "_FOR", 80 | "_GE", 81 | "_IDENT", 82 | "_IF", 83 | "_ELSE", 84 | "_IN", 85 | "_IS", 86 | "_LAMBDA", 87 | "_LE", 88 | "_NE", 89 | "_NOT", 90 | "_OR", 91 | "_PYTHON", 92 | "_STRING", 93 | "ShiftInstead", 94 | "'\\n'", 95 | "_ASSERT", 96 | "_UNARY", 97 | "';'", 98 | } 99 | var yyStatenames = [...]string{} 100 | 101 | const yyEofCode = 1 102 | const yyErrCode = 2 103 | const yyInitialStackSize = 16 104 | 105 | //line parse.y:564 106 | 107 | // Go helper code. 108 | 109 | // unary returns a unary expression with the given 110 | // position, operator, and subexpression. 111 | func unary(pos Position, op string, x Expr) Expr { 112 | return &UnaryExpr{ 113 | OpStart: pos, 114 | Op: op, 115 | X: x, 116 | } 117 | } 118 | 119 | // binary returns a binary expression with the given 120 | // operands, position, and operator. 121 | func binary(x Expr, pos Position, op string, y Expr) Expr { 122 | _, xend := x.Span() 123 | ystart, _ := y.Span() 124 | return &BinaryExpr{ 125 | X: x, 126 | OpStart: pos, 127 | Op: op, 128 | LineBreak: xend.Line < ystart.Line, 129 | Y: y, 130 | } 131 | } 132 | 133 | // forceCompact returns the setting for the ForceCompact field for a call or tuple. 134 | // 135 | // NOTE 1: The field is called ForceCompact, not ForceSingleLine, 136 | // because it only affects the formatting associated with the call or tuple syntax, 137 | // not the formatting of the arguments. For example: 138 | // 139 | // call([ 140 | // 1, 141 | // 2, 142 | // 3, 143 | // ]) 144 | // 145 | // is still a compact call even though it runs on multiple lines. 146 | // 147 | // In contrast the multiline form puts a linebreak after the (. 148 | // 149 | // call( 150 | // [ 151 | // 1, 152 | // 2, 153 | // 3, 154 | // ], 155 | // ) 156 | // 157 | // NOTE 2: Because of NOTE 1, we cannot use start and end on the 158 | // same line as a signal for compact mode: the formatting of an 159 | // embedded list might move the end to a different line, which would 160 | // then look different on rereading and cause buildifier not to be 161 | // idempotent. Instead, we have to look at properties guaranteed 162 | // to be preserved by the reformatting, namely that the opening 163 | // paren and the first expression are on the same line and that 164 | // each subsequent expression begins on the same line as the last 165 | // one ended (no line breaks after comma). 166 | func forceCompact(start Position, list []Expr, end Position) bool { 167 | if len(list) <= 1 { 168 | // The call or tuple will probably be compact anyway; don't force it. 169 | return false 170 | } 171 | 172 | // If there are any named arguments or non-string, non-literal 173 | // arguments, cannot force compact mode. 174 | line := start.Line 175 | for _, x := range list { 176 | start, end := x.Span() 177 | if start.Line != line { 178 | return false 179 | } 180 | line = end.Line 181 | switch x.(type) { 182 | case *LiteralExpr, *StringExpr: 183 | // ok 184 | default: 185 | return false 186 | } 187 | } 188 | return end.Line == line 189 | } 190 | 191 | // forceMultiLine returns the setting for the ForceMultiLine field. 192 | func forceMultiLine(start Position, list []Expr, end Position) bool { 193 | if len(list) > 1 { 194 | // The call will be multiline anyway, because it has multiple elements. Don't force it. 195 | return false 196 | } 197 | 198 | if len(list) == 0 { 199 | // Empty list: use position of brackets. 200 | return start.Line != end.Line 201 | } 202 | 203 | // Single-element list. 204 | // Check whether opening bracket is on different line than beginning of 205 | // element, or closing bracket is on different line than end of element. 206 | elemStart, elemEnd := list[0].Span() 207 | return start.Line != elemStart.Line || end.Line != elemEnd.Line 208 | } 209 | 210 | //line yacctab:1 211 | var yyExca = [...]int{ 212 | -1, 1, 213 | 1, -1, 214 | -2, 0, 215 | } 216 | 217 | const yyNprod = 71 218 | const yyPrivate = 57344 219 | 220 | var yyTokenNames []string 221 | var yyStates []string 222 | 223 | const yyLast = 542 224 | 225 | var yyAct = [...]int{ 226 | 227 | 53, 109, 9, 7, 65, 51, 86, 100, 21, 20, 228 | 135, 80, 47, 49, 87, 56, 57, 58, 59, 124, 229 | 18, 61, 107, 129, 127, 63, 64, 66, 67, 68, 230 | 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 231 | 79, 125, 81, 82, 83, 84, 123, 123, 12, 110, 232 | 17, 88, 128, 15, 122, 46, 91, 90, 93, 94, 233 | 11, 123, 13, 97, 130, 123, 85, 104, 50, 48, 234 | 102, 18, 18, 96, 99, 89, 14, 24, 98, 16, 235 | 62, 105, 20, 23, 55, 27, 24, 22, 26, 25, 236 | 112, 111, 23, 28, 134, 101, 115, 124, 25, 117, 237 | 112, 108, 116, 92, 19, 120, 108, 121, 108, 119, 238 | 60, 1, 126, 111, 113, 45, 114, 108, 10, 52, 239 | 54, 4, 2, 0, 131, 118, 133, 132, 0, 0, 240 | 0, 27, 24, 0, 26, 29, 136, 30, 23, 28, 241 | 0, 31, 37, 32, 25, 0, 0, 0, 38, 42, 242 | 0, 0, 33, 0, 36, 0, 44, 106, 39, 43, 243 | 0, 34, 35, 40, 41, 27, 24, 0, 26, 29, 244 | 0, 30, 23, 28, 0, 31, 37, 32, 25, 103, 245 | 0, 0, 38, 42, 0, 0, 33, 0, 36, 0, 246 | 44, 0, 39, 43, 0, 34, 35, 40, 41, 27, 247 | 24, 0, 26, 29, 0, 30, 23, 28, 0, 31, 248 | 37, 32, 25, 0, 0, 0, 38, 42, 0, 0, 249 | 33, 88, 36, 0, 44, 0, 39, 43, 0, 34, 250 | 35, 40, 41, 27, 24, 0, 26, 29, 0, 30, 251 | 23, 28, 95, 31, 37, 32, 25, 0, 0, 0, 252 | 38, 42, 0, 0, 33, 0, 36, 0, 44, 0, 253 | 39, 43, 0, 34, 35, 40, 41, 27, 24, 0, 254 | 26, 29, 0, 30, 23, 28, 0, 31, 37, 32, 255 | 25, 0, 0, 0, 38, 42, 0, 0, 33, 0, 256 | 36, 0, 44, 0, 39, 43, 0, 34, 35, 40, 257 | 41, 27, 24, 0, 26, 29, 0, 30, 23, 28, 258 | 0, 31, 37, 32, 25, 0, 0, 0, 38, 42, 259 | 0, 0, 33, 0, 36, 0, 0, 0, 39, 43, 260 | 0, 34, 35, 40, 41, 27, 24, 0, 26, 29, 261 | 0, 30, 23, 28, 0, 31, 0, 32, 25, 0, 262 | 0, 0, 0, 42, 0, 0, 33, 0, 36, 0, 263 | 44, 0, 39, 43, 0, 34, 35, 40, 41, 27, 264 | 24, 0, 26, 29, 0, 30, 23, 28, 0, 31, 265 | 0, 32, 25, 0, 0, 0, 0, 42, 0, 0, 266 | 33, 0, 36, 12, 0, 17, 39, 43, 15, 34, 267 | 35, 40, 41, 0, 0, 11, 0, 13, 0, 0, 268 | 0, 6, 3, 0, 0, 0, 18, 0, 0, 0, 269 | 0, 14, 0, 0, 16, 0, 8, 20, 0, 5, 270 | 27, 24, 0, 26, 29, 0, 30, 23, 28, 0, 271 | 31, 0, 32, 25, 0, 0, 0, 0, 42, 0, 272 | 0, 33, 0, 36, 0, 0, 0, 0, 0, 0, 273 | 34, 35, 0, 41, 27, 24, 0, 26, 29, 0, 274 | 30, 23, 28, 0, 31, 0, 32, 25, 0, 0, 275 | 0, 0, 42, 0, 0, 33, 0, 36, 0, 0, 276 | 0, 0, 0, 0, 34, 35, 27, 24, 0, 26, 277 | 29, 0, 30, 23, 28, 0, 31, 0, 32, 25, 278 | 0, 0, 0, 0, 0, 0, 0, 33, 0, 36, 279 | 0, 0, 0, 0, 0, 0, 34, 35, 27, 24, 280 | 0, 26, 29, 0, 30, 23, 28, 0, 0, 0, 281 | 0, 25, 282 | } 283 | var yyPact = [...]int{ 284 | 285 | -1000, -1000, 388, -1000, 78, -1000, -1000, 263, -1000, -1000, 286 | -30, 43, 43, 43, 43, 43, 43, 43, -1000, -1000, 287 | -1000, -1000, -1000, -7, 43, 43, 43, 43, 43, 43, 288 | 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 289 | -20, 43, 43, 43, 43, -1000, 48, 195, 66, 195, 290 | 97, 25, 39, 229, 64, 65, 263, -1000, -1000, -1000, 291 | -37, -1000, 89, 195, 161, 54, 72, 72, 72, 81, 292 | 81, 524, 524, 524, 524, 524, 524, 331, 331, 426, 293 | 43, 460, 492, 426, 127, -1000, 25, -1000, 44, 43, 294 | -1000, 25, -1000, 25, -1000, 43, 43, -1000, 43, 43, 295 | -1000, -1000, 25, -1000, 43, 426, 43, 36, -1000, 10, 296 | -8, -1000, 263, 18, 32, 263, -1000, 365, 17, 46, 297 | 263, 365, -1000, 43, -8, 43, 88, -1000, -1000, -1000, 298 | -1000, 297, -1000, 297, -21, 43, 297, 299 | } 300 | var yyPgo = [...]int{ 301 | 302 | 0, 8, 0, 4, 69, 55, 14, 6, 2, 1, 303 | 22, 122, 121, 5, 120, 119, 104, 118, 111, 110, 304 | } 305 | var yyR1 = [...]int{ 306 | 307 | 0, 18, 11, 11, 11, 11, 12, 12, 19, 19, 308 | 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 309 | 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 310 | 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 311 | 2, 2, 2, 2, 2, 2, 3, 3, 1, 1, 312 | 13, 14, 14, 15, 15, 4, 4, 5, 5, 16, 313 | 17, 17, 8, 9, 9, 6, 6, 7, 7, 10, 314 | 10, 315 | } 316 | var yyR2 = [...]int{ 317 | 318 | 0, 2, 0, 4, 2, 2, 1, 1, 0, 2, 319 | 1, 1, 3, 5, 5, 5, 3, 3, 3, 4, 320 | 6, 4, 6, 4, 2, 2, 2, 3, 3, 3, 321 | 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 322 | 3, 4, 3, 3, 3, 5, 0, 1, 0, 1, 323 | 3, 1, 3, 0, 2, 1, 3, 0, 2, 1, 324 | 1, 2, 1, 1, 3, 4, 6, 1, 2, 0, 325 | 3, 326 | } 327 | var yyChk = [...]int{ 328 | 329 | -1000, -18, -11, 24, -12, 41, 23, -2, 38, -8, 330 | -17, 17, 5, 19, 33, 10, 36, 7, 28, -16, 331 | 39, -1, 9, 11, 5, 17, 7, 4, 12, 8, 332 | 10, 14, 16, 25, 34, 35, 27, 15, 21, 31, 333 | 36, 37, 22, 32, 29, -16, -5, -2, -4, -2, 334 | -5, -13, -15, -2, -14, -4, -2, -2, -2, -2, 335 | -19, 28, -5, -2, -2, -3, -2, -2, -2, -2, 336 | -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, 337 | 31, -2, -2, -2, -2, 18, -7, -6, 26, 9, 338 | -1, -7, 6, -7, 20, 13, 9, -1, 13, 9, 339 | 44, 6, -7, 18, 13, -2, 30, -10, -6, -9, 340 | 5, -8, -2, -10, -10, -2, -13, -2, -10, -3, 341 | -2, -2, 18, 29, 9, 31, -9, 6, 20, 6, 342 | 18, -2, -8, -2, 6, 31, -2, 343 | } 344 | var yyDef = [...]int{ 345 | 346 | 2, -2, 0, 1, 48, 4, 5, 6, 7, 10, 347 | 11, 57, 57, 53, 0, 0, 0, 0, 62, 60, 348 | 59, 8, 49, 0, 57, 46, 0, 0, 0, 0, 349 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 350 | 0, 0, 0, 0, 0, 61, 0, 55, 48, 55, 351 | 0, 51, 0, 0, 48, 0, 55, 24, 25, 26, 352 | 3, 18, 0, 55, 47, 0, 27, 28, 29, 30, 353 | 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 354 | 0, 42, 43, 44, 0, 12, 69, 67, 0, 49, 355 | 58, 69, 17, 69, 16, 0, 49, 54, 0, 0, 356 | 9, 19, 69, 21, 46, 41, 0, 0, 68, 0, 357 | 0, 63, 56, 0, 0, 50, 52, 23, 0, 0, 358 | 47, 45, 13, 0, 0, 0, 0, 14, 15, 20, 359 | 22, 70, 64, 65, 0, 0, 66, 360 | } 361 | var yyTok1 = [...]int{ 362 | 363 | 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 364 | 41, 3, 3, 3, 3, 3, 3, 3, 3, 3, 365 | 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 366 | 3, 3, 3, 3, 3, 3, 3, 4, 3, 3, 367 | 5, 6, 7, 8, 9, 10, 11, 12, 3, 3, 368 | 3, 3, 3, 3, 3, 3, 3, 3, 13, 44, 369 | 14, 15, 16, 3, 3, 3, 3, 3, 3, 3, 370 | 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 371 | 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 372 | 3, 17, 3, 18, 3, 3, 3, 3, 3, 3, 373 | 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 374 | 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 375 | 3, 3, 3, 19, 3, 20, 376 | } 377 | var yyTok2 = [...]int{ 378 | 379 | 2, 3, 21, 22, 23, 24, 25, 26, 27, 28, 380 | 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 381 | 39, 40, 42, 43, 382 | } 383 | var yyTok3 = [...]int{ 384 | 0, 385 | } 386 | 387 | var yyErrorMessages = [...]struct { 388 | state int 389 | token int 390 | msg string 391 | }{} 392 | 393 | //line yaccpar:1 394 | 395 | /* parser for yacc output */ 396 | 397 | var ( 398 | yyDebug = 0 399 | yyErrorVerbose = false 400 | ) 401 | 402 | type yyLexer interface { 403 | Lex(lval *yySymType) int 404 | Error(s string) 405 | } 406 | 407 | type yyParser interface { 408 | Parse(yyLexer) int 409 | Lookahead() int 410 | } 411 | 412 | type yyParserImpl struct { 413 | lval yySymType 414 | stack [yyInitialStackSize]yySymType 415 | char int 416 | } 417 | 418 | func (p *yyParserImpl) Lookahead() int { 419 | return p.char 420 | } 421 | 422 | func yyNewParser() yyParser { 423 | return &yyParserImpl{} 424 | } 425 | 426 | const yyFlag = -1000 427 | 428 | func yyTokname(c int) string { 429 | if c >= 1 && c-1 < len(yyToknames) { 430 | if yyToknames[c-1] != "" { 431 | return yyToknames[c-1] 432 | } 433 | } 434 | return __yyfmt__.Sprintf("tok-%v", c) 435 | } 436 | 437 | func yyStatname(s int) string { 438 | if s >= 0 && s < len(yyStatenames) { 439 | if yyStatenames[s] != "" { 440 | return yyStatenames[s] 441 | } 442 | } 443 | return __yyfmt__.Sprintf("state-%v", s) 444 | } 445 | 446 | func yyErrorMessage(state, lookAhead int) string { 447 | const TOKSTART = 4 448 | 449 | if !yyErrorVerbose { 450 | return "syntax error" 451 | } 452 | 453 | for _, e := range yyErrorMessages { 454 | if e.state == state && e.token == lookAhead { 455 | return "syntax error: " + e.msg 456 | } 457 | } 458 | 459 | res := "syntax error: unexpected " + yyTokname(lookAhead) 460 | 461 | // To match Bison, suggest at most four expected tokens. 462 | expected := make([]int, 0, 4) 463 | 464 | // Look for shiftable tokens. 465 | base := yyPact[state] 466 | for tok := TOKSTART; tok-1 < len(yyToknames); tok++ { 467 | if n := base + tok; n >= 0 && n < yyLast && yyChk[yyAct[n]] == tok { 468 | if len(expected) == cap(expected) { 469 | return res 470 | } 471 | expected = append(expected, tok) 472 | } 473 | } 474 | 475 | if yyDef[state] == -2 { 476 | i := 0 477 | for yyExca[i] != -1 || yyExca[i+1] != state { 478 | i += 2 479 | } 480 | 481 | // Look for tokens that we accept or reduce. 482 | for i += 2; yyExca[i] >= 0; i += 2 { 483 | tok := yyExca[i] 484 | if tok < TOKSTART || yyExca[i+1] == 0 { 485 | continue 486 | } 487 | if len(expected) == cap(expected) { 488 | return res 489 | } 490 | expected = append(expected, tok) 491 | } 492 | 493 | // If the default action is to accept or reduce, give up. 494 | if yyExca[i+1] != 0 { 495 | return res 496 | } 497 | } 498 | 499 | for i, tok := range expected { 500 | if i == 0 { 501 | res += ", expecting " 502 | } else { 503 | res += " or " 504 | } 505 | res += yyTokname(tok) 506 | } 507 | return res 508 | } 509 | 510 | func yylex1(lex yyLexer, lval *yySymType) (char, token int) { 511 | token = 0 512 | char = lex.Lex(lval) 513 | if char <= 0 { 514 | token = yyTok1[0] 515 | goto out 516 | } 517 | if char < len(yyTok1) { 518 | token = yyTok1[char] 519 | goto out 520 | } 521 | if char >= yyPrivate { 522 | if char < yyPrivate+len(yyTok2) { 523 | token = yyTok2[char-yyPrivate] 524 | goto out 525 | } 526 | } 527 | for i := 0; i < len(yyTok3); i += 2 { 528 | token = yyTok3[i+0] 529 | if token == char { 530 | token = yyTok3[i+1] 531 | goto out 532 | } 533 | } 534 | 535 | out: 536 | if token == 0 { 537 | token = yyTok2[1] /* unknown char */ 538 | } 539 | if yyDebug >= 3 { 540 | __yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char)) 541 | } 542 | return char, token 543 | } 544 | 545 | func yyParse(yylex yyLexer) int { 546 | return yyNewParser().Parse(yylex) 547 | } 548 | 549 | func (yyrcvr *yyParserImpl) Parse(yylex yyLexer) int { 550 | var yyn int 551 | var yyVAL yySymType 552 | var yyDollar []yySymType 553 | _ = yyDollar // silence set and not used 554 | yyS := yyrcvr.stack[:] 555 | 556 | Nerrs := 0 /* number of errors */ 557 | Errflag := 0 /* error recovery flag */ 558 | yystate := 0 559 | yyrcvr.char = -1 560 | yytoken := -1 // yyrcvr.char translated into internal numbering 561 | defer func() { 562 | // Make sure we report no lookahead when not parsing. 563 | yystate = -1 564 | yyrcvr.char = -1 565 | yytoken = -1 566 | }() 567 | yyp := -1 568 | goto yystack 569 | 570 | ret0: 571 | return 0 572 | 573 | ret1: 574 | return 1 575 | 576 | yystack: 577 | /* put a state and value onto the stack */ 578 | if yyDebug >= 4 { 579 | __yyfmt__.Printf("char %v in %v\n", yyTokname(yytoken), yyStatname(yystate)) 580 | } 581 | 582 | yyp++ 583 | if yyp >= len(yyS) { 584 | nyys := make([]yySymType, len(yyS)*2) 585 | copy(nyys, yyS) 586 | yyS = nyys 587 | } 588 | yyS[yyp] = yyVAL 589 | yyS[yyp].yys = yystate 590 | 591 | yynewstate: 592 | yyn = yyPact[yystate] 593 | if yyn <= yyFlag { 594 | goto yydefault /* simple state */ 595 | } 596 | if yyrcvr.char < 0 { 597 | yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) 598 | } 599 | yyn += yytoken 600 | if yyn < 0 || yyn >= yyLast { 601 | goto yydefault 602 | } 603 | yyn = yyAct[yyn] 604 | if yyChk[yyn] == yytoken { /* valid shift */ 605 | yyrcvr.char = -1 606 | yytoken = -1 607 | yyVAL = yyrcvr.lval 608 | yystate = yyn 609 | if Errflag > 0 { 610 | Errflag-- 611 | } 612 | goto yystack 613 | } 614 | 615 | yydefault: 616 | /* default state action */ 617 | yyn = yyDef[yystate] 618 | if yyn == -2 { 619 | if yyrcvr.char < 0 { 620 | yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) 621 | } 622 | 623 | /* look through exception table */ 624 | xi := 0 625 | for { 626 | if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate { 627 | break 628 | } 629 | xi += 2 630 | } 631 | for xi += 2; ; xi += 2 { 632 | yyn = yyExca[xi+0] 633 | if yyn < 0 || yyn == yytoken { 634 | break 635 | } 636 | } 637 | yyn = yyExca[xi+1] 638 | if yyn < 0 { 639 | goto ret0 640 | } 641 | } 642 | if yyn == 0 { 643 | /* error ... attempt to resume parsing */ 644 | switch Errflag { 645 | case 0: /* brand new error */ 646 | yylex.Error(yyErrorMessage(yystate, yytoken)) 647 | Nerrs++ 648 | if yyDebug >= 1 { 649 | __yyfmt__.Printf("%s", yyStatname(yystate)) 650 | __yyfmt__.Printf(" saw %s\n", yyTokname(yytoken)) 651 | } 652 | fallthrough 653 | 654 | case 1, 2: /* incompletely recovered error ... try again */ 655 | Errflag = 3 656 | 657 | /* find a state where "error" is a legal shift action */ 658 | for yyp >= 0 { 659 | yyn = yyPact[yyS[yyp].yys] + yyErrCode 660 | if yyn >= 0 && yyn < yyLast { 661 | yystate = yyAct[yyn] /* simulate a shift of "error" */ 662 | if yyChk[yystate] == yyErrCode { 663 | goto yystack 664 | } 665 | } 666 | 667 | /* the current p has no shift on "error", pop stack */ 668 | if yyDebug >= 2 { 669 | __yyfmt__.Printf("error recovery pops state %d\n", yyS[yyp].yys) 670 | } 671 | yyp-- 672 | } 673 | /* there is no state on the stack with an error shift ... abort */ 674 | goto ret1 675 | 676 | case 3: /* no shift yet; clobber input char */ 677 | if yyDebug >= 2 { 678 | __yyfmt__.Printf("error recovery discards %s\n", yyTokname(yytoken)) 679 | } 680 | if yytoken == yyEofCode { 681 | goto ret1 682 | } 683 | yyrcvr.char = -1 684 | yytoken = -1 685 | goto yynewstate /* try again in the same state */ 686 | } 687 | } 688 | 689 | /* reduction by production yyn */ 690 | if yyDebug >= 2 { 691 | __yyfmt__.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate)) 692 | } 693 | 694 | yynt := yyn 695 | yypt := yyp 696 | _ = yypt // guard against "declared and not used" 697 | 698 | yyp -= yyR2[yyn] 699 | // yyp is now the index of $0. Perform the default action. Iff the 700 | // reduced production is ε, $1 is possibly out of range. 701 | if yyp+1 >= len(yyS) { 702 | nyys := make([]yySymType, len(yyS)*2) 703 | copy(nyys, yyS) 704 | yyS = nyys 705 | } 706 | yyVAL = yyS[yyp+1] 707 | 708 | /* consult goto table to find next state */ 709 | yyn = yyR1[yyn] 710 | yyg := yyPgo[yyn] 711 | yyj := yyg + yyS[yyp].yys + 1 712 | 713 | if yyj >= yyLast { 714 | yystate = yyAct[yyg] 715 | } else { 716 | yystate = yyAct[yyj] 717 | if yyChk[yystate] != -yyn { 718 | yystate = yyAct[yyg] 719 | } 720 | } 721 | // dummy call; replaced with literal code 722 | switch yynt { 723 | 724 | case 1: 725 | yyDollar = yyS[yypt-2 : yypt+1] 726 | //line parse.y:151 727 | { 728 | yylex.(*input).file = &File{Stmt: yyDollar[1].exprs} 729 | return 0 730 | } 731 | case 2: 732 | yyDollar = yyS[yypt-0 : yypt+1] 733 | //line parse.y:157 734 | { 735 | yyVAL.exprs = nil 736 | yyVAL.lastRule = nil 737 | } 738 | case 3: 739 | yyDollar = yyS[yypt-4 : yypt+1] 740 | //line parse.y:162 741 | { 742 | // If this statement follows a comment block, 743 | // attach the comments to the statement. 744 | if cb, ok := yyDollar[1].lastRule.(*CommentBlock); ok { 745 | yyVAL.exprs = yyDollar[1].exprs 746 | yyVAL.exprs[len(yyDollar[1].exprs)-1] = yyDollar[2].expr 747 | yyDollar[2].expr.Comment().Before = cb.After 748 | yyVAL.lastRule = yyDollar[2].expr 749 | break 750 | } 751 | 752 | // Otherwise add to list. 753 | yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[2].expr) 754 | yyVAL.lastRule = yyDollar[2].expr 755 | 756 | // Consider this input: 757 | // 758 | // foo() 759 | // # bar 760 | // baz() 761 | // 762 | // If we've just parsed baz(), the # bar is attached to 763 | // foo() as an After comment. Make it a Before comment 764 | // for baz() instead. 765 | if x := yyDollar[1].lastRule; x != nil { 766 | com := x.Comment() 767 | yyDollar[2].expr.Comment().Before = com.After 768 | com.After = nil 769 | } 770 | } 771 | case 4: 772 | yyDollar = yyS[yypt-2 : yypt+1] 773 | //line parse.y:193 774 | { 775 | // Blank line; sever last rule from future comments. 776 | yyVAL.exprs = yyDollar[1].exprs 777 | yyVAL.lastRule = nil 778 | } 779 | case 5: 780 | yyDollar = yyS[yypt-2 : yypt+1] 781 | //line parse.y:199 782 | { 783 | yyVAL.exprs = yyDollar[1].exprs 784 | yyVAL.lastRule = yyDollar[1].lastRule 785 | if yyVAL.lastRule == nil { 786 | cb := &CommentBlock{Start: yyDollar[2].pos} 787 | yyVAL.exprs = append(yyVAL.exprs, cb) 788 | yyVAL.lastRule = cb 789 | } 790 | com := yyVAL.lastRule.Comment() 791 | com.After = append(com.After, Comment{Start: yyDollar[2].pos, Token: yyDollar[2].tok}) 792 | } 793 | case 7: 794 | yyDollar = yyS[yypt-1 : yypt+1] 795 | //line parse.y:214 796 | { 797 | yyVAL.expr = &PythonBlock{Start: yyDollar[1].pos, Token: yyDollar[1].tok} 798 | } 799 | case 11: 800 | yyDollar = yyS[yypt-1 : yypt+1] 801 | //line parse.y:224 802 | { 803 | if len(yyDollar[1].strings) == 1 { 804 | yyVAL.expr = yyDollar[1].strings[0] 805 | break 806 | } 807 | 808 | yyVAL.expr = yyDollar[1].strings[0] 809 | for _, x := range yyDollar[1].strings[1:] { 810 | _, end := yyVAL.expr.Span() 811 | yyVAL.expr = binary(yyVAL.expr, end, "+", x) 812 | } 813 | } 814 | case 12: 815 | yyDollar = yyS[yypt-3 : yypt+1] 816 | //line parse.y:237 817 | { 818 | yyVAL.expr = &ListExpr{ 819 | Start: yyDollar[1].pos, 820 | List: yyDollar[2].exprs, 821 | Comma: yyDollar[2].comma, 822 | End: End{Pos: yyDollar[3].pos}, 823 | ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), 824 | } 825 | } 826 | case 13: 827 | yyDollar = yyS[yypt-5 : yypt+1] 828 | //line parse.y:247 829 | { 830 | exprStart, _ := yyDollar[2].expr.Span() 831 | yyVAL.expr = &ListForExpr{ 832 | Brack: "[]", 833 | Start: yyDollar[1].pos, 834 | X: yyDollar[2].expr, 835 | For: yyDollar[3].fors, 836 | If: yyDollar[4].ifs, 837 | End: End{Pos: yyDollar[5].pos}, 838 | ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, 839 | } 840 | } 841 | case 14: 842 | yyDollar = yyS[yypt-5 : yypt+1] 843 | //line parse.y:260 844 | { 845 | exprStart, _ := yyDollar[2].expr.Span() 846 | yyVAL.expr = &ListForExpr{ 847 | Brack: "()", 848 | Start: yyDollar[1].pos, 849 | X: yyDollar[2].expr, 850 | For: yyDollar[3].fors, 851 | If: yyDollar[4].ifs, 852 | End: End{Pos: yyDollar[5].pos}, 853 | ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, 854 | } 855 | } 856 | case 15: 857 | yyDollar = yyS[yypt-5 : yypt+1] 858 | //line parse.y:273 859 | { 860 | exprStart, _ := yyDollar[2].expr.Span() 861 | yyVAL.expr = &ListForExpr{ 862 | Brack: "{}", 863 | Start: yyDollar[1].pos, 864 | X: yyDollar[2].expr, 865 | For: yyDollar[3].fors, 866 | If: yyDollar[4].ifs, 867 | End: End{Pos: yyDollar[5].pos}, 868 | ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, 869 | } 870 | } 871 | case 16: 872 | yyDollar = yyS[yypt-3 : yypt+1] 873 | //line parse.y:286 874 | { 875 | yyVAL.expr = &DictExpr{ 876 | Start: yyDollar[1].pos, 877 | List: yyDollar[2].exprs, 878 | Comma: yyDollar[2].comma, 879 | End: End{Pos: yyDollar[3].pos}, 880 | ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), 881 | } 882 | } 883 | case 17: 884 | yyDollar = yyS[yypt-3 : yypt+1] 885 | //line parse.y:296 886 | { 887 | if len(yyDollar[2].exprs) == 1 && yyDollar[2].comma.Line == 0 { 888 | // Just a parenthesized expression, not a tuple. 889 | yyVAL.expr = &ParenExpr{ 890 | Start: yyDollar[1].pos, 891 | X: yyDollar[2].exprs[0], 892 | End: End{Pos: yyDollar[3].pos}, 893 | ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), 894 | } 895 | } else { 896 | yyVAL.expr = &TupleExpr{ 897 | Start: yyDollar[1].pos, 898 | List: yyDollar[2].exprs, 899 | Comma: yyDollar[2].comma, 900 | End: End{Pos: yyDollar[3].pos}, 901 | ForceCompact: forceCompact(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), 902 | ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), 903 | } 904 | } 905 | } 906 | case 18: 907 | yyDollar = yyS[yypt-3 : yypt+1] 908 | //line parse.y:317 909 | { 910 | yyVAL.expr = &DotExpr{ 911 | X: yyDollar[1].expr, 912 | Dot: yyDollar[2].pos, 913 | NamePos: yyDollar[3].pos, 914 | Name: yyDollar[3].tok, 915 | } 916 | } 917 | case 19: 918 | yyDollar = yyS[yypt-4 : yypt+1] 919 | //line parse.y:326 920 | { 921 | yyVAL.expr = &CallExpr{ 922 | X: yyDollar[1].expr, 923 | ListStart: yyDollar[2].pos, 924 | List: yyDollar[3].exprs, 925 | End: End{Pos: yyDollar[4].pos}, 926 | ForceCompact: forceCompact(yyDollar[2].pos, yyDollar[3].exprs, yyDollar[4].pos), 927 | ForceMultiLine: forceMultiLine(yyDollar[2].pos, yyDollar[3].exprs, yyDollar[4].pos), 928 | } 929 | } 930 | case 20: 931 | yyDollar = yyS[yypt-6 : yypt+1] 932 | //line parse.y:337 933 | { 934 | yyVAL.expr = &CallExpr{ 935 | X: yyDollar[1].expr, 936 | ListStart: yyDollar[2].pos, 937 | List: []Expr{ 938 | &ListForExpr{ 939 | Brack: "", 940 | Start: yyDollar[2].pos, 941 | X: yyDollar[3].expr, 942 | For: yyDollar[4].fors, 943 | If: yyDollar[5].ifs, 944 | End: End{Pos: yyDollar[6].pos}, 945 | }, 946 | }, 947 | End: End{Pos: yyDollar[6].pos}, 948 | } 949 | } 950 | case 21: 951 | yyDollar = yyS[yypt-4 : yypt+1] 952 | //line parse.y:355 953 | { 954 | yyVAL.expr = &IndexExpr{ 955 | X: yyDollar[1].expr, 956 | IndexStart: yyDollar[2].pos, 957 | Y: yyDollar[3].expr, 958 | End: yyDollar[4].pos, 959 | } 960 | } 961 | case 22: 962 | yyDollar = yyS[yypt-6 : yypt+1] 963 | //line parse.y:364 964 | { 965 | yyVAL.expr = &SliceExpr{ 966 | X: yyDollar[1].expr, 967 | SliceStart: yyDollar[2].pos, 968 | Y: yyDollar[3].expr, 969 | Colon: yyDollar[4].pos, 970 | Z: yyDollar[5].expr, 971 | End: yyDollar[6].pos, 972 | } 973 | } 974 | case 23: 975 | yyDollar = yyS[yypt-4 : yypt+1] 976 | //line parse.y:375 977 | { 978 | yyVAL.expr = &LambdaExpr{ 979 | Lambda: yyDollar[1].pos, 980 | Var: yyDollar[2].exprs, 981 | Colon: yyDollar[3].pos, 982 | Expr: yyDollar[4].expr, 983 | } 984 | } 985 | case 24: 986 | yyDollar = yyS[yypt-2 : yypt+1] 987 | //line parse.y:383 988 | { 989 | yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) 990 | } 991 | case 25: 992 | yyDollar = yyS[yypt-2 : yypt+1] 993 | //line parse.y:384 994 | { 995 | yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) 996 | } 997 | case 26: 998 | yyDollar = yyS[yypt-2 : yypt+1] 999 | //line parse.y:385 1000 | { 1001 | yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) 1002 | } 1003 | case 27: 1004 | yyDollar = yyS[yypt-3 : yypt+1] 1005 | //line parse.y:386 1006 | { 1007 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1008 | } 1009 | case 28: 1010 | yyDollar = yyS[yypt-3 : yypt+1] 1011 | //line parse.y:387 1012 | { 1013 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1014 | } 1015 | case 29: 1016 | yyDollar = yyS[yypt-3 : yypt+1] 1017 | //line parse.y:388 1018 | { 1019 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1020 | } 1021 | case 30: 1022 | yyDollar = yyS[yypt-3 : yypt+1] 1023 | //line parse.y:389 1024 | { 1025 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1026 | } 1027 | case 31: 1028 | yyDollar = yyS[yypt-3 : yypt+1] 1029 | //line parse.y:390 1030 | { 1031 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1032 | } 1033 | case 32: 1034 | yyDollar = yyS[yypt-3 : yypt+1] 1035 | //line parse.y:391 1036 | { 1037 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1038 | } 1039 | case 33: 1040 | yyDollar = yyS[yypt-3 : yypt+1] 1041 | //line parse.y:392 1042 | { 1043 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1044 | } 1045 | case 34: 1046 | yyDollar = yyS[yypt-3 : yypt+1] 1047 | //line parse.y:393 1048 | { 1049 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1050 | } 1051 | case 35: 1052 | yyDollar = yyS[yypt-3 : yypt+1] 1053 | //line parse.y:394 1054 | { 1055 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1056 | } 1057 | case 36: 1058 | yyDollar = yyS[yypt-3 : yypt+1] 1059 | //line parse.y:395 1060 | { 1061 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1062 | } 1063 | case 37: 1064 | yyDollar = yyS[yypt-3 : yypt+1] 1065 | //line parse.y:396 1066 | { 1067 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1068 | } 1069 | case 38: 1070 | yyDollar = yyS[yypt-3 : yypt+1] 1071 | //line parse.y:397 1072 | { 1073 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1074 | } 1075 | case 39: 1076 | yyDollar = yyS[yypt-3 : yypt+1] 1077 | //line parse.y:398 1078 | { 1079 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1080 | } 1081 | case 40: 1082 | yyDollar = yyS[yypt-3 : yypt+1] 1083 | //line parse.y:399 1084 | { 1085 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1086 | } 1087 | case 41: 1088 | yyDollar = yyS[yypt-4 : yypt+1] 1089 | //line parse.y:400 1090 | { 1091 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, "not in", yyDollar[4].expr) 1092 | } 1093 | case 42: 1094 | yyDollar = yyS[yypt-3 : yypt+1] 1095 | //line parse.y:401 1096 | { 1097 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1098 | } 1099 | case 43: 1100 | yyDollar = yyS[yypt-3 : yypt+1] 1101 | //line parse.y:402 1102 | { 1103 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1104 | } 1105 | case 44: 1106 | yyDollar = yyS[yypt-3 : yypt+1] 1107 | //line parse.y:404 1108 | { 1109 | if b, ok := yyDollar[3].expr.(*UnaryExpr); ok && b.Op == "not" { 1110 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, "is not", b.X) 1111 | } else { 1112 | yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) 1113 | } 1114 | } 1115 | case 45: 1116 | yyDollar = yyS[yypt-5 : yypt+1] 1117 | //line parse.y:412 1118 | { 1119 | yyVAL.expr = &ConditionalExpr{ 1120 | Then: yyDollar[1].expr, 1121 | IfStart: yyDollar[2].pos, 1122 | Test: yyDollar[3].expr, 1123 | ElseStart: yyDollar[4].pos, 1124 | Else: yyDollar[5].expr, 1125 | } 1126 | } 1127 | case 46: 1128 | yyDollar = yyS[yypt-0 : yypt+1] 1129 | //line parse.y:423 1130 | { 1131 | yyVAL.expr = nil 1132 | } 1133 | case 48: 1134 | yyDollar = yyS[yypt-0 : yypt+1] 1135 | //line parse.y:433 1136 | { 1137 | yyVAL.pos = Position{} 1138 | } 1139 | case 50: 1140 | yyDollar = yyS[yypt-3 : yypt+1] 1141 | //line parse.y:439 1142 | { 1143 | yyVAL.expr = &KeyValueExpr{ 1144 | Key: yyDollar[1].expr, 1145 | Colon: yyDollar[2].pos, 1146 | Value: yyDollar[3].expr, 1147 | } 1148 | } 1149 | case 51: 1150 | yyDollar = yyS[yypt-1 : yypt+1] 1151 | //line parse.y:449 1152 | { 1153 | yyVAL.exprs = []Expr{yyDollar[1].expr} 1154 | } 1155 | case 52: 1156 | yyDollar = yyS[yypt-3 : yypt+1] 1157 | //line parse.y:453 1158 | { 1159 | yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) 1160 | } 1161 | case 53: 1162 | yyDollar = yyS[yypt-0 : yypt+1] 1163 | //line parse.y:458 1164 | { 1165 | yyVAL.exprs, yyVAL.comma = nil, Position{} 1166 | } 1167 | case 54: 1168 | yyDollar = yyS[yypt-2 : yypt+1] 1169 | //line parse.y:462 1170 | { 1171 | yyVAL.exprs, yyVAL.comma = yyDollar[1].exprs, yyDollar[2].pos 1172 | } 1173 | case 55: 1174 | yyDollar = yyS[yypt-1 : yypt+1] 1175 | //line parse.y:468 1176 | { 1177 | yyVAL.exprs = []Expr{yyDollar[1].expr} 1178 | } 1179 | case 56: 1180 | yyDollar = yyS[yypt-3 : yypt+1] 1181 | //line parse.y:472 1182 | { 1183 | yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) 1184 | } 1185 | case 57: 1186 | yyDollar = yyS[yypt-0 : yypt+1] 1187 | //line parse.y:477 1188 | { 1189 | yyVAL.exprs, yyVAL.comma = nil, Position{} 1190 | } 1191 | case 58: 1192 | yyDollar = yyS[yypt-2 : yypt+1] 1193 | //line parse.y:481 1194 | { 1195 | yyVAL.exprs, yyVAL.comma = yyDollar[1].exprs, yyDollar[2].pos 1196 | } 1197 | case 59: 1198 | yyDollar = yyS[yypt-1 : yypt+1] 1199 | //line parse.y:487 1200 | { 1201 | yyVAL.string = &StringExpr{ 1202 | Start: yyDollar[1].pos, 1203 | Value: yyDollar[1].str, 1204 | TripleQuote: yyDollar[1].triple, 1205 | End: yyDollar[1].pos.add(yyDollar[1].tok), 1206 | Token: yyDollar[1].tok, 1207 | } 1208 | } 1209 | case 60: 1210 | yyDollar = yyS[yypt-1 : yypt+1] 1211 | //line parse.y:499 1212 | { 1213 | yyVAL.strings = []*StringExpr{yyDollar[1].string} 1214 | } 1215 | case 61: 1216 | yyDollar = yyS[yypt-2 : yypt+1] 1217 | //line parse.y:503 1218 | { 1219 | yyVAL.strings = append(yyDollar[1].strings, yyDollar[2].string) 1220 | } 1221 | case 62: 1222 | yyDollar = yyS[yypt-1 : yypt+1] 1223 | //line parse.y:509 1224 | { 1225 | yyVAL.expr = &LiteralExpr{Start: yyDollar[1].pos, Token: yyDollar[1].tok} 1226 | } 1227 | case 63: 1228 | yyDollar = yyS[yypt-1 : yypt+1] 1229 | //line parse.y:515 1230 | { 1231 | yyVAL.exprs = []Expr{yyDollar[1].expr} 1232 | } 1233 | case 64: 1234 | yyDollar = yyS[yypt-3 : yypt+1] 1235 | //line parse.y:519 1236 | { 1237 | yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) 1238 | } 1239 | case 65: 1240 | yyDollar = yyS[yypt-4 : yypt+1] 1241 | //line parse.y:525 1242 | { 1243 | yyVAL.forc = &ForClause{ 1244 | For: yyDollar[1].pos, 1245 | Var: yyDollar[2].exprs, 1246 | In: yyDollar[3].pos, 1247 | Expr: yyDollar[4].expr, 1248 | } 1249 | } 1250 | case 66: 1251 | yyDollar = yyS[yypt-6 : yypt+1] 1252 | //line parse.y:534 1253 | { 1254 | yyVAL.forc = &ForClause{ 1255 | For: yyDollar[1].pos, 1256 | Var: yyDollar[3].exprs, 1257 | In: yyDollar[5].pos, 1258 | Expr: yyDollar[6].expr, 1259 | } 1260 | } 1261 | case 67: 1262 | yyDollar = yyS[yypt-1 : yypt+1] 1263 | //line parse.y:545 1264 | { 1265 | yyVAL.fors = []*ForClause{yyDollar[1].forc} 1266 | } 1267 | case 68: 1268 | yyDollar = yyS[yypt-2 : yypt+1] 1269 | //line parse.y:548 1270 | { 1271 | yyVAL.fors = append(yyDollar[1].fors, yyDollar[2].forc) 1272 | } 1273 | case 69: 1274 | yyDollar = yyS[yypt-0 : yypt+1] 1275 | //line parse.y:553 1276 | { 1277 | yyVAL.ifs = nil 1278 | } 1279 | case 70: 1280 | yyDollar = yyS[yypt-3 : yypt+1] 1281 | //line parse.y:557 1282 | { 1283 | yyVAL.ifs = append(yyDollar[1].ifs, &IfClause{ 1284 | If: yyDollar[2].pos, 1285 | Cond: yyDollar[3].expr, 1286 | }) 1287 | } 1288 | } 1289 | goto yystack /* stack new state and value */ 1290 | } 1291 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/print.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2016 Google Inc. All Rights Reserved. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | // Printing of syntax trees. 17 | 18 | package build 19 | 20 | import ( 21 | "bytes" 22 | "fmt" 23 | "strings" 24 | ) 25 | 26 | // Format returns the formatted form of the given BUILD file. 27 | func Format(f *File) []byte { 28 | pr := &printer{} 29 | pr.file(f) 30 | return pr.Bytes() 31 | } 32 | 33 | // FormatString returns the string form of the given expression. 34 | func FormatString(x Expr) string { 35 | pr := &printer{} 36 | switch x := x.(type) { 37 | case *File: 38 | pr.file(x) 39 | default: 40 | pr.expr(x, precLow) 41 | } 42 | return pr.String() 43 | } 44 | 45 | // A printer collects the state during printing of a file or expression. 46 | type printer struct { 47 | bytes.Buffer // output buffer 48 | comment []Comment // pending end-of-line comments 49 | margin int // left margin (indent), a number of spaces 50 | depth int // nesting depth inside ( ) [ ] { } 51 | } 52 | 53 | // printf prints to the buffer. 54 | func (p *printer) printf(format string, args ...interface{}) { 55 | fmt.Fprintf(p, format, args...) 56 | } 57 | 58 | // indent returns the position on the current line, in bytes, 0-indexed. 59 | func (p *printer) indent() int { 60 | b := p.Bytes() 61 | n := 0 62 | for n < len(b) && b[len(b)-1-n] != '\n' { 63 | n++ 64 | } 65 | return n 66 | } 67 | 68 | // newline ends the current line, flushing end-of-line comments. 69 | // It must only be called when printing a newline is known to be safe: 70 | // when not inside an expression or when p.depth > 0. 71 | // To break a line inside an expression that might not be enclosed 72 | // in brackets of some kind, use breakline instead. 73 | func (p *printer) newline() { 74 | if len(p.comment) > 0 { 75 | p.printf(" ") 76 | for i, com := range p.comment { 77 | if i > 0 { 78 | p.trim() 79 | p.printf("\n%*s", p.margin, "") 80 | } 81 | p.printf("%s", strings.TrimSpace(com.Token)) 82 | } 83 | p.comment = p.comment[:0] 84 | } 85 | 86 | p.trim() 87 | p.printf("\n%*s", p.margin, "") 88 | } 89 | 90 | // breakline breaks the current line, inserting a continuation \ if needed. 91 | // If no continuation \ is needed, breakline flushes end-of-line comments. 92 | func (p *printer) breakline() { 93 | if p.depth == 0 { 94 | // Cannot have both final \ and comments. 95 | p.printf(" \\\n%*s", p.margin, "") 96 | return 97 | } 98 | 99 | // Safe to use newline. 100 | p.newline() 101 | } 102 | 103 | // trim removes trailing spaces from the current line. 104 | func (p *printer) trim() { 105 | // Remove trailing space from line we're about to end. 106 | b := p.Bytes() 107 | n := len(b) 108 | for n > 0 && b[n-1] == ' ' { 109 | n-- 110 | } 111 | p.Truncate(n) 112 | } 113 | 114 | // file formats the given file into the print buffer. 115 | func (p *printer) file(f *File) { 116 | for _, com := range f.Before { 117 | p.printf("%s", strings.TrimSpace(com.Token)) 118 | p.newline() 119 | } 120 | 121 | for i, stmt := range f.Stmt { 122 | switch stmt := stmt.(type) { 123 | case *CommentBlock: 124 | // comments already handled 125 | 126 | case *PythonBlock: 127 | for _, com := range stmt.Before { 128 | p.printf("%s", strings.TrimSpace(com.Token)) 129 | p.newline() 130 | } 131 | p.printf("%s", stmt.Token) // includes trailing newline 132 | 133 | default: 134 | p.expr(stmt, precLow) 135 | p.newline() 136 | } 137 | 138 | for _, com := range stmt.Comment().After { 139 | p.printf("%s", strings.TrimSpace(com.Token)) 140 | p.newline() 141 | } 142 | 143 | if i+1 < len(f.Stmt) && !compactStmt(stmt, f.Stmt[i+1]) { 144 | p.newline() 145 | } 146 | } 147 | 148 | for _, com := range f.After { 149 | p.printf("%s", strings.TrimSpace(com.Token)) 150 | p.newline() 151 | } 152 | } 153 | 154 | // compactStmt reports whether the pair of statements s1, s2 155 | // should be printed without an intervening blank line. 156 | // We omit the blank line when both are subinclude statements 157 | // and the second one has no leading comments. 158 | func compactStmt(s1, s2 Expr) bool { 159 | if len(s2.Comment().Before) > 0 { 160 | return false 161 | } 162 | 163 | return (isCall(s1, "subinclude") || isCall(s1, "load")) && 164 | (isCall(s2, "subinclude") || isCall(s2, "load")) 165 | } 166 | 167 | // isCall reports whether x is a call to a function with the given name. 168 | func isCall(x Expr, name string) bool { 169 | c, ok := x.(*CallExpr) 170 | if !ok { 171 | return false 172 | } 173 | nam, ok := c.X.(*LiteralExpr) 174 | if !ok { 175 | return false 176 | } 177 | return nam.Token == name 178 | } 179 | 180 | // Expression formatting. 181 | 182 | // The expression formatter must introduce parentheses to force the 183 | // meaning described by the parse tree. We preserve parentheses in the 184 | // input, so extra parentheses are only needed if we have edited the tree. 185 | // 186 | // For example consider these expressions: 187 | // (1) "x" "y" % foo 188 | // (2) "x" + "y" % foo 189 | // (3) "x" + ("y" % foo) 190 | // (4) ("x" + "y") % foo 191 | // When we parse (1), we represent the concatenation as an addition. 192 | // However, if we print the addition back out without additional parens, 193 | // as in (2), it has the same meaning as (3), which is not the original 194 | // meaning. To preserve the original meaning we must add parens as in (4). 195 | // 196 | // To allow arbitrary rewrites to be formatted properly, we track full 197 | // operator precedence while printing instead of just handling this one 198 | // case of string concatenation. 199 | // 200 | // The precedences are assigned values low to high. A larger number 201 | // binds tighter than a smaller number. All binary operators bind 202 | // left-to-right. 203 | const ( 204 | precLow = iota 205 | precAssign 206 | precComma 207 | precColon 208 | precIn 209 | precOr 210 | precAnd 211 | precCmp 212 | precAdd 213 | precMultiply 214 | precSuffix 215 | precUnary 216 | precConcat 217 | ) 218 | 219 | // opPrec gives the precedence for operators found in a BinaryExpr. 220 | var opPrec = map[string]int{ 221 | "=": precAssign, 222 | "+=": precAssign, 223 | "or": precOr, 224 | "and": precAnd, 225 | "<": precCmp, 226 | ">": precCmp, 227 | "==": precCmp, 228 | "!=": precCmp, 229 | "<=": precCmp, 230 | ">=": precCmp, 231 | "+": precAdd, 232 | "-": precAdd, 233 | "*": precMultiply, 234 | "/": precMultiply, 235 | "%": precMultiply, 236 | } 237 | 238 | // expr prints the expression v to the print buffer. 239 | // The value outerPrec gives the precedence of the operator 240 | // outside expr. If that operator binds tighter than v's operator, 241 | // expr must introduce parentheses to preserve the meaning 242 | // of the parse tree (see above). 243 | func (p *printer) expr(v Expr, outerPrec int) { 244 | // Emit line-comments preceding this expression. 245 | // If we are in the middle of an expression but not inside ( ) [ ] { } 246 | // then we cannot just break the line: we'd have to end it with a \. 247 | // However, even then we can't emit line comments since that would 248 | // end the expression. This is only a concern if we have rewritten 249 | // the parse tree. If comments were okay before this expression in 250 | // the original input they're still okay now, in the absense of rewrites. 251 | // 252 | // TODO(bazel-team): Check whether it is valid to emit comments right now, 253 | // and if not, insert them earlier in the output instead, at the most 254 | // recent \n not following a \ line. 255 | if before := v.Comment().Before; len(before) > 0 { 256 | // Want to print a line comment. 257 | // Line comments must be at the current margin. 258 | p.trim() 259 | if p.indent() > 0 { 260 | // There's other text on the line. Start a new line. 261 | p.printf("\n") 262 | } 263 | // Re-indent to margin. 264 | p.printf("%*s", p.margin, "") 265 | for _, com := range before { 266 | p.printf("%s", strings.TrimSpace(com.Token)) 267 | p.newline() 268 | } 269 | } 270 | 271 | // Do we introduce parentheses? 272 | // The result depends on the kind of expression. 273 | // Each expression type that might need parentheses 274 | // calls addParen with its own precedence. 275 | // If parentheses are necessary, addParen prints the 276 | // opening parenthesis and sets parenthesized so that 277 | // the code after the switch can print the closing one. 278 | parenthesized := false 279 | addParen := func(prec int) { 280 | if prec < outerPrec { 281 | p.printf("(") 282 | p.depth++ 283 | parenthesized = true 284 | } 285 | } 286 | 287 | switch v := v.(type) { 288 | default: 289 | panic(fmt.Errorf("printer: unexpected type %T", v)) 290 | 291 | case *LiteralExpr: 292 | p.printf("%s", v.Token) 293 | 294 | case *StringExpr: 295 | // If the Token is a correct quoting of Value, use it. 296 | // This preserves the specific escaping choices that 297 | // BUILD authors have made, and it also works around 298 | // b/7272572. 299 | if strings.HasPrefix(v.Token, `"`) { 300 | s, triple, err := unquote(v.Token) 301 | if s == v.Value && triple == v.TripleQuote && err == nil { 302 | p.printf("%s", v.Token) 303 | break 304 | } 305 | } 306 | 307 | p.printf("%s", quote(v.Value, v.TripleQuote)) 308 | 309 | case *DotExpr: 310 | addParen(precSuffix) 311 | p.expr(v.X, precSuffix) 312 | p.printf(".%s", v.Name) 313 | 314 | case *IndexExpr: 315 | addParen(precSuffix) 316 | p.expr(v.X, precSuffix) 317 | p.printf("[") 318 | p.expr(v.Y, precLow) 319 | p.printf("]") 320 | 321 | case *KeyValueExpr: 322 | p.expr(v.Key, precLow) 323 | p.printf(": ") 324 | p.expr(v.Value, precLow) 325 | 326 | case *SliceExpr: 327 | addParen(precSuffix) 328 | p.expr(v.X, precSuffix) 329 | p.printf("[") 330 | if v.Y != nil { 331 | p.expr(v.Y, precLow) 332 | } 333 | p.printf(":") 334 | if v.Z != nil { 335 | p.expr(v.Z, precLow) 336 | } 337 | p.printf("]") 338 | 339 | case *UnaryExpr: 340 | addParen(precUnary) 341 | if v.Op == "not" { 342 | p.printf("not ") // Requires a space after it. 343 | } else { 344 | p.printf("%s", v.Op) 345 | } 346 | p.expr(v.X, precUnary) 347 | 348 | case *LambdaExpr: 349 | addParen(precColon) 350 | p.printf("lambda ") 351 | for i, name := range v.Var { 352 | if i > 0 { 353 | p.printf(", ") 354 | } 355 | p.expr(name, precLow) 356 | } 357 | p.printf(": ") 358 | p.expr(v.Expr, precColon) 359 | 360 | case *BinaryExpr: 361 | // Precedence: use the precedence of the operator. 362 | // Since all binary expressions format left-to-right, 363 | // it is okay for the left side to reuse the same operator 364 | // without parentheses, so we use prec for v.X. 365 | // For the same reason, the right side cannot reuse the same 366 | // operator, or else a parse tree for a + (b + c), where the ( ) are 367 | // not present in the source, will format as a + b + c, which 368 | // means (a + b) + c. Treat the right expression as appearing 369 | // in a context one precedence level higher: use prec+1 for v.Y. 370 | // 371 | // Line breaks: if we are to break the line immediately after 372 | // the operator, introduce a margin at the current column, 373 | // so that the second operand lines up with the first one and 374 | // also so that neither operand can use space to the left. 375 | // If the operator is an =, indent the right side another 4 spaces. 376 | prec := opPrec[v.Op] 377 | addParen(prec) 378 | m := p.margin 379 | if v.LineBreak { 380 | p.margin = p.indent() 381 | if v.Op == "=" { 382 | p.margin += 4 383 | } 384 | } 385 | 386 | p.expr(v.X, prec) 387 | p.printf(" %s", v.Op) 388 | if v.LineBreak { 389 | p.breakline() 390 | } else { 391 | p.printf(" ") 392 | } 393 | p.expr(v.Y, prec+1) 394 | p.margin = m 395 | 396 | case *ParenExpr: 397 | p.seq("()", []Expr{v.X}, &v.End, modeParen, false, v.ForceMultiLine) 398 | 399 | case *CallExpr: 400 | addParen(precSuffix) 401 | p.expr(v.X, precSuffix) 402 | p.seq("()", v.List, &v.End, modeCall, v.ForceCompact, v.ForceMultiLine) 403 | 404 | case *ListExpr: 405 | p.seq("[]", v.List, &v.End, modeList, false, v.ForceMultiLine) 406 | 407 | case *TupleExpr: 408 | p.seq("()", v.List, &v.End, modeTuple, v.ForceCompact, v.ForceMultiLine) 409 | 410 | case *DictExpr: 411 | var list []Expr 412 | for _, x := range v.List { 413 | list = append(list, x) 414 | } 415 | p.seq("{}", list, &v.End, modeDict, false, v.ForceMultiLine) 416 | 417 | case *ListForExpr: 418 | p.listFor(v) 419 | 420 | case *ConditionalExpr: 421 | addParen(precSuffix) 422 | p.expr(v.Then, precSuffix) 423 | p.printf(" if ") 424 | p.expr(v.Test, precSuffix) 425 | p.printf(" else ") 426 | p.expr(v.Else, precSuffix) 427 | } 428 | 429 | // Add closing parenthesis if needed. 430 | if parenthesized { 431 | p.depth-- 432 | p.printf(")") 433 | } 434 | 435 | // Queue end-of-line comments for printing when we 436 | // reach the end of the line. 437 | p.comment = append(p.comment, v.Comment().Suffix...) 438 | } 439 | 440 | // A seqMode describes a formatting mode for a sequence of values, 441 | // like a list or call arguments. 442 | type seqMode int 443 | 444 | const ( 445 | _ seqMode = iota 446 | 447 | modeCall // f(x) 448 | modeList // [x] 449 | modeTuple // (x,) 450 | modeParen // (x) 451 | modeDict // {x:y} 452 | ) 453 | 454 | // seq formats a list of values inside a given bracket pair (brack = "()", "[]", "{}"). 455 | // The end node holds any trailing comments to be printed just before the 456 | // closing bracket. 457 | // The mode parameter specifies the sequence mode (see above). 458 | // If multiLine is true, seq avoids the compact form even 459 | // for 0- and 1-element sequences. 460 | func (p *printer) seq(brack string, list []Expr, end *End, mode seqMode, forceCompact, forceMultiLine bool) { 461 | p.printf("%s", brack[:1]) 462 | p.depth++ 463 | 464 | // If there are line comments, force multiline 465 | // so we can print the comments before the closing bracket. 466 | for _, x := range list { 467 | if len(x.Comment().Before) > 0 { 468 | forceMultiLine = true 469 | } 470 | } 471 | if len(end.Before) > 0 { 472 | forceMultiLine = true 473 | } 474 | 475 | // Resolve possibly ambiguous call arguments explicitly 476 | // instead of depending on implicit resolution in logic below. 477 | if forceMultiLine { 478 | forceCompact = false 479 | } 480 | 481 | switch { 482 | case len(list) == 0 && !forceMultiLine: 483 | // Compact form: print nothing. 484 | 485 | case len(list) == 1 && !forceMultiLine: 486 | // Compact form. 487 | p.expr(list[0], precLow) 488 | // Tuple must end with comma, to mark it as a tuple. 489 | if mode == modeTuple { 490 | p.printf(",") 491 | } 492 | 493 | case forceCompact: 494 | // Compact form but multiple elements. 495 | for i, x := range list { 496 | if i > 0 { 497 | p.printf(", ") 498 | } 499 | p.expr(x, precLow) 500 | } 501 | 502 | default: 503 | // Multi-line form. 504 | p.margin += 4 505 | for i, x := range list { 506 | // If we are about to break the line before the first 507 | // element and there are trailing end-of-line comments 508 | // waiting to be printed, delay them and print them as 509 | // whole-line comments preceding that element. 510 | // Do this by printing a newline ourselves and positioning 511 | // so that the end-of-line comment, with the two spaces added, 512 | // will line up with the current margin. 513 | if i == 0 && len(p.comment) > 0 { 514 | p.printf("\n%*s", p.margin-2, "") 515 | } 516 | 517 | p.newline() 518 | p.expr(x, precLow) 519 | if mode != modeParen || i+1 < len(list) { 520 | p.printf(",") 521 | } 522 | } 523 | // Final comments. 524 | for _, com := range end.Before { 525 | p.newline() 526 | p.printf("%s", strings.TrimSpace(com.Token)) 527 | } 528 | p.margin -= 4 529 | p.newline() 530 | } 531 | p.depth-- 532 | p.printf("%s", brack[1:]) 533 | } 534 | 535 | // listFor formats a ListForExpr (list comprehension). 536 | // The single-line form is: 537 | // [x for y in z if c] 538 | // 539 | // and the multi-line form is: 540 | // [ 541 | // x 542 | // for y in z 543 | // if c 544 | // ] 545 | // 546 | func (p *printer) listFor(v *ListForExpr) { 547 | multiLine := v.ForceMultiLine || len(v.End.Before) > 0 548 | 549 | // space breaks the line in multiline mode 550 | // or else prints a space. 551 | space := func() { 552 | if multiLine { 553 | p.breakline() 554 | } else { 555 | p.printf(" ") 556 | } 557 | } 558 | 559 | if v.Brack != "" { 560 | p.depth++ 561 | p.printf("%s", v.Brack[:1]) 562 | } 563 | 564 | if multiLine { 565 | if v.Brack != "" { 566 | p.margin += 4 567 | } 568 | p.newline() 569 | } 570 | 571 | p.expr(v.X, precLow) 572 | 573 | for _, c := range v.For { 574 | space() 575 | p.printf("for ") 576 | for i, name := range c.Var { 577 | if i > 0 { 578 | p.printf(", ") 579 | } 580 | p.expr(name, precLow) 581 | } 582 | p.printf(" in ") 583 | p.expr(c.Expr, precLow) 584 | } 585 | 586 | for _, c := range v.If { 587 | space() 588 | p.printf("if ") 589 | p.expr(c.Cond, precLow) 590 | } 591 | 592 | if multiLine { 593 | for _, com := range v.End.Before { 594 | p.newline() 595 | p.printf("%s", strings.TrimSpace(com.Token)) 596 | } 597 | if v.Brack != "" { 598 | p.margin -= 4 599 | } 600 | p.newline() 601 | } 602 | 603 | if v.Brack != "" { 604 | p.printf("%s", v.Brack[1:]) 605 | p.depth-- 606 | } 607 | } 608 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/quote.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2016 Google Inc. All Rights Reserved. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | // Python quoted strings. 17 | 18 | package build 19 | 20 | import ( 21 | "bytes" 22 | "fmt" 23 | "strconv" 24 | "strings" 25 | ) 26 | 27 | // unesc maps single-letter chars following \ to their actual values. 28 | var unesc = [256]byte{ 29 | 'a': '\a', 30 | 'b': '\b', 31 | 'f': '\f', 32 | 'n': '\n', 33 | 'r': '\r', 34 | 't': '\t', 35 | 'v': '\v', 36 | '\\': '\\', 37 | '\'': '\'', 38 | '"': '"', 39 | } 40 | 41 | // esc maps escape-worthy bytes to the char that should follow \. 42 | var esc = [256]byte{ 43 | '\a': 'a', 44 | '\b': 'b', 45 | '\f': 'f', 46 | '\n': 'n', 47 | '\r': 'r', 48 | '\t': 't', 49 | '\v': 'v', 50 | '\\': '\\', 51 | '\'': '\'', 52 | '"': '"', 53 | } 54 | 55 | // notEsc is a list of characters that can follow a \ in a string value 56 | // without having to escape the \. That is, since ( is in this list, we 57 | // quote the Go string "foo\\(bar" as the Python literal "foo\(bar". 58 | // This really does happen in BUILD files, especially in strings 59 | // being used as shell arguments containing regular expressions. 60 | const notEsc = " !#$%&()*+,-./:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~" 61 | 62 | // unquote unquotes the quoted string, returning the actual 63 | // string value, whether the original was triple-quoted, and 64 | // an error describing invalid input. 65 | func unquote(quoted string) (s string, triple bool, err error) { 66 | // Check for raw prefix: means don't interpret the inner \. 67 | raw := false 68 | if strings.HasPrefix(quoted, "r") { 69 | raw = true 70 | quoted = quoted[1:] 71 | } 72 | 73 | if len(quoted) < 2 { 74 | err = fmt.Errorf("string literal too short") 75 | return 76 | } 77 | 78 | if quoted[0] != '"' && quoted[0] != '\'' || quoted[0] != quoted[len(quoted)-1] { 79 | err = fmt.Errorf("string literal has invalid quotes") 80 | } 81 | 82 | // Check for triple quoted string. 83 | quote := quoted[0] 84 | if len(quoted) >= 6 && quoted[1] == quote && quoted[2] == quote && quoted[:3] == quoted[len(quoted)-3:] { 85 | triple = true 86 | quoted = quoted[3 : len(quoted)-3] 87 | } else { 88 | quoted = quoted[1 : len(quoted)-1] 89 | } 90 | 91 | // Now quoted is the quoted data, but no quotes. 92 | // If we're in raw mode or there are no escapes, we're done. 93 | if raw || !strings.Contains(quoted, `\`) { 94 | s = quoted 95 | return 96 | } 97 | 98 | // Otherwise process quoted string. 99 | // Each iteration processes one escape sequence along with the 100 | // plain text leading up to it. 101 | var buf bytes.Buffer 102 | for { 103 | // Remove prefix before escape sequence. 104 | i := strings.Index(quoted, `\`) 105 | if i < 0 { 106 | i = len(quoted) 107 | } 108 | buf.WriteString(quoted[:i]) 109 | quoted = quoted[i:] 110 | 111 | if len(quoted) == 0 { 112 | break 113 | } 114 | 115 | // Process escape sequence. 116 | if len(quoted) == 1 { 117 | err = fmt.Errorf(`truncated escape sequence \`) 118 | return 119 | } 120 | 121 | switch quoted[1] { 122 | default: 123 | // In Python, if \z (for some byte z) is not a known escape sequence 124 | // then it appears as literal text in the string. 125 | buf.WriteString(quoted[:2]) 126 | quoted = quoted[2:] 127 | 128 | case '\n': 129 | // Ignore the escape and the line break. 130 | quoted = quoted[2:] 131 | 132 | case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '\'', '"': 133 | // One-char escape 134 | buf.WriteByte(unesc[quoted[1]]) 135 | quoted = quoted[2:] 136 | 137 | case '0', '1', '2', '3', '4', '5', '6', '7': 138 | // Octal escape, up to 3 digits. 139 | n := int(quoted[1] - '0') 140 | quoted = quoted[2:] 141 | for i := 1; i < 3; i++ { 142 | if len(quoted) == 0 || quoted[0] < '0' || '7' < quoted[0] { 143 | break 144 | } 145 | n = n*8 + int(quoted[0]-'0') 146 | quoted = quoted[1:] 147 | } 148 | if n >= 256 { 149 | // NOTE: Python silently discards the high bit, 150 | // so that '\541' == '\141' == 'a'. 151 | // Let's see if we can avoid doing that in BUILD files. 152 | err = fmt.Errorf(`invalid escape sequence \%03o`, n) 153 | return 154 | } 155 | buf.WriteByte(byte(n)) 156 | 157 | case 'x': 158 | // Hexadecimal escape, exactly 2 digits. 159 | if len(quoted) < 4 { 160 | err = fmt.Errorf(`truncated escape sequence %s`, quoted) 161 | return 162 | } 163 | n, err1 := strconv.ParseInt(quoted[2:4], 16, 0) 164 | if err1 != nil { 165 | err = fmt.Errorf(`invalid escape sequence %s`, quoted[:4]) 166 | return 167 | } 168 | buf.WriteByte(byte(n)) 169 | quoted = quoted[4:] 170 | } 171 | } 172 | 173 | s = buf.String() 174 | return 175 | } 176 | 177 | // indexByte returns the index of the first instance of b in s, or else -1. 178 | func indexByte(s string, b byte) int { 179 | for i := 0; i < len(s); i++ { 180 | if s[i] == b { 181 | return i 182 | } 183 | } 184 | return -1 185 | } 186 | 187 | // hex is a list of the hexadecimal digits, for use in quoting. 188 | // We always print lower-case hexadecimal. 189 | const hex = "0123456789abcdef" 190 | 191 | // quote returns the quoted form of the string value "x". 192 | // If triple is true, quote uses the triple-quoted form """x""". 193 | func quote(unquoted string, triple bool) string { 194 | q := `"` 195 | if triple { 196 | q = `"""` 197 | } 198 | 199 | var buf bytes.Buffer 200 | buf.WriteString(q) 201 | 202 | for i := 0; i < len(unquoted); i++ { 203 | c := unquoted[i] 204 | if c == '"' && triple && (i+1 < len(unquoted) && unquoted[i+1] != '"' || i+2 < len(unquoted) && unquoted[i+2] != '"') { 205 | // Can pass up to two quotes through, because they are followed by a non-quote byte. 206 | buf.WriteByte(c) 207 | if i+1 < len(unquoted) && unquoted[i+1] == '"' { 208 | buf.WriteByte(c) 209 | i++ 210 | } 211 | continue 212 | } 213 | if triple && c == '\n' { 214 | // Can allow newline in triple-quoted string. 215 | buf.WriteByte(c) 216 | continue 217 | } 218 | if c == '\'' { 219 | // Can allow ' since we always use ". 220 | buf.WriteByte(c) 221 | continue 222 | } 223 | if c == '\\' { 224 | if i+1 < len(unquoted) && indexByte(notEsc, unquoted[i+1]) >= 0 { 225 | // Can pass \ through when followed by a byte that 226 | // known not to be a valid escape sequence and also 227 | // that does not trigger an escape sequence of its own. 228 | // Use this, because various BUILD files do. 229 | buf.WriteByte('\\') 230 | buf.WriteByte(unquoted[i+1]) 231 | i++ 232 | continue 233 | } 234 | } 235 | if esc[c] != 0 { 236 | buf.WriteByte('\\') 237 | buf.WriteByte(esc[c]) 238 | continue 239 | } 240 | if c < 0x20 || c >= 0x80 { 241 | // BUILD files are supposed to be Latin-1, so escape all control and high bytes. 242 | // I'd prefer to use \x here, but Blaze does not implement 243 | // \x in quoted strings (b/7272572). 244 | buf.WriteByte('\\') 245 | buf.WriteByte(hex[c>>6]) // actually octal but reusing hex digits 0-7. 246 | buf.WriteByte(hex[(c>>3)&7]) 247 | buf.WriteByte(hex[c&7]) 248 | /* 249 | buf.WriteByte('\\') 250 | buf.WriteByte('x') 251 | buf.WriteByte(hex[c>>4]) 252 | buf.WriteByte(hex[c&0xF]) 253 | */ 254 | continue 255 | } 256 | buf.WriteByte(c) 257 | continue 258 | } 259 | 260 | buf.WriteString(q) 261 | return buf.String() 262 | } 263 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/rewrite.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2016 Google Inc. All Rights Reserved. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | // Rewriting of high-level (not purely syntactic) BUILD constructs. 17 | 18 | package build 19 | 20 | import ( 21 | "regexp" 22 | "sort" 23 | "strings" 24 | ) 25 | 26 | // For debugging: flag to disable certain rewrites. 27 | var DisableRewrites []string 28 | 29 | // disabled reports whether the named rewrite is disabled. 30 | func disabled(name string) bool { 31 | for _, x := range DisableRewrites { 32 | if name == x { 33 | return true 34 | } 35 | } 36 | return false 37 | } 38 | 39 | // For debugging: allow sorting of these lists even with sorting otherwise disabled. 40 | var AllowSort []string 41 | 42 | // allowedSort reports whether sorting is allowed in the named context. 43 | func allowedSort(name string) bool { 44 | for _, x := range AllowSort { 45 | if name == x { 46 | return true 47 | } 48 | } 49 | return false 50 | } 51 | 52 | // Rewrite applies the high-level Buildifier rewrites to f, modifying it in place. 53 | // If info is non-nil, Rewrite updates it with information about the rewrite. 54 | func Rewrite(f *File, info *RewriteInfo) { 55 | // Allocate an info so that helpers can assume it's there. 56 | if info == nil { 57 | info = new(RewriteInfo) 58 | } 59 | 60 | for _, r := range rewrites { 61 | if !disabled(r.name) { 62 | r.fn(f, info) 63 | } 64 | } 65 | } 66 | 67 | // RewriteInfo collects information about what Rewrite did. 68 | type RewriteInfo struct { 69 | EditLabel int // number of label strings edited 70 | NameCall int // number of calls with argument names added 71 | SortCall int // number of call argument lists sorted 72 | SortStringList int // number of string lists sorted 73 | UnsafeSort int // number of unsafe string lists sorted 74 | Log []string // log entries - may change 75 | } 76 | 77 | func (info *RewriteInfo) String() string { 78 | s := "" 79 | if info.EditLabel > 0 { 80 | s += " label" 81 | } 82 | if info.NameCall > 0 { 83 | s += " callname" 84 | } 85 | if info.SortCall > 0 { 86 | s += " callsort" 87 | } 88 | if info.SortStringList > 0 { 89 | s += " listsort" 90 | } 91 | if info.UnsafeSort > 0 { 92 | s += " unsafesort" 93 | } 94 | if s != "" { 95 | s = s[1:] 96 | } 97 | return s 98 | } 99 | 100 | // rewrites is the list of all Buildifier rewrites, in the order in which they are applied. 101 | // The order here matters: for example, label canonicalization must happen 102 | // before sorting lists of strings. 103 | var rewrites = []struct { 104 | name string 105 | fn func(*File, *RewriteInfo) 106 | }{ 107 | {"callsort", sortCallArgs}, 108 | {"label", fixLabels}, 109 | {"listsort", sortStringLists}, 110 | {"multiplus", fixMultilinePlus}, 111 | } 112 | 113 | // leaveAlone reports whether any of the nodes on the stack are marked 114 | // with a comment containing "buildifier: leave-alone". 115 | func leaveAlone(stk []Expr, final Expr) bool { 116 | for _, x := range stk { 117 | if leaveAlone1(x) { 118 | return true 119 | } 120 | } 121 | if final != nil && leaveAlone1(final) { 122 | return true 123 | } 124 | return false 125 | } 126 | 127 | // hasComment reports whether x is marked with a comment that 128 | // after being converted to lower case, contains the specified text. 129 | func hasComment(x Expr, text string) bool { 130 | for _, com := range x.Comment().Before { 131 | if strings.Contains(strings.ToLower(com.Token), text) { 132 | return true 133 | } 134 | } 135 | return false 136 | } 137 | 138 | // leaveAlone1 reports whether x is marked with a comment containing 139 | // "buildifier: leave-alone", case-insensitive. 140 | func leaveAlone1(x Expr) bool { 141 | return hasComment(x, "buildifier: leave-alone") 142 | } 143 | 144 | // doNotSort reports whether x is marked with a comment containing 145 | // "do not sort", case-insensitive. 146 | func doNotSort(x Expr) bool { 147 | return hasComment(x, "do not sort") 148 | } 149 | 150 | // keepSorted reports whether x is marked with a comment containing 151 | // "keep sorted", case-insensitive. 152 | func keepSorted(x Expr) bool { 153 | return hasComment(x, "keep sorted") 154 | } 155 | 156 | // fixLabels rewrites labels into a canonical form. 157 | // 158 | // First, it joins labels written as string addition, turning 159 | // "//x" + ":y" (usually split across multiple lines) into "//x:y". 160 | // 161 | // Second, it removes redundant target qualifiers, turning 162 | // "//third_party/m4:m4" into "//third_party/m4". 163 | // 164 | func fixLabels(f *File, info *RewriteInfo) { 165 | joinLabel := func(p *Expr) { 166 | add, ok := (*p).(*BinaryExpr) 167 | if !ok || add.Op != "+" { 168 | return 169 | } 170 | str1, ok := add.X.(*StringExpr) 171 | if !ok || !strings.HasPrefix(str1.Value, "//") || strings.Contains(str1.Value, " ") { 172 | return 173 | } 174 | str2, ok := add.Y.(*StringExpr) 175 | if !ok || strings.Contains(str2.Value, " ") { 176 | return 177 | } 178 | info.EditLabel++ 179 | str1.Value += str2.Value 180 | 181 | // Deleting nodes add and str2. 182 | // Merge comments from add, str1, and str2 and save in str1. 183 | com1 := add.Comment() 184 | com2 := str1.Comment() 185 | com3 := str2.Comment() 186 | com1.Before = append(com1.Before, com2.Before...) 187 | com1.Before = append(com1.Before, com3.Before...) 188 | com1.Suffix = append(com1.Suffix, com2.Suffix...) 189 | com1.Suffix = append(com1.Suffix, com3.Suffix...) 190 | *str1.Comment() = *com1 191 | 192 | *p = str1 193 | } 194 | 195 | // labelRE matches label strings //x/y/z:abc. 196 | // $1 is //x/y/z, $2 is x/y/, $3 is z, $4 is :abc, and $5 is abc. 197 | labelRE := regexp.MustCompile(`^(//(.*/)?([^:]+))(:([^:]+))?$`) 198 | 199 | shortenLabel := func(v Expr) { 200 | str, ok := v.(*StringExpr) 201 | if !ok { 202 | return 203 | } 204 | m := labelRE.FindStringSubmatch(str.Value) 205 | if m == nil { 206 | return 207 | } 208 | if m[3] == m[5] { 209 | info.EditLabel++ 210 | str.Value = m[1] 211 | } 212 | } 213 | 214 | Walk(f, func(v Expr, stk []Expr) { 215 | switch v := v.(type) { 216 | case *CallExpr: 217 | if leaveAlone(stk, v) { 218 | return 219 | } 220 | for i := range v.List { 221 | if leaveAlone1(v.List[i]) { 222 | continue 223 | } 224 | as, ok := v.List[i].(*BinaryExpr) 225 | if !ok || as.Op != "=" { 226 | continue 227 | } 228 | key, ok := as.X.(*LiteralExpr) 229 | if !ok || !isLabelArg[key.Token] || labelBlacklist[callName(v)+"."+key.Token] { 230 | continue 231 | } 232 | if leaveAlone1(as.Y) { 233 | continue 234 | } 235 | if list, ok := as.Y.(*ListExpr); ok { 236 | for i := range list.List { 237 | if leaveAlone1(list.List[i]) { 238 | continue 239 | } 240 | joinLabel(&list.List[i]) 241 | shortenLabel(list.List[i]) 242 | } 243 | } else { 244 | joinLabel(&as.Y) 245 | shortenLabel(as.Y) 246 | } 247 | } 248 | } 249 | }) 250 | } 251 | 252 | // callName returns the name of the rule being called by call. 253 | // If the call is not to a literal rule name, callName returns "". 254 | func callName(call *CallExpr) string { 255 | rule, ok := call.X.(*LiteralExpr) 256 | if !ok { 257 | return "" 258 | } 259 | return rule.Token 260 | } 261 | 262 | // sortCallArgs sorts lists of named arguments to a call. 263 | func sortCallArgs(f *File, info *RewriteInfo) { 264 | Walk(f, func(v Expr, stk []Expr) { 265 | call, ok := v.(*CallExpr) 266 | if !ok { 267 | return 268 | } 269 | if leaveAlone(stk, call) { 270 | return 271 | } 272 | rule := callName(call) 273 | if rule == "" { 274 | return 275 | } 276 | 277 | // Find the tail of the argument list with named arguments. 278 | start := len(call.List) 279 | for start > 0 && argName(call.List[start-1]) != "" { 280 | start-- 281 | } 282 | 283 | // Record information about each arg into a sortable list. 284 | var args namedArgs 285 | for i, x := range call.List[start:] { 286 | name := argName(x) 287 | args = append(args, namedArg{ruleNamePriority(rule, name), name, i, x}) 288 | } 289 | 290 | // Sort the list and put the args back in the new order. 291 | if sort.IsSorted(args) { 292 | return 293 | } 294 | info.SortCall++ 295 | sort.Sort(args) 296 | for i, x := range args { 297 | call.List[start+i] = x.expr 298 | } 299 | }) 300 | } 301 | 302 | // ruleNamePriority maps a rule argument name to its sorting priority. 303 | // It could use the auto-generated per-rule tables but for now it just 304 | // falls back to the original list. 305 | func ruleNamePriority(rule, arg string) int { 306 | return namePriority[arg] 307 | /* 308 | list := ruleArgOrder[rule] 309 | if len(list) == 0 { 310 | return namePriority[arg] 311 | } 312 | for i, x := range list { 313 | if x == arg { 314 | return i 315 | } 316 | } 317 | return len(list) 318 | */ 319 | } 320 | 321 | // namePriority maps an argument name to its sorting priority. 322 | // 323 | // NOTE(bazel-team): These are the old buildifier rules. It is likely that this table 324 | // will change, perhaps swapping in a separate table for each call, 325 | // derived from the order used in the Build Encyclopedia. 326 | var namePriority = map[string]int{ 327 | "name": -99, 328 | "gwt_name": -98, 329 | "package_name": -97, 330 | "visible_node_name": -96, // for boq_initial_css_modules and boq_jswire_test_suite 331 | "size": -95, 332 | "timeout": -94, 333 | "testonly": -93, 334 | "src": -92, 335 | "srcdir": -91, 336 | "srcs": -90, 337 | "out": -89, 338 | "outs": -88, 339 | "hdrs": -87, 340 | "has_services": -86, // before api versions, for proto 341 | "include": -85, // before exclude, for glob 342 | "of": -84, // for check_dependencies 343 | "baseline": -83, // for searchbox_library 344 | // All others sort here, at 0. 345 | "destdir": 1, 346 | "exports": 2, 347 | "runtime_deps": 3, 348 | "deps": 4, 349 | "implementation": 5, 350 | "implements": 6, 351 | "alwayslink": 7, 352 | } 353 | 354 | // If x is of the form key=value, argName returns the string key. 355 | // Otherwise argName returns "". 356 | func argName(x Expr) string { 357 | if as, ok := x.(*BinaryExpr); ok && as.Op == "=" { 358 | if id, ok := as.X.(*LiteralExpr); ok { 359 | return id.Token 360 | } 361 | } 362 | return "" 363 | } 364 | 365 | // A namedArg records information needed for sorting 366 | // a named call argument into its proper position. 367 | type namedArg struct { 368 | priority int // kind of name; first sort key 369 | name string // name; second sort key 370 | index int // original index; final sort key 371 | expr Expr // name=value argument 372 | } 373 | 374 | // namedArgs is a slice of namedArg that implements sort.Interface 375 | type namedArgs []namedArg 376 | 377 | func (x namedArgs) Len() int { return len(x) } 378 | func (x namedArgs) Swap(i, j int) { x[i], x[j] = x[j], x[i] } 379 | 380 | func (x namedArgs) Less(i, j int) bool { 381 | p := x[i] 382 | q := x[j] 383 | if p.priority != q.priority { 384 | return p.priority < q.priority 385 | } 386 | if p.name != q.name { 387 | return p.name < q.name 388 | } 389 | return p.index < q.index 390 | } 391 | 392 | // sortStringLists sorts lists of string literals used as specific rule arguments. 393 | func sortStringLists(f *File, info *RewriteInfo) { 394 | Walk(f, func(v Expr, stk []Expr) { 395 | switch v := v.(type) { 396 | case *CallExpr: 397 | if leaveAlone(stk, v) { 398 | return 399 | } 400 | rule := callName(v) 401 | for _, arg := range v.List { 402 | if leaveAlone1(arg) { 403 | continue 404 | } 405 | as, ok := arg.(*BinaryExpr) 406 | if !ok || as.Op != "=" || leaveAlone1(as) || doNotSort(as) { 407 | continue 408 | } 409 | key, ok := as.X.(*LiteralExpr) 410 | if !ok { 411 | continue 412 | } 413 | context := rule + "." + key.Token 414 | if !isSortableListArg[key.Token] || sortableBlacklist[context] { 415 | continue 416 | } 417 | if disabled("unsafesort") && !sortableWhitelist[context] && !allowedSort(context) { 418 | continue 419 | } 420 | sortStringList(as.Y, info, context) 421 | } 422 | case *BinaryExpr: 423 | if disabled("unsafesort") { 424 | return 425 | } 426 | // "keep sorted" comment on x = list forces sorting of list. 427 | as := v 428 | if as.Op == "=" && keepSorted(as) { 429 | sortStringList(as.Y, info, "?") 430 | } 431 | case *KeyValueExpr: 432 | if disabled("unsafesort") { 433 | return 434 | } 435 | // "keep sorted" before key: list also forces sorting of list. 436 | if keepSorted(v) { 437 | sortStringList(v.Value, info, "?") 438 | } 439 | case *ListExpr: 440 | if disabled("unsafesort") { 441 | return 442 | } 443 | // "keep sorted" comment above first list element also forces sorting of list. 444 | if len(v.List) > 0 && keepSorted(v.List[0]) { 445 | sortStringList(v, info, "?") 446 | } 447 | } 448 | }) 449 | } 450 | 451 | // SortStringList sorts x, a list of strings. 452 | func SortStringList(x Expr) { 453 | sortStringList(x, nil, "") 454 | } 455 | 456 | // sortStringList sorts x, a list of strings. 457 | // The list is broken by non-strings and by blank lines and comments into chunks. 458 | // Each chunk is sorted in place. 459 | func sortStringList(x Expr, info *RewriteInfo, context string) { 460 | list, ok := x.(*ListExpr) 461 | if !ok || len(list.List) < 2 || doNotSort(list.List[0]) { 462 | return 463 | } 464 | 465 | forceSort := keepSorted(list.List[0]) 466 | 467 | // TODO(bazel-team): Decide how to recognize lists that cannot 468 | // be sorted. Avoiding all lists with comments avoids sorting 469 | // lists that say explicitly, in some form or another, why they 470 | // cannot be sorted. For example, many cc_test rules require 471 | // certain order in their deps attributes. 472 | if !forceSort { 473 | if line, _ := hasComments(list); line { 474 | return 475 | } 476 | } 477 | 478 | // Sort chunks of the list with no intervening blank lines or comments. 479 | for i := 0; i < len(list.List); { 480 | if _, ok := list.List[i].(*StringExpr); !ok { 481 | i++ 482 | continue 483 | } 484 | 485 | j := i + 1 486 | for ; j < len(list.List); j++ { 487 | if str, ok := list.List[j].(*StringExpr); !ok || len(str.Before) > 0 { 488 | break 489 | } 490 | } 491 | 492 | var chunk []stringSortKey 493 | for index, x := range list.List[i:j] { 494 | chunk = append(chunk, makeSortKey(index, x.(*StringExpr))) 495 | } 496 | if !sort.IsSorted(byStringExpr(chunk)) || !isUniq(chunk) { 497 | if info != nil { 498 | info.SortStringList++ 499 | if !sortableWhitelist[context] { 500 | info.UnsafeSort++ 501 | info.Log = append(info.Log, "sort:"+context) 502 | } 503 | } 504 | before := chunk[0].x.Comment().Before 505 | chunk[0].x.Comment().Before = nil 506 | 507 | sort.Sort(byStringExpr(chunk)) 508 | chunk = uniq(chunk) 509 | 510 | chunk[0].x.Comment().Before = before 511 | for offset, key := range chunk { 512 | list.List[i+offset] = key.x 513 | } 514 | list.List = append(list.List[:(i+len(chunk))], list.List[j:]...) 515 | } 516 | 517 | i = j 518 | } 519 | } 520 | 521 | // uniq removes duplicates from a list, which must already be sorted. 522 | // It edits the list in place. 523 | func uniq(sortedList []stringSortKey) []stringSortKey { 524 | out := sortedList[:0] 525 | for _, sk := range sortedList { 526 | if len(out) == 0 || sk.value != out[len(out)-1].value { 527 | out = append(out, sk) 528 | } 529 | } 530 | return out 531 | } 532 | 533 | // isUniq reports whether the sorted list only contains unique elements. 534 | func isUniq(list []stringSortKey) bool { 535 | for i := range list { 536 | if i+1 < len(list) && list[i].value == list[i+1].value { 537 | return false 538 | } 539 | } 540 | return true 541 | } 542 | 543 | // If stk describes a call argument like rule(arg=...), callArgName 544 | // returns the name of that argument, formatted as "rule.arg". 545 | func callArgName(stk []Expr) string { 546 | n := len(stk) 547 | if n < 2 { 548 | return "" 549 | } 550 | arg := argName(stk[n-1]) 551 | if arg == "" { 552 | return "" 553 | } 554 | call, ok := stk[n-2].(*CallExpr) 555 | if !ok { 556 | return "" 557 | } 558 | rule, ok := call.X.(*LiteralExpr) 559 | if !ok { 560 | return "" 561 | } 562 | return rule.Token + "." + arg 563 | } 564 | 565 | // A stringSortKey records information about a single string literal to be 566 | // sorted. The strings are first grouped into four phases: most strings, 567 | // strings beginning with ":", strings beginning with "//", and strings 568 | // beginning with "@". The next significant part of the comparison is the list 569 | // of elements in the value, where elements are split at `.' and `:'. Finally 570 | // we compare by value and break ties by original index. 571 | type stringSortKey struct { 572 | phase int 573 | split []string 574 | value string 575 | original int 576 | x Expr 577 | } 578 | 579 | func makeSortKey(index int, x *StringExpr) stringSortKey { 580 | key := stringSortKey{ 581 | value: x.Value, 582 | original: index, 583 | x: x, 584 | } 585 | 586 | switch { 587 | case strings.HasPrefix(x.Value, ":"): 588 | key.phase = 1 589 | case strings.HasPrefix(x.Value, "//"): 590 | key.phase = 2 591 | case strings.HasPrefix(x.Value, "@"): 592 | key.phase = 3 593 | } 594 | 595 | key.split = strings.Split(strings.Replace(x.Value, ":", ".", -1), ".") 596 | return key 597 | } 598 | 599 | // byStringExpr implements sort.Interface for a list of stringSortKey. 600 | type byStringExpr []stringSortKey 601 | 602 | func (x byStringExpr) Len() int { return len(x) } 603 | func (x byStringExpr) Swap(i, j int) { x[i], x[j] = x[j], x[i] } 604 | 605 | func (x byStringExpr) Less(i, j int) bool { 606 | xi := x[i] 607 | xj := x[j] 608 | 609 | if xi.phase != xj.phase { 610 | return xi.phase < xj.phase 611 | } 612 | for k := 0; k < len(xi.split) && k < len(xj.split); k++ { 613 | if xi.split[k] != xj.split[k] { 614 | return xi.split[k] < xj.split[k] 615 | } 616 | } 617 | if len(xi.split) != len(xj.split) { 618 | return len(xi.split) < len(xj.split) 619 | } 620 | if xi.value != xj.value { 621 | return xi.value < xj.value 622 | } 623 | return xi.original < xj.original 624 | } 625 | 626 | // fixMultilinePlus turns 627 | // 628 | // ... + 629 | // [ ... ] 630 | // 631 | // ... + 632 | // call(...) 633 | // 634 | // into 635 | // ... + [ 636 | // ... 637 | // ] 638 | // 639 | // ... + call( 640 | // ... 641 | // ) 642 | // 643 | // which typically works better with our aggressively compact formatting. 644 | func fixMultilinePlus(f *File, info *RewriteInfo) { 645 | 646 | // List manipulation helpers. 647 | // As a special case, we treat f([...]) as a list, mainly 648 | // for glob. 649 | 650 | // isList reports whether x is a list. 651 | var isList func(x Expr) bool 652 | isList = func(x Expr) bool { 653 | switch x := x.(type) { 654 | case *ListExpr: 655 | return true 656 | case *CallExpr: 657 | if len(x.List) == 1 { 658 | return isList(x.List[0]) 659 | } 660 | } 661 | return false 662 | } 663 | 664 | // isMultiLine reports whether x is a multiline list. 665 | var isMultiLine func(Expr) bool 666 | isMultiLine = func(x Expr) bool { 667 | switch x := x.(type) { 668 | case *ListExpr: 669 | return x.ForceMultiLine || len(x.List) > 1 670 | case *CallExpr: 671 | if x.ForceMultiLine || len(x.List) > 1 && !x.ForceCompact { 672 | return true 673 | } 674 | if len(x.List) == 1 { 675 | return isMultiLine(x.List[0]) 676 | } 677 | } 678 | return false 679 | } 680 | 681 | // forceMultiLine tries to force the list x to use a multiline form. 682 | // It reports whether it was successful. 683 | var forceMultiLine func(Expr) bool 684 | forceMultiLine = func(x Expr) bool { 685 | switch x := x.(type) { 686 | case *ListExpr: 687 | // Already multi line? 688 | if x.ForceMultiLine { 689 | return true 690 | } 691 | // If this is a list containing a list, force the 692 | // inner list to be multiline instead. 693 | if len(x.List) == 1 && forceMultiLine(x.List[0]) { 694 | return true 695 | } 696 | x.ForceMultiLine = true 697 | return true 698 | 699 | case *CallExpr: 700 | if len(x.List) == 1 { 701 | return forceMultiLine(x.List[0]) 702 | } 703 | } 704 | return false 705 | } 706 | 707 | skip := map[Expr]bool{} 708 | Walk(f, func(v Expr, stk []Expr) { 709 | if skip[v] { 710 | return 711 | } 712 | bin, ok := v.(*BinaryExpr) 713 | if !ok || bin.Op != "+" { 714 | return 715 | } 716 | 717 | // Found a +. 718 | // w + x + y + z parses as ((w + x) + y) + z, 719 | // so chase down the left side to make a list of 720 | // all the things being added together, separated 721 | // by the BinaryExprs that join them. 722 | // Mark them as "skip" so that when Walk recurses 723 | // into the subexpressions, we won't reprocess them. 724 | var all []Expr 725 | for { 726 | all = append(all, bin.Y, bin) 727 | bin1, ok := bin.X.(*BinaryExpr) 728 | if !ok || bin1.Op != "+" { 729 | break 730 | } 731 | bin = bin1 732 | skip[bin] = true 733 | } 734 | all = append(all, bin.X) 735 | 736 | // Because the outermost expression was the 737 | // rightmost one, the list is backward. Reverse it. 738 | for i, j := 0, len(all)-1; i < j; i, j = i+1, j-1 { 739 | all[i], all[j] = all[j], all[i] 740 | } 741 | 742 | // The 'all' slice is alternating addends and BinaryExpr +'s: 743 | // w, +, x, +, y, +, z 744 | // If there are no lists involved, don't rewrite anything. 745 | haveList := false 746 | for i := 0; i < len(all); i += 2 { 747 | if isList(all[i]) { 748 | haveList = true 749 | break 750 | } 751 | } 752 | if !haveList { 753 | return 754 | } 755 | 756 | // Okay, there are lists. 757 | // Consider each + next to a line break. 758 | for i := 1; i < len(all); i += 2 { 759 | bin := all[i].(*BinaryExpr) 760 | if !bin.LineBreak { 761 | continue 762 | } 763 | 764 | // We're going to break the line after the +. 765 | // If it is followed by a list, force that to be 766 | // multiline instead. 767 | if forceMultiLine(all[i+1]) { 768 | bin.LineBreak = false 769 | continue 770 | } 771 | 772 | // If the previous list was multiline already, 773 | // don't bother with the line break after 774 | // the +. 775 | if isMultiLine(all[i-1]) { 776 | bin.LineBreak = false 777 | continue 778 | } 779 | } 780 | }) 781 | } 782 | 783 | // hasComments reports whether any comments are associated with 784 | // the list or its elements. 785 | func hasComments(list *ListExpr) (line, suffix bool) { 786 | com := list.Comment() 787 | if len(com.Before) > 0 || len(com.After) > 0 || len(list.End.Before) > 0 { 788 | line = true 789 | } 790 | if len(com.Suffix) > 0 { 791 | suffix = true 792 | } 793 | for _, elem := range list.List { 794 | com := elem.Comment() 795 | if len(com.Before) > 0 { 796 | line = true 797 | } 798 | if len(com.Suffix) > 0 { 799 | suffix = true 800 | } 801 | } 802 | return 803 | } 804 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/rule.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2016 Google Inc. All Rights Reserved. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | // Rule-level API for inspecting and modifying a build.File syntax tree. 18 | 19 | package build 20 | 21 | // A Rule represents a single BUILD rule. 22 | type Rule struct { 23 | Call *CallExpr 24 | } 25 | 26 | // Rules returns the rules in the file of the given kind (such as "go_library"). 27 | // If kind == "", Rules returns all rules in the file. 28 | func (f *File) Rules(kind string) []*Rule { 29 | var all []*Rule 30 | for _, stmt := range f.Stmt { 31 | call, ok := stmt.(*CallExpr) 32 | if !ok { 33 | continue 34 | } 35 | k, ok := call.X.(*LiteralExpr) 36 | if !ok { 37 | continue 38 | } 39 | if kind != "" && k.Token != kind { 40 | continue 41 | } 42 | all = append(all, &Rule{call}) 43 | } 44 | return all 45 | } 46 | 47 | // RuleAt returns the rule in the file that starts at the specified line, or null if no such rule. 48 | func (f *File) RuleAt(linenum int) *Rule { 49 | for _, stmt := range f.Stmt { 50 | call, ok := stmt.(*CallExpr) 51 | if !ok { 52 | continue 53 | } 54 | start, end := call.X.Span() 55 | if start.Line <= linenum && linenum <= end.Line { 56 | return &Rule{call} 57 | } 58 | } 59 | return nil 60 | } 61 | 62 | // DelRules removes rules with the given kind and name from the file. 63 | // An empty kind matches all kinds; an empty name matches all names. 64 | // It returns the number of rules that were deleted. 65 | func (f *File) DelRules(kind, name string) int { 66 | var i int 67 | for _, stmt := range f.Stmt { 68 | if call, ok := stmt.(*CallExpr); ok { 69 | if k, ok := call.X.(*LiteralExpr); ok { 70 | if kind == "" || k.Token == kind { 71 | r := &Rule{call} 72 | if name == "" || r.AttrString("name") == name { 73 | continue 74 | } 75 | } 76 | } 77 | } 78 | f.Stmt[i] = stmt 79 | i++ 80 | } 81 | n := len(f.Stmt) - i 82 | f.Stmt = f.Stmt[:i] 83 | return n 84 | } 85 | 86 | // Kind returns the rule's kind (such as "go_library"). 87 | func (r *Rule) Kind() string { 88 | return r.Call.X.(*LiteralExpr).Token 89 | } 90 | 91 | // SetKind changes rule's kind (such as "go_library"). 92 | func (r *Rule) SetKind(kind string) { 93 | r.Call.X.(*LiteralExpr).Token = kind 94 | } 95 | 96 | // Name returns the rule's target name. 97 | // If the rule has no target name, Name returns the empty string. 98 | func (r *Rule) Name() string { 99 | return r.AttrString("name") 100 | } 101 | 102 | // AttrKeys returns the keys of all the rule's attributes. 103 | func (r *Rule) AttrKeys() []string { 104 | var keys []string 105 | for _, expr := range r.Call.List { 106 | if binExpr, ok := expr.(*BinaryExpr); ok && binExpr.Op == "=" { 107 | if keyExpr, ok := binExpr.X.(*LiteralExpr); ok { 108 | keys = append(keys, keyExpr.Token) 109 | } 110 | } 111 | } 112 | return keys 113 | } 114 | 115 | // AttrDefn returns the BinaryExpr defining the rule's attribute with the given key. 116 | // That is, the result is a *BinaryExpr with Op == "=". 117 | // If the rule has no such attribute, AttrDefn returns nil. 118 | func (r *Rule) AttrDefn(key string) *BinaryExpr { 119 | for _, kv := range r.Call.List { 120 | as, ok := kv.(*BinaryExpr) 121 | if !ok || as.Op != "=" { 122 | continue 123 | } 124 | k, ok := as.X.(*LiteralExpr) 125 | if !ok || k.Token != key { 126 | continue 127 | } 128 | return as 129 | } 130 | return nil 131 | } 132 | 133 | // Attr returns the value of the rule's attribute with the given key 134 | // (such as "name" or "deps"). 135 | // If the rule has no such attribute, Attr returns nil. 136 | func (r *Rule) Attr(key string) Expr { 137 | as := r.AttrDefn(key) 138 | if as == nil { 139 | return nil 140 | } 141 | return as.Y 142 | } 143 | 144 | // DelAttr deletes the rule's attribute with the named key. 145 | // It returns the old value of the attribute, or nil if the attribute was not found. 146 | func (r *Rule) DelAttr(key string) Expr { 147 | list := r.Call.List 148 | for i, kv := range list { 149 | as, ok := kv.(*BinaryExpr) 150 | if !ok || as.Op != "=" { 151 | continue 152 | } 153 | k, ok := as.X.(*LiteralExpr) 154 | if !ok || k.Token != key { 155 | continue 156 | } 157 | copy(list[i:], list[i+1:]) 158 | r.Call.List = list[:len(list)-1] 159 | return as.Y 160 | } 161 | return nil 162 | } 163 | 164 | // SetAttr sets the rule's attribute with the given key to value. 165 | // If the rule has no attribute with the key, SetAttr appends 166 | // one to the end of the rule's attribute list. 167 | func (r *Rule) SetAttr(key string, val Expr) { 168 | as := r.AttrDefn(key) 169 | if as != nil { 170 | as.Y = val 171 | return 172 | } 173 | 174 | r.Call.List = append(r.Call.List, 175 | &BinaryExpr{ 176 | X: &LiteralExpr{Token: key}, 177 | Op: "=", 178 | Y: val, 179 | }, 180 | ) 181 | } 182 | 183 | // AttrLiteral returns the literal form of the rule's attribute 184 | // with the given key (such as "cc_api_version"), only when 185 | // that value is an identifier or number. 186 | // If the rule has no such attribute or the attribute is not an identifier or number, 187 | // AttrLiteral returns "". 188 | func (r *Rule) AttrLiteral(key string) string { 189 | lit, ok := r.Attr(key).(*LiteralExpr) 190 | if !ok { 191 | return "" 192 | } 193 | return lit.Token 194 | } 195 | 196 | // AttrString returns the value of the rule's attribute 197 | // with the given key (such as "name"), as a string. 198 | // If the rule has no such attribute or the attribute has a non-string value, 199 | // Attr returns the empty string. 200 | func (r *Rule) AttrString(key string) string { 201 | str, ok := r.Attr(key).(*StringExpr) 202 | if !ok { 203 | return "" 204 | } 205 | return str.Value 206 | } 207 | 208 | // AttrStrings returns the value of the rule's attribute 209 | // with the given key (such as "srcs"), as a []string. 210 | // If the rule has no such attribute or the attribute is not 211 | // a list of strings, AttrStrings returns a nil slice. 212 | func (r *Rule) AttrStrings(key string) []string { 213 | return Strings(r.Attr(key)) 214 | } 215 | 216 | // Strings returns expr as a []string. 217 | // If expr is not a list of string literals, 218 | // Strings returns a nil slice instead. 219 | // If expr is an empty list of string literals, 220 | // returns a non-nil empty slice. 221 | // (this allows differentiating between these two cases) 222 | func Strings(expr Expr) []string { 223 | list, ok := expr.(*ListExpr) 224 | if !ok { 225 | return nil 226 | } 227 | all := []string{} // not nil 228 | for _, l := range list.List { 229 | str, ok := l.(*StringExpr) 230 | if !ok { 231 | return nil 232 | } 233 | all = append(all, str.Value) 234 | } 235 | return all 236 | } 237 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/syntax.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2016 Google Inc. All Rights Reserved. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | // Package build implements parsing and printing of BUILD files. 18 | package build 19 | 20 | // Syntax data structure definitions. 21 | 22 | import ( 23 | "strings" 24 | "unicode/utf8" 25 | ) 26 | 27 | // A Position describes the position between two bytes of input. 28 | type Position struct { 29 | Line int // line in input (starting at 1) 30 | LineRune int // rune in line (starting at 1) 31 | Byte int // byte in input (starting at 0) 32 | } 33 | 34 | // add returns the position at the end of s, assuming it starts at p. 35 | func (p Position) add(s string) Position { 36 | p.Byte += len(s) 37 | if n := strings.Count(s, "\n"); n > 0 { 38 | p.Line += n 39 | s = s[strings.LastIndex(s, "\n")+1:] 40 | p.LineRune = 1 41 | } 42 | p.LineRune += utf8.RuneCountInString(s) 43 | return p 44 | } 45 | 46 | // An Expr represents an input element. 47 | type Expr interface { 48 | // Span returns the start and end position of the expression, 49 | // excluding leading or trailing comments. 50 | Span() (start, end Position) 51 | 52 | // Comment returns the comments attached to the expression. 53 | // This method would normally be named 'Comments' but that 54 | // would interfere with embedding a type of the same name. 55 | Comment() *Comments 56 | } 57 | 58 | // A Comment represents a single # comment. 59 | type Comment struct { 60 | Start Position 61 | Token string // without trailing newline 62 | Suffix bool // an end of line (not whole line) comment 63 | } 64 | 65 | // Comments collects the comments associated with an expression. 66 | type Comments struct { 67 | Before []Comment // whole-line comments before this expression 68 | Suffix []Comment // end-of-line comments after this expression 69 | 70 | // For top-level expressions only, After lists whole-line 71 | // comments following the expression. 72 | After []Comment 73 | } 74 | 75 | // Comment returns the receiver. This isn't useful by itself, but 76 | // a Comments struct is embedded into all the expression 77 | // implementation types, and this gives each of those a Comment 78 | // method to satisfy the Expr interface. 79 | func (c *Comments) Comment() *Comments { 80 | return c 81 | } 82 | 83 | // A File represents an entire BUILD file. 84 | type File struct { 85 | Path string // file path, relative to workspace directory 86 | Comments 87 | Stmt []Expr 88 | } 89 | 90 | func (x *File) Span() (start, end Position) { 91 | if len(x.Stmt) == 0 { 92 | return 93 | } 94 | start, _ = x.Stmt[0].Span() 95 | _, end = x.Stmt[len(x.Stmt)-1].Span() 96 | return start, end 97 | } 98 | 99 | // A CommentBlock represents a top-level block of comments separate 100 | // from any rule. 101 | type CommentBlock struct { 102 | Comments 103 | Start Position 104 | } 105 | 106 | func (x *CommentBlock) Span() (start, end Position) { 107 | return x.Start, x.Start 108 | } 109 | 110 | // A PythonBlock represents a blob of Python code, typically a def or for loop. 111 | type PythonBlock struct { 112 | Comments 113 | Start Position 114 | Token string // raw Python code, including final newline 115 | } 116 | 117 | func (x *PythonBlock) Span() (start, end Position) { 118 | return x.Start, x.Start.add(x.Token) 119 | } 120 | 121 | // A LiteralExpr represents a literal identifier or number. 122 | type LiteralExpr struct { 123 | Comments 124 | Start Position 125 | Token string // identifier token 126 | } 127 | 128 | func (x *LiteralExpr) Span() (start, end Position) { 129 | return x.Start, x.Start.add(x.Token) 130 | } 131 | 132 | // A StringExpr represents a single literal string. 133 | type StringExpr struct { 134 | Comments 135 | Start Position 136 | Value string // string value (decoded) 137 | TripleQuote bool // triple quote output 138 | End Position 139 | 140 | // To allow specific formatting of string literals, 141 | // at least within our requirements, record the 142 | // preferred form of Value. This field is a hint: 143 | // it is only used if it is a valid quoted form for Value. 144 | Token string 145 | } 146 | 147 | func (x *StringExpr) Span() (start, end Position) { 148 | return x.Start, x.End 149 | } 150 | 151 | // An End represents the end of a parenthesized or bracketed expression. 152 | // It is a place to hang comments. 153 | type End struct { 154 | Comments 155 | Pos Position 156 | } 157 | 158 | func (x *End) Span() (start, end Position) { 159 | return x.Pos, x.Pos.add(")") 160 | } 161 | 162 | // A CallExpr represents a function call expression: X(List). 163 | type CallExpr struct { 164 | Comments 165 | X Expr 166 | ListStart Position // position of ( 167 | List []Expr 168 | End // position of ) 169 | ForceCompact bool // force compact (non-multiline) form when printing 170 | ForceMultiLine bool // force multiline form when printing 171 | } 172 | 173 | func (x *CallExpr) Span() (start, end Position) { 174 | start, _ = x.X.Span() 175 | return start, x.End.Pos.add(")") 176 | } 177 | 178 | // A DotExpr represents a field selector: X.Name. 179 | type DotExpr struct { 180 | Comments 181 | X Expr 182 | Dot Position 183 | NamePos Position 184 | Name string 185 | } 186 | 187 | func (x *DotExpr) Span() (start, end Position) { 188 | start, _ = x.X.Span() 189 | return start, x.NamePos.add(x.Name) 190 | } 191 | 192 | // A ListForExpr represents a list comprehension expression: [X for ... if ...]. 193 | type ListForExpr struct { 194 | Comments 195 | ForceMultiLine bool // split expression across multiple lines 196 | Brack string // "", "()", or "[]" 197 | Start Position 198 | X Expr 199 | For []*ForClause 200 | If []*IfClause 201 | End 202 | } 203 | 204 | func (x *ListForExpr) Span() (start, end Position) { 205 | return x.Start, x.End.Pos.add("]") 206 | } 207 | 208 | // A ForClause represents a for clause in a list comprehension: for Var in Expr. 209 | type ForClause struct { 210 | Comments 211 | For Position 212 | Var []Expr 213 | In Position 214 | Expr Expr 215 | } 216 | 217 | func (x *ForClause) Span() (start, end Position) { 218 | _, end = x.Expr.Span() 219 | return x.For, end 220 | } 221 | 222 | // An IfClause represents an if clause in a list comprehension: if Cond. 223 | type IfClause struct { 224 | Comments 225 | If Position 226 | Cond Expr 227 | } 228 | 229 | func (x *IfClause) Span() (start, end Position) { 230 | _, end = x.Cond.Span() 231 | return x.If, end 232 | } 233 | 234 | // A KeyValueExpr represents a dictionary entry: Key: Value. 235 | type KeyValueExpr struct { 236 | Comments 237 | Key Expr 238 | Colon Position 239 | Value Expr 240 | } 241 | 242 | func (x *KeyValueExpr) Span() (start, end Position) { 243 | start, _ = x.Key.Span() 244 | _, end = x.Value.Span() 245 | return start, end 246 | } 247 | 248 | // A DictExpr represents a dictionary literal: { List }. 249 | type DictExpr struct { 250 | Comments 251 | Start Position 252 | List []Expr // all *KeyValueExprs 253 | Comma Position // position of trailing comma, if any 254 | End 255 | ForceMultiLine bool // force multiline form when printing 256 | } 257 | 258 | func (x *DictExpr) Span() (start, end Position) { 259 | return x.Start, x.End.Pos.add("}") 260 | } 261 | 262 | // A ListExpr represents a list literal: [ List ]. 263 | type ListExpr struct { 264 | Comments 265 | Start Position 266 | List []Expr 267 | Comma Position // position of trailing comma, if any 268 | End 269 | ForceMultiLine bool // force multiline form when printing 270 | } 271 | 272 | func (x *ListExpr) Span() (start, end Position) { 273 | return x.Start, x.End.Pos.add("]") 274 | } 275 | 276 | // A TupleExpr represents a tuple literal: (List) 277 | type TupleExpr struct { 278 | Comments 279 | Start Position 280 | List []Expr 281 | Comma Position // position of trailing comma, if any 282 | End 283 | ForceCompact bool // force compact (non-multiline) form when printing 284 | ForceMultiLine bool // force multiline form when printing 285 | } 286 | 287 | func (x *TupleExpr) Span() (start, end Position) { 288 | return x.Start, x.End.Pos.add(")") 289 | } 290 | 291 | // A UnaryExpr represents a unary expression: Op X. 292 | type UnaryExpr struct { 293 | Comments 294 | OpStart Position 295 | Op string 296 | X Expr 297 | } 298 | 299 | func (x *UnaryExpr) Span() (start, end Position) { 300 | _, end = x.X.Span() 301 | return x.OpStart, end 302 | } 303 | 304 | // A BinaryExpr represents a binary expression: X Op Y. 305 | type BinaryExpr struct { 306 | Comments 307 | X Expr 308 | OpStart Position 309 | Op string 310 | LineBreak bool // insert line break between Op and Y 311 | Y Expr 312 | } 313 | 314 | func (x *BinaryExpr) Span() (start, end Position) { 315 | start, _ = x.X.Span() 316 | _, end = x.Y.Span() 317 | return start, end 318 | } 319 | 320 | // A ParenExpr represents a parenthesized expression: (X). 321 | type ParenExpr struct { 322 | Comments 323 | Start Position 324 | X Expr 325 | End 326 | ForceMultiLine bool // insert line break after opening ( and before closing ) 327 | } 328 | 329 | func (x *ParenExpr) Span() (start, end Position) { 330 | return x.Start, x.End.Pos.add(")") 331 | } 332 | 333 | // A SliceExpr represents a slice expression: X[Y:Z]. 334 | type SliceExpr struct { 335 | Comments 336 | X Expr 337 | SliceStart Position 338 | Y Expr 339 | Colon Position 340 | Z Expr 341 | End Position 342 | } 343 | 344 | func (x *SliceExpr) Span() (start, end Position) { 345 | start, _ = x.X.Span() 346 | return start, x.End 347 | } 348 | 349 | // An IndexExpr represents an index expression: X[Y]. 350 | type IndexExpr struct { 351 | Comments 352 | X Expr 353 | IndexStart Position 354 | Y Expr 355 | End Position 356 | } 357 | 358 | func (x *IndexExpr) Span() (start, end Position) { 359 | start, _ = x.X.Span() 360 | return start, x.End 361 | } 362 | 363 | // A LambdaExpr represents a lambda expression: lambda Var: Expr. 364 | type LambdaExpr struct { 365 | Comments 366 | Lambda Position 367 | Var []Expr 368 | Colon Position 369 | Expr Expr 370 | } 371 | 372 | func (x *LambdaExpr) Span() (start, end Position) { 373 | _, end = x.Expr.Span() 374 | return x.Lambda, end 375 | } 376 | 377 | // ConditionalExpr represents the conditional: X if TEST else ELSE. 378 | type ConditionalExpr struct { 379 | Comments 380 | Then Expr 381 | IfStart Position 382 | Test Expr 383 | ElseStart Position 384 | Else Expr 385 | } 386 | 387 | // Span returns the start and end position of the expression, 388 | // excluding leading or trailing comments. 389 | func (x *ConditionalExpr) Span() (start, end Position) { 390 | start, _ = x.Then.Span() 391 | _, end = x.Else.Span() 392 | return start, end 393 | } 394 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/tables.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2016 Google Inc. All Rights Reserved. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | // Tables about what Buildifier can and cannot edit. 17 | // Perhaps eventually this will be 18 | // derived from the BUILD encyclopedia. 19 | 20 | package build 21 | 22 | // A named argument to a rule call is considered to have a value 23 | // that can be treated as a label or list of labels if the name 24 | // is one of these names. There is a separate blacklist for 25 | // rule-specific exceptions. 26 | var isLabelArg = map[string]bool{ 27 | "app_target": true, 28 | "appdir": true, 29 | "base_package": true, 30 | "build_deps": true, 31 | "cc_deps": true, 32 | "ccdeps": true, 33 | "common_deps": true, 34 | "compile_deps": true, 35 | "compiler": true, 36 | "data": true, 37 | "default_visibility": true, 38 | "dep": true, 39 | "deps": true, 40 | "deps_java": true, 41 | "dont_depend_on": true, 42 | "env_deps": true, 43 | "envscripts": true, 44 | "exported_deps": true, 45 | "exports": true, 46 | "externs_list": true, 47 | "files": true, 48 | "globals": true, 49 | "implementation": true, 50 | "implements": true, 51 | "includes": true, 52 | "interface": true, 53 | "jar": true, 54 | "jars": true, 55 | "javadeps": true, 56 | "lib_deps": true, 57 | "library": true, 58 | "malloc": true, 59 | "model": true, 60 | "mods": true, 61 | "module_deps": true, 62 | "module_target": true, 63 | "of": true, 64 | "plugins": true, 65 | "proto_deps": true, 66 | "proto_target": true, 67 | "protos": true, 68 | "resource": true, 69 | "resources": true, 70 | "runtime_deps": true, 71 | "scope": true, 72 | "shared_deps": true, 73 | "similar_deps": true, 74 | "source_jar": true, 75 | "src": true, 76 | "srcs": true, 77 | "stripped_targets": true, 78 | "suites": true, 79 | "swigdeps": true, 80 | "target": true, 81 | "target_devices": true, 82 | "target_platforms": true, 83 | "template": true, 84 | "test": true, 85 | "tests": true, 86 | "tests_deps": true, 87 | "tool": true, 88 | "tools": true, 89 | "visibility": true, 90 | } 91 | 92 | // labelBlacklist is the list of call arguments that cannot be 93 | // shortened, because they are not interpreted using the same 94 | // rules as for other labels. 95 | var labelBlacklist = map[string]bool{ 96 | // Shortening this can cause visibility checks to fail. 97 | "package_group.includes": true, 98 | } 99 | 100 | // A named argument to a rule call is considered to be a sortable list 101 | // if the name is one of these names. There is a separate blacklist for 102 | // rule-specific exceptions. 103 | var isSortableListArg = map[string]bool{ 104 | "cc_deps": true, 105 | "common_deps": true, 106 | "compile_deps": true, 107 | "configs": true, 108 | "constraints": true, 109 | "data": true, 110 | "default_visibility": true, 111 | "deps": true, 112 | "deps_java": true, 113 | "exported_deps": true, 114 | "exports": true, 115 | "filegroups": true, 116 | "files": true, 117 | "hdrs": true, 118 | "imports": true, 119 | "includes": true, 120 | "inherits": true, 121 | "javadeps": true, 122 | "lib_deps": true, 123 | "module_deps": true, 124 | "out": true, 125 | "outs": true, 126 | "packages": true, 127 | "plugin_modules": true, 128 | "proto_deps": true, 129 | "protos": true, 130 | "pubs": true, 131 | "resources": true, 132 | "runtime_deps": true, 133 | "shared_deps": true, 134 | "similar_deps": true, 135 | "srcs": true, 136 | "swigdeps": true, 137 | "swig_includes": true, 138 | "tags": true, 139 | "tests": true, 140 | "tools": true, 141 | "to_start_extensions": true, 142 | "visibility": true, 143 | } 144 | 145 | // sortableBlacklist records specific rule arguments that must not be reordered. 146 | var sortableBlacklist = map[string]bool{ 147 | "genrule.outs": true, 148 | "genrule.srcs": true, 149 | } 150 | 151 | // sortableWhitelist records specific rule arguments that are guaranteed 152 | // to be reorderable, because bazel re-sorts the list itself after reading the BUILD file. 153 | var sortableWhitelist = map[string]bool{ 154 | "cc_inc_library.hdrs": true, 155 | "cc_library.hdrs": true, 156 | "java_library.srcs": true, 157 | "java_library.resources": true, 158 | "java_binary.srcs": true, 159 | "java_binary.resources": true, 160 | "java_test.srcs": true, 161 | "java_test.resources": true, 162 | "java_library.constraints": true, 163 | "java_import.constraints": true, 164 | } 165 | 166 | // OverrideTables allows a user of the build package to override the special-case rules. 167 | func OverrideTables(labelArg, blacklist, sortableListArg, sortBlacklist, sortWhitelist map[string]bool) { 168 | isLabelArg = labelArg 169 | labelBlacklist = blacklist 170 | isSortableListArg = sortableListArg 171 | sortableBlacklist = sortBlacklist 172 | sortableWhitelist = sortWhitelist 173 | } 174 | -------------------------------------------------------------------------------- /vendor/github.com/bazelbuild/buildifier/core/walk.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2016 Google Inc. All Rights Reserved. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | package build 18 | 19 | // Walk walks the expression tree v, calling f on all subexpressions 20 | // in a preorder traversal. 21 | // 22 | // The stk argument is the stack of expressions in the recursion above x, 23 | // from outermost to innermost. 24 | // 25 | func Walk(v Expr, f func(x Expr, stk []Expr)) { 26 | var stack []Expr 27 | walk1(&v, &stack, func(x Expr, stk []Expr) Expr { 28 | f(x, stk) 29 | return nil 30 | }) 31 | } 32 | 33 | // WalkAndUpdate walks the expression tree v, calling f on all subexpressions 34 | // in a preorder traversal. If f returns a non-nil value, the tree is mutated. 35 | // The new value replaces the old one. 36 | // 37 | // The stk argument is the stack of expressions in the recursion above x, 38 | // from outermost to innermost. 39 | // 40 | func Edit(v Expr, f func(x Expr, stk []Expr) Expr) Expr { 41 | var stack []Expr 42 | return walk1(&v, &stack, f) 43 | } 44 | 45 | // walk1 is the actual implementation of Walk and WalkAndUpdate. 46 | // It has the same signature and meaning as Walk, 47 | // except that it maintains in *stack the current stack 48 | // of nodes. Using a pointer to a slice here ensures that 49 | // as the stack grows and shrinks the storage can be 50 | // reused for the next growth. 51 | func walk1(v *Expr, stack *[]Expr, f func(x Expr, stk []Expr) Expr) Expr { 52 | if v == nil { 53 | return nil 54 | } 55 | 56 | if res := f(*v, *stack); res != nil { 57 | *v = res 58 | } 59 | *stack = append(*stack, *v) 60 | switch v := (*v).(type) { 61 | case *File: 62 | for _, stmt := range v.Stmt { 63 | walk1(&stmt, stack, f) 64 | } 65 | case *DotExpr: 66 | walk1(&v.X, stack, f) 67 | case *IndexExpr: 68 | walk1(&v.X, stack, f) 69 | walk1(&v.Y, stack, f) 70 | case *KeyValueExpr: 71 | walk1(&v.Key, stack, f) 72 | walk1(&v.Value, stack, f) 73 | case *SliceExpr: 74 | walk1(&v.X, stack, f) 75 | if v.Y != nil { 76 | walk1(&v.Y, stack, f) 77 | } 78 | if v.Z != nil { 79 | walk1(&v.Z, stack, f) 80 | } 81 | case *ParenExpr: 82 | walk1(&v.X, stack, f) 83 | case *UnaryExpr: 84 | walk1(&v.X, stack, f) 85 | case *BinaryExpr: 86 | walk1(&v.X, stack, f) 87 | walk1(&v.Y, stack, f) 88 | case *LambdaExpr: 89 | for i := range v.Var { 90 | walk1(&v.Var[i], stack, f) 91 | } 92 | walk1(&v.Expr, stack, f) 93 | case *CallExpr: 94 | walk1(&v.X, stack, f) 95 | for i := range v.List { 96 | walk1(&v.List[i], stack, f) 97 | } 98 | case *ListExpr: 99 | for i := range v.List { 100 | walk1(&v.List[i], stack, f) 101 | } 102 | case *TupleExpr: 103 | for i := range v.List { 104 | walk1(&v.List[i], stack, f) 105 | } 106 | case *DictExpr: 107 | for i := range v.List { 108 | walk1(&v.List[i], stack, f) 109 | } 110 | case *ListForExpr: 111 | walk1(&v.X, stack, f) 112 | for _, c := range v.For { 113 | for j := range c.Var { 114 | walk1(&c.Var[j], stack, f) 115 | } 116 | walk1(&c.Expr, stack, f) 117 | } 118 | for _, c := range v.If { 119 | walk1(&c.Cond, stack, f) 120 | } 121 | case *ConditionalExpr: 122 | walk1(&v.Then, stack, f) 123 | walk1(&v.Test, stack, f) 124 | walk1(&v.Else, stack, f) 125 | } 126 | *stack = (*stack)[:len(*stack)-1] 127 | return *v 128 | } 129 | -------------------------------------------------------------------------------- /vendor/github.com/golang/glog/LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, and 10 | distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by the copyright 13 | owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all other entities 16 | that control, are controlled by, or are under common control with that entity. 17 | For the purposes of this definition, "control" means (i) the power, direct or 18 | indirect, to cause the direction or management of such entity, whether by 19 | contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the 20 | outstanding shares, or (iii) beneficial ownership of such entity. 21 | 22 | "You" (or "Your") shall mean an individual or Legal Entity exercising 23 | permissions granted by this License. 24 | 25 | "Source" form shall mean the preferred form for making modifications, including 26 | but not limited to software source code, documentation source, and configuration 27 | files. 28 | 29 | "Object" form shall mean any form resulting from mechanical transformation or 30 | translation of a Source form, including but not limited to compiled object code, 31 | generated documentation, and conversions to other media types. 32 | 33 | "Work" shall mean the work of authorship, whether in Source or Object form, made 34 | available under the License, as indicated by a copyright notice that is included 35 | in or attached to the work (an example is provided in the Appendix below). 36 | 37 | "Derivative Works" shall mean any work, whether in Source or Object form, that 38 | is based on (or derived from) the Work and for which the editorial revisions, 39 | annotations, elaborations, or other modifications represent, as a whole, an 40 | original work of authorship. For the purposes of this License, Derivative Works 41 | shall not include works that remain separable from, or merely link (or bind by 42 | name) to the interfaces of, the Work and Derivative Works thereof. 43 | 44 | "Contribution" shall mean any work of authorship, including the original version 45 | of the Work and any modifications or additions to that Work or Derivative Works 46 | thereof, that is intentionally submitted to Licensor for inclusion in the Work 47 | by the copyright owner or by an individual or Legal Entity authorized to submit 48 | on behalf of the copyright owner. For the purposes of this definition, 49 | "submitted" means any form of electronic, verbal, or written communication sent 50 | to the Licensor or its representatives, including but not limited to 51 | communication on electronic mailing lists, source code control systems, and 52 | issue tracking systems that are managed by, or on behalf of, the Licensor for 53 | the purpose of discussing and improving the Work, but excluding communication 54 | that is conspicuously marked or otherwise designated in writing by the copyright 55 | owner as "Not a Contribution." 56 | 57 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf 58 | of whom a Contribution has been received by Licensor and subsequently 59 | incorporated within the Work. 60 | 61 | 2. Grant of Copyright License. 62 | 63 | Subject to the terms and conditions of this License, each Contributor hereby 64 | grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, 65 | irrevocable copyright license to reproduce, prepare Derivative Works of, 66 | publicly display, publicly perform, sublicense, and distribute the Work and such 67 | Derivative Works in Source or Object form. 68 | 69 | 3. Grant of Patent License. 70 | 71 | Subject to the terms and conditions of this License, each Contributor hereby 72 | grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, 73 | irrevocable (except as stated in this section) patent license to make, have 74 | made, use, offer to sell, sell, import, and otherwise transfer the Work, where 75 | such license applies only to those patent claims licensable by such Contributor 76 | that are necessarily infringed by their Contribution(s) alone or by combination 77 | of their Contribution(s) with the Work to which such Contribution(s) was 78 | submitted. If You institute patent litigation against any entity (including a 79 | cross-claim or counterclaim in a lawsuit) alleging that the Work or a 80 | Contribution incorporated within the Work constitutes direct or contributory 81 | patent infringement, then any patent licenses granted to You under this License 82 | for that Work shall terminate as of the date such litigation is filed. 83 | 84 | 4. Redistribution. 85 | 86 | You may reproduce and distribute copies of the Work or Derivative Works thereof 87 | in any medium, with or without modifications, and in Source or Object form, 88 | provided that You meet the following conditions: 89 | 90 | You must give any other recipients of the Work or Derivative Works a copy of 91 | this License; and 92 | You must cause any modified files to carry prominent notices stating that You 93 | changed the files; and 94 | You must retain, in the Source form of any Derivative Works that You distribute, 95 | all copyright, patent, trademark, and attribution notices from the Source form 96 | of the Work, excluding those notices that do not pertain to any part of the 97 | Derivative Works; and 98 | If the Work includes a "NOTICE" text file as part of its distribution, then any 99 | Derivative Works that You distribute must include a readable copy of the 100 | attribution notices contained within such NOTICE file, excluding those notices 101 | that do not pertain to any part of the Derivative Works, in at least one of the 102 | following places: within a NOTICE text file distributed as part of the 103 | Derivative Works; within the Source form or documentation, if provided along 104 | with the Derivative Works; or, within a display generated by the Derivative 105 | Works, if and wherever such third-party notices normally appear. The contents of 106 | the NOTICE file are for informational purposes only and do not modify the 107 | License. You may add Your own attribution notices within Derivative Works that 108 | You distribute, alongside or as an addendum to the NOTICE text from the Work, 109 | provided that such additional attribution notices cannot be construed as 110 | modifying the License. 111 | You may add Your own copyright statement to Your modifications and may provide 112 | additional or different license terms and conditions for use, reproduction, or 113 | distribution of Your modifications, or for any such Derivative Works as a whole, 114 | provided Your use, reproduction, and distribution of the Work otherwise complies 115 | with the conditions stated in this License. 116 | 117 | 5. Submission of Contributions. 118 | 119 | Unless You explicitly state otherwise, any Contribution intentionally submitted 120 | for inclusion in the Work by You to the Licensor shall be under the terms and 121 | conditions of this License, without any additional terms or conditions. 122 | Notwithstanding the above, nothing herein shall supersede or modify the terms of 123 | any separate license agreement you may have executed with Licensor regarding 124 | such Contributions. 125 | 126 | 6. Trademarks. 127 | 128 | This License does not grant permission to use the trade names, trademarks, 129 | service marks, or product names of the Licensor, except as required for 130 | reasonable and customary use in describing the origin of the Work and 131 | reproducing the content of the NOTICE file. 132 | 133 | 7. Disclaimer of Warranty. 134 | 135 | Unless required by applicable law or agreed to in writing, Licensor provides the 136 | Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, 137 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, 138 | including, without limitation, any warranties or conditions of TITLE, 139 | NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are 140 | solely responsible for determining the appropriateness of using or 141 | redistributing the Work and assume any risks associated with Your exercise of 142 | permissions under this License. 143 | 144 | 8. Limitation of Liability. 145 | 146 | In no event and under no legal theory, whether in tort (including negligence), 147 | contract, or otherwise, unless required by applicable law (such as deliberate 148 | and grossly negligent acts) or agreed to in writing, shall any Contributor be 149 | liable to You for damages, including any direct, indirect, special, incidental, 150 | or consequential damages of any character arising as a result of this License or 151 | out of the use or inability to use the Work (including but not limited to 152 | damages for loss of goodwill, work stoppage, computer failure or malfunction, or 153 | any and all other commercial damages or losses), even if such Contributor has 154 | been advised of the possibility of such damages. 155 | 156 | 9. Accepting Warranty or Additional Liability. 157 | 158 | While redistributing the Work or Derivative Works thereof, You may choose to 159 | offer, and charge a fee for, acceptance of support, warranty, indemnity, or 160 | other liability obligations and/or rights consistent with this License. However, 161 | in accepting such obligations, You may act only on Your own behalf and on Your 162 | sole responsibility, not on behalf of any other Contributor, and only if You 163 | agree to indemnify, defend, and hold each Contributor harmless for any liability 164 | incurred by, or claims asserted against, such Contributor by reason of your 165 | accepting any such warranty or additional liability. 166 | 167 | END OF TERMS AND CONDITIONS 168 | 169 | APPENDIX: How to apply the Apache License to your work 170 | 171 | To apply the Apache License to your work, attach the following boilerplate 172 | notice, with the fields enclosed by brackets "[]" replaced with your own 173 | identifying information. (Don't include the brackets!) The text should be 174 | enclosed in the appropriate comment syntax for the file format. We also 175 | recommend that a file or class name and description of purpose be included on 176 | the same "printed page" as the copyright notice for easier identification within 177 | third-party archives. 178 | 179 | Copyright [yyyy] [name of copyright owner] 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | -------------------------------------------------------------------------------- /vendor/github.com/golang/glog/README: -------------------------------------------------------------------------------- 1 | glog 2 | ==== 3 | 4 | Leveled execution logs for Go. 5 | 6 | This is an efficient pure Go implementation of leveled logs in the 7 | manner of the open source C++ package 8 | http://code.google.com/p/google-glog 9 | 10 | By binding methods to booleans it is possible to use the log package 11 | without paying the expense of evaluating the arguments to the log. 12 | Through the -vmodule flag, the package also provides fine-grained 13 | control over logging at the file level. 14 | 15 | The comment from glog.go introduces the ideas: 16 | 17 | Package glog implements logging analogous to the Google-internal 18 | C++ INFO/ERROR/V setup. It provides functions Info, Warning, 19 | Error, Fatal, plus formatting variants such as Infof. It 20 | also provides V-style logging controlled by the -v and 21 | -vmodule=file=2 flags. 22 | 23 | Basic examples: 24 | 25 | glog.Info("Prepare to repel boarders") 26 | 27 | glog.Fatalf("Initialization failed: %s", err) 28 | 29 | See the documentation for the V function for an explanation 30 | of these examples: 31 | 32 | if glog.V(2) { 33 | glog.Info("Starting transaction...") 34 | } 35 | 36 | glog.V(2).Infoln("Processed", nItems, "elements") 37 | 38 | 39 | The repository contains an open source version of the log package 40 | used inside Google. The master copy of the source lives inside 41 | Google, not here. The code in this repo is for export only and is not itself 42 | under development. Feature requests will be ignored. 43 | 44 | Send bug reports to golang-nuts@googlegroups.com. 45 | -------------------------------------------------------------------------------- /vendor/github.com/golang/glog/glog_file.go: -------------------------------------------------------------------------------- 1 | // Go support for leveled logs, analogous to https://code.google.com/p/google-glog/ 2 | // 3 | // Copyright 2013 Google Inc. All Rights Reserved. 4 | // 5 | // Licensed under the Apache License, Version 2.0 (the "License"); 6 | // you may not use this file except in compliance with the License. 7 | // You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, software 12 | // distributed under the License is distributed on an "AS IS" BASIS, 13 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | // See the License for the specific language governing permissions and 15 | // limitations under the License. 16 | 17 | // File I/O for logs. 18 | 19 | package glog 20 | 21 | import ( 22 | "errors" 23 | "flag" 24 | "fmt" 25 | "os" 26 | "os/user" 27 | "path/filepath" 28 | "strings" 29 | "sync" 30 | "time" 31 | ) 32 | 33 | // MaxSize is the maximum size of a log file in bytes. 34 | var MaxSize uint64 = 1024 * 1024 * 1800 35 | 36 | // logDirs lists the candidate directories for new log files. 37 | var logDirs []string 38 | 39 | // If non-empty, overrides the choice of directory in which to write logs. 40 | // See createLogDirs for the full list of possible destinations. 41 | var logDir = flag.String("log_dir", "", "If non-empty, write log files in this directory") 42 | 43 | func createLogDirs() { 44 | if *logDir != "" { 45 | logDirs = append(logDirs, *logDir) 46 | } 47 | logDirs = append(logDirs, os.TempDir()) 48 | } 49 | 50 | var ( 51 | pid = os.Getpid() 52 | program = filepath.Base(os.Args[0]) 53 | host = "unknownhost" 54 | userName = "unknownuser" 55 | ) 56 | 57 | func init() { 58 | h, err := os.Hostname() 59 | if err == nil { 60 | host = shortHostname(h) 61 | } 62 | 63 | current, err := user.Current() 64 | if err == nil { 65 | userName = current.Username 66 | } 67 | 68 | // Sanitize userName since it may contain filepath separators on Windows. 69 | userName = strings.Replace(userName, `\`, "_", -1) 70 | } 71 | 72 | // shortHostname returns its argument, truncating at the first period. 73 | // For instance, given "www.google.com" it returns "www". 74 | func shortHostname(hostname string) string { 75 | if i := strings.Index(hostname, "."); i >= 0 { 76 | return hostname[:i] 77 | } 78 | return hostname 79 | } 80 | 81 | // logName returns a new log file name containing tag, with start time t, and 82 | // the name for the symlink for tag. 83 | func logName(tag string, t time.Time) (name, link string) { 84 | name = fmt.Sprintf("%s.%s.%s.log.%s.%04d%02d%02d-%02d%02d%02d.%d", 85 | program, 86 | host, 87 | userName, 88 | tag, 89 | t.Year(), 90 | t.Month(), 91 | t.Day(), 92 | t.Hour(), 93 | t.Minute(), 94 | t.Second(), 95 | pid) 96 | return name, program + "." + tag 97 | } 98 | 99 | var onceLogDirs sync.Once 100 | 101 | // create creates a new log file and returns the file and its filename, which 102 | // contains tag ("INFO", "FATAL", etc.) and t. If the file is created 103 | // successfully, create also attempts to update the symlink for that tag, ignoring 104 | // errors. 105 | func create(tag string, t time.Time) (f *os.File, filename string, err error) { 106 | onceLogDirs.Do(createLogDirs) 107 | if len(logDirs) == 0 { 108 | return nil, "", errors.New("log: no log dirs") 109 | } 110 | name, link := logName(tag, t) 111 | var lastErr error 112 | for _, dir := range logDirs { 113 | fname := filepath.Join(dir, name) 114 | f, err := os.Create(fname) 115 | if err == nil { 116 | symlink := filepath.Join(dir, link) 117 | os.Remove(symlink) // ignore err 118 | os.Symlink(name, symlink) // ignore err 119 | return f, fname, nil 120 | } 121 | lastErr = err 122 | } 123 | return nil, "", fmt.Errorf("log: cannot create log: %v", lastErr) 124 | } 125 | -------------------------------------------------------------------------------- /vendor/go/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2009 The Go Authors. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are 5 | met: 6 | 7 | * Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above 10 | copyright notice, this list of conditions and the following disclaimer 11 | in the documentation and/or other materials provided with the 12 | distribution. 13 | * Neither the name of Google Inc. nor the names of its 14 | contributors may be used to endorse or promote products derived from 15 | this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 18 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 19 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 20 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 21 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 22 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 23 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /vendor/go/path/filepath/path.go: -------------------------------------------------------------------------------- 1 | // Copyright 2009 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | 5 | // Package filepath implements utility routines for manipulating filename paths 6 | // in a way compatible with the target operating system-defined file paths. 7 | // 8 | // Functions in this package replace any occurrences of the slash ('/') character 9 | // with os.PathSeparator when returning paths unless otherwise specified. 10 | package sfilepath 11 | 12 | import ( 13 | "os" 14 | "path/filepath" 15 | "sort" 16 | ) 17 | 18 | // Walk is filesystem.Walk that follows symlinks 19 | func Walk(root string, walkFn filepath.WalkFunc) error { 20 | info, err := os.Lstat(root) 21 | if err != nil { 22 | return walkFn(root, nil, err) 23 | } 24 | return walk(root, info, walkFn) 25 | } 26 | 27 | func walk(path string, info os.FileInfo, walkFn filepath.WalkFunc) error { 28 | err := walkFn(path, info, nil) 29 | if err != nil { 30 | if info.IsDir() && err == filepath.SkipDir { 31 | return nil 32 | } 33 | return err 34 | } 35 | 36 | if !info.IsDir() { 37 | return nil 38 | } 39 | 40 | names, err := readDirNames(path) 41 | if err != nil { 42 | return walkFn(path, info, err) 43 | } 44 | 45 | for _, name := range names { 46 | filename := filepath.Join(path, name) 47 | fileInfo, err := os.Stat(filename) 48 | if err != nil { 49 | if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir { 50 | return err 51 | } 52 | } else { 53 | err = walk(filename, fileInfo, walkFn) 54 | if err != nil { 55 | if !fileInfo.IsDir() || err != filepath.SkipDir { 56 | return err 57 | } 58 | } 59 | } 60 | } 61 | return nil 62 | } 63 | 64 | func readDirNames(dirname string) ([]string, error) { 65 | f, err := os.Open(dirname) 66 | if err != nil { 67 | return nil, err 68 | } 69 | names, err := f.Readdirnames(-1) 70 | f.Close() 71 | if err != nil { 72 | return nil, err 73 | } 74 | sort.Strings(names) 75 | return names, nil 76 | } 77 | --------------------------------------------------------------------------------