├── .gitignore ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── compress.go ├── compress_test.go ├── doc.go ├── evaluator.go ├── evaluator_test.go ├── expand_test.go ├── go.mod ├── go.sum ├── nodes.go ├── parser.go ├── range.peg ├── range.peg.go └── script └── ci /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: go 2 | go: 3 | - tip 4 | 5 | script: script/ci 6 | install: 7 | - mkdir -p $GOPATH/src/vbom.ml/util 8 | - git clone https://github.com/fvbommel/util $GOPATH/src/vbom.ml/util 9 | - go get -t -v ./... 10 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | If you would like to contribute code to this project you can do so through 5 | GitHub by forking the repository and sending a pull request. 6 | 7 | When submitting code, please make every effort to follow existing conventions 8 | and style in order to keep the code as readable as possible. 9 | 10 | Before your code can be accepted into the project you must also sign the 11 | [Individual Contributor License Agreement (CLA)][1]. 12 | 13 | [1]: https://spreadsheets.google.com/spreadsheet/viewform?formkey=dDViT2xzUHAwRkI3X3k5Z0lQM091OGc6MQ&ndplr=1 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Copyright 2014 Square 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Grange 2 | ====== 3 | 4 | Grange implements a modern subset of the range query language. It is an 5 | expressive grammar for selecting information out of arbitrary, self-referential 6 | metadata. It was developed for querying information about hosts across 7 | datacenters. 8 | 9 | %{has(DC;east) & has(TYPE;redis)}:DOWN 10 | 11 | See [godocs](https://godoc.org/github.com/square/grange) for usage and 12 | syntax. 13 | 14 | Goals 15 | ----- 16 | 17 | * Easily run cross-platform. 18 | * Error messages when things go wrong. 19 | * Fast. (Looking at you, `clusters`.) 20 | 21 | Development 22 | ----------- 23 | 24 | This is library, so does not export a main function. Run it via tests. 25 | 26 | export RANGE_SPEC_PATH=/tmp/range-spec 27 | git clone https://github.com/square/range-spec.git $RANGE_SPEC_PATH 28 | 29 | go get github.com/pointlander/peg 30 | 31 | $GOPATH/bin/peg range.peg && go test 32 | -------------------------------------------------------------------------------- /compress.go: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | import ( 4 | "fmt" 5 | "regexp" 6 | "sort" 7 | "strconv" 8 | "strings" 9 | 10 | "github.com/fvbommel/sortorder" 11 | ) 12 | 13 | var ( 14 | _ = fmt.Println 15 | ) 16 | 17 | // Normalizes a result set into a minimal range expression, such as 18 | // +{foo,bar}.example.com+. 19 | func Compress(nodes *Result) string { 20 | noDomain := []string{} 21 | domains := map[string][]string{} 22 | for node := range nodes.Iter() { 23 | tokens := strings.SplitN(node.(string), ".", 2) 24 | if len(tokens) == 2 { 25 | domains[tokens[1]] = append(domains[tokens[1]], tokens[0]) 26 | } else { 27 | noDomain = append(noDomain, node.(string)) 28 | } 29 | } 30 | sort.Sort(sortorder.Natural(noDomain)) 31 | 32 | result := compressNumeric(noDomain) 33 | var domainKeys = []string{} 34 | for domain, _ := range domains { 35 | domainKeys = append(domainKeys, domain) 36 | } 37 | sort.Sort(sortorder.Natural(domainKeys)) 38 | 39 | for _, domain := range domainKeys { 40 | domainNodes := domains[domain] 41 | sort.Sort(sortorder.Natural(domainNodes)) 42 | domainNodes = compressNumeric(domainNodes) 43 | joined := strings.Join(domainNodes, ",") 44 | if len(domainNodes) > 1 { 45 | joined = "{" + joined + "}" 46 | } 47 | result = append(result, joined+"."+domain) 48 | } 49 | return strings.Join(result, ",") 50 | } 51 | 52 | func numericExpansionFor(prefix string, start string, end string, suffix string) string { 53 | endN, _ := strconv.Atoi(end) 54 | startN, _ := strconv.Atoi(start) 55 | 56 | if startN == endN { 57 | return fmt.Sprintf("%s%s%s", prefix, end, suffix) 58 | } else { 59 | return fmt.Sprintf("%s%s..%d%s", prefix, start, endN, suffix) 60 | } 61 | } 62 | 63 | func compressNumeric(nodes []string) []string { 64 | r := regexp.MustCompile("^(.*?)(\\d+)([^\\d]*)$") 65 | 66 | result := []string{} 67 | currentPrefix := "" 68 | currentSuffix := "" 69 | currentNstr := "" 70 | start := "" 71 | startN := -1 72 | currentN := -1 73 | 74 | flush := func() { 75 | if startN > -1 { 76 | result = append(result, numericExpansionFor(currentPrefix, start, currentNstr, currentSuffix)) 77 | startN = -1 78 | currentPrefix = "" 79 | currentSuffix = "" 80 | currentN = -1 81 | currentNstr = "" 82 | } 83 | } 84 | 85 | for _, node := range nodes { 86 | match := r.FindStringSubmatch(node) 87 | 88 | if match == nil { 89 | flush() 90 | result = append(result, node) 91 | } else { 92 | prefix := match[1] 93 | n := match[2] 94 | suffix := match[3] 95 | 96 | if prefix != currentPrefix || suffix != currentSuffix { 97 | flush() 98 | } 99 | 100 | //if len(n) != len(currentNstr) { 101 | //flush 102 | newN, _ := strconv.Atoi(n) 103 | 104 | if zeroCount(n) != zeroCount(currentNstr) && len(n) != len(currentNstr) { 105 | flush() 106 | } 107 | 108 | if startN < 0 || newN != currentN+1 { 109 | // first in run 110 | flush() 111 | start = n 112 | startN = newN 113 | } 114 | 115 | currentNstr = n 116 | currentN = newN 117 | currentPrefix = prefix 118 | currentSuffix = suffix 119 | } 120 | } 121 | flush() 122 | return result 123 | } 124 | 125 | func zeroCount(n string) int { 126 | count := 0 127 | for _, c := range n { 128 | if c != '0' { 129 | break 130 | } 131 | count++ 132 | } 133 | return count 134 | } 135 | -------------------------------------------------------------------------------- /compress_test.go: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | import ( 4 | "bufio" 5 | "fmt" 6 | "os" 7 | "path/filepath" 8 | "strings" 9 | "testing" 10 | ) 11 | 12 | type RangeSpec struct { 13 | path string 14 | line int 15 | expr string 16 | results Result 17 | } 18 | 19 | func (spec *RangeSpec) String() string { 20 | return fmt.Sprintf("%s:%d", spec.path, spec.line) 21 | } 22 | 23 | func (spec *RangeSpec) Ignore(ignore_list []string) bool { 24 | for _, ignore := range ignore_list { 25 | if strings.HasSuffix(spec.String(), ignore) { 26 | return true 27 | } 28 | } 29 | return false 30 | } 31 | 32 | func TestCompress(t *testing.T) { 33 | spec_dir := os.Getenv("RANGE_SPEC_PATH") 34 | if spec_dir == "" { 35 | // Skip compress tests 36 | fmt.Fprintln(os.Stderr, "Skipping Compress() tests, RANGE_SPEC_PATH not set.") 37 | return 38 | } 39 | 40 | filepath.Walk(spec_dir+"/spec/compress", func(path string, info os.FileInfo, err error) error { 41 | if !info.IsDir() { 42 | return nil 43 | } 44 | 45 | specs, err := filepath.Glob(path + "/*.spec") 46 | if err == nil && specs != nil { 47 | for _, spec := range specs { 48 | loadSpec(t, spec) 49 | } 50 | } 51 | return nil 52 | }) 53 | } 54 | 55 | func runSpec(t *testing.T, spec RangeSpec) { 56 | actual := Compress(&spec.results) 57 | 58 | if actual != spec.expr { 59 | t.Errorf("failed %s:%d\n got: %s\nwant: %s", 60 | spec.path, spec.line, actual, spec.expr) 61 | } 62 | } 63 | 64 | func loadSpec(t *testing.T, specpath string) { 65 | file, _ := os.Open(specpath) 66 | scanner := bufio.NewScanner(file) 67 | currentSpec := RangeSpec{results: NewResult(), path: specpath} 68 | 69 | line := 0 70 | for scanner.Scan() { 71 | line++ 72 | if strings.HasPrefix(strings.Trim(scanner.Text(), " "), "#") { 73 | continue 74 | } else if scanner.Text() == "" { 75 | runSpec(t, currentSpec) 76 | currentSpec = RangeSpec{results: NewResult(), path: specpath} 77 | } else { 78 | if currentSpec.expr == "" { 79 | currentSpec.expr = scanner.Text() 80 | currentSpec.line = line 81 | } else { 82 | currentSpec.results.Add(scanner.Text()) 83 | } 84 | } 85 | } 86 | if currentSpec.expr != "" { 87 | runSpec(t, currentSpec) 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /doc.go: -------------------------------------------------------------------------------- 1 | /* 2 | Grange implements a modern subset of the range query language. It is an 3 | expressive grammar for selecting information out of arbitrary, self-referential 4 | metadata. It was developed for querying information about hosts across 5 | datacenters. 6 | 7 | Basics 8 | 9 | A range query operates on a state containing clusters. 10 | 11 | state := grange.NewState() 12 | state.AddCluster("a", Cluster{ 13 | CLUSTER: []string{"a", "b", "c"}, 14 | TYPE: []string{"letters"}, 15 | }) 16 | result, err = state.Query("%a") // "a", "b", "c" 17 | result, err = state.Query("%a:KEYS") // "CLUSTER", "TYPE" 18 | result, err = state.Query("%a:TYPE") // "letters" 19 | 20 | Range also allows for a default cluster (traditionally named GROUPS), that can 21 | be accessed with some shortcut syntax, documented below. 22 | 23 | Values can also be range expressions, so that clusters can be defined in terms 24 | of each other ("self-referential"). 25 | 26 | state := grange.NewState() 27 | state.AddCluster("down", Cluster{ CLUSTER: []string{"host1"}) 28 | state.AddCluster("dc1", Cluster{ CLUSTER: []string{"@dc1 - %down"}) 29 | 30 | result, err := state.Query("%dc1") // "host2" 31 | 32 | For an example usage of this library, see 33 | https://github.com/square/grange-server 34 | 35 | Syntax 36 | 37 | host1 - value constant, returns itself. 38 | host1,host2 - union, concatenates both sides. 39 | host1..3 - numeric expansion. 40 | a{b,c}d - brace expansion, works just like your shell. 41 | (a,b) & a - returns intersection of boths sides. 42 | (a,b) - a - returns left side minus right side. 43 | /abc/ - regex match using RE2 semantics. When used on the right 44 | side of an operator, filters the left side values using the 45 | regex. When used by itself, matches all values in the 46 | default cluster.. 47 | %dc1 - cluster lookup, returns the values at CLUSTER key in "dc1" 48 | cluster. 49 | %dc1:KEYS - returns all available keys for a cluster. 50 | %dc1:SOMEKEY - returns values at SOMEKEY key. 51 | %dc1:{A,B} - returns values at both A and B key. Query inside braces can 52 | be any range expression. 53 | @dc1 - key lookup in default cluster, equivalent to %GROUPS:dc1. 54 | $SOMEKEY - Looks up values from SOMEKEY in the current cluster when 55 | used as a cluster value. When used at top-level, the 56 | default cluster is used. 57 | ?host1 - returns all keys in the default cluster that contain host1. 58 | clusters(h1) - returns all clusters for which the h1 is present in the 59 | CLUSTER key. Parameter can be any range expression. 60 | has(KEY;val) - returns all clusters with SOMEKEY matching value. 61 | count(EXPR) - returns the number of results returned by EXPR. 62 | allclusters() - returns the names of all clusters 63 | q(x://blah) - quote a constant value, the parameter will be returned as 64 | is and not evaluated as a range expression. Useful for 65 | storing metadata in clusters. 66 | 67 | All of the above can be combined to form highly expressive queries. 68 | 69 | %{has(DC;east) & has(TYPE;redis)}:DOWN 70 | - all down redis nodes in the east datacenter. 71 | 72 | has(TYPE;%{clusters(host1)}:TYPE) 73 | - all clusters with types matching the clusters of host1. 74 | 75 | %{clusters(/foo/)}:{DOC,OWNER} 76 | - OWNER and DOC values for all clusters on all hosts matching "foo". 77 | 78 | Differences From Libcrange 79 | 80 | A number of libcrange features have been deliberately omitted from grange, 81 | either becase they are archaic features of the language, or they are 82 | mis-aligned with the goals of this library. 83 | 84 | - ^ "admin" operator is not supported. Not a useful concept anymore. 85 | - # "hash" operator is not supported. Normal function calls are sufficient. 86 | - Uses RE2 regular expressions rather than PCRE. RE2 is not as fully 87 | featured, but guarantees that searches run in time linear in the size of 88 | the input. Regexes should not be used often anyway: prefer explicit 89 | metadata. 90 | - Non-deterministic functions, in particular functions that make network 91 | calls. This library aims to provide fast query performance, which is much 92 | harder when dealing with non-determinism. Clients who wish to emulate 93 | this behaviour should either calculate function results upfront and 94 | import them into the state, or post-process results. 95 | 96 | */ 97 | package grange 98 | -------------------------------------------------------------------------------- /evaluator.go: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "regexp" 7 | "strconv" 8 | "strings" 9 | 10 | "sync" 11 | 12 | "time" 13 | 14 | "github.com/deckarep/golang-set" 15 | "github.com/orcaman/concurrent-map" 16 | ) 17 | 18 | // State holds data that queries operate over. Queries in grange are 19 | // deterministic, so the same query will always return the same result for a 20 | // given state. Clients are expected to build their own state to query from 21 | // their own datasource, such as a database or files on disk. 22 | // 23 | // State maintains an internal cache of expanded values to speed up queries. 24 | // After constructing a large state it is recommended to call PrimeCache() 25 | // before querying, otherwise initial queries will likely take longer than 26 | // later ones as the cache is built up incrementally. 27 | type State struct { 28 | clusters map[string]Cluster 29 | defaultCluster string 30 | 31 | // Populated lazily as groups are evaluated. They won't change unless state 32 | // changes. 33 | // clusterCache map[string]map[string]*Result 34 | // To make it concurrent read write capable , using concurrent-map. 35 | // As concurrent-map stores value type as interface, 36 | // we need to cast them back whenever we retrive values 37 | // cmap.ConcurrentMap is of type map[string] interface 38 | clusterCache cmap.ConcurrentMap 39 | 40 | // clusters() query is expensive, we want to cache results of each such query 41 | // var cachedClusterQueryResults map[string] Result 42 | cachedCQR cmap.ConcurrentMap 43 | metrics metrics 44 | } 45 | 46 | type metrics struct { 47 | // useful to know how old state is 48 | initializedAt time.Time 49 | 50 | cacheInitializedAt time.Time 51 | errorsDuringCacheBuild int 52 | 53 | buildTimeForCache float64 54 | buildTimeForClusterCache float64 55 | buildTimeForCachedCQR float64 56 | 57 | numberOfTruncatedResults int64 58 | } 59 | 60 | // A Cluster is mapping of arbitrary keys to arrays of values. The only 61 | // required key is CLUSTER, which is the default set of values for the cluster. 62 | type Cluster map[string][]string 63 | 64 | // A set of values returned by a query. The size of this set is limited by 65 | // MaxResults. 66 | type Result struct { 67 | mapset.Set 68 | } 69 | 70 | var ( 71 | // Maximum number of characters that grange will try to parse in a query. 72 | // Queries longer than this will be rejected. This limit also applies to 73 | // cluster and group names and values. Combined with MaxResults, this limits 74 | // result sizes to approximately 4MB. 75 | MaxQuerySize = 4000 76 | 77 | // The maximum number of results a query can return. Execution will be 78 | // short-circuited once this many results have been gathered. No error will 79 | // be returned. 80 | MaxResults = 10000 81 | 82 | // Maximum number of subqueries that will be evaluated, including evaluation 83 | // of cluster values. If this is exceeded, an error will be returned. 84 | // Primarily useful for aborting cycles, but also can shortcut really 85 | // expensive queries. This should not be exceeded in normal operation. 86 | MaxQueryDepth = 100 87 | 88 | // The default cluster for new states, used by @ and ? syntax. Can be changed 89 | // per-state using SetDefaultCluster. 90 | DefaultCluster = "GROUPS" 91 | 92 | // Parallelism to be used while building primeCache 93 | primeCacheParallelismFactor = 2 94 | ) 95 | 96 | // Clusters is a getter for all clusters that have been added to the state. 97 | // There isn't really a good reason to use this other than for debugging 98 | // purposes. 99 | func (s *State) Clusters() map[string]Cluster { 100 | return s.clusters 101 | } 102 | 103 | // NewState creates a new state to be passed into EvalRange. This will need to 104 | // be used at least once before you can query anything. 105 | 106 | func NewState() State { 107 | state := State{ 108 | clusters: map[string]Cluster{}, 109 | defaultCluster: DefaultCluster, 110 | metrics: metrics{}, 111 | } 112 | state.metrics.initializedAt = time.Now() 113 | state.ResetCache() 114 | return state 115 | } 116 | 117 | // NewResult is mostly used internally, but is handy in testing scenarios when 118 | // you need to compare a query result to a known value. 119 | func NewResult(args ...interface{}) Result { 120 | return Result{mapset.NewSetFromSlice(args)} 121 | } 122 | 123 | // AddCluster adds a new cluster to the state and resets the cache. 124 | func (state *State) AddCluster(name string, c Cluster) { 125 | state.clusters[name] = c 126 | state.ResetCache() 127 | } 128 | 129 | // Changes the default cluster for the state. 130 | func (state *State) SetDefaultCluster(name string) { 131 | state.defaultCluster = name 132 | } 133 | 134 | // PrimeCache traverses over the entire state to expand all values and store 135 | // them in the state's cache. Subsequent queries will be able to use the cache 136 | // immediately, rather than having to build it up incrementally. 137 | // 138 | // It returns all errors encountered during the traverse. This isn't 139 | // necessarily a critical problem, often errors will be in obscure keys, but 140 | // you should probably try to fix them. 141 | func (state *State) PrimeCache() []error { 142 | // Limiting parallelism to 2 143 | // splitting clusters in slices and spawn go routine for each 144 | startTime := time.Now() 145 | clusters := state.clusterNamesAsArray() 146 | arrayOfClusterSlices := splitIntoSlices(clusters, primeCacheParallelismFactor) 147 | var wg sync.WaitGroup 148 | resultCh := make(chan mapset.Set, primeCacheParallelismFactor) 149 | errorCh := make(chan []error, primeCacheParallelismFactor) 150 | defer close(resultCh) 151 | defer close(errorCh) 152 | for _, slice := range arrayOfClusterSlices { 153 | wg.Add(1) 154 | go func(s []string) { 155 | defer wg.Done() 156 | res, err := buildPrimeClusterCacheForSlice(state, s) 157 | resultCh <- res 158 | errorCh <- err 159 | }(slice) 160 | } 161 | done := make(chan interface{}) 162 | go func() { 163 | wg.Wait() 164 | // sleep makes thats buffers in resultCh and errorCh are read 165 | // time.Sleep(1*time.Millisecond) 166 | close(done) 167 | }() 168 | results := mapset.NewSet() 169 | errors := []error{} 170 | 171 | Loop: 172 | for { 173 | select { 174 | case r := <-resultCh: 175 | results = results.Union(r) 176 | case err := <-errorCh: 177 | errors = append(errors, err...) 178 | case <-done: 179 | if len(resultCh) == 0 && len(errorCh) == 0 { 180 | break Loop 181 | } 182 | } 183 | } 184 | // end of Loop: 185 | 186 | wg.Wait() 187 | state.metrics.buildTimeForClusterCache = time.Since(startTime).Seconds() 188 | state.populateCachedCQRforSet(results) 189 | state.metrics.buildTimeForCachedCQR = 190 | time.Since(startTime).Seconds() - state.metrics.buildTimeForClusterCache 191 | state.metrics.buildTimeForCache = time.Since(startTime).Seconds() 192 | state.metrics.errorsDuringCacheBuild = len(errors) 193 | state.metrics.cacheInitializedAt = time.Now() 194 | return errors 195 | } 196 | 197 | // StateMetrics returns metrics related to state, cache as map[string]int64 198 | func (state *State) StateMetrics() map[string]int64 { 199 | metrics := make(map[string]int64) 200 | metrics["stateInitializedAt"] = state.metrics.initializedAt.Unix() 201 | metrics["cacheInitializedAt"] = state.metrics.cacheInitializedAt.Unix() 202 | metrics["numberOfClusters"] = int64(len(state.clusters)) 203 | metrics["numberOfCachedClusters"] = int64(state.clusterCache.Count()) 204 | metrics["numberOfcachedCQR"] = int64(state.cachedCQR.Count()) 205 | metrics["cacheTotalBuildTimeInSeconds"] = int64(state.metrics.buildTimeForCache) 206 | metrics["cacheBuildTimeForClustersInSeconds"] = int64(state.metrics.buildTimeForClusterCache) 207 | metrics["cacheBuildTimeForCQRInSeconds"] = int64(state.metrics.buildTimeForCachedCQR) 208 | metrics["errorsDuringCacheBuild"] = int64(state.metrics.errorsDuringCacheBuild) 209 | metrics["numberOfTruncatedResults"] = state.metrics.numberOfTruncatedResults 210 | return metrics 211 | } 212 | 213 | // buildPrimeClusterCacheForSlice is used internally for building ClusterCache. 214 | // returns results of CLUSTER_NAME:CLUSTER , which is used for building cachedCQR 215 | // also returns array of errors encounter during parsing clusters 216 | func buildPrimeClusterCacheForSlice(state *State, clusters []string) (mapset.Set, []error) { 217 | var errs []error 218 | res := mapset.NewSet() 219 | for _, clusterName := range clusters { 220 | context := newContext() 221 | context.currentClusterName = clusterName 222 | for key, _ := range state.clusters[clusterName] { 223 | err := clusterLookup(state, &context, key) 224 | // we are interested only results for key CLUSTER 225 | if key == "CLUSTER" { 226 | res = res.Union(context.currentResult.Set) 227 | } 228 | if err != nil { 229 | errs = append(errs, err) 230 | } 231 | } 232 | } 233 | return res, errs 234 | } 235 | 236 | func (state *State) clusterNamesAsArray() []string { 237 | i := 0 238 | ret := make([]string, len(state.clusters)) 239 | for name := range state.clusters { 240 | ret[i] = name 241 | i++ 242 | } 243 | return ret 244 | } 245 | 246 | // splices array into ~'count' of slices 247 | // if len(array) is less than count, 248 | // we splice array into slices of length 1 249 | // if len(array) is not multiple of count, 250 | // we end up returning more than 'count' number of slices 251 | func splitIntoSlices(array []string, count int) [][]string { 252 | var ret [][]string 253 | lengthOfArray := len(array) 254 | if lengthOfArray == 0 { 255 | return append(ret, array) 256 | } 257 | sliceLength := lengthOfArray / count 258 | if sliceLength == 0 { 259 | sliceLength = 1 260 | } 261 | for i := 0; i < lengthOfArray; i += sliceLength { 262 | if i+sliceLength < lengthOfArray { 263 | ret = append(ret, array[i:i+sliceLength]) 264 | } else { 265 | ret = append(ret, array[i:lengthOfArray]) 266 | } 267 | } 268 | return ret 269 | } 270 | 271 | // ResetCache clears cached expansions. The public API for modifying state 272 | // already calls this when necessary, so you shouldn't really have a need to 273 | // call this. 274 | func (state *State) ResetCache() { 275 | // state.clusterCache = map[string]map[string]*Result{} 276 | state.clusterCache = cmap.New() 277 | state.cachedCQR = cmap.New() 278 | } 279 | 280 | // Query is the main interface to grange. See the main package documentation 281 | // for query language specification. On error, an empty result is returned 282 | // alongside the error. Queries that are longer than MaxQuerySize are 283 | // considered errors. 284 | // 285 | // The size of the returned result is capped by MaxResults. 286 | // 287 | // This method is only thread-safe if PrimeCache() has previously been called 288 | // on the state. 289 | func (state *State) Query(input string) (Result, error) { 290 | if len(input) > MaxQuerySize { 291 | return NewResult(), 292 | errors.New(fmt.Sprintf("Query is too long, max length is %d", MaxQuerySize)) 293 | } 294 | 295 | context := newContext() 296 | return evalRangeWithContext(input, state, &context) 297 | } 298 | 299 | // Used for populating state.cachedCQR 300 | // Go over all clusters in state.clusters, look if element is part of cluster definition, 301 | // if yes, add name of cluster to cachedCQR[element] 302 | // parse through all clusters before adding to state.cachedCQR 303 | // as a precaution, we do not want to cache empty results 304 | func (state *State) populateCachedCQRforSet(set mapset.Set) { 305 | context := newContext() 306 | // skip processing for already cached elements 307 | notCached := make(map[string]Result) 308 | for element := range set.Iter() { 309 | if !state.cachedCQR.Has(element.(string)) { 310 | notCached[element.(string)] = NewResult() 311 | } 312 | } 313 | 314 | if len(notCached) == 0 { 315 | return 316 | } 317 | 318 | for clusterName, _ := range state.clusters { 319 | subContext := context.subCluster(clusterName) 320 | clusterLookup(state, &subContext, "CLUSTER") 321 | for key := range notCached { 322 | if subContext.currentResult.Contains(key) { 323 | notCached[key].Add(clusterName) 324 | } 325 | } 326 | } 327 | for key, val := range notCached { 328 | if val.Cardinality() > 0 { 329 | state.cachedCQR.Set(key, val) 330 | } 331 | } 332 | } 333 | 334 | func (state *State) addValueToCachedCQR(key string, value string) { 335 | if tmp, ok := state.cachedCQR.Get(key); ok { 336 | tmp.(Result).Add(value) 337 | } else { 338 | state.cachedCQR.Set(key, NewResult(value)) 339 | } 340 | } 341 | 342 | type tooManyResults struct{} 343 | 344 | type evalContext struct { 345 | currentClusterName string 346 | currentResult Result 347 | workingResult *Result 348 | depth int 349 | } 350 | 351 | func newContext() evalContext { 352 | return evalContext{currentResult: NewResult()} 353 | } 354 | 355 | func parseRange(input string) (parserNode, error) { 356 | r := &rangeQuery{Buffer: input} 357 | r.Init() 358 | if err := r.Parse(); err != nil { 359 | return nil, err 360 | } 361 | r.Execute() 362 | if len(r.nodeStack) > 0 { 363 | return r.nodeStack[0], nil 364 | } else { 365 | return nodeNull{}, nil 366 | } 367 | } 368 | 369 | func evalRangeWithContext(input string, state *State, context *evalContext) (Result, error) { 370 | err := evalRangeInplace(input, state, context) 371 | 372 | return context.currentResult, err 373 | } 374 | 375 | // Useful internally so that results do not need to be copied all over the place 376 | func evalRangeInplace(input string, state *State, context *evalContext) (err error) { 377 | if context.depth > MaxQueryDepth { 378 | return errors.New("Query exceeded maximum recursion limit") 379 | } 380 | node, parseError := parseRange(input) 381 | if parseError != nil { 382 | return errors.New("Could not parse query: " + input) 383 | } 384 | 385 | defer func() { 386 | if r := recover(); r != nil { 387 | switch r.(type) { 388 | case tooManyResults: 389 | // No error returned, we just chop off the results 390 | state.metrics.numberOfTruncatedResults += 1 391 | err = nil 392 | case error: 393 | err = r.(error) 394 | default: 395 | panic(r) 396 | } 397 | } 398 | }() 399 | 400 | return node.(evalNode).visit(state, context) 401 | } 402 | 403 | func (c evalContext) hasResults() bool { 404 | return c.currentResult.Cardinality() == 0 405 | } 406 | 407 | func (n nodeBraces) visit(state *State, context *evalContext) error { 408 | leftContext := context.sub() 409 | rightContext := context.sub() 410 | middleContext := context.sub() 411 | 412 | if err := n.left.(evalNode).visit(state, &leftContext); err != nil { 413 | return err 414 | } 415 | if err := n.node.(evalNode).visit(state, &middleContext); err != nil { 416 | return err 417 | } 418 | if err := n.right.(evalNode).visit(state, &rightContext); err != nil { 419 | return err 420 | } 421 | 422 | if leftContext.hasResults() { 423 | leftContext.addResult("") 424 | } 425 | if middleContext.hasResults() { 426 | middleContext.addResult("") 427 | } 428 | if rightContext.hasResults() { 429 | rightContext.addResult("") 430 | } 431 | 432 | for l := range leftContext.resultIter() { 433 | for m := range middleContext.resultIter() { 434 | for r := range rightContext.resultIter() { 435 | context.addResult(fmt.Sprintf("%s%s%s", l, m, r)) 436 | } 437 | } 438 | } 439 | 440 | return nil 441 | } 442 | 443 | // Hack, see note on nodeBraceStart definition in nodes.go 444 | func (n nodeBraceStart) visit(state *State, context *evalContext) error { 445 | return nil 446 | } 447 | 448 | func (n nodeLocalClusterLookup) visit(state *State, context *evalContext) error { 449 | var evalErr error 450 | 451 | subContext := context.sub() 452 | evalErr = n.node.(evalNode).visit(state, &subContext) 453 | if evalErr != nil { 454 | return evalErr 455 | } 456 | 457 | for key := range subContext.resultIter() { 458 | if context.currentClusterName == "" { 459 | context.addResult("$" + key.(string)) 460 | } else { 461 | evalErr = clusterLookup(state, context, key.(string)) 462 | if evalErr != nil { 463 | return evalErr 464 | } 465 | } 466 | } 467 | 468 | return nil 469 | } 470 | 471 | func (n nodeClusterLookup) visit(state *State, context *evalContext) error { 472 | var evalErr error 473 | 474 | subContext := context.sub() 475 | evalErr = n.node.(evalNode).visit(state, &subContext) 476 | if evalErr != nil { 477 | return evalErr 478 | } 479 | 480 | keyContext := context.sub() 481 | evalErr = n.key.(evalNode).visit(state, &keyContext) 482 | if evalErr != nil { 483 | return evalErr 484 | } 485 | 486 | for clusterName := range subContext.resultIter() { 487 | context.currentClusterName = clusterName.(string) 488 | for key := range keyContext.resultIter() { 489 | evalErr = clusterLookup(state, context, key.(string)) 490 | if evalErr != nil { 491 | return evalErr 492 | } 493 | } 494 | } 495 | 496 | return nil 497 | } 498 | 499 | func (c evalContext) sub() evalContext { 500 | ret := newContext() 501 | ret.currentClusterName = c.currentClusterName 502 | ret.depth = c.depth + 1 503 | return ret 504 | } 505 | 506 | func (c evalContext) subCluster(clusterName string) evalContext { 507 | ret := c.sub() 508 | ret.currentClusterName = clusterName 509 | return ret 510 | } 511 | 512 | func (n nodeOperator) visit(state *State, context *evalContext) error { 513 | switch n.op { 514 | case operatorIntersect: 515 | 516 | leftContext := context.sub() 517 | if err := n.left.(evalNode).visit(state, &leftContext); err != nil { 518 | return err 519 | } 520 | 521 | if leftContext.currentResult.Cardinality() == 0 { 522 | // Optimization: no need to compute right side if left side is empty 523 | return nil 524 | } 525 | 526 | rightContext := context.sub() 527 | // nodeRegexp needs to know about LHS to filter correctly 528 | rightContext.workingResult = &leftContext.currentResult 529 | if err := n.right.(evalNode).visit(state, &rightContext); err != nil { 530 | return err 531 | } 532 | 533 | for x := range leftContext.currentResult.Intersect(rightContext.currentResult.Set).Iter() { 534 | context.addResult(x.(string)) 535 | } 536 | case operatorSubtract: 537 | leftContext := context.sub() 538 | if err := n.left.(evalNode).visit(state, &leftContext); err != nil { 539 | return err 540 | } 541 | 542 | if leftContext.currentResult.Cardinality() == 0 { 543 | // Optimization: no need to compute right side if left side is empty 544 | return nil 545 | } 546 | 547 | rightContext := context.sub() 548 | // nodeRegexp needs to know about LHS to filter correctly 549 | rightContext.workingResult = &leftContext.currentResult 550 | if err := n.right.(evalNode).visit(state, &rightContext); err != nil { 551 | return err 552 | } 553 | 554 | for x := range leftContext.currentResult.Difference(rightContext.currentResult.Set).Iter() { 555 | context.addResult(x.(string)) 556 | } 557 | case operatorUnion: 558 | if err := n.left.(evalNode).visit(state, context); err != nil { 559 | return err 560 | } 561 | if err := n.right.(evalNode).visit(state, context); err != nil { 562 | return err 563 | } 564 | } 565 | return nil 566 | } 567 | 568 | func (n nodeConstant) visit(state *State, context *evalContext) error { 569 | context.addResult(n.val) 570 | return nil 571 | } 572 | 573 | var ( 574 | numericRangeRegexp = regexp.MustCompile("^(.*?)(\\d+)\\.\\.([^\\d]*?)?(\\d+)(.*)$") 575 | ) 576 | 577 | func (n nodeText) visit(state *State, context *evalContext) error { 578 | match := numericRangeRegexp.FindStringSubmatch(n.val) 579 | 580 | if len(match) == 0 { 581 | context.addResult(n.val) 582 | return nil 583 | } 584 | 585 | leftStr := match[1] 586 | leftStrToMatch := match[1] 587 | leftN := match[2] 588 | rightStr := match[3] 589 | rightN := match[4] 590 | trailing := match[5] 591 | 592 | // Equalize the numeric portions. n10..2 will initally be {"n", "10", 2"}, but 593 | // needs to be converted to {"n1", "0", "2"}. 594 | for { 595 | if len(leftN) <= len(rightN) { 596 | break 597 | } 598 | 599 | leftStr += leftN[0:1] 600 | leftN = leftN[1:] 601 | } 602 | 603 | // a1..a4 is valid, a1..b4 is invalid 604 | if !strings.HasSuffix(leftStrToMatch, rightStr) { 605 | context.addResult(n.val) 606 | return nil 607 | } 608 | 609 | width := strconv.Itoa(len(leftN)) 610 | low, _ := strconv.Atoi(leftN) 611 | high, _ := strconv.Atoi(rightN) 612 | 613 | for x := low; x <= high; x++ { 614 | context.addResult(fmt.Sprintf("%s%0"+width+"d%s", leftStr, x, trailing)) 615 | } 616 | 617 | return nil 618 | } 619 | 620 | func (n nodeGroupQuery) visit(state *State, context *evalContext) error { 621 | subContext := context.sub() 622 | if err := n.node.(evalNode).visit(state, &subContext); err != nil { 623 | return err 624 | } 625 | 626 | lookingFor := subContext.currentResult 627 | 628 | // It's theoretically nicer to re-use clusterLookup here, but it's an order 629 | // of magnitude slower than poking around the cache directly. 630 | clusterName := state.defaultCluster 631 | 632 | var cachedClusterDetails cmap.ConcurrentMap 633 | if tmp, ok := state.clusterCache.Get(clusterName); ok { 634 | cachedClusterDetails = tmp.(cmap.ConcurrentMap) 635 | } else { 636 | cachedClusterDetails = cmap.New() 637 | state.clusterCache.Set(clusterName, cachedClusterDetails) 638 | } 639 | 640 | for groupName, group := range state.clusters[state.defaultCluster] { 641 | key := groupName 642 | var results *Result 643 | if tmp, ok := cachedClusterDetails.Get(key); ok { 644 | results = tmp.(*Result) 645 | } else { 646 | subContext := context.subCluster(state.defaultCluster) 647 | for _, value := range group { 648 | err := evalRangeInplace(value, state, &subContext) 649 | if err != nil { 650 | return err 651 | } 652 | } 653 | results = &subContext.currentResult 654 | cachedClusterDetails.Set(key, results) 655 | } 656 | 657 | for x := range lookingFor.Iter() { 658 | if results.Contains(x) { 659 | context.addResult(groupName) 660 | break 661 | } 662 | } 663 | } 664 | return nil 665 | } 666 | 667 | func (n nodeFunction) visit(state *State, context *evalContext) error { 668 | switch n.name { 669 | case "allclusters": 670 | if err := n.verifyParams(0); err != nil { 671 | return err 672 | } 673 | for clusterKey, _ := range state.clusters { 674 | context.addResult(clusterKey) 675 | } 676 | case "count": 677 | if err := n.verifyParams(1); err != nil { 678 | return err 679 | } 680 | valueContext := context.sub() 681 | if err := n.params[0].(evalNode).visit(state, &valueContext); err != nil { 682 | return err 683 | } 684 | 685 | context.addResult(strconv.Itoa(valueContext.currentResult.Cardinality())) 686 | case "has": 687 | if err := n.verifyParams(2); err != nil { 688 | return err 689 | } 690 | 691 | keyContext := context.sub() 692 | valueContext := context.sub() 693 | if err := n.params[0].(evalNode).visit(state, &keyContext); err != nil { 694 | return err 695 | } 696 | if err := n.params[1].(evalNode).visit(state, &valueContext); err != nil { 697 | return err 698 | } 699 | 700 | key := (<-keyContext.resultIter()).(string) 701 | 702 | for clusterName, _ := range state.clusters { 703 | subContext := context.subCluster(clusterName) 704 | clusterLookup(state, &subContext, key) 705 | 706 | l := subContext.currentResult.Set 707 | r := valueContext.currentResult.Set 708 | 709 | if l.Intersect(r).Cardinality() > 0 { 710 | context.addResult(clusterName) 711 | } 712 | } 713 | case "clusters": 714 | if err := n.verifyParams(1); err != nil { 715 | return err 716 | } 717 | subContext := context.sub() 718 | if err := n.params[0].(evalNode).visit(state, &subContext); err != nil { 719 | return err 720 | } 721 | 722 | lookingFor := subContext.currentResult 723 | context.addSetToResult(state.getResultsFromCachedCQRforSet(lookingFor)) 724 | case "mem": 725 | if err := n.verifyParams(2); err != nil { 726 | return err 727 | } 728 | 729 | clusterContext := context.sub() 730 | valueContext := context.sub() 731 | 732 | if err := n.params[0].(evalNode).visit(state, &clusterContext); err != nil { 733 | return err 734 | } 735 | if err := n.params[1].(evalNode).visit(state, &valueContext); err != nil { 736 | return err 737 | } 738 | 739 | for clusterName := range state.clusters { 740 | subContext := context.subCluster(clusterName) 741 | clusterLookup(state, &subContext, "KEYS") 742 | 743 | for _, clusterKey := range subContext.currentResult.Set.ToSlice() { 744 | clusterKeyContext := subContext.sub() 745 | clusterLookup(state, &clusterKeyContext, clusterKey.(string)) 746 | 747 | if clusterKeyContext.currentResult.Set.Intersect(valueContext.currentResult.Set).Cardinality() > 0 { 748 | context.addResult(clusterKey.(string)) 749 | } 750 | } 751 | } 752 | default: 753 | return errors.New(fmt.Sprintf("Unknown function: %s", n.name)) 754 | } 755 | return nil 756 | } 757 | 758 | func (n nodeFunction) verifyParams(expected int) error { 759 | if len(n.params) != expected { 760 | msg := fmt.Sprintf("Wrong number of params for %s: expected %d, got %d.", 761 | n.name, 762 | expected, 763 | len(n.params), 764 | ) 765 | return errors.New(msg) 766 | } 767 | return nil 768 | } 769 | 770 | func (n nodeRegexp) visit(state *State, context *evalContext) error { 771 | if context.workingResult == nil { 772 | subContext := context.sub() 773 | state.allValues(&subContext) 774 | context.workingResult = &subContext.currentResult 775 | } 776 | 777 | r, err := regexp.Compile(n.val) 778 | 779 | if err != nil { 780 | return err 781 | } 782 | 783 | for x := range context.workingResult.Iter() { 784 | if r.MatchString(x.(string)) { 785 | context.addResult(x.(string)) 786 | } 787 | } 788 | 789 | return nil 790 | } 791 | 792 | func (n nodeNull) visit(state *State, context *evalContext) error { 793 | return nil 794 | } 795 | 796 | func (state *State) allValues(context *evalContext) error { 797 | // Expand everything into the set 798 | return evalRangeInplace("@{%"+state.defaultCluster+":KEYS}", state, context) 799 | } 800 | 801 | func clusterLookup(state *State, context *evalContext, key string) error { 802 | var evalErr error 803 | clusterName := context.currentClusterName 804 | if clusterName == "" { 805 | clusterName = state.defaultCluster 806 | } 807 | cluster := state.clusters[clusterName] 808 | 809 | if key == "KEYS" { 810 | for k, _ := range cluster { 811 | context.currentResult.Add(k) // TODO: addResult 812 | } 813 | return nil 814 | } 815 | 816 | var cachedClusterDetails cmap.ConcurrentMap 817 | if tmp, ok := state.clusterCache.Get(clusterName); ok { 818 | cachedClusterDetails = tmp.(cmap.ConcurrentMap) 819 | } else { 820 | cachedClusterDetails = cmap.New() 821 | state.clusterCache.Set(clusterName, cachedClusterDetails) 822 | } 823 | var results *Result 824 | if tmp, ok := cachedClusterDetails.Get(key); ok { 825 | results = tmp.(*Result) 826 | } else { 827 | clusterExp := cluster[key] // TODO: Error handling 828 | 829 | subContext := context.subCluster(context.currentClusterName) 830 | 831 | for _, value := range clusterExp { 832 | evalErr = evalRangeInplace(value, state, &subContext) 833 | if evalErr != nil { 834 | return evalErr 835 | } 836 | } 837 | results = &subContext.currentResult 838 | cachedClusterDetails.Set(key, results) 839 | } 840 | 841 | for x := range results.Iter() { 842 | context.addResult(x.(string)) 843 | } 844 | return nil 845 | } 846 | 847 | func (c *evalContext) addResult(value string) { 848 | if c.currentResult.Cardinality() >= MaxResults { 849 | panic(tooManyResults{}) 850 | } 851 | 852 | if len(value) > MaxQuerySize { 853 | panic(errors.New( 854 | fmt.Sprintf("Value would exceed max query size: %s...", value[0:20]))) 855 | } 856 | 857 | c.currentResult.Add(value) 858 | } 859 | 860 | func (c *evalContext) addSetToResult(set mapset.Set) { 861 | if c.currentResult.Cardinality()+set.Cardinality() >= MaxResults { 862 | panic(tooManyResults{}) 863 | } 864 | for value := range set.Iter() { 865 | c.currentResult.Add(value.(string)) 866 | } 867 | } 868 | 869 | func (c *evalContext) resultIter() <-chan interface{} { 870 | return c.currentResult.Iter() 871 | } 872 | 873 | type evalNode interface { 874 | visit(*State, *evalContext) error 875 | } 876 | 877 | func (state *State) getResultsFromCachedCQRforSet(set mapset.Set) Result { 878 | context := newContext() 879 | state.populateCachedCQRforSet(set) 880 | for name := range set.Iter() { 881 | if subResult, ok := state.cachedCQR.Get(name.(string)); ok { 882 | //add subresults to context.currentResult 883 | context.addSetToResult(subResult.(Result)) 884 | } 885 | } 886 | return context.currentResult 887 | } 888 | -------------------------------------------------------------------------------- /evaluator_test.go: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | import ( 4 | "fmt" 5 | "reflect" 6 | "strconv" 7 | "strings" 8 | "testing" 9 | 10 | "github.com/deckarep/golang-set" 11 | ) 12 | 13 | func TestEmptyQuery(t *testing.T) { 14 | testEval(t, NewResult(), "", emptyState()) 15 | } 16 | 17 | func TestDefaultCluster(t *testing.T) { 18 | testEval(t, NewResult("b", "c"), "%a", singleCluster("a", Cluster{ 19 | "CLUSTER": []string{"b", "c"}, 20 | })) 21 | } 22 | 23 | func TestExplicitCluster(t *testing.T) { 24 | testEval(t, NewResult("b", "c"), "%a:NODES", singleCluster("a", Cluster{ 25 | "NODES": []string{"b", "c"}, 26 | })) 27 | } 28 | 29 | func TestClusterKeys(t *testing.T) { 30 | testEval(t, NewResult("NODES"), "%a:KEYS", singleCluster("a", Cluster{ 31 | "NODES": []string{"b", "c"}, 32 | })) 33 | } 34 | 35 | func TestClusterKeysMulti(t *testing.T) { 36 | testEval(t, NewResult("a", "b"), "%a:{NODES,TYPE}", singleCluster("a", Cluster{ 37 | "NODES": []string{"a"}, 38 | "TYPE": []string{"b"}, 39 | })) 40 | } 41 | 42 | func TestClusterMissing(t *testing.T) { 43 | testEval(t, NewResult(), "%a", emptyState()) 44 | } 45 | 46 | func TestClusterMissingKey(t *testing.T) { 47 | testEval(t, NewResult(), "%a:NODES", singleCluster("a", Cluster{})) 48 | } 49 | 50 | func TestErrorExplicitCluster(t *testing.T) { 51 | testError(t, "Invalid token in query: \"}\"", "%a:}") 52 | } 53 | 54 | func TestErrorClusterName(t *testing.T) { 55 | testError(t, "Invalid token in query: \"}\"", "%}") 56 | } 57 | 58 | func TestStartingDash(t *testing.T) { 59 | testError(t, "Could not parse query: -foo", "-foo") 60 | } 61 | 62 | func TestHas(t *testing.T) { 63 | testEval(t, NewResult("a", "b"), "has(TYPE;one)", multiCluster(map[string]Cluster{ 64 | "a": Cluster{"TYPE": []string{"one", "two"}}, 65 | "b": Cluster{"TYPE": []string{"two", "one"}}, 66 | "c": Cluster{"TYPE": []string{"three"}}, 67 | })) 68 | } 69 | 70 | func TestHasIntersect(t *testing.T) { 71 | testEval(t, NewResult("b"), "has(TYPE;one)&b", multiCluster(map[string]Cluster{ 72 | "a": Cluster{"TYPE": []string{"one", "two"}}, 73 | "b": Cluster{"TYPE": []string{"two", "one"}}, 74 | "c": Cluster{"TYPE": []string{"three"}}, 75 | })) 76 | 77 | testEval(t, NewResult("b"), "has(TYPE;two)&has(TYPE;three)", multiCluster(map[string]Cluster{ 78 | "a": Cluster{"TYPE": []string{"one", "two"}}, 79 | "b": Cluster{"TYPE": []string{"two", "one", "three"}}, 80 | "c": Cluster{"TYPE": []string{"three"}}, 81 | })) 82 | } 83 | 84 | func TestIntersectEasy(t *testing.T) { 85 | testEval(t, NewResult("a"), "a & a", emptyState()) 86 | testEval(t, NewResult(), "a & b", emptyState()) 87 | } 88 | 89 | func TestIntersectCluster(t *testing.T) { 90 | testEval(t, NewResult("c"), "%a:L&%a:R", singleCluster("a", Cluster{ 91 | "L": []string{"b", "c"}, 92 | "R": []string{"c", "d"}, 93 | })) 94 | } 95 | 96 | /* 97 | // TODO: Pending 98 | func TestIntersectError(t *testing.T) { 99 | testError(t, "No left side provided for intersection", "&a") 100 | } 101 | */ 102 | 103 | func TestUnionEasy(t *testing.T) { 104 | testEval(t, NewResult("a", "b"), "a,b", emptyState()) 105 | } 106 | 107 | func TestBracesWithUnion(t *testing.T) { 108 | testEval(t, NewResult("a.c", "b.c"), "{a,b}.c", emptyState()) 109 | testEval(t, NewResult("a.b", "a.c"), "a.{b,c}", emptyState()) 110 | testEval(t, NewResult("a.b.d", "a.c.d"), "a.{b,c}.d", emptyState()) 111 | } 112 | 113 | func TestClusterUnion(t *testing.T) { 114 | testEval(t, NewResult("c", "d"), "%a,%b", multiCluster(map[string]Cluster{ 115 | "a": Cluster{"CLUSTER": []string{"c"}}, 116 | "b": Cluster{"CLUSTER": []string{"d"}}, 117 | })) 118 | } 119 | 120 | /* 121 | // TODO: Pending 122 | func TestNoExpandInClusterName(t *testing.T) { 123 | testError(t, "Invalid token in query: \"{\"", "%a-{b,c}") 124 | } 125 | */ 126 | 127 | func TestSelfReferentialCluster(t *testing.T) { 128 | testEval(t, NewResult("b"), "%a", multiCluster(map[string]Cluster{ 129 | "a": Cluster{"CLUSTER": []string{"$ALL"}, "ALL": []string{"b"}}, 130 | })) 131 | } 132 | 133 | func TestSelfReferentialClusterExpression(t *testing.T) { 134 | testEval(t, NewResult("a", "c"), "%a", multiCluster(map[string]Cluster{ 135 | "a": Cluster{ 136 | "CLUSTER": []string{"$ALL - $DOWN"}, 137 | "ALL": []string{"a", "b", "c"}, 138 | "DOWN": []string{"b"}, 139 | }, 140 | })) 141 | } 142 | 143 | func TestGroups(t *testing.T) { 144 | testEval(t, NewResult("a", "b"), "@dc", singleGroup("dc", "a", "b")) 145 | } 146 | 147 | func TestGroupsExpand(t *testing.T) { 148 | testEval(t, NewResult("c"), "@a", multiGroup(Cluster{ 149 | "a": []string{"$b"}, 150 | "b": []string{"c"}, 151 | })) 152 | } 153 | 154 | func TestClusterLookup(t *testing.T) { 155 | testEval(t, NewResult("a"), "%{has(TYPE;db)}", singleCluster("ignore", Cluster{ 156 | "CLUSTER": []string{"a"}, 157 | "TYPE": []string{"db"}, 158 | })) 159 | } 160 | 161 | func TestClusterLookupExplicitKey(t *testing.T) { 162 | testEval(t, NewResult("a"), "%{has(TYPE;db)}:NODES", singleCluster("ignore", Cluster{ 163 | "NODES": []string{"a"}, 164 | "TYPE": []string{"db"}, 165 | })) 166 | } 167 | 168 | func TestClusterLookupDedup(t *testing.T) { 169 | testEval(t, NewResult("one", "two"), "%{has(TYPE;one)}:TYPE", multiCluster(map[string]Cluster{ 170 | "a": Cluster{"TYPE": []string{"one", "two"}}, 171 | "b": Cluster{"TYPE": []string{"two", "one"}}, 172 | "c": Cluster{"TYPE": []string{"three"}}, 173 | })) 174 | } 175 | 176 | func TestGroupsIsCluster(t *testing.T) { 177 | testEval(t, NewResult("a"), "%GROUPS:KEYS", singleGroup("a")) 178 | } 179 | 180 | func TestMatchNoContext(t *testing.T) { 181 | testEval(t, NewResult("ab"), "/b/", singleGroup("b", "ab", "c")) 182 | } 183 | 184 | func TestMatchRegexp(t *testing.T) { 185 | testEval(t, NewResult("ab"), "/^.b/", singleGroup("b", "ab", "cab")) 186 | } 187 | 188 | func TestInvalidRegexp(t *testing.T) { 189 | testError2(t, "error parsing regexp: missing argument to repetition operator: `+`", "/+/", emptyState()) 190 | } 191 | 192 | func TestMatchEasy(t *testing.T) { 193 | testEval(t, NewResult("ab", "ba", "abc"), "%cluster & /b/", 194 | singleCluster("cluster", Cluster{ 195 | "CLUSTER": []string{"ab", "ba", "abc", "ccc"}, 196 | })) 197 | } 198 | 199 | func TestMatchReverse(t *testing.T) { 200 | testEval(t, NewResult("ab", "ba", "abc"), "/b/ & @group", 201 | singleGroup("group", "ab", "ba", "abc", "ccc")) 202 | } 203 | 204 | func TestMatchWithSubtract(t *testing.T) { 205 | testEval(t, NewResult("ccc"), "%cluster - /b/", 206 | singleCluster("cluster", Cluster{ 207 | "CLUSTER": []string{"ab", "ba", "abc", "ccc"}, 208 | })) 209 | } 210 | 211 | func TestUnionSubtractLeftAssociative(t *testing.T) { 212 | testEval(t, NewResult("a", "b-a"), "a,b-a", emptyState()) 213 | testEval(t, NewResult("b"), "a , b - a", emptyState()) 214 | } 215 | 216 | func TestCombineWithBraces(t *testing.T) { 217 | testEval(t, NewResult("b"), "b - %{a}", emptyState()) 218 | } 219 | 220 | func TestGroupLookupAndSubtraction(t *testing.T) { 221 | testEval(t, NewResult("a"), "{a} - b", emptyState()) 222 | } 223 | 224 | func TestInvalidLex(t *testing.T) { 225 | testError(t, "No closing / for match", "/") 226 | } 227 | 228 | func TestClusters(t *testing.T) { 229 | testEval(t, NewResult("a", "b"), "clusters(one)", multiCluster(map[string]Cluster{ 230 | "a": Cluster{"CLUSTER": []string{"two", "one"}}, 231 | "b": Cluster{"CLUSTER": []string{"$ALL"}, "ALL": []string{"one"}}, 232 | "c": Cluster{"CLUSTER": []string{"three"}}, 233 | })) 234 | } 235 | 236 | func TestClusterQueryCache(t *testing.T) { 237 | state := multiCluster(map[string]Cluster{ 238 | "a": Cluster{"CLUSTER": []string{"two", "one"}}, 239 | "b": Cluster{"CLUSTER": []string{"$ALL"}, "ALL": []string{"one"}}, 240 | "c": Cluster{"CLUSTER": []string{"three"}}, 241 | "d": Cluster{"CLUSTER": []string{"four"}}, 242 | }) 243 | state.Query("clusters(one)") 244 | expected_length := 1 245 | actual_length_of_keys := len(state.cachedCQR.Keys()) 246 | if actual_length_of_keys != expected_length { 247 | t.Errorf("Expected '%d' Key(s) in 'state.cachedCQR', but got %d", 248 | expected_length, actual_length_of_keys) 249 | } 250 | } 251 | 252 | func TestPopulateCachedCQRforSet(t *testing.T) { 253 | state := multiCluster(map[string]Cluster{ 254 | "a": Cluster{"CLUSTER": []string{"two", "one"}}, 255 | "b": Cluster{"CLUSTER": []string{"$ALL"}, "ALL": []string{"one"}}, 256 | "c": Cluster{"CLUSTER": []string{"three"}}, 257 | "d": Cluster{"CLUSTER": []string{"four"}}, 258 | }) 259 | state.populateCachedCQRforSet(NewResult("one")) 260 | tmp, _ := state.cachedCQR.Get("one") 261 | actual := mapset.NewSetFromSlice(tmp.(Result).ToSlice()) 262 | expected := mapset.NewSetFromSlice([]interface{}{"a", "b"}) 263 | if !expected.Equal(actual) { 264 | t.Errorf("Expected state.CachedCQR for 'one' to be : '%v', but got '%v'", expected, actual) 265 | } 266 | } 267 | 268 | func TestClusterQueryCacheTwo(t *testing.T) { 269 | state := multiCluster(map[string]Cluster{ 270 | "a": Cluster{"CLUSTER": []string{"two", "one"}}, 271 | "b": Cluster{"CLUSTER": []string{"$ALL"}, "ALL": []string{"one"}}, 272 | "c": Cluster{"CLUSTER": []string{"three"}}, 273 | "d": Cluster{"CLUSTER": []string{"four"}}, 274 | }) 275 | state.Query("clusters(one,two)") 276 | expected_length := 2 277 | actual_length_of_keys := len(state.cachedCQR.Keys()) 278 | if actual_length_of_keys != expected_length { 279 | t.Errorf("Expected '%d' Key(s) in 'state.cachedCQR', but got %d", 280 | expected_length, actual_length_of_keys) 281 | } 282 | } 283 | 284 | func TestPrimeCacheReturnsErrors(t *testing.T) { 285 | state := singleGroup("a", "(") 286 | errors := state.PrimeCache() 287 | 288 | if len(errors) == 1 { 289 | expected := "Could not parse query: (" 290 | actual := errors[0].Error() 291 | if actual != expected { 292 | t.Errorf("Different error returned.\n got: %s\nwant: %s", 293 | actual, expected) 294 | } 295 | } else { 296 | t.Errorf("Expected 1 error, got %d", len(errors)) 297 | } 298 | } 299 | 300 | func TestPrimeCachePopulatesCQR(t *testing.T) { 301 | state := multiCluster(map[string]Cluster{ 302 | "a": Cluster{"CLUSTER": []string{"two", "one"}}, 303 | "b": Cluster{"CLUSTER": []string{"$ALL"}, "ALL": []string{"one"}}, 304 | "c": Cluster{"CLUSTER": []string{"three"}}, 305 | "d": Cluster{"CLUSTER": []string{"four"}}, 306 | }) 307 | state.PrimeCache() 308 | expected := 4 309 | actual := len(state.cachedCQR.Keys()) 310 | if actual != expected { 311 | t.Errorf("Expected '%d' Key(s) in 'state.cachedCQR', but got %d", 312 | expected, actual) 313 | } 314 | } 315 | 316 | func TestState_StateMetrics(t *testing.T) { 317 | state := multiCluster(map[string]Cluster{ 318 | "a": Cluster{"CLUSTER": []string{"two", "one"}}, 319 | "b": Cluster{"CLUSTER": []string{"$ALL"}, "ALL": []string{"one"}}, 320 | "c": Cluster{"CLUSTER": []string{"three"}}, 321 | "d": Cluster{"CLUSTER": []string{"four"}}, 322 | }) 323 | state.PrimeCache() 324 | metrics := state.StateMetrics() 325 | if metrics["numberOfcachedCQR"] != 4 { 326 | t.Errorf("Expected metrics to report 4, key, instead got : %v", metrics["state.cache.numberOfCQR"]) 327 | } 328 | 329 | } 330 | func TestCycle(t *testing.T) { 331 | testError2(t, "Query exceeded maximum recursion limit", "%a", 332 | multiCluster(map[string]Cluster{ 333 | "a": Cluster{"CLUSTER": []string{"%a"}}, 334 | })) 335 | } 336 | 337 | func TestClustersEasy(t *testing.T) { 338 | testEval(t, NewResult("a"), "clusters(one)", multiCluster(map[string]Cluster{ 339 | "a": Cluster{"CLUSTER": []string{"two", "one"}}, 340 | })) 341 | } 342 | 343 | func TestQ(t *testing.T) { 344 | testEval(t, NewResult("(/"), "q((/)", emptyState()) 345 | testEval(t, NewResult("http://foo/bar?yeah"), "q(http://foo/bar?yeah)", emptyState()) 346 | } 347 | 348 | func TestQueryGroups(t *testing.T) { 349 | testEval(t, NewResult("one", "two"), "?a", multiGroup(Cluster{ 350 | "one": []string{"a"}, 351 | "two": []string{"$one"}, 352 | "three": []string{"b"}, 353 | })) 354 | } 355 | 356 | func TestCount(t *testing.T) { 357 | testEval(t, NewResult("1"), "count(a)", emptyState()) 358 | testEval(t, NewResult("2"), "count({a,b,a})", emptyState()) 359 | // TODO: why does this not parse 360 | // testEval(t, NewResult("2"), "count(a,b,a)", emptyState()) 361 | } 362 | 363 | func TestAllClusters(t *testing.T) { 364 | testEval(t, NewResult("a"), "allclusters()", singleCluster("a", Cluster{})) 365 | } 366 | 367 | func TestLengthError(t *testing.T) { 368 | longString := strings.Repeat("a", MaxQuerySize) 369 | testEval(t, NewResult(longString), longString, emptyState()) 370 | testError2(t, fmt.Sprintf("Query is too long, max length is %d", MaxQuerySize), longString+"a", emptyState()) 371 | } 372 | 373 | func TestFunctionError(t *testing.T) { 374 | testError2(t, "Wrong number of params for has: expected 2, got 0.", "has()", emptyState()) 375 | testError2(t, "Wrong number of params for has: expected 2, got 1.", "has(x)", emptyState()) 376 | testError2(t, "Wrong number of params for has: expected 2, got 3.", "has(x;y;z)", emptyState()) 377 | 378 | testError2(t, "Wrong number of params for count: expected 1, got 0.", "count()", emptyState()) 379 | testError2(t, "Wrong number of params for clusters: expected 1, got 0.", "clusters()", emptyState()) 380 | testError2(t, "Wrong number of params for allclusters: expected 0, got 1.", "allclusters(x)", emptyState()) 381 | 382 | testError2(t, "Unknown function: foobar", "foobar(x)", emptyState()) 383 | } 384 | 385 | func TestMaxResults(t *testing.T) { 386 | result := make([]interface{}, MaxResults) 387 | for i := 1; i <= MaxResults; i++ { 388 | result[i-1] = strconv.Itoa(i) 389 | } 390 | 391 | testEval(t, NewResult(result...), "1..10000000", emptyState()) 392 | } 393 | 394 | func TestMaxText(t *testing.T) { 395 | longString := strings.Repeat("a", MaxQuerySize+1) 396 | testError2(t, "Value would exceed max query size: aaaaaaaaaaaaaaaaaaaa...", "%a", 397 | singleCluster("a", Cluster{ 398 | "CLUSTER": []string{longString}, 399 | })) 400 | } 401 | 402 | func BenchmarkClusters(b *testing.B) { 403 | // setup fake state 404 | state := NewState() 405 | 406 | state.AddCluster("cluster", Cluster{ 407 | "CLUSTER": []string{"$ALL"}, 408 | "ALL": []string{"mynode"}, 409 | }) 410 | b.ResetTimer() 411 | for i := 0; i < b.N; i++ { 412 | state.Query("clusters(mynode)") 413 | } 414 | } 415 | 416 | func BenchmarkHas(b *testing.B) { 417 | // setup fake state 418 | state := NewState() 419 | 420 | state.AddCluster("cluster", Cluster{ 421 | "CLUSTER": []string{"mynode"}, 422 | "TYPE": []string{"redis"}, 423 | }) 424 | b.ResetTimer() 425 | for i := 0; i < b.N; i++ { 426 | state.Query("has(TYPE;redis)") 427 | } 428 | } 429 | 430 | func testError(t *testing.T, expected string, query string) { 431 | _, err := emptyState().Query(query) 432 | 433 | if err == nil { 434 | t.Errorf("Expected error but none returned") 435 | } else if err.Error() != expected { 436 | // TODO: Get error messages back 437 | //t.Errorf("Different error returned.\n got: %s\nwant: %s", err.Error(), expected) 438 | } 439 | } 440 | 441 | func testError2(t *testing.T, expected string, query string, state *State) { 442 | _, err := state.Query(query) 443 | 444 | if err == nil { 445 | t.Errorf("Expected error but none returned") 446 | } else if err.Error() != expected { 447 | t.Errorf("Different error returned.\n got: %s\nwant: %s", err.Error(), expected) 448 | } 449 | } 450 | 451 | func testEval(t *testing.T, expected Result, query string, state *State) { 452 | actual, err := state.Query(query) 453 | 454 | if err != nil { 455 | t.Errorf("%s Expected result, got error: %s", query, err) 456 | } else if !reflect.DeepEqual(actual, expected) { 457 | t.Errorf("EvalRange\n got: %v\nwant: %v", actual, expected) 458 | } 459 | } 460 | 461 | func singleCluster(name string, c Cluster) *State { 462 | state := NewState() 463 | state.clusters[name] = c 464 | return &state 465 | } 466 | 467 | func singleGroup(name string, members ...string) *State { 468 | return singleCluster("GROUPS", Cluster{ 469 | name: members, 470 | }) 471 | } 472 | 473 | func multiGroup(c Cluster) *State { 474 | return singleCluster("GROUPS", c) 475 | } 476 | 477 | func multiCluster(cs map[string]Cluster) *State { 478 | state := NewState() 479 | state.clusters = cs 480 | return &state 481 | } 482 | 483 | func emptyState() *State { 484 | state := NewState() 485 | return &state 486 | } 487 | -------------------------------------------------------------------------------- /expand_test.go: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | import ( 4 | "bufio" 5 | "fmt" 6 | "io/ioutil" 7 | "os" 8 | "path" 9 | "path/filepath" 10 | "reflect" 11 | "strings" 12 | "testing" 13 | 14 | "gopkg.in/yaml.v2" 15 | ) 16 | 17 | // range-specs that are not currently implemented 18 | var PendingList = []string{ 19 | // All related to brackets inside identifiers: %a{b}c 20 | // https://github.com/square/grange/issues/42 21 | "spec/expand/simple/lookup.spec:19", 22 | "spec/expand/simple/lookup.spec:24", 23 | "spec/expand/simple/lookup.spec:29", 24 | "spec/expand/default_cluster/mem_function.spec:5", 25 | "spec/expand/default_cluster/at_operator.spec:8", 26 | 27 | // Probably requires rewriting numeric expansion implementation to not use a regex. 28 | // https://github.com/square/grange/issues/40 29 | "spec/expand/numeric_expansion.spec:55", 30 | "spec/expand/numeric_expansion.spec:61", 31 | 32 | // Using regex as LHS of set operation 33 | // https://github.com/square/grange/issues/41 34 | "spec/expand/regex.spec:10", 35 | 36 | // Better parsing of expressions following % 37 | // https://github.com/square/grange/issues/43 38 | "spec/expand/clusters/cluster_func.spec:1", 39 | } 40 | 41 | // if non-empty, only run these range-specs. Ideally this would be set as a CLI 42 | // flag. 43 | var FocusList = []string{} 44 | 45 | func TestExpand(t *testing.T) { 46 | spec_dir := os.Getenv("RANGE_SPEC_PATH") 47 | if spec_dir == "" { 48 | // Skip compress tests 49 | fmt.Fprintln(os.Stderr, "Skipping Expand() tests, RANGE_SPEC_PATH not set.") 50 | return 51 | } 52 | 53 | filepath.Walk(spec_dir+"/spec/expand", func(path string, info os.FileInfo, err error) error { 54 | if !info.IsDir() { 55 | return nil 56 | } 57 | 58 | specs, err := filepath.Glob(path + "/*.spec") 59 | if err == nil && specs != nil { 60 | for _, spec := range specs { 61 | loadExpandSpec(t, spec) 62 | } 63 | } 64 | return nil 65 | }) 66 | } 67 | 68 | func runExpandSpec(t *testing.T, spec RangeSpec) { 69 | state := NewState() 70 | // Load YAML files 71 | yamls, err := filepath.Glob(path.Dir(spec.path) + "/*.yaml") 72 | if err != nil { 73 | fmt.Println(err) 74 | return 75 | } 76 | 77 | for _, yamlPath := range yamls { 78 | dat, err := ioutil.ReadFile(yamlPath) 79 | if err != nil { 80 | t.Errorf("Could not read: %s", yamlPath) 81 | } 82 | basename := path.Base(yamlPath) 83 | name := strings.TrimSuffix(basename, ".yaml") 84 | 85 | m := make(map[string]interface{}) 86 | err = yaml.Unmarshal(dat, &m) 87 | if err != nil { 88 | t.Errorf("Invalid YAML: %s", yamlPath) 89 | } 90 | c := yamlToCluster(name, m) 91 | state.AddCluster(name, c) 92 | } 93 | 94 | if len(FocusList) == 0 || spec.Ignore(FocusList) { 95 | actual, err := state.Query(spec.expr) 96 | 97 | if err != nil { 98 | if spec.Ignore(PendingList) { 99 | fmt.Printf("PENDING %s\n%s\n\n", spec.String(), err) 100 | } else { 101 | t.Errorf("FAILED %s\n%s", spec.String(), err) 102 | } 103 | } else if !reflect.DeepEqual(actual, spec.results) { 104 | if spec.Ignore(PendingList) { 105 | fmt.Printf("PENDING %s\n got: %s\nwant: %s\n\n", 106 | spec.String(), actual, spec.results) 107 | } else { 108 | t.Errorf("FAILED %s\n got: %s\nwant: %s", 109 | spec.String(), actual, spec.results) 110 | } 111 | } else { 112 | if spec.Ignore(PendingList) { 113 | t.Errorf("PASSED but listed as PENDING %s", spec.String()) 114 | } 115 | } 116 | } 117 | } 118 | 119 | func loadExpandSpec(t *testing.T, specpath string) { 120 | file, _ := os.Open(specpath) 121 | scanner := bufio.NewScanner(file) 122 | currentSpec := RangeSpec{results: NewResult(), path: specpath} 123 | 124 | line := 0 125 | for scanner.Scan() { 126 | line++ 127 | if strings.HasPrefix(strings.Trim(scanner.Text(), " "), "#") { 128 | continue 129 | } else if scanner.Text() == "" { 130 | runExpandSpec(t, currentSpec) 131 | currentSpec = RangeSpec{results: NewResult(), path: specpath} 132 | } else { 133 | if currentSpec.expr == "" { 134 | currentSpec.expr = scanner.Text() 135 | currentSpec.line = line 136 | } else { 137 | currentSpec.results.Add(scanner.Text()) 138 | } 139 | } 140 | } 141 | if currentSpec.expr != "" { 142 | runExpandSpec(t, currentSpec) 143 | } 144 | } 145 | 146 | // Converts a generic YAML map to a cluster by extracting all the correctly 147 | // typed strings and discarding invalid values. 148 | func yamlToCluster(clusterName string, yaml map[string]interface{}) Cluster { 149 | c := Cluster{} 150 | 151 | for key, value := range yaml { 152 | switch value.(type) { 153 | case nil: 154 | c[key] = []string{} 155 | case string: 156 | c[key] = []string{value.(string)} 157 | case int: 158 | c[key] = []string{fmt.Sprintf("%d", value.(int))} 159 | case bool: 160 | c[key] = []string{fmt.Sprintf("%s", value)} 161 | case []interface{}: 162 | result := []string{} 163 | 164 | for _, x := range value.([]interface{}) { 165 | switch x.(type) { 166 | case string: 167 | result = append(result, fmt.Sprintf("%s", x)) 168 | case int: 169 | result = append(result, fmt.Sprintf("%d", x)) 170 | case bool: 171 | result = append(result, fmt.Sprintf("%s", x)) 172 | default: 173 | // discard 174 | } 175 | } 176 | c[key] = result 177 | default: 178 | // discard 179 | } 180 | } 181 | return c 182 | } 183 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/square/grange 2 | 3 | go 1.12 4 | 5 | require ( 6 | github.com/deckarep/golang-set v0.0.0-20170202203032-fc8930a5e645 7 | github.com/fvbommel/sortorder v1.0.1 8 | github.com/orcaman/concurrent-map v0.0.0-20160823150647-8bf1e9bacbf6 9 | gopkg.in/yaml.v2 v2.2.7 10 | ) 11 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/deckarep/golang-set v0.0.0-20170202203032-fc8930a5e645 h1:P2qhNT0y1A7XeBvSwkvXV2nZTd28Ax5n709pred+3Ys= 2 | github.com/deckarep/golang-set v0.0.0-20170202203032-fc8930a5e645/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ= 3 | github.com/fvbommel/sortorder v1.0.1 h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE= 4 | github.com/fvbommel/sortorder v1.0.1/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= 5 | github.com/orcaman/concurrent-map v0.0.0-20160823150647-8bf1e9bacbf6 h1:df8k17NbGFBiBwHnkSCGQ3F9c6TrF8zmGs2jJ9OsQGc= 6 | github.com/orcaman/concurrent-map v0.0.0-20160823150647-8bf1e9bacbf6/go.mod h1:Lu3tH6HLW3feq74c2GC+jIMS/K2CFcDWnWD9XkenwhI= 7 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= 8 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 9 | gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo= 10 | gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 11 | -------------------------------------------------------------------------------- /nodes.go: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | ) 7 | 8 | type operatorType int 9 | 10 | const ( 11 | operatorIntersect operatorType = iota 12 | operatorSubtract 13 | operatorUnion 14 | ) 15 | 16 | type parserNode interface { 17 | String() string 18 | } 19 | 20 | type nodeNull struct{} 21 | 22 | // Transient marker node to delineate the start of a braces capture. This is 23 | // kind of weird. This node should never be present one parsing is complete. 24 | // 25 | // There is a known bug where %{} leaves this node in the parse tree. I'm not 26 | // sure how to fix at the parsing level yet, so for now have just included a 27 | // noop implementation of this node. 28 | type nodeBraceStart struct{} 29 | 30 | type nodeText struct { 31 | val string 32 | } 33 | 34 | type nodeConstant struct { 35 | val string 36 | } 37 | 38 | type nodeRegexp struct { 39 | val string 40 | } 41 | 42 | type nodeLocalClusterLookup struct { 43 | node parserNode 44 | } 45 | 46 | type nodeGroupQuery struct { 47 | node parserNode 48 | } 49 | 50 | type nodeClusterLookup struct { 51 | node parserNode 52 | key parserNode 53 | } 54 | 55 | type nodeOperator struct { 56 | op operatorType 57 | left parserNode 58 | right parserNode 59 | } 60 | 61 | type nodeBraces struct { 62 | node parserNode 63 | left parserNode 64 | right parserNode 65 | } 66 | 67 | type nodeFunction struct { 68 | name string 69 | params []parserNode 70 | } 71 | 72 | func (n nodeFunction) String() string { 73 | result := []string{} 74 | for _, param := range n.params { 75 | result = append(result, param.String()) 76 | } 77 | 78 | return fmt.Sprintf("%s(%s)", n.name, strings.Join(result, ";")) 79 | } 80 | 81 | func (n nodeText) String() string { 82 | return n.val 83 | } 84 | 85 | func (n nodeConstant) String() string { 86 | return n.val 87 | } 88 | 89 | func (n nodeRegexp) String() string { 90 | return fmt.Sprintf("/%s/", n.val) 91 | } 92 | 93 | func (n nodeClusterLookup) String() string { 94 | switch n.key.(type) { 95 | case nodeText: 96 | if n.key.(nodeText).val == "CLUSTER" { 97 | return fmt.Sprintf("%%{%s}", n.node) 98 | } 99 | } 100 | return fmt.Sprintf("%%{%s}:%s", n.node, n.key) 101 | } 102 | 103 | func (n nodeGroupQuery) String() string { 104 | return fmt.Sprintf("?%s", n.node) 105 | } 106 | 107 | func (n nodeLocalClusterLookup) String() string { 108 | return fmt.Sprintf("$%s", n.node) 109 | } 110 | 111 | func (n nodeBraces) String() string { 112 | return fmt.Sprintf("%s{%s}%s", n.node, n.left, n.right) 113 | } 114 | 115 | func (n nodeNull) String() string { 116 | return "" 117 | } 118 | 119 | func (n nodeBraceStart) String() string { 120 | return "" 121 | } 122 | 123 | func (n nodeOperator) String() string { 124 | var op string 125 | 126 | switch n.op { 127 | case operatorIntersect: 128 | op = "&" 129 | case operatorSubtract: 130 | op = "-" 131 | case operatorUnion: 132 | op = "," 133 | } 134 | return fmt.Sprintf("%s %s %s", n.left, op, n.right) 135 | } 136 | 137 | func (t operatorType) String() string { 138 | switch t { 139 | case operatorIntersect: 140 | return "&" 141 | case operatorSubtract: 142 | return "-" 143 | case operatorUnion: 144 | return "," 145 | default: 146 | panic("Unknown operatorType") 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /parser.go: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | import "fmt" 4 | import "os" 5 | 6 | func (r *rangeQuery) popNode() parserNode { 7 | l := len(r.nodeStack) 8 | if l == 0 { 9 | fmt.Fprintf(os.Stderr, "Tried to popNode when stack is empty. Returning null, but probably hiding a bug somewhere.\n") 10 | return nodeNull{} 11 | } 12 | result := r.nodeStack[l-1] 13 | r.nodeStack = r.nodeStack[:l-1] 14 | return result 15 | } 16 | 17 | func (r *rangeQuery) pushNode(node parserNode) { 18 | r.nodeStack = append(r.nodeStack, node) 19 | } 20 | 21 | func (r *rangeQuery) addValue(val string) { 22 | r.pushNode(nodeText{val}) 23 | } 24 | 25 | func (r *rangeQuery) addConstant(val string) { 26 | r.pushNode(nodeConstant{val}) 27 | } 28 | 29 | func (r *rangeQuery) addNull() { 30 | r.pushNode(nodeNull{}) 31 | } 32 | 33 | func (r *rangeQuery) addBraceStart() { 34 | r.pushNode(nodeBraceStart{}) 35 | } 36 | 37 | func (r *rangeQuery) addFuncArg() { 38 | var funcNode parserNode 39 | 40 | paramNode := r.popNode() 41 | switch paramNode.(type) { 42 | case nodeFunction: 43 | // No arguments. This is kind of terrible, probably a better way to do 44 | // this. 45 | r.pushNode(paramNode) 46 | default: 47 | l := len(r.nodeStack) 48 | if l == 0 { 49 | fmt.Fprintf(os.Stderr, "Tried to addFuncArg when stack is empty. Returning null, but probably hiding a bug somewhere.\n") 50 | return 51 | } 52 | funcNode = r.nodeStack[l-1] 53 | fn := funcNode.(nodeFunction) 54 | fn.params = append(fn.params, paramNode) 55 | r.nodeStack[len(r.nodeStack)-1] = fn 56 | } 57 | } 58 | 59 | func (r *rangeQuery) addBraces() { 60 | if len(r.nodeStack) < 2 { 61 | return 62 | } 63 | 64 | right := r.popNode() 65 | node := r.popNode() 66 | 67 | var left parserNode 68 | left = nodeNull{} 69 | 70 | // This is kind of bullshit but not sure a better way to do it yet 71 | switch node.(type) { 72 | case nodeBraceStart: 73 | node = nodeNull{} 74 | default: 75 | if len(r.nodeStack) > 0 { 76 | left = r.popNode() 77 | switch left.(type) { 78 | case nodeBraceStart: 79 | left = nodeNull{} 80 | } 81 | } 82 | } 83 | r.pushNode(nodeBraces{node, left, right}) 84 | } 85 | 86 | func (r *rangeQuery) addGroupLookup() { 87 | exprNode := r.popNode() 88 | r.pushNode(nodeClusterLookup{nodeConstant{"GROUPS"}, exprNode}) 89 | } 90 | 91 | func (r *rangeQuery) addGroupQuery() { 92 | exprNode := r.popNode() 93 | r.pushNode(nodeGroupQuery{exprNode}) 94 | } 95 | 96 | func (r *rangeQuery) addClusterQuery() { 97 | exprNode := r.popNode() 98 | r.pushNode(nodeFunction{"clusters", []parserNode{exprNode}}) 99 | } 100 | 101 | func (r *rangeQuery) addLocalClusterLookup() { 102 | exprNode := r.popNode() 103 | r.pushNode(nodeLocalClusterLookup{exprNode}) 104 | } 105 | 106 | func (r *rangeQuery) addFunction(name string) { 107 | r.pushNode(nodeFunction{name, []parserNode{}}) 108 | } 109 | 110 | func (r *rangeQuery) addClusterLookup() { 111 | exprNode := r.popNode() 112 | r.pushNode(nodeClusterLookup{exprNode, nodeConstant{"CLUSTER"}}) 113 | } 114 | 115 | func (r *rangeQuery) addRegex(val string) { 116 | r.pushNode(nodeRegexp{val}) 117 | } 118 | 119 | func (r *rangeQuery) addKeyLookup() { 120 | keyNode := r.popNode() 121 | // TODO: Error out if no lookup 122 | if len(r.nodeStack) > 0 { 123 | lookupNode := r.popNode() 124 | 125 | switch lookupNode.(type) { 126 | case nodeClusterLookup: 127 | n := lookupNode.(nodeClusterLookup) 128 | n.key = keyNode 129 | r.pushNode(n) 130 | // TODO: Error out if wrong node type 131 | } 132 | } 133 | } 134 | 135 | func (r *rangeQuery) addOperator(typ operatorType) { 136 | right := r.popNode() 137 | left := r.popNode() 138 | 139 | r.pushNode(nodeOperator{typ, left, right}) 140 | } 141 | -------------------------------------------------------------------------------- /range.peg: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | type rangeQuery Peg { 4 | currentLiteral string 5 | nodeStack []parserNode 6 | } 7 | 8 | expression <- combinedexpr? !. 9 | 10 | combinedexpr <- rangeexpr combinators? 11 | 12 | rangeexpr <- space 13 | ( const 14 | / function 15 | / cluster 16 | / clusterq 17 | / group 18 | / groupq 19 | / localkey 20 | / regex 21 | / value 22 | / emptybrackets 23 | / brackets 24 | / emptybraces 25 | / { p.addBraceStart() } braces 26 | ) 27 | space 28 | 29 | combinators <- space (union / intersect / exclude / braces) 30 | intersect <- '&' rangeexpr { p.addOperator(operatorIntersect) } combinators? 31 | exclude <- '-' rangeexpr { p.addOperator(operatorSubtract) } combinators? 32 | union <- ',' rangeexpr { p.addOperator(operatorUnion) } combinators? 33 | 34 | # This is pretty gross and there is probably a much nicer way to do it. 35 | # See https://github.com/square/grange/issues/37 36 | emptybraces <- '{' space '}' { p.pushNode(nodeNull{}) } 37 | emptybrackets <- '(' space ')' { p.pushNode(nodeNull{}) } 38 | 39 | # See https://github.com/pointlander/peg/issues/21 for context 40 | braces <- '{' combinedexpr? '}' rangeexpr? { p.addBraces() } 41 | brackets <- '(' combinedexpr? ')' 42 | 43 | clusterq <- '*' rangeexpr { p.addClusterQuery() } 44 | groupq <- '?' rangeexpr { p.addGroupQuery() } 45 | cluster <- '%' literal { p.addValue(buffer[begin:end]); p.addClusterLookup() } key? 46 | / '%' rangeexpr { p.addClusterLookup() } key? 47 | group <- '@' rangeexpr { p.addGroupLookup() } 48 | 49 | key <- ':' rangeexpr { p.addKeyLookup() } 50 | localkey <- '$' rangeexpr { p.addLocalClusterLookup() } 51 | 52 | function <- literal { p.addFunction(buffer[begin:end]) } '(' funcargs ')' 53 | funcargs <- combinedexpr? { p.addFuncArg() } ';' funcargs 54 | / combinedexpr? { p.addFuncArg() } 55 | 56 | regex <- '/' < (!'/' .)* > '/' { p.addRegex(buffer[begin:end]) } 57 | literal <- < leaderChar [[a-z0-9-_.]]* > 58 | value <- < leaderChar [[:a-z0-9-_.]]* > { p.addValue(buffer[begin:end]) } 59 | leaderChar <- [[a-z0-9._]] # Do not match "-" so not to confuse with exclude rule 60 | space <- ' '* 61 | const <- q / quoted 62 | q <- 'q(' <(!')' .)*> ')' { p.addConstant(buffer[begin:end]) } 63 | quoted <- '"' <(!'"' .)*> '"' { p.addConstant(buffer[begin:end]) } 64 | -------------------------------------------------------------------------------- /range.peg.go: -------------------------------------------------------------------------------- 1 | package grange 2 | 3 | //go:generate /home/xavier/Code/go/bin/peg range.peg 4 | 5 | import ( 6 | "fmt" 7 | "math" 8 | "sort" 9 | "strconv" 10 | ) 11 | 12 | const endSymbol rune = 1114112 13 | 14 | /* The rule types inferred from the grammar are below. */ 15 | type pegRule uint8 16 | 17 | const ( 18 | ruleUnknown pegRule = iota 19 | ruleexpression 20 | rulecombinedexpr 21 | rulerangeexpr 22 | rulecombinators 23 | ruleintersect 24 | ruleexclude 25 | ruleunion 26 | ruleemptybraces 27 | ruleemptybrackets 28 | rulebraces 29 | rulebrackets 30 | ruleclusterq 31 | rulegroupq 32 | rulecluster 33 | rulegroup 34 | rulekey 35 | rulelocalkey 36 | rulefunction 37 | rulefuncargs 38 | ruleregex 39 | ruleliteral 40 | rulevalue 41 | ruleleaderChar 42 | rulespace 43 | ruleconst 44 | ruleq 45 | rulequoted 46 | ruleAction0 47 | ruleAction1 48 | ruleAction2 49 | ruleAction3 50 | ruleAction4 51 | ruleAction5 52 | ruleAction6 53 | ruleAction7 54 | ruleAction8 55 | ruleAction9 56 | ruleAction10 57 | ruleAction11 58 | ruleAction12 59 | ruleAction13 60 | ruleAction14 61 | ruleAction15 62 | ruleAction16 63 | rulePegText 64 | ruleAction17 65 | ruleAction18 66 | ruleAction19 67 | ruleAction20 68 | ) 69 | 70 | var rul3s = [...]string{ 71 | "Unknown", 72 | "expression", 73 | "combinedexpr", 74 | "rangeexpr", 75 | "combinators", 76 | "intersect", 77 | "exclude", 78 | "union", 79 | "emptybraces", 80 | "emptybrackets", 81 | "braces", 82 | "brackets", 83 | "clusterq", 84 | "groupq", 85 | "cluster", 86 | "group", 87 | "key", 88 | "localkey", 89 | "function", 90 | "funcargs", 91 | "regex", 92 | "literal", 93 | "value", 94 | "leaderChar", 95 | "space", 96 | "const", 97 | "q", 98 | "quoted", 99 | "Action0", 100 | "Action1", 101 | "Action2", 102 | "Action3", 103 | "Action4", 104 | "Action5", 105 | "Action6", 106 | "Action7", 107 | "Action8", 108 | "Action9", 109 | "Action10", 110 | "Action11", 111 | "Action12", 112 | "Action13", 113 | "Action14", 114 | "Action15", 115 | "Action16", 116 | "PegText", 117 | "Action17", 118 | "Action18", 119 | "Action19", 120 | "Action20", 121 | } 122 | 123 | type token32 struct { 124 | pegRule 125 | begin, end uint32 126 | } 127 | 128 | func (t *token32) String() string { 129 | return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v", rul3s[t.pegRule], t.begin, t.end) 130 | } 131 | 132 | type node32 struct { 133 | token32 134 | up, next *node32 135 | } 136 | 137 | func (node *node32) print(pretty bool, buffer string) { 138 | var print func(node *node32, depth int) 139 | print = func(node *node32, depth int) { 140 | for node != nil { 141 | for c := 0; c < depth; c++ { 142 | fmt.Printf(" ") 143 | } 144 | rule := rul3s[node.pegRule] 145 | quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) 146 | if !pretty { 147 | fmt.Printf("%v %v\n", rule, quote) 148 | } else { 149 | fmt.Printf("\x1B[34m%v\x1B[m %v\n", rule, quote) 150 | } 151 | if node.up != nil { 152 | print(node.up, depth+1) 153 | } 154 | node = node.next 155 | } 156 | } 157 | print(node, 0) 158 | } 159 | 160 | func (node *node32) Print(buffer string) { 161 | node.print(false, buffer) 162 | } 163 | 164 | func (node *node32) PrettyPrint(buffer string) { 165 | node.print(true, buffer) 166 | } 167 | 168 | type tokens32 struct { 169 | tree []token32 170 | } 171 | 172 | func (t *tokens32) Trim(length uint32) { 173 | t.tree = t.tree[:length] 174 | } 175 | 176 | func (t *tokens32) Print() { 177 | for _, token := range t.tree { 178 | fmt.Println(token.String()) 179 | } 180 | } 181 | 182 | func (t *tokens32) AST() *node32 { 183 | type element struct { 184 | node *node32 185 | down *element 186 | } 187 | tokens := t.Tokens() 188 | var stack *element 189 | for _, token := range tokens { 190 | if token.begin == token.end { 191 | continue 192 | } 193 | node := &node32{token32: token} 194 | for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { 195 | stack.node.next = node.up 196 | node.up = stack.node 197 | stack = stack.down 198 | } 199 | stack = &element{node: node, down: stack} 200 | } 201 | if stack != nil { 202 | return stack.node 203 | } 204 | return nil 205 | } 206 | 207 | func (t *tokens32) PrintSyntaxTree(buffer string) { 208 | t.AST().Print(buffer) 209 | } 210 | 211 | func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { 212 | t.AST().PrettyPrint(buffer) 213 | } 214 | 215 | func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { 216 | if tree := t.tree; int(index) >= len(tree) { 217 | expanded := make([]token32, 2*len(tree)) 218 | copy(expanded, tree) 219 | t.tree = expanded 220 | } 221 | t.tree[index] = token32{ 222 | pegRule: rule, 223 | begin: begin, 224 | end: end, 225 | } 226 | } 227 | 228 | func (t *tokens32) Tokens() []token32 { 229 | return t.tree 230 | } 231 | 232 | type rangeQuery struct { 233 | currentLiteral string 234 | nodeStack []parserNode 235 | 236 | Buffer string 237 | buffer []rune 238 | rules [50]func() bool 239 | parse func(rule ...int) error 240 | reset func() 241 | Pretty bool 242 | tokens32 243 | } 244 | 245 | func (p *rangeQuery) Parse(rule ...int) error { 246 | return p.parse(rule...) 247 | } 248 | 249 | func (p *rangeQuery) Reset() { 250 | p.reset() 251 | } 252 | 253 | type textPosition struct { 254 | line, symbol int 255 | } 256 | 257 | type textPositionMap map[int]textPosition 258 | 259 | func translatePositions(buffer []rune, positions []int) textPositionMap { 260 | length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 261 | sort.Ints(positions) 262 | 263 | search: 264 | for i, c := range buffer { 265 | if c == '\n' { 266 | line, symbol = line+1, 0 267 | } else { 268 | symbol++ 269 | } 270 | if i == positions[j] { 271 | translations[positions[j]] = textPosition{line, symbol} 272 | for j++; j < length; j++ { 273 | if i != positions[j] { 274 | continue search 275 | } 276 | } 277 | break search 278 | } 279 | } 280 | 281 | return translations 282 | } 283 | 284 | type parseError struct { 285 | p *rangeQuery 286 | max token32 287 | } 288 | 289 | func (e *parseError) Error() string { 290 | tokens, error := []token32{e.max}, "\n" 291 | positions, p := make([]int, 2*len(tokens)), 0 292 | for _, token := range tokens { 293 | positions[p], p = int(token.begin), p+1 294 | positions[p], p = int(token.end), p+1 295 | } 296 | translations := translatePositions(e.p.buffer, positions) 297 | format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" 298 | if e.p.Pretty { 299 | format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" 300 | } 301 | for _, token := range tokens { 302 | begin, end := int(token.begin), int(token.end) 303 | error += fmt.Sprintf(format, 304 | rul3s[token.pegRule], 305 | translations[begin].line, translations[begin].symbol, 306 | translations[end].line, translations[end].symbol, 307 | strconv.Quote(string(e.p.buffer[begin:end]))) 308 | } 309 | 310 | return error 311 | } 312 | 313 | func (p *rangeQuery) PrintSyntaxTree() { 314 | if p.Pretty { 315 | p.tokens32.PrettyPrintSyntaxTree(p.Buffer) 316 | } else { 317 | p.tokens32.PrintSyntaxTree(p.Buffer) 318 | } 319 | } 320 | 321 | func (p *rangeQuery) Execute() { 322 | buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 323 | for _, token := range p.Tokens() { 324 | switch token.pegRule { 325 | 326 | case rulePegText: 327 | begin, end = int(token.begin), int(token.end) 328 | text = string(_buffer[begin:end]) 329 | 330 | case ruleAction0: 331 | p.addBraceStart() 332 | case ruleAction1: 333 | p.addOperator(operatorIntersect) 334 | case ruleAction2: 335 | p.addOperator(operatorSubtract) 336 | case ruleAction3: 337 | p.addOperator(operatorUnion) 338 | case ruleAction4: 339 | p.pushNode(nodeNull{}) 340 | case ruleAction5: 341 | p.pushNode(nodeNull{}) 342 | case ruleAction6: 343 | p.addBraces() 344 | case ruleAction7: 345 | p.addClusterQuery() 346 | case ruleAction8: 347 | p.addGroupQuery() 348 | case ruleAction9: 349 | p.addValue(buffer[begin:end]) 350 | p.addClusterLookup() 351 | case ruleAction10: 352 | p.addClusterLookup() 353 | case ruleAction11: 354 | p.addGroupLookup() 355 | case ruleAction12: 356 | p.addKeyLookup() 357 | case ruleAction13: 358 | p.addLocalClusterLookup() 359 | case ruleAction14: 360 | p.addFunction(buffer[begin:end]) 361 | case ruleAction15: 362 | p.addFuncArg() 363 | case ruleAction16: 364 | p.addFuncArg() 365 | case ruleAction17: 366 | p.addRegex(buffer[begin:end]) 367 | case ruleAction18: 368 | p.addValue(buffer[begin:end]) 369 | case ruleAction19: 370 | p.addConstant(buffer[begin:end]) 371 | case ruleAction20: 372 | p.addConstant(buffer[begin:end]) 373 | 374 | } 375 | } 376 | _, _, _, _, _ = buffer, _buffer, text, begin, end 377 | } 378 | 379 | func (p *rangeQuery) Init() { 380 | var ( 381 | max token32 382 | position, tokenIndex uint32 383 | buffer []rune 384 | ) 385 | p.reset = func() { 386 | max = token32{} 387 | position, tokenIndex = 0, 0 388 | 389 | p.buffer = []rune(p.Buffer) 390 | if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { 391 | p.buffer = append(p.buffer, endSymbol) 392 | } 393 | buffer = p.buffer 394 | } 395 | p.reset() 396 | 397 | _rules := p.rules 398 | tree := tokens32{tree: make([]token32, math.MaxInt16)} 399 | p.parse = func(rule ...int) error { 400 | r := 1 401 | if len(rule) > 0 { 402 | r = rule[0] 403 | } 404 | matches := p.rules[r]() 405 | p.tokens32 = tree 406 | if matches { 407 | p.Trim(tokenIndex) 408 | return nil 409 | } 410 | return &parseError{p, max} 411 | } 412 | 413 | add := func(rule pegRule, begin uint32) { 414 | tree.Add(rule, begin, position, tokenIndex) 415 | tokenIndex++ 416 | if begin != position && position > max.end { 417 | max = token32{rule, begin, position} 418 | } 419 | } 420 | 421 | matchDot := func() bool { 422 | if buffer[position] != endSymbol { 423 | position++ 424 | return true 425 | } 426 | return false 427 | } 428 | 429 | /*matchChar := func(c byte) bool { 430 | if buffer[position] == c { 431 | position++ 432 | return true 433 | } 434 | return false 435 | }*/ 436 | 437 | /*matchRange := func(lower byte, upper byte) bool { 438 | if c := buffer[position]; c >= lower && c <= upper { 439 | position++ 440 | return true 441 | } 442 | return false 443 | }*/ 444 | 445 | _rules = [...]func() bool{ 446 | nil, 447 | /* 0 expression <- <(combinedexpr? !.)> */ 448 | func() bool { 449 | position0, tokenIndex0 := position, tokenIndex 450 | { 451 | position1 := position 452 | { 453 | position2, tokenIndex2 := position, tokenIndex 454 | if !_rules[rulecombinedexpr]() { 455 | goto l2 456 | } 457 | goto l3 458 | l2: 459 | position, tokenIndex = position2, tokenIndex2 460 | } 461 | l3: 462 | { 463 | position4, tokenIndex4 := position, tokenIndex 464 | if !matchDot() { 465 | goto l4 466 | } 467 | goto l0 468 | l4: 469 | position, tokenIndex = position4, tokenIndex4 470 | } 471 | add(ruleexpression, position1) 472 | } 473 | return true 474 | l0: 475 | position, tokenIndex = position0, tokenIndex0 476 | return false 477 | }, 478 | /* 1 combinedexpr <- <(rangeexpr combinators?)> */ 479 | func() bool { 480 | position5, tokenIndex5 := position, tokenIndex 481 | { 482 | position6 := position 483 | if !_rules[rulerangeexpr]() { 484 | goto l5 485 | } 486 | { 487 | position7, tokenIndex7 := position, tokenIndex 488 | if !_rules[rulecombinators]() { 489 | goto l7 490 | } 491 | goto l8 492 | l7: 493 | position, tokenIndex = position7, tokenIndex7 494 | } 495 | l8: 496 | add(rulecombinedexpr, position6) 497 | } 498 | return true 499 | l5: 500 | position, tokenIndex = position5, tokenIndex5 501 | return false 502 | }, 503 | /* 2 rangeexpr <- <(space (const / function / cluster / clusterq / group / groupq / localkey / regex / value / emptybrackets / brackets / emptybraces / (Action0 braces)) space)> */ 504 | func() bool { 505 | position9, tokenIndex9 := position, tokenIndex 506 | { 507 | position10 := position 508 | if !_rules[rulespace]() { 509 | goto l9 510 | } 511 | { 512 | position11, tokenIndex11 := position, tokenIndex 513 | if !_rules[ruleconst]() { 514 | goto l12 515 | } 516 | goto l11 517 | l12: 518 | position, tokenIndex = position11, tokenIndex11 519 | if !_rules[rulefunction]() { 520 | goto l13 521 | } 522 | goto l11 523 | l13: 524 | position, tokenIndex = position11, tokenIndex11 525 | if !_rules[rulecluster]() { 526 | goto l14 527 | } 528 | goto l11 529 | l14: 530 | position, tokenIndex = position11, tokenIndex11 531 | if !_rules[ruleclusterq]() { 532 | goto l15 533 | } 534 | goto l11 535 | l15: 536 | position, tokenIndex = position11, tokenIndex11 537 | if !_rules[rulegroup]() { 538 | goto l16 539 | } 540 | goto l11 541 | l16: 542 | position, tokenIndex = position11, tokenIndex11 543 | if !_rules[rulegroupq]() { 544 | goto l17 545 | } 546 | goto l11 547 | l17: 548 | position, tokenIndex = position11, tokenIndex11 549 | if !_rules[rulelocalkey]() { 550 | goto l18 551 | } 552 | goto l11 553 | l18: 554 | position, tokenIndex = position11, tokenIndex11 555 | if !_rules[ruleregex]() { 556 | goto l19 557 | } 558 | goto l11 559 | l19: 560 | position, tokenIndex = position11, tokenIndex11 561 | if !_rules[rulevalue]() { 562 | goto l20 563 | } 564 | goto l11 565 | l20: 566 | position, tokenIndex = position11, tokenIndex11 567 | if !_rules[ruleemptybrackets]() { 568 | goto l21 569 | } 570 | goto l11 571 | l21: 572 | position, tokenIndex = position11, tokenIndex11 573 | if !_rules[rulebrackets]() { 574 | goto l22 575 | } 576 | goto l11 577 | l22: 578 | position, tokenIndex = position11, tokenIndex11 579 | if !_rules[ruleemptybraces]() { 580 | goto l23 581 | } 582 | goto l11 583 | l23: 584 | position, tokenIndex = position11, tokenIndex11 585 | if !_rules[ruleAction0]() { 586 | goto l9 587 | } 588 | if !_rules[rulebraces]() { 589 | goto l9 590 | } 591 | } 592 | l11: 593 | if !_rules[rulespace]() { 594 | goto l9 595 | } 596 | add(rulerangeexpr, position10) 597 | } 598 | return true 599 | l9: 600 | position, tokenIndex = position9, tokenIndex9 601 | return false 602 | }, 603 | /* 3 combinators <- <(space (union / intersect / exclude / braces))> */ 604 | func() bool { 605 | position24, tokenIndex24 := position, tokenIndex 606 | { 607 | position25 := position 608 | if !_rules[rulespace]() { 609 | goto l24 610 | } 611 | { 612 | position26, tokenIndex26 := position, tokenIndex 613 | if !_rules[ruleunion]() { 614 | goto l27 615 | } 616 | goto l26 617 | l27: 618 | position, tokenIndex = position26, tokenIndex26 619 | if !_rules[ruleintersect]() { 620 | goto l28 621 | } 622 | goto l26 623 | l28: 624 | position, tokenIndex = position26, tokenIndex26 625 | if !_rules[ruleexclude]() { 626 | goto l29 627 | } 628 | goto l26 629 | l29: 630 | position, tokenIndex = position26, tokenIndex26 631 | if !_rules[rulebraces]() { 632 | goto l24 633 | } 634 | } 635 | l26: 636 | add(rulecombinators, position25) 637 | } 638 | return true 639 | l24: 640 | position, tokenIndex = position24, tokenIndex24 641 | return false 642 | }, 643 | /* 4 intersect <- <('&' rangeexpr Action1 combinators?)> */ 644 | func() bool { 645 | position30, tokenIndex30 := position, tokenIndex 646 | { 647 | position31 := position 648 | if buffer[position] != rune('&') { 649 | goto l30 650 | } 651 | position++ 652 | if !_rules[rulerangeexpr]() { 653 | goto l30 654 | } 655 | if !_rules[ruleAction1]() { 656 | goto l30 657 | } 658 | { 659 | position32, tokenIndex32 := position, tokenIndex 660 | if !_rules[rulecombinators]() { 661 | goto l32 662 | } 663 | goto l33 664 | l32: 665 | position, tokenIndex = position32, tokenIndex32 666 | } 667 | l33: 668 | add(ruleintersect, position31) 669 | } 670 | return true 671 | l30: 672 | position, tokenIndex = position30, tokenIndex30 673 | return false 674 | }, 675 | /* 5 exclude <- <('-' rangeexpr Action2 combinators?)> */ 676 | func() bool { 677 | position34, tokenIndex34 := position, tokenIndex 678 | { 679 | position35 := position 680 | if buffer[position] != rune('-') { 681 | goto l34 682 | } 683 | position++ 684 | if !_rules[rulerangeexpr]() { 685 | goto l34 686 | } 687 | if !_rules[ruleAction2]() { 688 | goto l34 689 | } 690 | { 691 | position36, tokenIndex36 := position, tokenIndex 692 | if !_rules[rulecombinators]() { 693 | goto l36 694 | } 695 | goto l37 696 | l36: 697 | position, tokenIndex = position36, tokenIndex36 698 | } 699 | l37: 700 | add(ruleexclude, position35) 701 | } 702 | return true 703 | l34: 704 | position, tokenIndex = position34, tokenIndex34 705 | return false 706 | }, 707 | /* 6 union <- <(',' rangeexpr Action3 combinators?)> */ 708 | func() bool { 709 | position38, tokenIndex38 := position, tokenIndex 710 | { 711 | position39 := position 712 | if buffer[position] != rune(',') { 713 | goto l38 714 | } 715 | position++ 716 | if !_rules[rulerangeexpr]() { 717 | goto l38 718 | } 719 | if !_rules[ruleAction3]() { 720 | goto l38 721 | } 722 | { 723 | position40, tokenIndex40 := position, tokenIndex 724 | if !_rules[rulecombinators]() { 725 | goto l40 726 | } 727 | goto l41 728 | l40: 729 | position, tokenIndex = position40, tokenIndex40 730 | } 731 | l41: 732 | add(ruleunion, position39) 733 | } 734 | return true 735 | l38: 736 | position, tokenIndex = position38, tokenIndex38 737 | return false 738 | }, 739 | /* 7 emptybraces <- <('{' space '}' Action4)> */ 740 | func() bool { 741 | position42, tokenIndex42 := position, tokenIndex 742 | { 743 | position43 := position 744 | if buffer[position] != rune('{') { 745 | goto l42 746 | } 747 | position++ 748 | if !_rules[rulespace]() { 749 | goto l42 750 | } 751 | if buffer[position] != rune('}') { 752 | goto l42 753 | } 754 | position++ 755 | if !_rules[ruleAction4]() { 756 | goto l42 757 | } 758 | add(ruleemptybraces, position43) 759 | } 760 | return true 761 | l42: 762 | position, tokenIndex = position42, tokenIndex42 763 | return false 764 | }, 765 | /* 8 emptybrackets <- <('(' space ')' Action5)> */ 766 | func() bool { 767 | position44, tokenIndex44 := position, tokenIndex 768 | { 769 | position45 := position 770 | if buffer[position] != rune('(') { 771 | goto l44 772 | } 773 | position++ 774 | if !_rules[rulespace]() { 775 | goto l44 776 | } 777 | if buffer[position] != rune(')') { 778 | goto l44 779 | } 780 | position++ 781 | if !_rules[ruleAction5]() { 782 | goto l44 783 | } 784 | add(ruleemptybrackets, position45) 785 | } 786 | return true 787 | l44: 788 | position, tokenIndex = position44, tokenIndex44 789 | return false 790 | }, 791 | /* 9 braces <- <('{' combinedexpr? '}' rangeexpr? Action6)> */ 792 | func() bool { 793 | position46, tokenIndex46 := position, tokenIndex 794 | { 795 | position47 := position 796 | if buffer[position] != rune('{') { 797 | goto l46 798 | } 799 | position++ 800 | { 801 | position48, tokenIndex48 := position, tokenIndex 802 | if !_rules[rulecombinedexpr]() { 803 | goto l48 804 | } 805 | goto l49 806 | l48: 807 | position, tokenIndex = position48, tokenIndex48 808 | } 809 | l49: 810 | if buffer[position] != rune('}') { 811 | goto l46 812 | } 813 | position++ 814 | { 815 | position50, tokenIndex50 := position, tokenIndex 816 | if !_rules[rulerangeexpr]() { 817 | goto l50 818 | } 819 | goto l51 820 | l50: 821 | position, tokenIndex = position50, tokenIndex50 822 | } 823 | l51: 824 | if !_rules[ruleAction6]() { 825 | goto l46 826 | } 827 | add(rulebraces, position47) 828 | } 829 | return true 830 | l46: 831 | position, tokenIndex = position46, tokenIndex46 832 | return false 833 | }, 834 | /* 10 brackets <- <('(' combinedexpr? ')')> */ 835 | func() bool { 836 | position52, tokenIndex52 := position, tokenIndex 837 | { 838 | position53 := position 839 | if buffer[position] != rune('(') { 840 | goto l52 841 | } 842 | position++ 843 | { 844 | position54, tokenIndex54 := position, tokenIndex 845 | if !_rules[rulecombinedexpr]() { 846 | goto l54 847 | } 848 | goto l55 849 | l54: 850 | position, tokenIndex = position54, tokenIndex54 851 | } 852 | l55: 853 | if buffer[position] != rune(')') { 854 | goto l52 855 | } 856 | position++ 857 | add(rulebrackets, position53) 858 | } 859 | return true 860 | l52: 861 | position, tokenIndex = position52, tokenIndex52 862 | return false 863 | }, 864 | /* 11 clusterq <- <('*' rangeexpr Action7)> */ 865 | func() bool { 866 | position56, tokenIndex56 := position, tokenIndex 867 | { 868 | position57 := position 869 | if buffer[position] != rune('*') { 870 | goto l56 871 | } 872 | position++ 873 | if !_rules[rulerangeexpr]() { 874 | goto l56 875 | } 876 | if !_rules[ruleAction7]() { 877 | goto l56 878 | } 879 | add(ruleclusterq, position57) 880 | } 881 | return true 882 | l56: 883 | position, tokenIndex = position56, tokenIndex56 884 | return false 885 | }, 886 | /* 12 groupq <- <('?' rangeexpr Action8)> */ 887 | func() bool { 888 | position58, tokenIndex58 := position, tokenIndex 889 | { 890 | position59 := position 891 | if buffer[position] != rune('?') { 892 | goto l58 893 | } 894 | position++ 895 | if !_rules[rulerangeexpr]() { 896 | goto l58 897 | } 898 | if !_rules[ruleAction8]() { 899 | goto l58 900 | } 901 | add(rulegroupq, position59) 902 | } 903 | return true 904 | l58: 905 | position, tokenIndex = position58, tokenIndex58 906 | return false 907 | }, 908 | /* 13 cluster <- <(('%' literal Action9 key?) / ('%' rangeexpr Action10 key?))> */ 909 | func() bool { 910 | position60, tokenIndex60 := position, tokenIndex 911 | { 912 | position61 := position 913 | { 914 | position62, tokenIndex62 := position, tokenIndex 915 | if buffer[position] != rune('%') { 916 | goto l63 917 | } 918 | position++ 919 | if !_rules[ruleliteral]() { 920 | goto l63 921 | } 922 | if !_rules[ruleAction9]() { 923 | goto l63 924 | } 925 | { 926 | position64, tokenIndex64 := position, tokenIndex 927 | if !_rules[rulekey]() { 928 | goto l64 929 | } 930 | goto l65 931 | l64: 932 | position, tokenIndex = position64, tokenIndex64 933 | } 934 | l65: 935 | goto l62 936 | l63: 937 | position, tokenIndex = position62, tokenIndex62 938 | if buffer[position] != rune('%') { 939 | goto l60 940 | } 941 | position++ 942 | if !_rules[rulerangeexpr]() { 943 | goto l60 944 | } 945 | if !_rules[ruleAction10]() { 946 | goto l60 947 | } 948 | { 949 | position66, tokenIndex66 := position, tokenIndex 950 | if !_rules[rulekey]() { 951 | goto l66 952 | } 953 | goto l67 954 | l66: 955 | position, tokenIndex = position66, tokenIndex66 956 | } 957 | l67: 958 | } 959 | l62: 960 | add(rulecluster, position61) 961 | } 962 | return true 963 | l60: 964 | position, tokenIndex = position60, tokenIndex60 965 | return false 966 | }, 967 | /* 14 group <- <('@' rangeexpr Action11)> */ 968 | func() bool { 969 | position68, tokenIndex68 := position, tokenIndex 970 | { 971 | position69 := position 972 | if buffer[position] != rune('@') { 973 | goto l68 974 | } 975 | position++ 976 | if !_rules[rulerangeexpr]() { 977 | goto l68 978 | } 979 | if !_rules[ruleAction11]() { 980 | goto l68 981 | } 982 | add(rulegroup, position69) 983 | } 984 | return true 985 | l68: 986 | position, tokenIndex = position68, tokenIndex68 987 | return false 988 | }, 989 | /* 15 key <- <(':' rangeexpr Action12)> */ 990 | func() bool { 991 | position70, tokenIndex70 := position, tokenIndex 992 | { 993 | position71 := position 994 | if buffer[position] != rune(':') { 995 | goto l70 996 | } 997 | position++ 998 | if !_rules[rulerangeexpr]() { 999 | goto l70 1000 | } 1001 | if !_rules[ruleAction12]() { 1002 | goto l70 1003 | } 1004 | add(rulekey, position71) 1005 | } 1006 | return true 1007 | l70: 1008 | position, tokenIndex = position70, tokenIndex70 1009 | return false 1010 | }, 1011 | /* 16 localkey <- <('$' rangeexpr Action13)> */ 1012 | func() bool { 1013 | position72, tokenIndex72 := position, tokenIndex 1014 | { 1015 | position73 := position 1016 | if buffer[position] != rune('$') { 1017 | goto l72 1018 | } 1019 | position++ 1020 | if !_rules[rulerangeexpr]() { 1021 | goto l72 1022 | } 1023 | if !_rules[ruleAction13]() { 1024 | goto l72 1025 | } 1026 | add(rulelocalkey, position73) 1027 | } 1028 | return true 1029 | l72: 1030 | position, tokenIndex = position72, tokenIndex72 1031 | return false 1032 | }, 1033 | /* 17 function <- <(literal Action14 '(' funcargs ')')> */ 1034 | func() bool { 1035 | position74, tokenIndex74 := position, tokenIndex 1036 | { 1037 | position75 := position 1038 | if !_rules[ruleliteral]() { 1039 | goto l74 1040 | } 1041 | if !_rules[ruleAction14]() { 1042 | goto l74 1043 | } 1044 | if buffer[position] != rune('(') { 1045 | goto l74 1046 | } 1047 | position++ 1048 | if !_rules[rulefuncargs]() { 1049 | goto l74 1050 | } 1051 | if buffer[position] != rune(')') { 1052 | goto l74 1053 | } 1054 | position++ 1055 | add(rulefunction, position75) 1056 | } 1057 | return true 1058 | l74: 1059 | position, tokenIndex = position74, tokenIndex74 1060 | return false 1061 | }, 1062 | /* 18 funcargs <- <((combinedexpr? Action15 ';' funcargs) / (combinedexpr? Action16))> */ 1063 | func() bool { 1064 | position76, tokenIndex76 := position, tokenIndex 1065 | { 1066 | position77 := position 1067 | { 1068 | position78, tokenIndex78 := position, tokenIndex 1069 | { 1070 | position80, tokenIndex80 := position, tokenIndex 1071 | if !_rules[rulecombinedexpr]() { 1072 | goto l80 1073 | } 1074 | goto l81 1075 | l80: 1076 | position, tokenIndex = position80, tokenIndex80 1077 | } 1078 | l81: 1079 | if !_rules[ruleAction15]() { 1080 | goto l79 1081 | } 1082 | if buffer[position] != rune(';') { 1083 | goto l79 1084 | } 1085 | position++ 1086 | if !_rules[rulefuncargs]() { 1087 | goto l79 1088 | } 1089 | goto l78 1090 | l79: 1091 | position, tokenIndex = position78, tokenIndex78 1092 | { 1093 | position82, tokenIndex82 := position, tokenIndex 1094 | if !_rules[rulecombinedexpr]() { 1095 | goto l82 1096 | } 1097 | goto l83 1098 | l82: 1099 | position, tokenIndex = position82, tokenIndex82 1100 | } 1101 | l83: 1102 | if !_rules[ruleAction16]() { 1103 | goto l76 1104 | } 1105 | } 1106 | l78: 1107 | add(rulefuncargs, position77) 1108 | } 1109 | return true 1110 | l76: 1111 | position, tokenIndex = position76, tokenIndex76 1112 | return false 1113 | }, 1114 | /* 19 regex <- <('/' <(!'/' .)*> '/' Action17)> */ 1115 | func() bool { 1116 | position84, tokenIndex84 := position, tokenIndex 1117 | { 1118 | position85 := position 1119 | if buffer[position] != rune('/') { 1120 | goto l84 1121 | } 1122 | position++ 1123 | { 1124 | position86 := position 1125 | l87: 1126 | { 1127 | position88, tokenIndex88 := position, tokenIndex 1128 | { 1129 | position89, tokenIndex89 := position, tokenIndex 1130 | if buffer[position] != rune('/') { 1131 | goto l89 1132 | } 1133 | position++ 1134 | goto l88 1135 | l89: 1136 | position, tokenIndex = position89, tokenIndex89 1137 | } 1138 | if !matchDot() { 1139 | goto l88 1140 | } 1141 | goto l87 1142 | l88: 1143 | position, tokenIndex = position88, tokenIndex88 1144 | } 1145 | add(rulePegText, position86) 1146 | } 1147 | if buffer[position] != rune('/') { 1148 | goto l84 1149 | } 1150 | position++ 1151 | if !_rules[ruleAction17]() { 1152 | goto l84 1153 | } 1154 | add(ruleregex, position85) 1155 | } 1156 | return true 1157 | l84: 1158 | position, tokenIndex = position84, tokenIndex84 1159 | return false 1160 | }, 1161 | /* 20 literal <- <<(leaderChar ([a-z] / [A-Z] / ([0-9] / [0-9]) / '-' / '_' / '.')*)>> */ 1162 | func() bool { 1163 | position90, tokenIndex90 := position, tokenIndex 1164 | { 1165 | position91 := position 1166 | { 1167 | position92 := position 1168 | if !_rules[ruleleaderChar]() { 1169 | goto l90 1170 | } 1171 | l93: 1172 | { 1173 | position94, tokenIndex94 := position, tokenIndex 1174 | { 1175 | position95, tokenIndex95 := position, tokenIndex 1176 | if c := buffer[position]; c < rune('a') || c > rune('z') { 1177 | goto l96 1178 | } 1179 | position++ 1180 | goto l95 1181 | l96: 1182 | position, tokenIndex = position95, tokenIndex95 1183 | if c := buffer[position]; c < rune('A') || c > rune('Z') { 1184 | goto l97 1185 | } 1186 | position++ 1187 | goto l95 1188 | l97: 1189 | position, tokenIndex = position95, tokenIndex95 1190 | { 1191 | position99, tokenIndex99 := position, tokenIndex 1192 | if c := buffer[position]; c < rune('0') || c > rune('9') { 1193 | goto l100 1194 | } 1195 | position++ 1196 | goto l99 1197 | l100: 1198 | position, tokenIndex = position99, tokenIndex99 1199 | if c := buffer[position]; c < rune('0') || c > rune('9') { 1200 | goto l98 1201 | } 1202 | position++ 1203 | } 1204 | l99: 1205 | goto l95 1206 | l98: 1207 | position, tokenIndex = position95, tokenIndex95 1208 | if buffer[position] != rune('-') { 1209 | goto l101 1210 | } 1211 | position++ 1212 | goto l95 1213 | l101: 1214 | position, tokenIndex = position95, tokenIndex95 1215 | if buffer[position] != rune('_') { 1216 | goto l102 1217 | } 1218 | position++ 1219 | goto l95 1220 | l102: 1221 | position, tokenIndex = position95, tokenIndex95 1222 | if buffer[position] != rune('.') { 1223 | goto l94 1224 | } 1225 | position++ 1226 | } 1227 | l95: 1228 | goto l93 1229 | l94: 1230 | position, tokenIndex = position94, tokenIndex94 1231 | } 1232 | add(rulePegText, position92) 1233 | } 1234 | add(ruleliteral, position91) 1235 | } 1236 | return true 1237 | l90: 1238 | position, tokenIndex = position90, tokenIndex90 1239 | return false 1240 | }, 1241 | /* 21 value <- <(<(leaderChar (':' / ([a-z] / [A-Z]) / ([0-9] / [0-9]) / '-' / '_' / '.')*)> Action18)> */ 1242 | func() bool { 1243 | position103, tokenIndex103 := position, tokenIndex 1244 | { 1245 | position104 := position 1246 | { 1247 | position105 := position 1248 | if !_rules[ruleleaderChar]() { 1249 | goto l103 1250 | } 1251 | l106: 1252 | { 1253 | position107, tokenIndex107 := position, tokenIndex 1254 | { 1255 | position108, tokenIndex108 := position, tokenIndex 1256 | if buffer[position] != rune(':') { 1257 | goto l109 1258 | } 1259 | position++ 1260 | goto l108 1261 | l109: 1262 | position, tokenIndex = position108, tokenIndex108 1263 | { 1264 | position111, tokenIndex111 := position, tokenIndex 1265 | if c := buffer[position]; c < rune('a') || c > rune('z') { 1266 | goto l112 1267 | } 1268 | position++ 1269 | goto l111 1270 | l112: 1271 | position, tokenIndex = position111, tokenIndex111 1272 | if c := buffer[position]; c < rune('A') || c > rune('Z') { 1273 | goto l110 1274 | } 1275 | position++ 1276 | } 1277 | l111: 1278 | goto l108 1279 | l110: 1280 | position, tokenIndex = position108, tokenIndex108 1281 | { 1282 | position114, tokenIndex114 := position, tokenIndex 1283 | if c := buffer[position]; c < rune('0') || c > rune('9') { 1284 | goto l115 1285 | } 1286 | position++ 1287 | goto l114 1288 | l115: 1289 | position, tokenIndex = position114, tokenIndex114 1290 | if c := buffer[position]; c < rune('0') || c > rune('9') { 1291 | goto l113 1292 | } 1293 | position++ 1294 | } 1295 | l114: 1296 | goto l108 1297 | l113: 1298 | position, tokenIndex = position108, tokenIndex108 1299 | if buffer[position] != rune('-') { 1300 | goto l116 1301 | } 1302 | position++ 1303 | goto l108 1304 | l116: 1305 | position, tokenIndex = position108, tokenIndex108 1306 | if buffer[position] != rune('_') { 1307 | goto l117 1308 | } 1309 | position++ 1310 | goto l108 1311 | l117: 1312 | position, tokenIndex = position108, tokenIndex108 1313 | if buffer[position] != rune('.') { 1314 | goto l107 1315 | } 1316 | position++ 1317 | } 1318 | l108: 1319 | goto l106 1320 | l107: 1321 | position, tokenIndex = position107, tokenIndex107 1322 | } 1323 | add(rulePegText, position105) 1324 | } 1325 | if !_rules[ruleAction18]() { 1326 | goto l103 1327 | } 1328 | add(rulevalue, position104) 1329 | } 1330 | return true 1331 | l103: 1332 | position, tokenIndex = position103, tokenIndex103 1333 | return false 1334 | }, 1335 | /* 22 leaderChar <- <([a-z] / [A-Z] / ([0-9] / [0-9]) / '.' / '_')> */ 1336 | func() bool { 1337 | position118, tokenIndex118 := position, tokenIndex 1338 | { 1339 | position119 := position 1340 | { 1341 | position120, tokenIndex120 := position, tokenIndex 1342 | if c := buffer[position]; c < rune('a') || c > rune('z') { 1343 | goto l121 1344 | } 1345 | position++ 1346 | goto l120 1347 | l121: 1348 | position, tokenIndex = position120, tokenIndex120 1349 | if c := buffer[position]; c < rune('A') || c > rune('Z') { 1350 | goto l122 1351 | } 1352 | position++ 1353 | goto l120 1354 | l122: 1355 | position, tokenIndex = position120, tokenIndex120 1356 | { 1357 | position124, tokenIndex124 := position, tokenIndex 1358 | if c := buffer[position]; c < rune('0') || c > rune('9') { 1359 | goto l125 1360 | } 1361 | position++ 1362 | goto l124 1363 | l125: 1364 | position, tokenIndex = position124, tokenIndex124 1365 | if c := buffer[position]; c < rune('0') || c > rune('9') { 1366 | goto l123 1367 | } 1368 | position++ 1369 | } 1370 | l124: 1371 | goto l120 1372 | l123: 1373 | position, tokenIndex = position120, tokenIndex120 1374 | if buffer[position] != rune('.') { 1375 | goto l126 1376 | } 1377 | position++ 1378 | goto l120 1379 | l126: 1380 | position, tokenIndex = position120, tokenIndex120 1381 | if buffer[position] != rune('_') { 1382 | goto l118 1383 | } 1384 | position++ 1385 | } 1386 | l120: 1387 | add(ruleleaderChar, position119) 1388 | } 1389 | return true 1390 | l118: 1391 | position, tokenIndex = position118, tokenIndex118 1392 | return false 1393 | }, 1394 | /* 23 space <- <' '*> */ 1395 | func() bool { 1396 | { 1397 | position128 := position 1398 | l129: 1399 | { 1400 | position130, tokenIndex130 := position, tokenIndex 1401 | if buffer[position] != rune(' ') { 1402 | goto l130 1403 | } 1404 | position++ 1405 | goto l129 1406 | l130: 1407 | position, tokenIndex = position130, tokenIndex130 1408 | } 1409 | add(rulespace, position128) 1410 | } 1411 | return true 1412 | }, 1413 | /* 24 const <- <(q / quoted)> */ 1414 | func() bool { 1415 | position131, tokenIndex131 := position, tokenIndex 1416 | { 1417 | position132 := position 1418 | { 1419 | position133, tokenIndex133 := position, tokenIndex 1420 | if !_rules[ruleq]() { 1421 | goto l134 1422 | } 1423 | goto l133 1424 | l134: 1425 | position, tokenIndex = position133, tokenIndex133 1426 | if !_rules[rulequoted]() { 1427 | goto l131 1428 | } 1429 | } 1430 | l133: 1431 | add(ruleconst, position132) 1432 | } 1433 | return true 1434 | l131: 1435 | position, tokenIndex = position131, tokenIndex131 1436 | return false 1437 | }, 1438 | /* 25 q <- <('q' '(' <(!')' .)*> ')' Action19)> */ 1439 | func() bool { 1440 | position135, tokenIndex135 := position, tokenIndex 1441 | { 1442 | position136 := position 1443 | if buffer[position] != rune('q') { 1444 | goto l135 1445 | } 1446 | position++ 1447 | if buffer[position] != rune('(') { 1448 | goto l135 1449 | } 1450 | position++ 1451 | { 1452 | position137 := position 1453 | l138: 1454 | { 1455 | position139, tokenIndex139 := position, tokenIndex 1456 | { 1457 | position140, tokenIndex140 := position, tokenIndex 1458 | if buffer[position] != rune(')') { 1459 | goto l140 1460 | } 1461 | position++ 1462 | goto l139 1463 | l140: 1464 | position, tokenIndex = position140, tokenIndex140 1465 | } 1466 | if !matchDot() { 1467 | goto l139 1468 | } 1469 | goto l138 1470 | l139: 1471 | position, tokenIndex = position139, tokenIndex139 1472 | } 1473 | add(rulePegText, position137) 1474 | } 1475 | if buffer[position] != rune(')') { 1476 | goto l135 1477 | } 1478 | position++ 1479 | if !_rules[ruleAction19]() { 1480 | goto l135 1481 | } 1482 | add(ruleq, position136) 1483 | } 1484 | return true 1485 | l135: 1486 | position, tokenIndex = position135, tokenIndex135 1487 | return false 1488 | }, 1489 | /* 26 quoted <- <('"' <(!'"' .)*> '"' Action20)> */ 1490 | func() bool { 1491 | position141, tokenIndex141 := position, tokenIndex 1492 | { 1493 | position142 := position 1494 | if buffer[position] != rune('"') { 1495 | goto l141 1496 | } 1497 | position++ 1498 | { 1499 | position143 := position 1500 | l144: 1501 | { 1502 | position145, tokenIndex145 := position, tokenIndex 1503 | { 1504 | position146, tokenIndex146 := position, tokenIndex 1505 | if buffer[position] != rune('"') { 1506 | goto l146 1507 | } 1508 | position++ 1509 | goto l145 1510 | l146: 1511 | position, tokenIndex = position146, tokenIndex146 1512 | } 1513 | if !matchDot() { 1514 | goto l145 1515 | } 1516 | goto l144 1517 | l145: 1518 | position, tokenIndex = position145, tokenIndex145 1519 | } 1520 | add(rulePegText, position143) 1521 | } 1522 | if buffer[position] != rune('"') { 1523 | goto l141 1524 | } 1525 | position++ 1526 | if !_rules[ruleAction20]() { 1527 | goto l141 1528 | } 1529 | add(rulequoted, position142) 1530 | } 1531 | return true 1532 | l141: 1533 | position, tokenIndex = position141, tokenIndex141 1534 | return false 1535 | }, 1536 | /* 28 Action0 <- <{ p.addBraceStart() }> */ 1537 | func() bool { 1538 | { 1539 | add(ruleAction0, position) 1540 | } 1541 | return true 1542 | }, 1543 | /* 29 Action1 <- <{ p.addOperator(operatorIntersect) }> */ 1544 | func() bool { 1545 | { 1546 | add(ruleAction1, position) 1547 | } 1548 | return true 1549 | }, 1550 | /* 30 Action2 <- <{ p.addOperator(operatorSubtract) }> */ 1551 | func() bool { 1552 | { 1553 | add(ruleAction2, position) 1554 | } 1555 | return true 1556 | }, 1557 | /* 31 Action3 <- <{ p.addOperator(operatorUnion) }> */ 1558 | func() bool { 1559 | { 1560 | add(ruleAction3, position) 1561 | } 1562 | return true 1563 | }, 1564 | /* 32 Action4 <- <{ p.pushNode(nodeNull{}) }> */ 1565 | func() bool { 1566 | { 1567 | add(ruleAction4, position) 1568 | } 1569 | return true 1570 | }, 1571 | /* 33 Action5 <- <{ p.pushNode(nodeNull{}) }> */ 1572 | func() bool { 1573 | { 1574 | add(ruleAction5, position) 1575 | } 1576 | return true 1577 | }, 1578 | /* 34 Action6 <- <{ p.addBraces() }> */ 1579 | func() bool { 1580 | { 1581 | add(ruleAction6, position) 1582 | } 1583 | return true 1584 | }, 1585 | /* 35 Action7 <- <{ p.addClusterQuery() }> */ 1586 | func() bool { 1587 | { 1588 | add(ruleAction7, position) 1589 | } 1590 | return true 1591 | }, 1592 | /* 36 Action8 <- <{ p.addGroupQuery() }> */ 1593 | func() bool { 1594 | { 1595 | add(ruleAction8, position) 1596 | } 1597 | return true 1598 | }, 1599 | /* 37 Action9 <- <{ p.addValue(buffer[begin:end]); p.addClusterLookup() }> */ 1600 | func() bool { 1601 | { 1602 | add(ruleAction9, position) 1603 | } 1604 | return true 1605 | }, 1606 | /* 38 Action10 <- <{ p.addClusterLookup() }> */ 1607 | func() bool { 1608 | { 1609 | add(ruleAction10, position) 1610 | } 1611 | return true 1612 | }, 1613 | /* 39 Action11 <- <{ p.addGroupLookup() }> */ 1614 | func() bool { 1615 | { 1616 | add(ruleAction11, position) 1617 | } 1618 | return true 1619 | }, 1620 | /* 40 Action12 <- <{ p.addKeyLookup() }> */ 1621 | func() bool { 1622 | { 1623 | add(ruleAction12, position) 1624 | } 1625 | return true 1626 | }, 1627 | /* 41 Action13 <- <{ p.addLocalClusterLookup() }> */ 1628 | func() bool { 1629 | { 1630 | add(ruleAction13, position) 1631 | } 1632 | return true 1633 | }, 1634 | /* 42 Action14 <- <{ p.addFunction(buffer[begin:end]) }> */ 1635 | func() bool { 1636 | { 1637 | add(ruleAction14, position) 1638 | } 1639 | return true 1640 | }, 1641 | /* 43 Action15 <- <{ p.addFuncArg() }> */ 1642 | func() bool { 1643 | { 1644 | add(ruleAction15, position) 1645 | } 1646 | return true 1647 | }, 1648 | /* 44 Action16 <- <{ p.addFuncArg() }> */ 1649 | func() bool { 1650 | { 1651 | add(ruleAction16, position) 1652 | } 1653 | return true 1654 | }, 1655 | nil, 1656 | /* 46 Action17 <- <{ p.addRegex(buffer[begin:end]) }> */ 1657 | func() bool { 1658 | { 1659 | add(ruleAction17, position) 1660 | } 1661 | return true 1662 | }, 1663 | /* 47 Action18 <- <{ p.addValue(buffer[begin:end]) }> */ 1664 | func() bool { 1665 | { 1666 | add(ruleAction18, position) 1667 | } 1668 | return true 1669 | }, 1670 | /* 48 Action19 <- <{ p.addConstant(buffer[begin:end]) }> */ 1671 | func() bool { 1672 | { 1673 | add(ruleAction19, position) 1674 | } 1675 | return true 1676 | }, 1677 | /* 49 Action20 <- <{ p.addConstant(buffer[begin:end]) }> */ 1678 | func() bool { 1679 | { 1680 | add(ruleAction20, position) 1681 | } 1682 | return true 1683 | }, 1684 | } 1685 | p.rules = _rules 1686 | } 1687 | -------------------------------------------------------------------------------- /script/ci: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | export RANGE_SPEC_PATH=/tmp/range-spec 4 | git clone https://github.com/square/range-spec.git $RANGE_SPEC_PATH 5 | 6 | # pinning upstream spec repo 7 | pushd $RANGE_SPEC_PATH; 8 | git checkout 54769a7ebb70dc1b51af0c51317a61c1f76e6f03 9 | popd 10 | go test 11 | --------------------------------------------------------------------------------