├── .gitignore ├── parse ├── itemtype_string.go ├── items.go ├── parser_test.go ├── node.go ├── parser.go ├── lexer.go └── lexer_test.go ├── README.md ├── LICENSE ├── executor.go └── executor_test.go /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled Object files, Static and Dynamic libs (Shared Objects) 2 | *.o 3 | *.a 4 | *.so 5 | 6 | # Folders 7 | _obj 8 | _test 9 | 10 | # Architecture specific extensions/prefixes 11 | *.[568vq] 12 | [568vq].out 13 | 14 | *.cgo1.go 15 | *.cgo2.c 16 | _cgo_defun.c 17 | _cgo_gotypes.go 18 | _cgo_export.* 19 | 20 | _testmain.go 21 | 22 | *.exe 23 | *.test 24 | *.prof 25 | -------------------------------------------------------------------------------- /parse/itemtype_string.go: -------------------------------------------------------------------------------- 1 | // generated by stringer -type itemType; DO NOT EDIT 2 | 3 | package parse 4 | 5 | import "fmt" 6 | 7 | const _itemType_name = "itemErroritemObjNameitemDotitemFunctionitemLeftBraceitemFnArgumentitemRightBraceitemLeftCurlyitemFieldNameitemCommaitemRightCurlyitemEOF" 8 | 9 | var _itemType_index = [...]uint8{0, 9, 20, 27, 39, 52, 66, 80, 93, 106, 115, 129, 136} 10 | 11 | func (i itemType) String() string { 12 | if i < 0 || i+1 >= itemType(len(_itemType_index)) { 13 | return fmt.Sprintf("itemType(%d)", i) 14 | } 15 | return _itemType_name[_itemType_index[i]:_itemType_index[i+1]] 16 | } 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # graphql 2 | 3 | [![GoDoc](https://godoc.org/github.com/cryptix/go-graphql?status.svg)](https://godoc.org/github.com/cryptix/go-graphql) 4 | 5 | GraphQL packages for golang. 6 | 7 | ## Notes 8 | 9 | I started this as a fun project when facebook first announced GraphQL and all of this is just based on scraping together stuff from slides. 10 | 11 | My commitment to this is still _fun only_ so don't get your hopes up to get a feature complete implementation anytime soon. 12 | 13 | 14 | ## Refs 15 | 16 | * [announcment talk gist notes](https://gist.github.com/wincent/598fa75e22bdfa44cf47) 17 | * [working draft](https://facebook.github.io/graphql/) (Released Jul 02 2015) 18 | 19 | -------------------------------------------------------------------------------- /parse/items.go: -------------------------------------------------------------------------------- 1 | package parse 2 | 3 | import "fmt" 4 | 5 | //go:generate stringer -type itemType 6 | type itemType int 7 | 8 | const ( 9 | itemError itemType = iota 10 | 11 | itemObjName // eg 'node' 12 | itemDot // . 13 | itemFunction // first 14 | itemLeftBrace // ( 15 | itemFnArgument // 123 16 | itemRightBrace // ) 17 | itemLeftCurly // { 18 | itemFieldName // id 19 | itemComma // , 20 | itemRightCurly // } 21 | itemEOF 22 | ) 23 | 24 | type item struct { 25 | typ itemType // Type, 26 | val string 27 | } 28 | 29 | func (i item) String() string { 30 | switch i.typ { 31 | case itemEOF: 32 | return "EOF" 33 | case itemError: 34 | return i.val 35 | } 36 | if len(i.val) > 10 { 37 | return fmt.Sprintf("<%-14s> %.10q...", i.typ, i.val) 38 | } 39 | return fmt.Sprintf("<%-14s> %q", i.typ, i.val) 40 | } 41 | -------------------------------------------------------------------------------- /parse/parser_test.go: -------------------------------------------------------------------------------- 1 | package parse 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/require" 7 | ) 8 | 9 | func TestQuery_simple(t *testing.T) { 10 | n, err := Parse(`node(){id,name}`) 11 | require.Nil(t, err) 12 | require.NotNil(t, n) 13 | require.Equal(t, "node", n.Name()) 14 | require.Len(t, n.FieldNames(), 2) 15 | require.Equal(t, []string{"id", "name"}, n.FieldNames()) 16 | } 17 | 18 | func TestQuery_nested(t *testing.T) { 19 | r := require.New(t) 20 | n, err := Parse(`node(){id,name,obj{a,b}}`) 21 | r.Nil(err) 22 | r.NotNil(n) 23 | r.Equal("node", n.Name()) 24 | r.Equal([]string{"id", "name", "obj"}, n.FieldNames()) 25 | r.Equal([]string{"id", "name"}, n.PlainFields()) 26 | obj, ok := n.Field("obj") 27 | r.True(ok) 28 | r.Equal([]string{"a", "b"}, obj.FieldNames()) 29 | obj, ok = n.Field("nonExistant") 30 | r.False(ok) 31 | r.Nil(obj) 32 | } 33 | -------------------------------------------------------------------------------- /parse/node.go: -------------------------------------------------------------------------------- 1 | package parse 2 | 3 | import ( 4 | "errors" 5 | "sort" 6 | "strconv" 7 | ) 8 | 9 | type Node struct { 10 | name string 11 | arg string 12 | fields map[string]*Node 13 | } 14 | 15 | func (n Node) ID() (int64, error) { 16 | if n.arg == "" { 17 | return -1, errors.New("no id") 18 | } 19 | return strconv.ParseInt(n.arg, 10, 64) 20 | } 21 | 22 | func (n Node) Name() string { 23 | return n.name 24 | } 25 | 26 | func (n Node) FieldNames() []string { 27 | names := make([]string, len(n.fields)) 28 | i := 0 29 | for _, v := range n.fields { 30 | names[i] = v.Name() 31 | i++ 32 | } 33 | sort.Strings(names) 34 | return names 35 | } 36 | 37 | func (n *Node) Field(name string) (*Node, bool) { 38 | v, ok := n.fields[name] 39 | return v, ok 40 | } 41 | 42 | func (n *Node) PlainFields() []string { 43 | names := make([]string, len(n.fields)) 44 | i := 0 45 | for _, v := range n.fields { 46 | if v.fields != nil { 47 | continue 48 | } 49 | names[i] = v.Name() 50 | i++ 51 | } 52 | names = names[:i] 53 | sort.Strings(names) 54 | return names 55 | } 56 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Henry 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /executor.go: -------------------------------------------------------------------------------- 1 | package graphql 2 | 3 | import ( 4 | "encoding/json" 5 | "io" 6 | "io/ioutil" 7 | "net/http" 8 | 9 | "github.com/cryptix/go-graphql/parse" 10 | ) 11 | 12 | type Data map[string]interface{} 13 | 14 | type Store interface { 15 | Get(int64, []string) (Data, error) 16 | } 17 | 18 | type Executor struct { 19 | stores map[string]Store 20 | } 21 | 22 | func NewExecutor() *Executor { 23 | return &Executor{ 24 | stores: make(map[string]Store), 25 | } 26 | } 27 | 28 | func (e *Executor) Register(name string, s Store) error { 29 | // BUG(cryptix) dont replace 30 | e.stores[name] = s 31 | return nil 32 | } 33 | 34 | func (e *Executor) ServeHTTP(rw http.ResponseWriter, req *http.Request) { 35 | txt, err := ioutil.ReadAll(io.LimitReader(req.Body, 512*1024)) // who would ever need more than half a meg of query string... ;) 36 | if err != nil { 37 | http.Error(rw, err.Error(), http.StatusInternalServerError) 38 | return 39 | } 40 | qry, err := parse.Parse(string(txt)) 41 | if err != nil { 42 | http.Error(rw, err.Error(), http.StatusInternalServerError) 43 | return 44 | } 45 | store, ok := e.stores[qry.Name()] 46 | if !ok { 47 | http.Error(rw, "store not registerd", http.StatusNotFound) 48 | return 49 | } 50 | id, err := qry.ID() 51 | if err != nil { 52 | http.Error(rw, err.Error(), http.StatusInternalServerError) 53 | return 54 | } 55 | data, err := store.Get(id, qry.PlainFields()) 56 | if err != nil { 57 | http.Error(rw, err.Error(), http.StatusInternalServerError) 58 | return 59 | } 60 | err = json.NewEncoder(rw).Encode(data) 61 | if err != nil { 62 | http.Error(rw, err.Error(), http.StatusInternalServerError) 63 | return 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /parse/parser.go: -------------------------------------------------------------------------------- 1 | /* 2 | Package parse implements a lexer and parser for graphql - facebooks query language for relay/react 3 | 4 | TODO 5 | 6 | add functions of nodes - eg: .after() 7 | */ 8 | package parse 9 | 10 | import ( 11 | "errors" 12 | "fmt" 13 | ) 14 | 15 | func Parse(qry string) (*Node, error) { 16 | n := new(Node) 17 | l := lex("base", qry) 18 | parseLoop: 19 | for { 20 | i := <-l.items 21 | switch i.typ { 22 | case itemObjName: 23 | n = new(Node) 24 | n.fields = make(map[string]*Node) 25 | n.name = i.val 26 | case itemLeftBrace: 27 | if n == nil { 28 | return nil, errors.New("graphql: no root node") 29 | } 30 | i = <-l.items 31 | // no argument, return 32 | if i.typ == itemRightBrace { 33 | continue 34 | } 35 | if i.typ != itemFnArgument { 36 | return nil, errors.New("graphql: expected fnArgument") 37 | } 38 | n.arg = i.val 39 | i = <-l.items 40 | if i.typ != itemRightBrace { 41 | return nil, errors.New("graphql: expected rightBrace") 42 | } 43 | case itemLeftCurly: 44 | err := addNode(n, l.items) 45 | if err != nil { 46 | return nil, err 47 | } 48 | case itemError: 49 | return nil, fmt.Errorf("graphql: parse error: %s", i.val) 50 | case itemEOF: 51 | break parseLoop 52 | default: 53 | return nil, fmt.Errorf("graphql: parse - unhandled item type %v", i) 54 | } 55 | } 56 | return n, nil 57 | } 58 | 59 | func addNode(n *Node, items <-chan item) error { 60 | var newNode *Node 61 | for i := range items { 62 | switch i.typ { 63 | case itemComma: 64 | case itemFieldName: 65 | newNode = new(Node) 66 | newNode.name = i.val 67 | n.fields[i.val] = newNode 68 | case itemLeftCurly: 69 | if newNode == nil { 70 | return fmt.Errorf("graphql: addNode() - illegal leftCurly, field nil. %v", i) 71 | } 72 | newNode.fields = make(map[string]*Node) 73 | if err := addNode(newNode, items); err != nil { 74 | return err 75 | } 76 | case itemRightCurly: 77 | return nil 78 | default: 79 | return fmt.Errorf("graphql: addNode() - unhandled item type %v", i) 80 | } 81 | } 82 | panic("not reached") 83 | } 84 | -------------------------------------------------------------------------------- /executor_test.go: -------------------------------------------------------------------------------- 1 | package graphql 2 | 3 | import ( 4 | "encoding/json" 5 | "errors" 6 | "fmt" 7 | "net/http" 8 | "net/http/httptest" 9 | "strings" 10 | "testing" 11 | 12 | "github.com/stretchr/testify/assert" 13 | "github.com/stretchr/testify/require" 14 | ) 15 | 16 | func TestGet_simple(t *testing.T) { 17 | store := peopleStore{ 18 | data: map[int64]person{ 19 | 123: person{123, "Frank", 23, "Green"}, 20 | 666: person{-1, "Devil", 2015, "Red"}, 21 | }, 22 | } 23 | exe := NewExecutor() 24 | if err := exe.Register("people", &store); err != nil { 25 | t.Fatalf("exe.Register() Err: %q", err) 26 | } 27 | cases := []struct { 28 | qry, res string 29 | err bool 30 | }{ 31 | { 32 | qry: `unknown(1){id,name}`, 33 | res: "store not registerd\n", 34 | err: true, 35 | }, 36 | { 37 | qry: `people(1){id,name}`, 38 | res: "not found\n", 39 | err: true, 40 | }, 41 | { 42 | qry: `people(123){id,name}`, 43 | res: `{"id":123,"name":"Frank"}` + "\n", 44 | }, 45 | { 46 | qry: `people(123){age,haircolor}`, 47 | res: `{"age":23,"haircolor":"Green"}` + "\n", 48 | }, 49 | { 50 | qry: `people(666){name,age}`, 51 | res: `{"age":2015,"name":"Devil"}` + "\n", 52 | }, 53 | } 54 | for _, c := range cases { 55 | req, err := http.NewRequest("GET", "/", strings.NewReader(c.qry)) 56 | if err != nil { 57 | t.Fatalf("NewRequest Err: %q", err) 58 | } 59 | rw := httptest.NewRecorder() 60 | exe.ServeHTTP(rw, req) 61 | if !c.err { 62 | assert.Equal(t, http.StatusOK, rw.Code) 63 | } 64 | require.Equal(t, c.res, rw.Body.String()) 65 | } 66 | } 67 | 68 | // totally naive test store 69 | type person struct { 70 | Id int64 `json:"id"` 71 | Name string `json:"name"` 72 | Age int `json:"age"` 73 | HairColor string `json:"haircolor"` 74 | } 75 | 76 | type peopleStore struct { 77 | data map[int64]person 78 | } 79 | 80 | func (ps *peopleStore) Get(id int64, fields []string) (Data, error) { 81 | p, ok := ps.data[id] 82 | if !ok { 83 | return nil, errors.New("not found") 84 | } 85 | // lazy way to make map with only the required keys 86 | jsbytes, err := json.Marshal(&p) 87 | if err != nil { 88 | return nil, err 89 | } 90 | var tmp Data 91 | err = json.Unmarshal(jsbytes, &tmp) 92 | if err != nil { 93 | return nil, err 94 | } 95 | d := make(Data) 96 | for _, f := range fields { 97 | v, ok := tmp[f] 98 | if !ok { 99 | return nil, fmt.Errorf("graphql: Get required field not found %s", f) 100 | } 101 | d[f] = v 102 | } 103 | return d, nil 104 | } 105 | -------------------------------------------------------------------------------- /parse/lexer.go: -------------------------------------------------------------------------------- 1 | package parse 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | "unicode/utf8" 7 | ) 8 | 9 | const eof = -1 10 | 11 | type stateFn func(*lexer) stateFn 12 | 13 | type lexer struct { 14 | name string // used only for err 15 | input string // the string beeing scanned 16 | state stateFn 17 | pos int 18 | start int 19 | width int 20 | items chan item 21 | } 22 | 23 | func lex(name, input string) *lexer { 24 | l := &lexer{ 25 | name: name, 26 | input: input, 27 | items: make(chan item), 28 | } 29 | go l.run() 30 | return l 31 | } 32 | 33 | func (l *lexer) run() { 34 | for l.state = lexObjName; l.state != nil; { 35 | l.state = l.state(l) 36 | } 37 | close(l.items) 38 | } 39 | 40 | func (l *lexer) emit(t itemType) { 41 | val := l.input[l.start:l.pos] 42 | // BUG(cryptix): a bit hacky... 43 | if t == itemFieldName || t == itemRightCurly { 44 | val = strings.TrimSpace(val) 45 | if len(val) == 0 { 46 | return 47 | } 48 | } 49 | l.items <- item{t, val} 50 | l.start = l.pos 51 | } 52 | 53 | // helpers 54 | func (l *lexer) next() rune { 55 | if l.pos >= len(l.input) { 56 | l.width = 0 57 | return eof 58 | } 59 | var r rune 60 | r, l.width = utf8.DecodeRuneInString(l.input[l.pos:]) 61 | l.pos += l.width 62 | return r 63 | } 64 | 65 | func (l *lexer) backup() { 66 | l.pos -= l.width 67 | } 68 | 69 | func (l *lexer) ignore() { 70 | l.start = l.pos 71 | } 72 | 73 | func (l *lexer) peek() rune { 74 | r := l.next() 75 | l.backup() 76 | return r 77 | } 78 | 79 | func (l *lexer) acceptRun(valid string) { 80 | for strings.IndexRune(valid, l.next()) >= 0 { 81 | } 82 | l.backup() 83 | } 84 | 85 | func (l *lexer) errorf(format string, args ...interface{}) stateFn { 86 | l.items <- item{itemError, fmt.Sprintf(format, args...)} 87 | return nil 88 | } 89 | 90 | const ( 91 | dot = '.' 92 | comma = ',' 93 | leftBrace = '(' 94 | rightBrace = ')' 95 | leftCurly = '{' 96 | rightCurly = '}' 97 | ) 98 | 99 | func lexObjName(l *lexer) stateFn { 100 | for len(l.input[l.pos:]) > 0 { 101 | switch l.input[l.pos] { 102 | case leftBrace: 103 | if l.pos > l.start { 104 | l.emit(itemObjName) // emit identifier e.g. node 105 | } 106 | return lexLeftBrace 107 | case dot: 108 | if l.pos > l.start { 109 | l.emit(itemObjName) // emit identifier e.g. node 110 | } 111 | return lexDot 112 | } 113 | if l.next() == eof { 114 | break 115 | } 116 | } 117 | l.emit(itemEOF) // empty query 118 | return nil 119 | } 120 | 121 | func lexDot(l *lexer) stateFn { 122 | l.pos += 1 123 | l.emit(itemDot) 124 | return lexFnName 125 | } 126 | 127 | func lexFnName(l *lexer) stateFn { 128 | for { 129 | if l.input[l.pos] == leftBrace { 130 | if l.pos > l.start { 131 | l.emit(itemFunction) 132 | } 133 | return lexLeftBrace 134 | } 135 | if l.next() == eof { 136 | return l.errorf("illegal function name") 137 | } 138 | } 139 | panic("not reached") 140 | } 141 | 142 | func lexLeftBrace(l *lexer) stateFn { 143 | l.pos += 1 144 | l.emit(itemLeftBrace) 145 | return lexFnArgument 146 | } 147 | 148 | func lexFnArgument(l *lexer) stateFn { 149 | for len(l.input[l.pos:]) > 0 { 150 | if l.input[l.pos] == rightBrace { 151 | if l.pos > l.start { 152 | l.emit(itemFnArgument) // emit function argument as string 153 | } 154 | return lexRightBrace 155 | } 156 | switch r := l.next(); { 157 | case r == eof || r == '\n': 158 | break 159 | } 160 | } 161 | return l.errorf("illegal function argument") 162 | } 163 | 164 | func lexRightBrace(l *lexer) stateFn { 165 | l.pos += 1 166 | l.emit(itemRightBrace) 167 | l.acceptRun(" \t\n") 168 | l.ignore() 169 | if l.peek() == dot { 170 | return lexDot 171 | } 172 | return lexLeftCurly 173 | } 174 | 175 | func lexLeftCurly(l *lexer) stateFn { 176 | l.pos += 1 177 | l.emit(itemLeftCurly) 178 | p := l.peek() 179 | for isSpace(p) || p == '\n' { 180 | l.next() 181 | l.ignore() 182 | p = l.peek() 183 | } 184 | return lexFieldNames 185 | } 186 | 187 | func lexFieldNames(l *lexer) stateFn { 188 | for len(l.input[l.pos:]) > 0 { 189 | switch l.input[l.pos] { 190 | case rightCurly: 191 | if l.pos > l.start { 192 | l.emit(itemFieldName) 193 | } 194 | return lexRightCurly 195 | case leftCurly: 196 | if l.pos > l.start { 197 | l.emit(itemFieldName) 198 | } 199 | return lexLeftCurly 200 | case comma: 201 | if l.pos > l.start { 202 | l.emit(itemFieldName) 203 | } 204 | return lexComma 205 | case dot: 206 | if l.pos > l.start { 207 | l.emit(itemFieldName) 208 | } 209 | return lexDot 210 | case leftBrace: 211 | if l.pos > l.start { 212 | l.emit(itemFunction) 213 | } 214 | return lexLeftBrace 215 | } 216 | if l.next() == eof { 217 | break 218 | } 219 | } 220 | return l.errorf("illegal fieldname") 221 | } 222 | 223 | func lexComma(l *lexer) stateFn { 224 | l.pos += 1 225 | l.emit(itemComma) 226 | p := l.peek() 227 | for isSpace(p) || p == '\n' { 228 | l.next() 229 | l.ignore() 230 | p = l.peek() 231 | } 232 | return lexFieldNames 233 | } 234 | 235 | func lexRightCurly(l *lexer) stateFn { 236 | l.pos += 1 237 | l.emit(itemRightCurly) 238 | // TODO add depth check 239 | if l.peek() == eof { 240 | l.emit(itemEOF) 241 | return nil 242 | } 243 | return lexFieldNames 244 | } 245 | 246 | // isSpace reports whether r is a space character. 247 | func isSpace(r rune) bool { 248 | return r == ' ' || r == '\t' 249 | } 250 | -------------------------------------------------------------------------------- /parse/lexer_test.go: -------------------------------------------------------------------------------- 1 | package parse 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | "github.com/stretchr/testify/require" 8 | ) 9 | 10 | func TestLex_empty(t *testing.T) { 11 | l := lex("empty", "") 12 | want := []item{ 13 | item{itemEOF, ""}, 14 | } 15 | assertLexer(t, l, want) 16 | } 17 | 18 | func TestLex_errors(t *testing.T) { 19 | tcases := []struct { 20 | name, qry string 21 | items []item 22 | }{ 23 | {"missing close brace", 24 | "node(", 25 | []item{ 26 | item{itemObjName, "node"}, 27 | item{itemLeftBrace, "("}, 28 | item{itemError, "illegal function argument"}, 29 | }, 30 | }, 31 | // BUG(cryptix): error at parse level 32 | {"missing parens", 33 | "node{}", 34 | []item{ 35 | item{itemEOF, "node{}"}, 36 | // item{itemError, "illegal parens argument"}, 37 | }, 38 | }, 39 | {"missing close curly", 40 | "node(123){", 41 | []item{ 42 | item{itemObjName, "node"}, 43 | item{itemLeftBrace, "("}, 44 | item{itemFnArgument, "123"}, 45 | item{itemRightBrace, ")"}, 46 | item{itemLeftCurly, "{"}, 47 | item{itemError, "illegal fieldname"}, 48 | }, 49 | }, 50 | } 51 | for _, c := range tcases { 52 | l := lex(c.name, c.qry) 53 | assertLexer(t, l, c.items) 54 | } 55 | } 56 | 57 | func TestLex_simpleGraph(t *testing.T) { 58 | l := lex("simple", "node(123){one,two,obj{a,b}}") 59 | want := []item{ 60 | item{itemObjName, "node"}, 61 | item{itemLeftBrace, "("}, 62 | item{itemFnArgument, "123"}, 63 | item{itemRightBrace, ")"}, 64 | item{itemLeftCurly, "{"}, 65 | item{itemFieldName, "one"}, 66 | item{itemComma, ","}, 67 | item{itemFieldName, "two"}, 68 | item{itemComma, ","}, 69 | item{itemFieldName, "obj"}, 70 | item{itemLeftCurly, "{"}, 71 | item{itemFieldName, "a"}, 72 | item{itemComma, ","}, 73 | item{itemFieldName, "b"}, 74 | item{itemRightCurly, "}"}, 75 | item{itemRightCurly, "}"}, 76 | item{itemEOF, ""}, 77 | } 78 | assertLexer(t, l, want) 79 | } 80 | 81 | func TestLex_indented(t *testing.T) { 82 | l := lex("indented", `node(123) { 83 | one, 84 | two, 85 | three 86 | }`) 87 | want := []item{ 88 | item{itemObjName, "node"}, 89 | item{itemLeftBrace, "("}, 90 | item{itemFnArgument, "123"}, 91 | item{itemRightBrace, ")"}, 92 | item{itemLeftCurly, "{"}, 93 | item{itemFieldName, "one"}, 94 | item{itemComma, ","}, 95 | item{itemFieldName, "two"}, 96 | item{itemComma, ","}, 97 | item{itemFieldName, "three"}, 98 | item{itemRightCurly, "}"}, 99 | item{itemEOF, ""}, 100 | } 101 | assertLexer(t, l, want) 102 | } 103 | 104 | func TestLex_talkExample(t *testing.T) { 105 | l := lex("talkExample", 106 | `node(1572451031) { 107 | id, 108 | name, 109 | birthdate { 110 | month, 111 | day 112 | }, 113 | friends.after(3500401).first(2) { 114 | cursor, 115 | node { 116 | name 117 | } 118 | } 119 | }`) 120 | want := []item{ 121 | item{itemObjName, "node"}, 122 | item{itemLeftBrace, "("}, 123 | item{itemFnArgument, "1572451031"}, 124 | item{itemRightBrace, ")"}, 125 | item{itemLeftCurly, "{"}, 126 | item{itemFieldName, "id"}, 127 | item{itemComma, ","}, 128 | item{itemFieldName, "name"}, 129 | item{itemComma, ","}, 130 | item{itemFieldName, "birthdate"}, 131 | item{itemLeftCurly, "{"}, 132 | item{itemFieldName, "month"}, 133 | item{itemComma, ","}, 134 | item{itemFieldName, "day"}, 135 | item{itemRightCurly, "}"}, 136 | item{itemComma, ","}, 137 | item{itemFieldName, "friends"}, 138 | item{itemDot, "."}, 139 | item{itemFunction, "after"}, 140 | item{itemLeftBrace, "("}, 141 | item{itemFnArgument, "3500401"}, 142 | item{itemRightBrace, ")"}, 143 | item{itemDot, "."}, 144 | item{itemFunction, "first"}, 145 | item{itemLeftBrace, "("}, 146 | item{itemFnArgument, "2"}, 147 | item{itemRightBrace, ")"}, 148 | item{itemLeftCurly, "{"}, 149 | item{itemFieldName, "cursor"}, 150 | item{itemComma, ","}, 151 | item{itemFieldName, "node"}, 152 | item{itemLeftCurly, "{"}, 153 | item{itemFieldName, "name"}, 154 | item{itemRightCurly, "}"}, 155 | item{itemRightCurly, "}"}, 156 | item{itemRightCurly, "}"}, 157 | item{itemEOF, ""}, 158 | } 159 | assertLexer(t, l, want) 160 | } 161 | 162 | // https://news.ycombinator.com/item?id=8978936 163 | func TestLex_hnExample(t *testing.T) { 164 | l := lex("hnExample", 165 | `viewer() { 166 | posts { 167 | node { 168 | author { id, name, favorite_color }, 169 | } 170 | }, 171 | friends { 172 | node { 173 | id, 174 | name, 175 | favorite_color 176 | } 177 | }, 178 | notifications { 179 | node { 180 | source { id, name, favorite_color } 181 | } 182 | }, 183 | }`) 184 | want := []item{ 185 | item{itemObjName, "viewer"}, 186 | item{itemLeftBrace, "("}, 187 | item{itemRightBrace, ")"}, 188 | item{itemLeftCurly, "{"}, 189 | item{itemFieldName, "posts"}, 190 | item{itemLeftCurly, "{"}, 191 | item{itemFieldName, "node"}, 192 | item{itemLeftCurly, "{"}, 193 | item{itemFieldName, "author"}, 194 | item{itemLeftCurly, "{"}, 195 | item{itemFieldName, "id"}, 196 | item{itemComma, ","}, 197 | item{itemFieldName, "name"}, 198 | item{itemComma, ","}, 199 | item{itemFieldName, "favorite_color"}, 200 | item{itemRightCurly, "}"}, 201 | item{itemComma, ","}, 202 | item{itemRightCurly, "}"}, // node 203 | item{itemRightCurly, "}"}, // posts 204 | item{itemComma, ","}, 205 | item{itemFieldName, "friends"}, 206 | item{itemLeftCurly, "{"}, 207 | item{itemFieldName, "node"}, 208 | item{itemLeftCurly, "{"}, 209 | item{itemFieldName, "id"}, 210 | item{itemComma, ","}, 211 | item{itemFieldName, "name"}, 212 | item{itemComma, ","}, 213 | item{itemFieldName, "favorite_color"}, 214 | item{itemRightCurly, "}"}, // node 215 | item{itemRightCurly, "}"}, // friends 216 | item{itemComma, ","}, 217 | item{itemFieldName, "notifications"}, 218 | item{itemLeftCurly, "{"}, 219 | item{itemFieldName, "node"}, 220 | item{itemLeftCurly, "{"}, 221 | item{itemFieldName, "source"}, 222 | item{itemLeftCurly, "{"}, 223 | item{itemFieldName, "id"}, 224 | item{itemComma, ","}, 225 | item{itemFieldName, "name"}, 226 | item{itemComma, ","}, 227 | item{itemFieldName, "favorite_color"}, 228 | item{itemRightCurly, "}"}, // source 229 | item{itemRightCurly, "}"}, // node 230 | item{itemRightCurly, "}"}, // notifications 231 | item{itemComma, ","}, 232 | item{itemRightCurly, "}"}, // viewer 233 | item{itemEOF, ""}, 234 | } 235 | assertLexer(t, l, want) 236 | } 237 | 238 | func TestLex_ghcomment(t *testing.T) { 239 | l := lex("ghcomment", `friends.first(1) { edges { cursor, node { name } } }`) 240 | want := []item{ 241 | item{itemObjName, "friends"}, 242 | item{itemDot, "."}, 243 | item{itemFunction, "first"}, 244 | item{itemLeftBrace, "("}, 245 | item{itemFnArgument, "1"}, 246 | item{itemRightBrace, ")"}, 247 | item{itemLeftCurly, "{"}, 248 | item{itemFieldName, "edges"}, 249 | item{itemLeftCurly, "{"}, 250 | item{itemFieldName, "cursor"}, 251 | item{itemComma, ","}, 252 | item{itemFieldName, "node"}, 253 | item{itemLeftCurly, "{"}, 254 | item{itemFieldName, "name"}, 255 | item{itemRightCurly, "}"}, 256 | item{itemRightCurly, "}"}, 257 | item{itemRightCurly, "}"}, 258 | item{itemEOF, ""}, 259 | } 260 | assertLexer(t, l, want) 261 | } 262 | 263 | func assertLexer(t *testing.T, l *lexer, want []item) { 264 | var got []item 265 | for i := range l.items { 266 | got = append(got, i) 267 | } 268 | require.Len(t, got, len(want), "delta: %d", len(got)-len(want)) 269 | for idx := range want { 270 | assert.Equal(t, 271 | want[idx], 272 | got[idx], 273 | "item #%d from lexer is wrong\n Got:%s\nWant:%s", idx+1, got[idx], want[idx], 274 | ) 275 | } 276 | } 277 | --------------------------------------------------------------------------------