├── .gitignore ├── .vscode └── launch.json ├── Cargo.toml ├── LICENSE ├── README.md ├── prepare-publish.sh ├── src ├── ast.rs ├── lexer.rs ├── lib.rs └── parser.rs └── test ├── published-test ├── index.js ├── package-lock.json └── package.json └── test.js /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | node_modules 4 | Cargo.lock 5 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "lldb", 9 | "request": "launch", 10 | "name": "Debug", 11 | "program": "${workspaceFolder}/src/lib.rs", 12 | "args": [], 13 | "cwd": "${workspaceFolder}" 14 | } 15 | ] 16 | } -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "graphql-wasm-parser" 3 | version = "0.0.2" 4 | authors = ["Andreas Marek "] 5 | edition = "2018" 6 | description = "graphql parser" 7 | license = "MIT" 8 | repository = "https://github.com/graphql-wasm/graphql-wasm-parser" 9 | 10 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 11 | 12 | [lib] 13 | crate-type = ["cdylib"] 14 | 15 | [dependencies] 16 | serde = "^1.0.59" 17 | serde_derive = "^1.0.59" 18 | 19 | [dependencies.wasm-bindgen] 20 | version = "0.2.50" 21 | features = ["serde-serialize"] 22 | 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Andreas Marek 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # WebAssembly GraphQL parser 2 | 3 | [GraphQL](https://graphql.org) parser in [WebAssembly](https://webassembly.org), compiled from [Rust](https://www.rust-lang.org). 4 | 5 | ## Status: experimental 6 | 7 | API/AST structure will probably change. 8 | No semantic validation at the moment, just syntax (e.g. you can declare a field twice). 9 | 10 | ## Usage via NPM 11 | 12 | Published at npm as `graphql-wasm-parser`. Add it as dependency and use it like any other js module: 13 | 14 | ```js 15 | const { parse } = require('graphql-wasm-parser'); 16 | 17 | const ast = parse("{foo}"); 18 | console.log(JSON.stringify(ast)); 19 | ``` -------------------------------------------------------------------------------- /prepare-publish.sh: -------------------------------------------------------------------------------- 1 | wasm-pack build --target nodejs 2 | node test/test.js -------------------------------------------------------------------------------- /src/ast.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug, Clone, PartialEq, Serialize)] 2 | pub struct Document { 3 | pub definitions: Vec, 4 | } 5 | 6 | #[derive(Debug, Clone, PartialEq, Serialize)] 7 | pub enum Definition { 8 | Operation(OperationDefinition), 9 | Fragment(FragmentDefinition), 10 | Schema(SchemaDefinition), 11 | SchemaExtension(SchemaExtension), 12 | ObjectType(ObjectTypeDefinition), 13 | ObjectTypeExtension(ObjectTypeExtension), 14 | ScalarType(ScalarTypeDefinition), 15 | ScalarTypeExtension(ScalarTypeExtension), 16 | InterfaceType(InterfaceTypeDefinition), 17 | InterfaceTypeExtension(InterfaceTypeExtension), 18 | UnionType(UnionTypeDefinition), 19 | UnionTypeExtension(UnionTypeExtension), 20 | EnumType(EnumTypeDefinition), 21 | EnumTypeExtension(EnumTypeExtension), 22 | InputObjectType(InputObjectTypeDefinition), 23 | InputObjectTypeExtension(InputObjectTypeExtension), 24 | Directive(DirectiveDefinition), 25 | } 26 | 27 | #[derive(Debug, Clone, PartialEq, Serialize)] 28 | pub struct ScalarTypeDefinition { 29 | pub name: String, 30 | pub description: Option, 31 | pub directives: Vec, 32 | } 33 | 34 | #[derive(Debug, Clone, PartialEq, Serialize)] 35 | pub struct ScalarTypeExtension { 36 | pub name: String, 37 | pub directives: Vec, 38 | } 39 | 40 | #[derive(Debug, Clone, PartialEq, Serialize)] 41 | pub struct InterfaceTypeDefinition { 42 | pub name: String, 43 | pub fields: Vec, 44 | pub description: Option, 45 | pub directives: Vec, 46 | } 47 | 48 | #[derive(Debug, Clone, PartialEq, Serialize)] 49 | pub struct InterfaceTypeExtension { 50 | pub name: String, 51 | pub fields: Vec, 52 | pub directives: Vec, 53 | } 54 | 55 | #[derive(Debug, Clone, PartialEq, Serialize)] 56 | pub struct UnionTypeDefinition { 57 | pub name: String, 58 | pub description: Option, 59 | pub directives: Vec, 60 | pub types: Vec, 61 | } 62 | 63 | #[derive(Debug, Clone, PartialEq, Serialize)] 64 | pub struct UnionTypeExtension { 65 | pub name: String, 66 | pub directives: Vec, 67 | pub types: Vec, 68 | } 69 | 70 | #[derive(Debug, Clone, PartialEq, Serialize)] 71 | pub struct EnumTypeDefinition { 72 | pub name: String, 73 | pub directives: Vec, 74 | pub description: Option, 75 | pub values: Vec, 76 | } 77 | 78 | #[derive(Debug, Clone, PartialEq, Serialize)] 79 | pub struct EnumTypeExtension { 80 | pub name: String, 81 | pub directives: Vec, 82 | pub values: Vec, 83 | } 84 | 85 | #[derive(Debug, Clone, PartialEq, Serialize)] 86 | pub struct EnumValueDefinition { 87 | pub name: String, 88 | pub directives: Vec, 89 | pub description: Option, 90 | } 91 | 92 | #[derive(Debug, Clone, PartialEq, Serialize)] 93 | pub struct InputObjectTypeDefinition { 94 | pub name: String, 95 | pub directives: Vec, 96 | pub description: Option, 97 | pub fields: Vec, 98 | } 99 | 100 | #[derive(Debug, Clone, PartialEq, Serialize)] 101 | pub struct InputObjectTypeExtension { 102 | pub name: String, 103 | pub directives: Vec, 104 | pub fields: Vec, 105 | } 106 | 107 | #[derive(Debug, Clone, PartialEq, Serialize)] 108 | pub struct DirectiveDefinition { 109 | pub name: String, 110 | pub description: Option, 111 | pub locations: Vec, 112 | pub arguments: Vec, 113 | } 114 | 115 | #[derive(Debug, Clone,Copy, PartialEq, Serialize)] 116 | pub enum DirectiveLocation { 117 | QUERY, 118 | MUTATION, 119 | SUBSCRIPTION, 120 | FIELD, 121 | FRAGMENT_DEFINITION, 122 | FRAGMENT_SPREAD, 123 | INLINE_FRAGMENT, 124 | VARIABLE_DEFINITION, 125 | SCHEMA, 126 | SCALAR, 127 | OBJECT, 128 | FIELD_DEFINITION, 129 | ARGUMENT_DEFINITION, 130 | INTERFACE, 131 | UNION, 132 | ENUM, 133 | ENUM_VALUE, 134 | INPUT_OBJECT, 135 | INPUT_FIELD_DEFINITION, 136 | } 137 | 138 | #[derive(Debug, Clone, PartialEq, Serialize)] 139 | pub struct ObjectTypeDefinition { 140 | pub name: String, 141 | pub interfaces: Vec, 142 | pub fields: Vec, 143 | pub description: Option, 144 | pub directives: Vec, 145 | } 146 | 147 | #[derive(Debug, Clone, PartialEq, Serialize)] 148 | pub struct ObjectTypeExtension { 149 | pub name: String, 150 | pub interfaces: Vec, 151 | pub fields: Vec, 152 | pub directives: Vec, 153 | } 154 | 155 | #[derive(Debug, Clone, PartialEq, Serialize)] 156 | pub struct FieldDefinition { 157 | pub name: String, 158 | pub type_reference: TypeReference, 159 | pub arguments: Vec, 160 | } 161 | 162 | #[derive(Debug, Clone, PartialEq, Serialize)] 163 | pub struct InputValueDefinition { 164 | pub name: String, 165 | pub type_reference: TypeReference, 166 | pub default_value: Option, 167 | pub description: Option, 168 | pub directives: Vec, 169 | } 170 | 171 | #[derive(Debug, Clone, PartialEq, Serialize)] 172 | pub struct SchemaDefinition { 173 | pub directives: Vec, 174 | pub operation_type_definitions: Vec, 175 | } 176 | 177 | #[derive(Debug, Clone, PartialEq, Serialize)] 178 | pub struct SchemaExtension { 179 | pub directives: Vec, 180 | pub operation_type_definitions: Vec, 181 | } 182 | 183 | #[derive(Debug, Clone, PartialEq, Serialize)] 184 | pub struct OperationTypeDefinition { 185 | pub operation: OperationType, 186 | pub type_name: String, 187 | } 188 | 189 | #[derive(Debug, Clone, PartialEq, Serialize)] 190 | pub struct OperationDefinition { 191 | pub operation_type: OperationType, 192 | pub name: Option, 193 | pub selection_set: SelectionSet, 194 | pub variable_definitions: Vec, 195 | } 196 | 197 | #[derive(Debug, Clone, PartialEq, Serialize)] 198 | pub struct VariableDefinition { 199 | pub name: String, 200 | pub type_reference: TypeReference, 201 | pub default_value: Option, 202 | } 203 | 204 | #[derive(Debug, Clone, PartialEq, Serialize)] 205 | pub enum TypeReference { 206 | NamedType(String), 207 | ListType(Box), 208 | NonNullType(Box), 209 | } 210 | 211 | #[derive(Debug, Clone, PartialEq, Serialize)] 212 | pub enum Value { 213 | Variable(String), 214 | IntValue(String), 215 | FloatValue(String), 216 | StringValue(String), 217 | BooleanValue(bool), 218 | NullValue, 219 | EnumValue(String), 220 | ListValue(Box>), 221 | ObjectValue(Box>), 222 | } 223 | 224 | #[derive(Debug, Clone, PartialEq, Serialize)] 225 | pub struct ObjectField { 226 | pub name: String, 227 | pub value: Value, 228 | } 229 | 230 | #[derive(Debug, Clone, PartialEq, Serialize)] 231 | pub struct SelectionSet { 232 | pub selections: Vec, 233 | } 234 | 235 | #[derive(Debug, Clone, PartialEq, Serialize)] 236 | pub enum Selection { 237 | Field(Field), 238 | FragmentSpread(FragmentSpread), 239 | InlineFragment(InlineFragment), 240 | } 241 | #[derive(Debug, Clone, PartialEq, Serialize)] 242 | pub struct Field { 243 | pub name: String, 244 | pub alias: Option, 245 | pub selection_set: Option, 246 | pub directives: Vec, 247 | pub arguments: Vec 248 | } 249 | 250 | #[derive(Debug, Clone, PartialEq, Serialize)] 251 | pub struct FragmentSpread { 252 | pub name: String, 253 | } 254 | #[derive(Debug, Clone, PartialEq, Serialize)] 255 | pub struct InlineFragment { 256 | pub type_condition: Option, 257 | pub selection_set: SelectionSet, 258 | } 259 | 260 | #[derive(Debug, Clone, PartialEq, Serialize)] 261 | pub enum OperationType { 262 | Query, 263 | Mutation, 264 | Subscription, 265 | } 266 | 267 | #[derive(Debug, Clone, PartialEq, Serialize)] 268 | pub struct FragmentDefinition { 269 | pub name: String, 270 | pub type_condition: String, 271 | pub selection_set: SelectionSet, 272 | } 273 | 274 | #[derive(Debug, Clone, PartialEq, Serialize)] 275 | pub struct Directive { 276 | pub name: String, 277 | pub arguments: Vec, 278 | } 279 | 280 | #[derive(Debug, Clone, PartialEq, Serialize)] 281 | pub struct Argument { 282 | pub name: String, 283 | pub value: Value, 284 | } 285 | -------------------------------------------------------------------------------- /src/lexer.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::fmt::Debug; 3 | use std::ops::Index; 4 | use std::ops::Range; 5 | use std::ops::RangeFrom; 6 | use std::slice::Iter; 7 | use Result::*; 8 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] 9 | pub enum TokenKind { 10 | SOF, 11 | END_OF_INPUT, 12 | BANG, 13 | DOLLAR, 14 | AMP, 15 | PAREN_L, 16 | PAREN_R, 17 | SPREAD, 18 | COLON, 19 | EQUALS, 20 | AT, 21 | BRACKET_L, 22 | BRACKET_R, 23 | BRACE_L, 24 | PIPE, 25 | BRACE_R, 26 | NAME, 27 | INT, 28 | FLOAT, 29 | STRING, 30 | BLOCK_STRING, 31 | COMMENT, 32 | } 33 | 34 | #[derive(Debug)] 35 | pub struct LexerError { 36 | pub message: String, 37 | } 38 | 39 | impl LexerError { 40 | pub fn new(message: &str) -> LexerError { 41 | return LexerError { 42 | message: String::from(message), 43 | }; 44 | } 45 | } 46 | 47 | #[derive(Debug)] 48 | pub struct Position { 49 | pub name: String, 50 | pub column: usize, 51 | pub line: usize, 52 | } 53 | 54 | #[derive(Debug)] 55 | pub struct Token { 56 | pub kind: TokenKind, 57 | pub start: usize, 58 | pub end: usize, 59 | pub value: Option, 60 | pub position: Position, 61 | } 62 | 63 | impl Token { 64 | pub fn simple_token(kind: TokenKind, index: usize, position: Position) -> Token { 65 | Token { 66 | kind, 67 | start: index, 68 | end: index + 1, 69 | value: Option::None, 70 | position, 71 | } 72 | } 73 | 74 | pub fn get_value<'a>(&'a self) -> &'a String { 75 | self.value.as_ref().unwrap() 76 | } 77 | } 78 | 79 | #[derive(Debug)] 80 | pub struct Lexer { 81 | pub source: Box, 82 | pub current_index: usize, 83 | pub tokens: Vec, 84 | } 85 | 86 | pub trait MultiSource: Debug { 87 | fn get(&self, index: usize) -> Option; 88 | fn range_string(&self, range: Range) -> String; 89 | fn index_to_position(&self, index: usize) -> Position; 90 | fn len(&self) -> usize; 91 | } 92 | 93 | #[derive(Debug)] 94 | struct Line { 95 | number: usize, 96 | start_index: usize, 97 | } 98 | 99 | #[derive(Debug)] 100 | pub struct OneStringSource { 101 | chars: Vec, 102 | } 103 | 104 | pub struct NamedStringSource {} 105 | 106 | impl MultiSource for OneStringSource { 107 | fn get(&self, index: usize) -> Option { 108 | self.chars.get(index).map(|c| *c) 109 | } 110 | 111 | fn range_string(&self, range: Range) -> String { 112 | self.chars.index(range).iter().cloned().collect::() 113 | } 114 | 115 | fn index_to_position(&self, index: usize) -> Position { 116 | let mut cur_line = 1; 117 | let mut cur_col = 1; 118 | let mut cur_index = 0; 119 | while cur_index < index { 120 | if self.chars[cur_index] == '\n' && cur_index < index { 121 | cur_line += 1; 122 | cur_col = 1; 123 | } else { 124 | cur_col += 1; 125 | } 126 | cur_index += 1; 127 | } 128 | let result = Position { 129 | name: String::from(""), 130 | line: cur_line, 131 | column: cur_col, 132 | }; 133 | println!("calc: index: {}, result: {:?}", index, result); 134 | result 135 | } 136 | fn len(&self) -> usize { 137 | self.chars.len() 138 | } 139 | } 140 | 141 | impl<'a> Lexer { 142 | pub fn new(source_str: &'a str) -> Lexer { 143 | let start_of_file_token = Token { 144 | kind: TokenKind::SOF, 145 | start: 0, 146 | end: 0, 147 | value: Option::None, 148 | position: Position { 149 | name: String::from(""), 150 | line: 1, 151 | column: 1, 152 | }, 153 | }; 154 | Lexer { 155 | source: Box::new(OneStringSource { 156 | chars: source_str.chars().collect(), 157 | }), 158 | current_index: 0, 159 | tokens: vec![start_of_file_token], 160 | } 161 | } 162 | pub fn current_token(&self) -> &Token { 163 | self.tokens.last().as_ref().expect("no tokens") 164 | } 165 | 166 | pub fn current_token_value(&self) -> &String { 167 | let token = self.tokens.last().unwrap(); 168 | token.value.as_ref().unwrap() 169 | } 170 | 171 | pub fn current_token_value_safe(&self) -> Option { 172 | let token = self.tokens.last().unwrap(); 173 | match token.value.as_ref() { 174 | Some(s) => Some(s.clone()), 175 | None => None, 176 | } 177 | } 178 | 179 | pub fn prev_token(&self) -> &Token { 180 | self.tokens.get(self.tokens.len() - 2).unwrap() 181 | } 182 | 183 | pub fn prev_token_value(&self) -> String { 184 | let token = self.tokens.get(self.tokens.len() - 2).unwrap(); 185 | token.value.as_ref().unwrap().clone() 186 | } 187 | 188 | pub fn advance_all(&mut self) { 189 | while self.tokens.last().expect("").kind != TokenKind::END_OF_INPUT { 190 | self.advance(); 191 | } 192 | } 193 | 194 | pub fn lookahead(&self) -> Result { 195 | if self.tokens.last().unwrap().kind == TokenKind::END_OF_INPUT { 196 | return Ok(Token::simple_token( 197 | TokenKind::END_OF_INPUT, 198 | self.source.len(), 199 | self.source.index_to_position(self.source.len() - 1), 200 | )); 201 | } 202 | let position = self.position_after_whitespace(); 203 | let new_token; 204 | if position == self.source.len() { 205 | new_token = Token::simple_token( 206 | TokenKind::END_OF_INPUT, 207 | self.source.len(), 208 | self.source.index_to_position(self.source.len() - 1), 209 | ); 210 | } else { 211 | new_token = self.read_token(position)?; 212 | } 213 | Ok(new_token) 214 | } 215 | pub fn advance(&mut self) -> Result<&Token, LexerError> { 216 | if self.tokens.last().unwrap().kind == TokenKind::END_OF_INPUT { 217 | return Ok(self.tokens.last().unwrap()); 218 | } 219 | let position = self.position_after_whitespace(); 220 | let new_token; 221 | if position == self.source.len() { 222 | new_token = Token::simple_token( 223 | TokenKind::END_OF_INPUT, 224 | self.source.len(), 225 | self.source.index_to_position(self.source.len() - 1), 226 | ); 227 | } else { 228 | new_token = self.read_token(position)?; 229 | } 230 | self.current_index = new_token.end; 231 | self.tokens.push(new_token); 232 | Ok(&self.tokens.last().expect("")) 233 | } 234 | 235 | fn position_after_whitespace(&self) -> usize { 236 | let count = self.count_while(self.current_index, |c| match c { 237 | '\u{0020}' | '\u{0009}' | '\u{feff}' | ',' | '\n' | '\r' => true, 238 | c => false, 239 | }); 240 | self.current_index + count 241 | } 242 | 243 | fn count_while(&self, start: usize, while_fn: F) -> usize 244 | where 245 | F: Fn(&char) -> bool, 246 | { 247 | let mut counter = 0; 248 | while start + counter < self.source.len() { 249 | let c = self.source.get(start + counter).unwrap(); 250 | if while_fn(&c) { 251 | counter += 1; 252 | } else { 253 | break; 254 | } 255 | } 256 | counter 257 | } 258 | fn count_until(&self, start: usize, predicate: F) -> usize 259 | where 260 | F: Fn(&char) -> bool, 261 | { 262 | let mut counter = 0; 263 | while start + counter < self.source.len() { 264 | let c = self.source.get(start + counter).unwrap(); 265 | if !predicate(&c) { 266 | counter += 1; 267 | } else { 268 | break; 269 | } 270 | } 271 | counter 272 | } 273 | fn collect_string_until( 274 | &self, 275 | start: usize, 276 | predicate: F, 277 | include_ast_char: bool, 278 | ) -> (String, usize) 279 | where 280 | F: Fn(&char) -> bool, 281 | { 282 | let mut string = String::new(); 283 | let mut counter = 0; 284 | while start + counter < self.source.len() { 285 | let c = self.source.get(start + counter).unwrap(); 286 | if !predicate(&c) { 287 | string.push(c); 288 | counter += 1; 289 | } else { 290 | if (include_ast_char) { 291 | counter += 1; 292 | } 293 | break; 294 | } 295 | } 296 | (string, counter) 297 | } 298 | 299 | fn read_token(&self, index: usize) -> Result { 300 | let c = self.source.get(index).unwrap(); 301 | let position = self.source.index_to_position(index); 302 | let new_token = match c { 303 | '{' => Ok(Token::simple_token(TokenKind::BRACE_L, index, position)), 304 | '}' => Ok(Token::simple_token(TokenKind::BRACE_R, index, position)), 305 | '!' => Ok(Token::simple_token(TokenKind::BANG, index, position)), 306 | '|' => Ok(Token::simple_token(TokenKind::PIPE, index, position)), 307 | ':' => Ok(Token::simple_token(TokenKind::COLON, index, position)), 308 | '[' => Ok(Token::simple_token(TokenKind::BRACKET_L, index, position)), 309 | ']' => Ok(Token::simple_token(TokenKind::BRACKET_R, index, position)), 310 | '$' => Ok(Token::simple_token(TokenKind::DOLLAR, index, position)), 311 | '@' => Ok(Token::simple_token(TokenKind::AT, index, position)), 312 | '&' => Ok(Token::simple_token(TokenKind::AMP, index, position)), 313 | '(' => Ok(Token::simple_token(TokenKind::PAREN_L, index, position)), 314 | ')' => Ok(Token::simple_token(TokenKind::PAREN_R, index, position)), 315 | '=' => Ok(Token::simple_token(TokenKind::EQUALS, index, position)), 316 | '\"' => self.read_string_or_block_string(index), 317 | '.' => self.read_spread(index), 318 | 'A'..='Z' | '_' | 'a'..='z' => Ok(self.read_name(index)), 319 | '0'..='9' | '-' => self.read_number(index), 320 | '#' => Ok(self.read_comment(index)), 321 | _ => panic!( 322 | "unexpected char '{}' at position {:?}", 323 | c.escape_unicode(), 324 | position 325 | ), 326 | }?; 327 | 328 | Ok(new_token) 329 | } 330 | 331 | fn read_spread(&self, index: usize) -> Result { 332 | // we have at least a . 333 | let next_char_1 = self.source.get(index + 2); 334 | Lexer::expect_char(next_char_1, '.')?; 335 | let next_char_2 = self.source.get(index + 2); 336 | Lexer::expect_char(next_char_2, '.')?; 337 | Ok(Token { 338 | kind: TokenKind::SPREAD, 339 | start: index, 340 | end: index + 3, 341 | value: None, 342 | position: self.source.index_to_position(index), 343 | }) 344 | } 345 | fn expect_char(c: Option, expected_char: char) -> Result<(), LexerError> { 346 | match c { 347 | None => Err(LexerError::new("Unexpected char")), 348 | Some(actual_char) => { 349 | if actual_char != expected_char { 350 | Err(LexerError::new("Unexpected char")) 351 | } else { 352 | Ok(()) 353 | } 354 | } 355 | } 356 | } 357 | 358 | fn read_comment(&self, index: usize) -> Token { 359 | // we have at least `#` 360 | let (value, counter) = self.collect_string_until( 361 | index + 1, 362 | |c| match *c { 363 | '\u{000A}' | '\u{000D}' => true, 364 | _ => false, 365 | }, 366 | true, 367 | ); 368 | Token { 369 | kind: TokenKind::COMMENT, 370 | start: index, 371 | end: index + counter + 1, 372 | value: Option::Some(value), 373 | position: self.source.index_to_position(index), 374 | } 375 | } 376 | 377 | fn read_name(&self, index: usize) -> Token { 378 | let mut name_string = String::new(); 379 | let mut counter = 0; 380 | loop { 381 | let c = match self.source.get(index + counter) { 382 | None => break, 383 | Some(c) => c, 384 | }; 385 | match c { 386 | '_' | '0'..='9' | 'A'..='Z' | 'a'..='z' => { 387 | counter += 1; 388 | name_string.push(c); 389 | } 390 | _ => break, 391 | } 392 | } 393 | Token { 394 | kind: TokenKind::NAME, 395 | start: index, 396 | end: index + counter, 397 | value: Option::Some(name_string), 398 | position: self.source.index_to_position(index), 399 | } 400 | } 401 | 402 | fn read_number(&self, index: usize) -> Result { 403 | let first_code = self.source.get(index).unwrap(); 404 | let mut code = Option::Some(first_code); 405 | let mut position = index; 406 | let mut is_float = false; 407 | if code.is_some() && code.unwrap() == '-' { 408 | position += 1; 409 | code = self.source.get(position); 410 | }; 411 | if code.is_some() && code.unwrap() == '0' { 412 | position += 1; 413 | code = self.source.get(position); 414 | if code.is_some() { 415 | if code.unwrap() >= '0' && code.unwrap() <= '9' { 416 | return Err(LexerError::new(&format!("invalid char {}", code.unwrap()))); 417 | } 418 | } 419 | } else { 420 | position = self.assert_and_read_digits(position, code)?; 421 | code = self.source.get(position); 422 | } 423 | if code.is_some() && code.unwrap() == '.' { 424 | is_float = true; 425 | position += 1; 426 | code = self.source.get(position); 427 | position = self.assert_and_read_digits(position, code)?; 428 | code = self.source.get(position); 429 | } 430 | if code.is_some() && (code.unwrap() == 'E' || code.unwrap() == 'e') { 431 | is_float = true; 432 | position += 1; 433 | code = self.source.get(position); 434 | if code.is_some() && code.unwrap() == '+' || code.unwrap() == '-' { 435 | position += 1; 436 | code = self.source.get(position); 437 | } 438 | position = self.assert_and_read_digits(position, code)?; 439 | } 440 | let value: String = self.source.range_string(index..position); 441 | Ok(Token { 442 | kind: if is_float { 443 | TokenKind::FLOAT 444 | } else { 445 | TokenKind::INT 446 | }, 447 | start: index, 448 | end: position, 449 | value: Option::Some(value), 450 | position: self.source.index_to_position(index), 451 | }) 452 | } 453 | 454 | fn assert_and_read_digits( 455 | &self, 456 | start: usize, 457 | first_code: Option, 458 | ) -> Result { 459 | if first_code.is_none() { 460 | return Err(LexerError::new(&format!( 461 | "Invalid number, expected digit but got end of input" 462 | ))); 463 | } 464 | if !(first_code.unwrap() >= '0' && first_code.unwrap() <= '9') { 465 | return Err(LexerError::new(&format!( 466 | "Invalid number, expected digit but got {}", 467 | first_code.unwrap() 468 | ))); 469 | } 470 | let counter = self.count_while(start, |c: &char| match *c { 471 | '0'..='9' => true, 472 | _ => false, 473 | }); 474 | Ok(start + counter) 475 | } 476 | 477 | fn read_string_or_block_string(&self, index: usize) -> Result { 478 | if self.is_triple_quote(index) { 479 | return self.read_block_string(index); 480 | } else { 481 | return self.read_string(index); 482 | } 483 | } 484 | 485 | fn read_string(&self, index: usize) -> Result { 486 | // we have at least " 487 | let mut value = String::new(); 488 | 489 | let mut counter = 1; 490 | while index + counter < self.source.len() { 491 | let c = self.source.get(index + counter).unwrap(); 492 | if c == '"' { 493 | return Ok(Token { 494 | kind: TokenKind::STRING, 495 | start: index, 496 | end: index + counter + 1, 497 | value: Option::Some(value), 498 | position: self.source.index_to_position(index), 499 | }); 500 | } 501 | if c < '\u{0020}' && c != '\u{0009}' { 502 | return Err(LexerError::new("not allowed char in String")); 503 | } 504 | value.push(c); 505 | counter += 1; 506 | } 507 | Err(LexerError::new("Unterminated string.")) 508 | } 509 | 510 | fn read_block_string(&self, index: usize) -> Result { 511 | // we know it starts with triple quote 512 | let mut position = index + 3; 513 | let mut chunk_start = position; 514 | let mut raw_value = String::new(); 515 | while position < self.source.len() { 516 | if self.is_triple_quote(position) { 517 | raw_value.push_str(&self.source.range_string(chunk_start..position)); 518 | return Ok(Token { 519 | kind: TokenKind::BLOCK_STRING, 520 | start: index, 521 | end: position + 3, 522 | value: Option::Some(raw_value), 523 | position: self.source.index_to_position(index), 524 | }); 525 | } 526 | let code = self.source.get(position).unwrap(); 527 | if code < '\u{0020}' && code != '\u{0009}' && code != '\u{000a}' && code != '\u{000d}' { 528 | return Err(LexerError::new(&format!( 529 | "Invalid character within String: {}.", 530 | code 531 | ))); 532 | } 533 | if code == '\u{000a}' { 534 | position += 1; 535 | } else if code == '\u{000d}' { 536 | if self.char_equal_at(position + 1, '\u{000a}') { 537 | // CRLF means one newline 538 | position += 2; 539 | } else { 540 | position += 1; 541 | } 542 | } else if code == '\u{005c}' && self.is_triple_quote(position + 1) { 543 | raw_value.push_str(&self.source.range_string(chunk_start..position)); 544 | raw_value.push_str("\"\"\""); 545 | position += 4; 546 | chunk_start = position; 547 | } else { 548 | position += 1; 549 | } 550 | } 551 | Err(LexerError::new("Untermintated block string")) 552 | } 553 | 554 | fn is_triple_quote(&self, position: usize) -> bool { 555 | if !(position + 2 < self.source.len()) { 556 | return false; 557 | } 558 | self.source.get(position).unwrap() == '"' 559 | && self.source.get(position + 1).unwrap() == '"' 560 | && self.source.get(position + 2).unwrap() == '"' 561 | } 562 | 563 | fn char_equal_at(&self, position: usize, c: char) -> bool { 564 | match self.source.get(position) { 565 | Some(value) => value == c, 566 | None => false, 567 | } 568 | } 569 | } 570 | 571 | #[cfg(test)] 572 | mod tests { 573 | macro_rules! test_token { 574 | ($name:ident $source:literal $token_kind:ident) => { 575 | #[test] 576 | fn $name() { 577 | let mut lexer = Lexer::new($source); 578 | lexer.advance_all(); 579 | assert_eq!(lexer.tokens[1].kind, $token_kind); 580 | } 581 | }; 582 | ($name:ident $source:literal $token_kind:ident $value:literal) => { 583 | #[test] 584 | fn $name() { 585 | let mut lexer = Lexer::new($source); 586 | lexer.advance_all(); 587 | assert_eq!(lexer.tokens[1].kind, $token_kind); 588 | assert_eq!(lexer.tokens[1].value, Some(String::from($value))); 589 | } 590 | }; 591 | } 592 | 593 | use super::TokenKind::*; 594 | use super::*; 595 | 596 | test_token!(name_starting_with_underscoe "_someName" NAME "_someName"); 597 | test_token!(block_string r#""""string""""# BLOCK_STRING "string"); 598 | test_token!(block_string_triple_quote_escaped r#""""string\"""""""# BLOCK_STRING r#"string""""#); 599 | test_token!(bang "!" BANG); 600 | test_token!(equals "=" EQUALS); 601 | test_token!(float_e_notation "123e4" FLOAT "123e4"); 602 | test_token!(float_e_notation_2 "123E4" FLOAT "123E4"); 603 | test_token!(float_e_notation_3 "57.123E4" FLOAT "57.123E4"); 604 | test_token!(float_e_notation_4 "26.123e4" FLOAT "26.123e4"); 605 | test_token!(float_e_notation_5 "123e-4" FLOAT "123e-4"); 606 | test_token!(float_e_notation_6 "-1.123e4" FLOAT "-1.123e4"); 607 | test_token!(float_e_notation_7 "-1.123e-4" FLOAT "-1.123e-4"); 608 | test_token!(float_e_notation_8 "-1.123E-4" FLOAT "-1.123E-4"); 609 | test_token!(float_e_notation_9 "-1.123E+4" FLOAT "-1.123E+4"); 610 | test_token!(float_e_notation_10 "-1.123e456" FLOAT "-1.123e456"); 611 | 612 | #[test] 613 | fn parse_simple_query() { 614 | let mut lexer = Lexer::new(" {foo}"); 615 | lexer.advance_all(); 616 | assert_eq!(lexer.tokens.len(), 5); 617 | } 618 | #[test] 619 | fn parse_name_starting_with_underscore() { 620 | let mut lexer = Lexer::new("_someName"); 621 | lexer.advance_all(); 622 | assert_eq!(lexer.tokens[1].kind, NAME); 623 | assert_eq!(lexer.tokens[1].value, Some(String::from("_someName"))); 624 | } 625 | #[test] 626 | fn parse_bang() { 627 | let mut lexer = Lexer::new("!"); 628 | lexer.advance_all(); 629 | assert_eq!(lexer.tokens[1].kind, BANG); 630 | } 631 | #[test] 632 | fn parse_pipe() { 633 | let mut lexer = Lexer::new("|"); 634 | lexer.advance_all(); 635 | assert_eq!(lexer.tokens[1].kind, PIPE); 636 | } 637 | #[test] 638 | fn parse_colon() { 639 | let mut lexer = Lexer::new(":"); 640 | lexer.advance_all(); 641 | assert_eq!(lexer.tokens[1].kind, COLON); 642 | } 643 | #[test] 644 | fn parse_bracket() { 645 | let mut lexer = Lexer::new("[]"); 646 | lexer.advance_all(); 647 | assert_eq!(lexer.tokens[1].kind, BRACKET_L); 648 | assert_eq!(lexer.tokens[2].kind, BRACKET_R); 649 | } 650 | #[test] 651 | fn parse_parentheses() { 652 | let mut lexer = Lexer::new("()"); 653 | lexer.advance_all(); 654 | assert_eq!(lexer.tokens[1].kind, PAREN_L); 655 | assert_eq!(lexer.tokens[2].kind, PAREN_R); 656 | } 657 | #[test] 658 | fn parse_dollar() { 659 | let mut lexer = Lexer::new("$"); 660 | lexer.advance_all(); 661 | assert_eq!(lexer.tokens[1].kind, DOLLAR); 662 | } 663 | #[test] 664 | fn parse_at() { 665 | let mut lexer = Lexer::new("@"); 666 | lexer.advance_all(); 667 | assert_eq!(lexer.tokens[1].kind, AT); 668 | } 669 | #[test] 670 | fn parse_ampersand() { 671 | let mut lexer = Lexer::new("&"); 672 | lexer.advance_all(); 673 | assert_eq!(lexer.tokens[1].kind, AMP); 674 | } 675 | #[test] 676 | fn parse_simple_query_with_ws() { 677 | let mut lexer = Lexer::new(", { foo } "); 678 | lexer.advance_all(); 679 | assert_eq!(lexer.tokens.len(), 5); 680 | } 681 | 682 | #[test] 683 | fn parse_simple_query_with_ws_2() { 684 | let mut lexer = Lexer::new(" { foo } "); 685 | lexer.advance_all(); 686 | assert_eq!(lexer.tokens.len(), 5); 687 | } 688 | #[test] 689 | fn parse_simple_query_with_comment() { 690 | let mut lexer = Lexer::new(" #hello\n { foo } "); 691 | lexer.advance_all(); 692 | assert_eq!(lexer.tokens.len(), 6); 693 | } 694 | #[test] 695 | fn parse_string() { 696 | let mut lexer = Lexer::new(" \" foo \" "); 697 | lexer.advance_all(); 698 | assert_eq!(lexer.tokens.len(), 3); 699 | let token = &lexer.tokens[1]; 700 | assert_eq!(token.kind, TokenKind::STRING); 701 | let value = token.value.as_ref().expect(""); 702 | assert_eq!(value, " foo "); 703 | } 704 | 705 | #[test] 706 | fn parse_comment() { 707 | let mut lexer = Lexer::new("#comment"); 708 | lexer.advance_all(); 709 | assert_eq!(lexer.tokens.len(), 3); 710 | let comment_token = &lexer.tokens[1]; 711 | assert_eq!(comment_token.kind, TokenKind::COMMENT); 712 | let value = comment_token.value.as_ref().expect(""); 713 | assert_eq!(value, "comment"); 714 | } 715 | #[test] 716 | fn parse_integer() { 717 | let mut lexer = Lexer::new("1234"); 718 | lexer.advance_all(); 719 | assert_eq!(lexer.tokens.len(), 3); 720 | let integer_token = &lexer.tokens[1]; 721 | assert_eq!(integer_token.kind, TokenKind::INT); 722 | let value = integer_token.value.as_ref().expect(""); 723 | assert_eq!(value, "1234"); 724 | } 725 | 726 | #[test] 727 | fn parse_float() { 728 | let mut lexer = Lexer::new("0.123"); 729 | lexer.advance_all(); 730 | assert_eq!(lexer.tokens.len(), 3); 731 | let integer_token = &lexer.tokens[1]; 732 | assert_eq!(integer_token.kind, TokenKind::FLOAT); 733 | let value = integer_token.value.as_ref().expect(""); 734 | assert_eq!(value, "0.123"); 735 | } 736 | #[test] 737 | fn parse_minus_zero() { 738 | let mut lexer = Lexer::new("-0"); 739 | lexer.advance_all(); 740 | assert_eq!(lexer.tokens.len(), 3); 741 | let integer_token = &lexer.tokens[1]; 742 | assert_eq!(integer_token.kind, TokenKind::INT); 743 | let value = integer_token.value.as_ref().expect(""); 744 | assert_eq!(value, "-0"); 745 | } 746 | #[test] 747 | fn parse_zero() { 748 | let mut lexer = Lexer::new("0"); 749 | lexer.advance_all(); 750 | assert_eq!(lexer.tokens.len(), 3); 751 | let integer_token = &lexer.tokens[1]; 752 | assert_eq!(integer_token.kind, TokenKind::INT); 753 | let value = integer_token.value.as_ref().expect(""); 754 | assert_eq!(value, "0"); 755 | } 756 | #[test] 757 | fn parse_minus_int() { 758 | let mut lexer = Lexer::new("-4"); 759 | lexer.advance_all(); 760 | assert_eq!(lexer.tokens.len(), 3); 761 | let integer_token = &lexer.tokens[1]; 762 | assert_eq!(integer_token.kind, TokenKind::INT); 763 | let value = integer_token.value.as_ref().expect(""); 764 | assert_eq!(value, "-4"); 765 | } 766 | 767 | #[test] 768 | fn correct_position() { 769 | let mut lexer = Lexer::new( 770 | r" #comment1 771 | query { 772 | #comment2 773 | field #comment3 774 | #comment4 775 | 776 | } 777 | ", 778 | ); 779 | lexer.advance_all(); 780 | assert_eq!(lexer.tokens.len(), 10); 781 | assert_eq!(lexer.tokens[1].position.line, 1); 782 | assert_eq!(lexer.tokens[1].position.column, 2); 783 | 784 | assert_eq!(lexer.tokens[2].position.line, 2); 785 | assert_eq!(lexer.tokens[2].position.column, 9); 786 | 787 | assert_eq!(lexer.tokens[3].position.line, 2); 788 | assert_eq!(lexer.tokens[3].position.column, 15); 789 | 790 | assert_eq!(lexer.tokens[4].position.line, 3); 791 | assert_eq!(lexer.tokens[4].position.column, 11); 792 | 793 | assert_eq!(lexer.tokens[5].position.line, 4); 794 | assert_eq!(lexer.tokens[5].position.column, 13); 795 | 796 | assert_eq!(lexer.tokens[6].position.line, 4); 797 | assert_eq!(lexer.tokens[6].position.column, 19); 798 | 799 | assert_eq!(lexer.tokens[7].position.line, 5); 800 | assert_eq!(lexer.tokens[7].position.column, 17); 801 | 802 | assert_eq!(lexer.tokens[8].position.line, 7); 803 | assert_eq!(lexer.tokens[8].position.column, 5); 804 | } 805 | } 806 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde_derive; 3 | 4 | use wasm_bindgen::prelude::*; 5 | pub mod lexer; 6 | pub mod parser; 7 | pub mod ast; 8 | 9 | #[wasm_bindgen] 10 | pub fn parse(document: &str) -> JsValue { 11 | let mut lexer = lexer::Lexer::new(document); 12 | let result = parser::parse(&mut lexer).unwrap(); 13 | JsValue::from_serde(&result).unwrap() 14 | } 15 | -------------------------------------------------------------------------------- /src/parser.rs: -------------------------------------------------------------------------------- 1 | use crate::ast::*; 2 | use crate::lexer::*; 3 | use Option::*; 4 | use Result::*; 5 | use TokenKind::*; 6 | use TypeReference::*; 7 | 8 | #[derive(Debug)] 9 | pub struct ParsingError { 10 | pub message: String, 11 | } 12 | impl ParsingError { 13 | pub fn new(message_str: &str) -> ParsingError { 14 | ParsingError { 15 | message: String::from(message_str), 16 | } 17 | } 18 | } 19 | 20 | pub fn parse(lexer: &mut Lexer) -> Result { 21 | document(lexer) 22 | } 23 | 24 | fn document(lexer: &mut Lexer) -> Result { 25 | let definitions = many( 26 | lexer, 27 | TokenKind::SOF, 28 | parse_definition, 29 | TokenKind::END_OF_INPUT, 30 | )?; 31 | Ok(Document { definitions }) 32 | } 33 | 34 | fn parse_definition(lexer: &mut Lexer) -> Result { 35 | if peek(lexer, NAME) { 36 | match lexer.current_token_value().as_ref() { 37 | "query" | "mutation" | "subscription" | "fragment" => { 38 | return parse_executable_definition(lexer) 39 | } 40 | "schema" | "scalar" | "type" | "interface" | "union" | "enum" | "input" 41 | | "directive" => return parse_type_system_definition(lexer), 42 | "extend" => return parse_type_system_extension(lexer), 43 | value => return Err(ParsingError::new(&format!("unexpected string '{}'", value))), 44 | } 45 | } else if peek(lexer, BRACE_L) { 46 | return parse_executable_definition(lexer); 47 | } else if peek_description(lexer) { 48 | return parse_type_system_definition(lexer); 49 | } 50 | Err(ParsingError::new("unexpected token")) 51 | } 52 | 53 | fn parse_type_system_definition(lexer: &mut Lexer) -> Result { 54 | // this hear only for making sure the reference to the value lives long enough 55 | let lookahead_token; 56 | let (token_kind, token_value) = if peek_description(lexer) { 57 | lookahead_token = lookahead_lexer(lexer)?; 58 | (lookahead_token.kind, lookahead_token.get_value()) 59 | } else { 60 | (lexer.current_token().kind, lexer.current_token_value()) 61 | }; 62 | if token_kind != NAME { 63 | return unexpected_token(token_kind, NAME); 64 | } 65 | match token_value.as_ref() { 66 | "schema" => return parse_schema_definition(lexer), 67 | "scalar" => return parse_scalar_type_definition(lexer), 68 | "type" => return parse_object_type_definition(lexer), 69 | "interface" => return parse_interface_type_definition(lexer), 70 | "union" => return parse_union_type_definition(lexer), 71 | "enum" => return parse_enum_type_definition(lexer), 72 | "input" => return parse_input_object_type_definition(lexer), 73 | "directive" => return parse_directive_definition(lexer), 74 | value => { 75 | return Err(ParsingError::new(&format!( 76 | "unexpected {} for type system definition", 77 | value 78 | ))) 79 | } 80 | } 81 | } 82 | 83 | fn parse_directives(lexer: &mut Lexer, is_const: bool) -> Result, ParsingError> { 84 | let mut result = Vec::new(); 85 | loop { 86 | if !peek(lexer, AT) { 87 | break; 88 | } 89 | result.push(parse_directive(lexer, is_const)?); 90 | } 91 | Ok(result) 92 | } 93 | fn parse_directive(lexer: &mut Lexer, is_const: bool) -> Result { 94 | expect_token(lexer, AT)?; 95 | let name = parse_name(lexer)?; 96 | let arguments = parse_arguments(lexer, is_const)?; 97 | Ok(Directive { name, arguments }) 98 | } 99 | 100 | fn parse_arguments(lexer: &mut Lexer, is_const: bool) -> Result, ParsingError> { 101 | let parse_fn = if is_const { 102 | parse_const_argument 103 | } else { 104 | parse_argument 105 | }; 106 | if peek(lexer, PAREN_L) { 107 | many(lexer, PAREN_L, parse_fn, PAREN_R) 108 | } else { 109 | Ok(Vec::new()) 110 | } 111 | } 112 | 113 | fn parse_argument(lexer: &mut Lexer) -> Result { 114 | let name = parse_name(lexer)?; 115 | expect_token(lexer, COLON)?; 116 | let value = parse_value_value(lexer)?; 117 | Ok(Argument { name, value }) 118 | } 119 | 120 | fn parse_const_argument(lexer: &mut Lexer) -> Result { 121 | let name = parse_name(lexer)?; 122 | expect_token(lexer, COLON)?; 123 | let value = parse_const_value(lexer)?; 124 | Ok(Argument { name, value }) 125 | } 126 | 127 | fn parse_schema_definition(lexer: &mut Lexer) -> Result { 128 | expect_keyword(lexer, "schema")?; 129 | let directives = parse_directives(lexer, true)?; 130 | let operation_type_definitions = 131 | many(lexer, BRACE_L, parse_operation_type_definition, BRACE_R)?; 132 | Ok(Definition::Schema(SchemaDefinition { 133 | directives, 134 | operation_type_definitions, 135 | })) 136 | } 137 | fn parse_operation_type_definition( 138 | lexer: &mut Lexer, 139 | ) -> Result { 140 | let operation = parse_operation_type(lexer)?; 141 | expect_token(lexer, COLON)?; 142 | let type_name = parse_name(lexer)?; 143 | Ok(OperationTypeDefinition { 144 | operation, 145 | type_name, 146 | }) 147 | } 148 | fn parse_scalar_type_definition(lexer: &mut Lexer) -> Result { 149 | let description = parse_description(lexer)?; 150 | expect_keyword(lexer, "scalar")?; 151 | let name = parse_name(lexer)?; 152 | let directives = parse_directives(lexer, true)?; 153 | Ok(Definition::ScalarType(ScalarTypeDefinition { 154 | name, 155 | description, 156 | directives, 157 | })) 158 | } 159 | fn parse_object_type_definition(lexer: &mut Lexer) -> Result { 160 | expect_keyword(lexer, "type")?; 161 | let description = parse_description(lexer)?; 162 | let name = parse_name(lexer)?; 163 | let interfaces = parse_implements_interfaces(lexer)?; 164 | let directives = parse_directives(lexer, true)?; 165 | let fields = parse_field_definitions(lexer)?; 166 | Ok(Definition::ObjectType(ObjectTypeDefinition { 167 | name, 168 | interfaces, 169 | fields, 170 | description, 171 | directives, 172 | })) 173 | } 174 | 175 | fn parse_field_definitions(lexer: &mut Lexer) -> Result, ParsingError> { 176 | many(lexer, BRACE_L, parse_field_definition, BRACE_R) 177 | } 178 | 179 | fn parse_field_definition(lexer: &mut Lexer) -> Result { 180 | let name = parse_name(lexer)?; 181 | let arguments = parse_argument_definitions(lexer)?; 182 | expect_token(lexer, COLON)?; 183 | let type_reference = parse_type_reference(lexer)?; 184 | Ok(FieldDefinition { 185 | name, 186 | type_reference, 187 | arguments, 188 | }) 189 | } 190 | fn parse_argument_definitions( 191 | lexer: &mut Lexer, 192 | ) -> Result, ParsingError> { 193 | if !peek(lexer, PAREN_L) { 194 | return Ok(Vec::new()); 195 | } 196 | many(lexer, PAREN_L, parse_input_value_definition, PAREN_R) 197 | } 198 | 199 | fn parse_input_value_definition(lexer: &mut Lexer) -> Result { 200 | let description = parse_description(lexer)?; 201 | let name = parse_name(lexer)?; 202 | expect_token(lexer, COLON)?; 203 | let type_reference = parse_type_reference(lexer)?; 204 | let default_value = match expect_optional_token(lexer, EQUALS)? { 205 | Some(_) => Some(parse_const_value(lexer)?), 206 | None => None, 207 | }; 208 | let directives = parse_directives(lexer, true)?; 209 | Ok(InputValueDefinition { 210 | name, 211 | type_reference, 212 | default_value, 213 | description, 214 | directives, 215 | }) 216 | } 217 | 218 | fn parse_implements_interfaces(lexer: &mut Lexer) -> Result, ParsingError> { 219 | if !expect_optional_keyword(lexer, "implements")? { 220 | return Ok(Vec::new()); 221 | } 222 | let mut result = Vec::new(); 223 | expect_optional_token(lexer, AMP)?; 224 | loop { 225 | result.push(parse_name(lexer)?); 226 | if expect_optional_token(lexer, AMP)?.is_none() { 227 | break; 228 | } 229 | } 230 | Ok(result) 231 | } 232 | fn parse_interface_type_definition(lexer: &mut Lexer) -> Result { 233 | expect_keyword(lexer, "interface")?; 234 | let description = parse_description(lexer)?; 235 | let name = parse_name(lexer)?; 236 | let directives = parse_directives(lexer, true)?; 237 | let fields = parse_field_definitions(lexer)?; 238 | Ok(Definition::InterfaceType(InterfaceTypeDefinition { 239 | description, 240 | name, 241 | fields, 242 | directives, 243 | })) 244 | } 245 | fn parse_union_type_definition(lexer: &mut Lexer) -> Result { 246 | let description = parse_description(lexer)?; 247 | expect_keyword(lexer, "union")?; 248 | let name = parse_name(lexer)?; 249 | let directives = parse_directives(lexer, true)?; 250 | let types = parse_union_type_members(lexer)?; 251 | Ok(Definition::UnionType(UnionTypeDefinition { 252 | name, 253 | description, 254 | directives, 255 | types, 256 | })) 257 | } 258 | 259 | fn parse_union_type_members(lexer: &mut Lexer) -> Result, ParsingError> { 260 | let mut result = Vec::new(); 261 | if expect_optional_token(lexer, EQUALS)?.is_some() { 262 | expect_optional_token(lexer, PIPE)?; 263 | loop { 264 | result.push(parse_name(lexer)?); 265 | if expect_optional_token(lexer, PIPE)?.is_none() { 266 | break; 267 | } 268 | } 269 | }; 270 | Ok(result) 271 | } 272 | fn parse_enum_type_definition(lexer: &mut Lexer) -> Result { 273 | let description = parse_description(lexer)?; 274 | expect_keyword(lexer, "enum")?; 275 | let name = parse_name(lexer)?; 276 | let directives = parse_directives(lexer, true)?; 277 | let values = parse_enum_values_definition(lexer)?; 278 | Ok(Definition::EnumType(EnumTypeDefinition { 279 | name, 280 | directives, 281 | description, 282 | values, 283 | })) 284 | } 285 | fn parse_enum_values_definition( 286 | lexer: &mut Lexer, 287 | ) -> Result, ParsingError> { 288 | if peek(lexer, BRACE_L) { 289 | many(lexer, BRACE_L, parse_enum_value_definition, BRACE_R) 290 | } else { 291 | Ok(Vec::new()) 292 | } 293 | } 294 | fn parse_enum_value_definition(lexer: &mut Lexer) -> Result { 295 | let description = parse_description(lexer)?; 296 | let name = parse_name(lexer)?; 297 | let directives = parse_directives(lexer, true)?; 298 | Ok(EnumValueDefinition { 299 | name, 300 | description, 301 | directives, 302 | }) 303 | } 304 | fn parse_input_object_type_definition(lexer: &mut Lexer) -> Result { 305 | let description = parse_description(lexer)?; 306 | expect_keyword(lexer, "input")?; 307 | let name = parse_name(lexer)?; 308 | let directives = parse_directives(lexer, true)?; 309 | let fields = parse_input_fields_definition(lexer)?; 310 | Ok(Definition::InputObjectType(InputObjectTypeDefinition { 311 | description, 312 | name, 313 | directives, 314 | fields, 315 | })) 316 | } 317 | 318 | fn parse_input_fields_definition( 319 | lexer: &mut Lexer, 320 | ) -> Result, ParsingError> { 321 | if peek(lexer, BRACE_L) { 322 | many(lexer, BRACE_L, parse_input_value_definition, BRACE_R) 323 | } else { 324 | Ok(Vec::new()) 325 | } 326 | } 327 | 328 | fn parse_directive_definition(lexer: &mut Lexer) -> Result { 329 | let description = parse_description(lexer)?; 330 | expect_keyword(lexer, "directive")?; 331 | expect_token(lexer, AT)?; 332 | let name = parse_name(lexer)?; 333 | let arguments = parse_argument_definitions(lexer)?; 334 | expect_keyword(lexer, "on")?; 335 | let locations = parse_directive_locations(lexer)?; 336 | Ok(Definition::Directive(DirectiveDefinition { 337 | description, 338 | name, 339 | arguments, 340 | locations, 341 | })) 342 | } 343 | 344 | fn parse_directive_locations(lexer: &mut Lexer) -> Result, ParsingError> { 345 | expect_optional_token(lexer, PIPE)?; 346 | let mut result = Vec::new(); 347 | loop { 348 | result.push(parse_directive_location(lexer)?); 349 | if expect_optional_token(lexer, PIPE)?.is_none() { 350 | break; 351 | } 352 | } 353 | Ok(result) 354 | } 355 | fn parse_directive_location(lexer: &mut Lexer) -> Result { 356 | let name = parse_name(lexer)?; 357 | match name.as_ref() { 358 | "QUERY" => Ok(DirectiveLocation::QUERY), 359 | "MUTATION" => Ok(DirectiveLocation::MUTATION), 360 | "SUBSCRIPTION" => Ok(DirectiveLocation::SUBSCRIPTION), 361 | "FIELD" => Ok(DirectiveLocation::FIELD), 362 | "FRAGMENT_DEFINITION" => Ok(DirectiveLocation::FRAGMENT_DEFINITION), 363 | "FRAGMENT_SPREAD" => Ok(DirectiveLocation::FRAGMENT_SPREAD), 364 | "INLINE_FRAGMENT" => Ok(DirectiveLocation::INLINE_FRAGMENT), 365 | "VARIABLE_DEFINITION" => Ok(DirectiveLocation::VARIABLE_DEFINITION), 366 | "SCHEMA" => Ok(DirectiveLocation::SCHEMA), 367 | "SCALAR" => Ok(DirectiveLocation::SCALAR), 368 | "OBJECT" => Ok(DirectiveLocation::OBJECT), 369 | "FIELD_DEFINITION" => Ok(DirectiveLocation::FIELD_DEFINITION), 370 | "ARGUMENT_DEFINITION" => Ok(DirectiveLocation::ARGUMENT_DEFINITION), 371 | "INTERFACE" => Ok(DirectiveLocation::INTERFACE), 372 | "UNION" => Ok(DirectiveLocation::UNION), 373 | "ENUM" => Ok(DirectiveLocation::ENUM), 374 | "ENUM_VALUE" => Ok(DirectiveLocation::ENUM_VALUE), 375 | "INPUT_OBJECT" => Ok(DirectiveLocation::INPUT_OBJECT), 376 | "INPUT_FIELD_DEFINITION" => Ok(DirectiveLocation::INPUT_FIELD_DEFINITION), 377 | _ => Err(ParsingError::new("unexpected directive location")), 378 | } 379 | } 380 | 381 | fn unexpected_token(actual: TokenKind, expected: TokenKind) -> Result { 382 | Err(ParsingError::new(&format!( 383 | "Unexecpted {:?}, but expected {:?}", 384 | actual, expected 385 | ))) 386 | } 387 | 388 | fn parse_description(lexer: &mut Lexer) -> Result, ParsingError> { 389 | if peek_description(lexer) { 390 | Ok(Some(get_string(parse_string_literal(lexer)?))) 391 | } else { 392 | Ok(None) 393 | } 394 | } 395 | fn get_string(string_value: Value) -> String { 396 | match string_value { 397 | Value::StringValue(value) => value, 398 | _ => panic!("shoud not happen"), 399 | } 400 | } 401 | 402 | fn parse_type_system_extension(lexer: &mut Lexer) -> Result { 403 | let token = lookahead_lexer(lexer)?; 404 | if token.kind != NAME { 405 | return unexpected_token(token.kind, NAME); 406 | } 407 | 408 | match token.value.as_ref().unwrap().as_ref() { 409 | "schema" => return parse_schema_extension(lexer), 410 | "scalar" => return parse_scalar_type_extension(lexer), 411 | "type" => return parse_object_type_extension(lexer), 412 | "interface" => return parse_interface_type_extension(lexer), 413 | "union" => return parse_union_type_extension(lexer), 414 | "enum" => return parse_enum_type_extension(lexer), 415 | "input" => return parse_input_object_type_extension(lexer), 416 | _ => return Err(ParsingError::new("unexpected")), 417 | } 418 | } 419 | fn parse_schema_extension(lexer: &mut Lexer) -> Result { 420 | expect_keyword(lexer, "extend")?; 421 | expect_keyword(lexer, "schema")?; 422 | let directives = parse_directives(lexer, true)?; 423 | let operation_type_definitions = if peek(lexer, BRACE_L) { 424 | many(lexer, BRACE_L, parse_operation_type_definition, BRACE_R)? 425 | } else { 426 | Vec::new() 427 | }; 428 | if directives.len() == 0 && operation_type_definitions.len() == 0 { 429 | return Err(ParsingError::new("unexpected")); 430 | }; 431 | Ok(Definition::SchemaExtension(SchemaExtension { 432 | directives, 433 | operation_type_definitions, 434 | })) 435 | } 436 | 437 | fn parse_scalar_type_extension(lexer: &mut Lexer) -> Result { 438 | expect_keyword(lexer, "extend")?; 439 | expect_keyword(lexer, "scalar")?; 440 | let name = parse_name(lexer)?; 441 | let directives = parse_directives(lexer, true)?; 442 | if directives.len() == 0 { 443 | return Err(ParsingError::new("'extend scalar' requires directives")); 444 | }; 445 | Ok(Definition::ScalarTypeExtension(ScalarTypeExtension { 446 | directives, 447 | name, 448 | })) 449 | } 450 | fn parse_object_type_extension(lexer: &mut Lexer) -> Result { 451 | expect_keyword(lexer, "extend")?; 452 | expect_keyword(lexer, "type")?; 453 | let name = parse_name(lexer)?; 454 | let interfaces = parse_implements_interfaces(lexer)?; 455 | let directives = parse_directives(lexer, true)?; 456 | let fields = parse_field_definitions(lexer)?; 457 | if interfaces.len() == 0 && directives.len() == 0 && fields.len() == 0 { 458 | return Err(ParsingError::new("unexpected")); 459 | }; 460 | Ok(Definition::ObjectTypeExtension(ObjectTypeExtension { 461 | directives, 462 | name, 463 | interfaces, 464 | fields, 465 | })) 466 | } 467 | 468 | fn parse_interface_type_extension(lexer: &mut Lexer) -> Result { 469 | expect_keyword(lexer, "extend")?; 470 | expect_keyword(lexer, "interface")?; 471 | let name = parse_name(lexer)?; 472 | let directives = parse_directives(lexer, true)?; 473 | let fields = parse_field_definitions(lexer)?; 474 | if directives.len() == 0 && fields.len() == 0 { 475 | return Err(ParsingError::new("unexpected")); 476 | }; 477 | Ok(Definition::InterfaceTypeExtension(InterfaceTypeExtension { 478 | directives, 479 | name, 480 | fields, 481 | })) 482 | } 483 | 484 | fn parse_union_type_extension(lexer: &mut Lexer) -> Result { 485 | expect_keyword(lexer, "extend")?; 486 | expect_keyword(lexer, "union")?; 487 | let name = parse_name(lexer)?; 488 | let directives = parse_directives(lexer, true)?; 489 | let types = parse_union_type_members(lexer)?; 490 | if directives.len() == 0 && types.len() == 0 { 491 | return Err(ParsingError::new("unexpected")); 492 | }; 493 | Ok(Definition::UnionTypeExtension(UnionTypeExtension { 494 | directives, 495 | name, 496 | types, 497 | })) 498 | } 499 | 500 | fn parse_enum_type_extension(lexer: &mut Lexer) -> Result { 501 | expect_keyword(lexer, "extend")?; 502 | expect_keyword(lexer, "enum")?; 503 | let name = parse_name(lexer)?; 504 | let directives = parse_directives(lexer, true)?; 505 | let values = parse_enum_values_definition(lexer)?; 506 | if directives.len() == 0 && values.len() == 0 { 507 | return Err(ParsingError::new("unexpected")); 508 | }; 509 | Ok(Definition::EnumTypeExtension(EnumTypeExtension { 510 | directives, 511 | name, 512 | values, 513 | })) 514 | } 515 | fn parse_input_object_type_extension(lexer: &mut Lexer) -> Result { 516 | expect_keyword(lexer, "extend")?; 517 | expect_keyword(lexer, "input")?; 518 | let name = parse_name(lexer)?; 519 | let directives = parse_directives(lexer, true)?; 520 | let fields = parse_input_fields_definition(lexer)?; 521 | if directives.len() == 0 && fields.len() == 0 { 522 | return Err(ParsingError::new("unexpected")); 523 | }; 524 | Ok(Definition::InputObjectTypeExtension( 525 | InputObjectTypeExtension { 526 | directives, 527 | name, 528 | fields, 529 | }, 530 | )) 531 | } 532 | 533 | fn parse_executable_definition(lexer: &mut Lexer) -> Result { 534 | if peek(lexer, NAME) { 535 | let name_token = lexer.tokens.last().unwrap(); 536 | let name_value = name_token.value.as_ref().unwrap(); 537 | return match name_value.as_ref() { 538 | "query" | "mutation" | "subscription" => parse_operation_definition(lexer), 539 | "fragment" => parse_fragment_definition(lexer), 540 | _ => Err(ParsingError::new(&format!( 541 | "Illegal operation name {}", 542 | name_value 543 | ))), 544 | }; 545 | } else if peek(lexer, BRACE_L) { 546 | return parse_operation_definition(lexer); 547 | } 548 | Err(ParsingError::new("unexpected")) 549 | } 550 | 551 | fn parse_fragment_definition(lexer: &mut Lexer) -> Result { 552 | expect_keyword(lexer, "fragment")?; 553 | let name = parse_fragment_name(lexer)?; 554 | expect_keyword(lexer, "on")?; 555 | let type_condition = parse_name(lexer)?; 556 | let selection_set = parse_selection_set(lexer)?; 557 | Ok(Definition::Fragment(FragmentDefinition { 558 | name, 559 | type_condition, 560 | selection_set, 561 | })) 562 | } 563 | 564 | fn parse_operation_definition(lexer: &mut Lexer) -> Result { 565 | if peek(lexer, BRACE_L) { 566 | let selection_set = parse_selection_set(lexer)?; 567 | return Ok(Definition::Operation(OperationDefinition { 568 | operation_type: OperationType::Query, 569 | name: None, 570 | selection_set, 571 | variable_definitions: Vec::new(), 572 | })); 573 | } 574 | let operation_type = parse_operation_type(lexer)?; 575 | let name = match peek(lexer, NAME) { 576 | true => Some(parse_name(lexer)?), 577 | false => None, 578 | }; 579 | let variable_definitions = parse_variable_definitions(lexer)?; 580 | let selection_set = parse_selection_set(lexer)?; 581 | Ok(Definition::Operation(OperationDefinition { 582 | operation_type, 583 | name, 584 | selection_set, 585 | variable_definitions, 586 | })) 587 | } 588 | 589 | fn parse_variable_definitions(lexer: &mut Lexer) -> Result, ParsingError> { 590 | match peek(lexer, PAREN_L) { 591 | true => many(lexer, PAREN_L, parse_variable_definition, PAREN_R), 592 | false => Ok(vec![]), 593 | } 594 | } 595 | fn parse_variable_definition(lexer: &mut Lexer) -> Result { 596 | let name = parse_variable(lexer)?; 597 | expect_token(lexer, COLON)?; 598 | let type_reference = parse_type_reference(lexer)?; 599 | let default_value = match expect_optional_token(lexer, EQUALS)? { 600 | Some(_) => Some(parse_value_literal(lexer, true)?), 601 | None => None, 602 | }; 603 | Ok(VariableDefinition { 604 | name, 605 | type_reference, 606 | default_value, 607 | }) 608 | } 609 | 610 | fn parse_value_literal(lexer: &mut Lexer, is_const: bool) -> Result { 611 | let token = lexer.current_token(); 612 | let token_value = lexer.current_token_value_safe(); 613 | match token.kind { 614 | BRACKET_L => parse_list(lexer, is_const), 615 | BRACE_L => parse_object(lexer, is_const), 616 | INT => { 617 | advance_lexer(lexer)?; 618 | Ok(Value::IntValue(token_value.unwrap())) 619 | } 620 | FLOAT => { 621 | advance_lexer(lexer)?; 622 | Ok(Value::FloatValue(token_value.unwrap())) 623 | } 624 | STRING | BLOCK_STRING => parse_string_literal(lexer), 625 | NAME => { 626 | let name_value = token_value.unwrap(); 627 | if name_value == "true" || name_value == "false" { 628 | advance_lexer(lexer)?; 629 | Ok(Value::BooleanValue(name_value == "true")) 630 | } else if name_value == "null" { 631 | advance_lexer(lexer)?; 632 | Ok(Value::NullValue) 633 | } else { 634 | advance_lexer(lexer)?; 635 | Ok(Value::EnumValue(name_value)) 636 | } 637 | } 638 | DOLLAR => { 639 | if !is_const { 640 | Ok(Value::Variable(parse_variable(lexer)?)) 641 | } else { 642 | Err(ParsingError::new( 643 | "variable not allowed in const expression", 644 | )) 645 | } 646 | } 647 | _ => Err(ParsingError::new("ERROR")), 648 | } 649 | } 650 | 651 | fn parse_const_value(lexer: &mut Lexer) -> Result { 652 | parse_value_literal(lexer, true) 653 | } 654 | 655 | fn parse_value_value(lexer: &mut Lexer) -> Result { 656 | parse_value_literal(lexer, false) 657 | } 658 | 659 | fn parse_string_literal(lexer: &mut Lexer) -> Result { 660 | // we know that the current token is STRING or BLOCK_STRING 661 | let value = lexer.current_token_value().to_string(); 662 | advance_lexer(lexer)?; 663 | Ok(Value::StringValue(value)) 664 | } 665 | 666 | fn parse_list(lexer: &mut Lexer, is_const: bool) -> Result { 667 | let parse_fn = if is_const { 668 | parse_const_value 669 | } else { 670 | parse_value_value 671 | }; 672 | let values = any(lexer, BRACKET_L, parse_fn, BRACKET_R)?; 673 | Ok(Value::ListValue(Box::new(values))) 674 | } 675 | fn parse_object(lexer: &mut Lexer, is_const: bool) -> Result { 676 | let parse_fn = if is_const { 677 | parse_object_field_const 678 | } else { 679 | parse_object_field_value 680 | }; 681 | let fields = any(lexer, BRACE_L, parse_fn, BRACE_R)?; 682 | Ok(Value::ObjectValue(Box::new(fields))) 683 | } 684 | 685 | fn parse_object_field_const(lexer: &mut Lexer) -> Result { 686 | parse_object_field(lexer, true) 687 | } 688 | fn parse_object_field_value(lexer: &mut Lexer) -> Result { 689 | parse_object_field(lexer, false) 690 | } 691 | 692 | fn parse_object_field(lexer: &mut Lexer, is_const: bool) -> Result { 693 | let name = parse_name(lexer)?; 694 | expect_token(lexer, COLON)?; 695 | Ok(ObjectField { 696 | name, 697 | value: parse_value_literal(lexer, is_const)?, 698 | }) 699 | } 700 | fn parse_variable(lexer: &mut Lexer) -> Result { 701 | expect_token(lexer, DOLLAR)?; 702 | parse_name(lexer) 703 | } 704 | 705 | fn parse_type_reference(lexer: &mut Lexer) -> Result { 706 | let mut _type: TypeReference; 707 | if expect_optional_token(lexer, BRACKET_L)?.is_some() { 708 | _type = parse_type_reference(lexer)?; 709 | expect_token(lexer, BRACKET_R)?; 710 | _type = TypeReference::ListType(Box::new(_type)); 711 | } else { 712 | _type = TypeReference::NamedType(parse_name(lexer)?); 713 | } 714 | if expect_optional_token(lexer, BANG)?.is_some() { 715 | _type = TypeReference::NonNullType(Box::new(_type)); 716 | } 717 | return Ok(_type); 718 | } 719 | 720 | fn parse_operation_type(lexer: &mut Lexer) -> Result { 721 | let name = expect_token(lexer, NAME)?; 722 | let name_value = name.value.as_ref(); 723 | match name_value.unwrap().as_ref() { 724 | "query" => Ok(OperationType::Query), 725 | "mutation" => Ok(OperationType::Mutation), 726 | "subscription" => Ok(OperationType::Subscription), 727 | _ => Err(ParsingError::new("not yet")), 728 | } 729 | } 730 | fn parse_selection_set(lexer: &mut Lexer) -> Result { 731 | let selections = many(lexer, BRACE_L, parse_selection, BRACE_R)?; 732 | Ok(SelectionSet { selections }) 733 | } 734 | 735 | fn parse_selection(lexer: &mut Lexer) -> Result { 736 | if peek(lexer, SPREAD) { 737 | return parse_fragment_selection(lexer); 738 | } 739 | Ok(Selection::Field(parse_field(lexer)?)) 740 | } 741 | 742 | fn parse_fragment_selection(lexer: &mut Lexer) -> Result { 743 | expect_token(lexer, SPREAD)?; 744 | let has_type_condition = expect_optional_keyword(lexer, "on")?; 745 | if !has_type_condition && peek(lexer, NAME) { 746 | let name = parse_fragment_name(lexer)?; 747 | return Ok(Selection::FragmentSpread(FragmentSpread { name })); 748 | } 749 | let type_condition = if has_type_condition { 750 | Some(parse_name(lexer)?) 751 | } else { 752 | None 753 | }; 754 | let selection_set = parse_selection_set(lexer)?; 755 | Ok(Selection::InlineFragment(InlineFragment { 756 | type_condition, 757 | selection_set, 758 | })) 759 | } 760 | 761 | fn parse_fragment_name(lexer: &mut Lexer) -> Result { 762 | if lexer.current_token_value() == "on" { 763 | return Err(ParsingError::new("Unexpected 'on'")); 764 | } 765 | return parse_name(lexer); 766 | } 767 | 768 | fn parse_field(lexer: &mut Lexer) -> Result { 769 | let name_or_alias = parse_name(lexer)?; 770 | let name: String; 771 | let mut alias: Option = None; 772 | let colon = expect_optional_token(lexer, COLON)?; 773 | if colon.is_some() { 774 | alias = Some(name_or_alias); 775 | name = parse_name(lexer)?; 776 | } else { 777 | name = name_or_alias; 778 | } 779 | let arguments = parse_arguments(lexer, false)?; 780 | let directives = parse_directives(lexer, false)?; 781 | let mut selection_set: Option = None; 782 | if peek(lexer, BRACE_L) { 783 | selection_set = Some(parse_selection_set(lexer)?); 784 | } 785 | Ok(Field { 786 | name, 787 | alias, 788 | selection_set, 789 | arguments, 790 | directives, 791 | }) 792 | } 793 | 794 | fn parse_name(lexer: &mut Lexer) -> Result { 795 | let name_token = expect_token(lexer, NAME)?; 796 | Ok(name_token.value.as_ref().unwrap().to_string()) 797 | } 798 | 799 | fn peek(lexer: &mut Lexer, kind: TokenKind) -> bool { 800 | lexer.current_token().kind == kind 801 | } 802 | fn peek_description(lexer: &mut Lexer) -> bool { 803 | peek(lexer, STRING) || peek(lexer, BLOCK_STRING) 804 | } 805 | 806 | fn any( 807 | lexer: &mut Lexer, 808 | open_kind: TokenKind, 809 | parse_fn: fn(&mut Lexer) -> Result, 810 | close_king: TokenKind, 811 | ) -> Result, ParsingError> { 812 | expect_token(lexer, open_kind)?; 813 | let mut result: Vec = Vec::new(); 814 | while expect_optional_token(lexer, close_king)?.is_none() { 815 | result.push(parse_fn(lexer)?); 816 | } 817 | Result::Ok(result) 818 | } 819 | 820 | fn many( 821 | lexer: &mut Lexer, 822 | open_kind: TokenKind, 823 | parse_fn: fn(&mut Lexer) -> Result, 824 | close_king: TokenKind, 825 | ) -> Result, ParsingError> { 826 | expect_token(lexer, open_kind)?; 827 | let mut result: Vec = Vec::new(); 828 | result.push(parse_fn(lexer)?); 829 | while expect_optional_token(lexer, close_king)?.is_none() { 830 | result.push(parse_fn(lexer)?); 831 | } 832 | Result::Ok(result) 833 | } 834 | 835 | fn expect_keyword<'a>(lexer: &mut Lexer, value: &str) -> Result<(), ParsingError> { 836 | let token = lexer.current_token(); 837 | if token.kind == TokenKind::NAME && token.value.as_ref().unwrap() == value { 838 | advance_lexer(lexer)?; 839 | return Ok(()); 840 | } 841 | Err(ParsingError::new(&format!("expect {}", value))) 842 | } 843 | 844 | fn expect_optional_keyword(lexer: &mut Lexer, value: &str) -> Result { 845 | let token = lexer.current_token(); 846 | if token.kind == TokenKind::NAME && token.value.as_ref().unwrap() == value { 847 | advance_lexer(lexer)?; 848 | return Ok(true); 849 | } 850 | Ok(false) 851 | } 852 | 853 | fn expect_token(lexer: &mut Lexer, kind: TokenKind) -> Result<&Token, ParsingError> { 854 | let last_token = lexer.current_token(); 855 | let last_kind = last_token.kind; 856 | if last_kind == kind { 857 | advance_lexer(lexer)?; 858 | Result::Ok(lexer.prev_token()) 859 | } else { 860 | Result::Err(ParsingError { 861 | message: format!("Expected {:?}, but got {:?}", kind, last_kind), 862 | }) 863 | } 864 | } 865 | 866 | fn expect_optional_token( 867 | lexer: &mut Lexer, 868 | kind: TokenKind, 869 | ) -> Result, ParsingError> { 870 | let current_token = lexer.current_token(); 871 | if current_token.kind == kind { 872 | advance_lexer(lexer)?; 873 | Ok(Option::Some(lexer.prev_token())) 874 | } else { 875 | Ok(Option::None) 876 | } 877 | } 878 | 879 | fn advance_lexer(lexer: &mut Lexer) -> Result<&Token, ParsingError> { 880 | match lexer.advance() { 881 | Ok(token) => Ok(token), 882 | Err(lexer_error) => Err(ParsingError::new(&format!( 883 | "Lexer error: {}", 884 | lexer_error.message 885 | ))), 886 | } 887 | } 888 | fn lookahead_lexer(lexer: &mut Lexer) -> Result { 889 | match lexer.lookahead() { 890 | Ok(token) => Ok(token), 891 | Err(lexer_error) => Err(ParsingError::new(&format!( 892 | "Lexer error: {}", 893 | lexer_error.message 894 | ))), 895 | } 896 | } 897 | 898 | #[cfg(test)] 899 | mod tests { 900 | use super::*; 901 | 902 | macro_rules! enum_field { 903 | ($enum:ident $field:ident $exp:expr) => { 904 | if let $enum::$field(value) = $exp { 905 | value 906 | } else { 907 | panic!("unexpected {}",); 908 | } 909 | }; 910 | } 911 | 912 | #[test] 913 | fn parse_simple_query() { 914 | let mut lexer = Lexer::new("{foo}"); 915 | let result = parse(&mut lexer); 916 | assert!(result.is_ok()); 917 | let field = Field { 918 | name: String::from("foo"), 919 | alias: None, 920 | selection_set: None, 921 | directives: Vec::new(), 922 | arguments: Vec::new(), 923 | }; 924 | let selection_set = SelectionSet { 925 | selections: vec![Selection::Field(field)], 926 | }; 927 | 928 | let document = result.unwrap(); 929 | let definition = Definition::Operation(OperationDefinition { 930 | name: None, 931 | operation_type: OperationType::Query, 932 | variable_definitions: Vec::new(), 933 | selection_set, 934 | }); 935 | let expected = Document { 936 | definitions: vec![definition], 937 | }; 938 | assert_eq!(document, expected); 939 | } 940 | #[test] 941 | fn parse_field_alias() { 942 | let mut lexer = Lexer::new("{alias: foo}"); 943 | let result = parse(&mut lexer); 944 | let definition = &result.unwrap().definitions[0]; 945 | let operation_definition = enum_field!(Definition Operation definition); 946 | let field = enum_field!(Selection Field &operation_definition.selection_set.selections[0]); 947 | assert_eq!(field.alias.as_ref().unwrap(), "alias"); 948 | } 949 | 950 | #[test] 951 | fn parse_field_with_args() { 952 | let mut lexer = Lexer::new(r#"{foo(arg:"hello")}"#); 953 | let result = parse(&mut lexer); 954 | let definition = &result.unwrap().definitions[0]; 955 | let operation_definition = enum_field!(Definition Operation definition); 956 | let field = enum_field!(Selection Field &operation_definition.selection_set.selections[0]); 957 | let argument = &field.arguments[0]; 958 | assert_eq!(argument.name, "arg"); 959 | assert_eq!(argument.value, Value::StringValue(String::from("hello"))); 960 | } 961 | 962 | #[test] 963 | fn parse_fragment_definition() { 964 | let mut lexer = Lexer::new("fragment Foo on Bar { field }"); 965 | let result = parse(&mut lexer); 966 | let definition = &result.unwrap().definitions[0]; 967 | let fragment_definition = enum_field!(Definition Fragment definition); 968 | assert_eq!(fragment_definition.name, "Foo"); 969 | assert_eq!(fragment_definition.type_condition, "Bar"); 970 | let selection = &fragment_definition.selection_set.selections[0]; 971 | let field = enum_field!(Selection Field selection); 972 | assert_eq!(field.name, "field"); 973 | } 974 | 975 | #[test] 976 | fn parse_object_definition() { 977 | let mut lexer = Lexer::new("type Foo{bar: String}"); 978 | let result = parse(&mut lexer); 979 | let definition = &result.unwrap().definitions[0]; 980 | let object_type_definition = enum_field!(Definition ObjectType definition); 981 | assert_eq!(object_type_definition.name, "Foo"); 982 | let field_definiton = &object_type_definition.fields[0]; 983 | assert_eq!(field_definiton.name, "bar"); 984 | assert_eq!( 985 | field_definiton.type_reference, 986 | TypeReference::NamedType(String::from("String")) 987 | ); 988 | } 989 | #[test] 990 | fn parse_object_extension() { 991 | let mut lexer = Lexer::new("extend type Foo{bar: String}"); 992 | let result = parse(&mut lexer); 993 | let definition = &result.unwrap().definitions[0]; 994 | let object_type_extension = enum_field!(Definition ObjectTypeExtension definition); 995 | assert_eq!(object_type_extension.name, "Foo"); 996 | let field_definiton = &object_type_extension.fields[0]; 997 | assert_eq!(field_definiton.name, "bar"); 998 | assert_eq!( 999 | field_definiton.type_reference, 1000 | TypeReference::NamedType(String::from("String")) 1001 | ); 1002 | } 1003 | 1004 | #[test] 1005 | fn parse_schema_definition() { 1006 | let mut lexer = Lexer::new("schema { query: MyQuery}"); 1007 | let result = parse(&mut lexer); 1008 | let definition = &result.unwrap().definitions[0]; 1009 | let schema_definition = enum_field!(Definition Schema definition); 1010 | let op_type_def = &schema_definition.operation_type_definitions[0]; 1011 | assert_eq!(op_type_def.operation, OperationType::Query); 1012 | assert_eq!(op_type_def.type_name, "MyQuery"); 1013 | } 1014 | 1015 | #[test] 1016 | fn parse_schema_extension() { 1017 | let mut lexer = Lexer::new("extend schema { mutation: MyMutation}"); 1018 | let result = parse(&mut lexer); 1019 | let definition = &result.unwrap().definitions[0]; 1020 | let schema_extension = enum_field!(Definition SchemaExtension definition); 1021 | let op_type_def = &schema_extension.operation_type_definitions[0]; 1022 | assert_eq!(op_type_def.operation, OperationType::Mutation); 1023 | assert_eq!(op_type_def.type_name, "MyMutation"); 1024 | } 1025 | #[test] 1026 | fn parse_interface_definition() { 1027 | let mut lexer = Lexer::new("interface MyI { field : Int }"); 1028 | let result = parse(&mut lexer); 1029 | let definition = &result.unwrap().definitions[0]; 1030 | let interface_definition = enum_field!(Definition InterfaceType definition); 1031 | assert_eq!(interface_definition.name, "MyI"); 1032 | let field_definiton = &interface_definition.fields[0]; 1033 | assert_eq!(field_definiton.name, "field"); 1034 | assert_eq!( 1035 | field_definiton.type_reference, 1036 | TypeReference::NamedType(String::from("Int")) 1037 | ); 1038 | } 1039 | 1040 | #[test] 1041 | fn parse_interface_extension() { 1042 | let mut lexer = Lexer::new("extend interface MyI { field : Int }"); 1043 | let result = parse(&mut lexer); 1044 | let definition = &result.unwrap().definitions[0]; 1045 | let interface_extension = enum_field!(Definition InterfaceTypeExtension definition); 1046 | assert_eq!(interface_extension.name, "MyI"); 1047 | let field_definiton = &interface_extension.fields[0]; 1048 | assert_eq!(field_definiton.name, "field"); 1049 | assert_eq!( 1050 | field_definiton.type_reference, 1051 | TypeReference::NamedType(String::from("Int")) 1052 | ); 1053 | } 1054 | 1055 | #[test] 1056 | fn parse_enum_definition() { 1057 | let mut lexer = Lexer::new("enum MyEnum { FOO, BAR }"); 1058 | let result = parse(&mut lexer); 1059 | let definition = &result.unwrap().definitions[0]; 1060 | let enum_definition = enum_field!(Definition EnumType definition); 1061 | assert_eq!(enum_definition.name, "MyEnum"); 1062 | let value1 = &enum_definition.values[0]; 1063 | let value2 = &enum_definition.values[1]; 1064 | assert_eq!(value1.name, "FOO"); 1065 | assert_eq!(value2.name, "BAR"); 1066 | } 1067 | 1068 | #[test] 1069 | fn parse_enum_extension() { 1070 | let mut lexer = Lexer::new("extend enum MyEnum { FOO, BAR }"); 1071 | let result = parse(&mut lexer); 1072 | let definition = &result.unwrap().definitions[0]; 1073 | let enum_extension = enum_field!(Definition EnumTypeExtension definition); 1074 | assert_eq!(enum_extension.name, "MyEnum"); 1075 | let value1 = &enum_extension.values[0]; 1076 | let value2 = &enum_extension.values[1]; 1077 | assert_eq!(value1.name, "FOO"); 1078 | assert_eq!(value2.name, "BAR"); 1079 | } 1080 | 1081 | #[test] 1082 | fn parse_union_definition() { 1083 | let mut lexer = Lexer::new("union MyUnion = A | B | C"); 1084 | let result = parse(&mut lexer); 1085 | let definition = &result.unwrap().definitions[0]; 1086 | let union_definition = enum_field!(Definition UnionType definition); 1087 | assert_eq!(union_definition.name, "MyUnion"); 1088 | let type1 = &union_definition.types[0]; 1089 | let type2 = &union_definition.types[1]; 1090 | let type3 = &union_definition.types[2]; 1091 | assert_eq!(type1, "A"); 1092 | assert_eq!(type2, "B"); 1093 | assert_eq!(type3, "C"); 1094 | } 1095 | 1096 | #[test] 1097 | fn parse_union_extension() { 1098 | let mut lexer = Lexer::new("extend union MyUnion = A"); 1099 | let result = parse(&mut lexer); 1100 | let definition = &result.unwrap().definitions[0]; 1101 | let union_extension = enum_field!(Definition UnionTypeExtension definition); 1102 | assert_eq!(union_extension.name, "MyUnion"); 1103 | let type1 = &union_extension.types[0]; 1104 | assert_eq!(type1, "A"); 1105 | } 1106 | 1107 | #[test] 1108 | fn parse_directive_definition() { 1109 | let mut lexer = Lexer::new("directive @MyDirective on FIELD_DEFINITION"); 1110 | let result = parse(&mut lexer); 1111 | let definition = &result.unwrap().definitions[0]; 1112 | let directive_definition = enum_field!(Definition Directive definition); 1113 | assert_eq!(directive_definition.name, "MyDirective"); 1114 | let location = directive_definition.locations[0]; 1115 | assert_eq!(location, DirectiveLocation::FIELD_DEFINITION); 1116 | } 1117 | 1118 | #[test] 1119 | fn parse_input_definition() { 1120 | let mut lexer = Lexer::new("input MyInput {field: [Bool!]!}"); 1121 | let result = parse(&mut lexer); 1122 | let definition = &result.unwrap().definitions[0]; 1123 | let input_definition = enum_field!(Definition InputObjectType definition); 1124 | assert_eq!(input_definition.name, "MyInput"); 1125 | let field = &input_definition.fields[0]; 1126 | assert_eq!(field.name, "field"); 1127 | let non_null_bool = NonNullType(Box::new(NamedType(String::from("Bool")))); 1128 | let type_ref = NonNullType(Box::new(ListType(Box::new(non_null_bool)))); 1129 | assert_eq!(field.type_reference, type_ref); 1130 | } 1131 | 1132 | #[test] 1133 | fn parse_input_extension() { 1134 | let mut lexer = Lexer::new("extend input MyInput {field: Bool!}"); 1135 | let result = parse(&mut lexer); 1136 | let definition = &result.unwrap().definitions[0]; 1137 | let input_extension = enum_field!(Definition InputObjectTypeExtension definition); 1138 | assert_eq!(input_extension.name, "MyInput"); 1139 | let field = &input_extension.fields[0]; 1140 | assert_eq!(field.name, "field"); 1141 | let type_ref = NonNullType(Box::new(NamedType(String::from("Bool")))); 1142 | assert_eq!(field.type_reference, type_ref); 1143 | } 1144 | 1145 | #[test] 1146 | fn parse_scalar_definition() { 1147 | let mut lexer = Lexer::new("scalar MyScalar"); 1148 | let result = parse(&mut lexer); 1149 | let definition = &result.unwrap().definitions[0]; 1150 | let scalar_type = enum_field!(Definition ScalarType definition); 1151 | assert_eq!(scalar_type.name, "MyScalar"); 1152 | } 1153 | 1154 | #[test] 1155 | fn parse_scalar_extension() { 1156 | let mut lexer = Lexer::new("extend scalar MyScalar @MyDirective"); 1157 | let result = parse(&mut lexer); 1158 | let definition = &result.unwrap().definitions[0]; 1159 | let scalar_type = enum_field!(Definition ScalarTypeExtension definition); 1160 | assert_eq!(scalar_type.name, "MyScalar"); 1161 | let directive = Directive { 1162 | name: String::from("MyDirective"), 1163 | arguments: Vec::new(), 1164 | }; 1165 | assert_eq!(scalar_type.directives, vec![directive]); 1166 | } 1167 | 1168 | #[test] 1169 | fn parse_two_level_query() { 1170 | let mut lexer = Lexer::new("{foo{bar}}"); 1171 | let result = parse(&mut lexer); 1172 | assert!(result.is_ok()); 1173 | let bar = Field { 1174 | name: String::from("bar"), 1175 | alias: None, 1176 | selection_set: None, 1177 | directives: Vec::new(), 1178 | arguments: Vec::new(), 1179 | }; 1180 | let selection_set_foo = SelectionSet { 1181 | selections: vec![Selection::Field(bar)], 1182 | }; 1183 | let foo = Field { 1184 | name: String::from("foo"), 1185 | alias: None, 1186 | selection_set: Some(selection_set_foo), 1187 | directives: Vec::new(), 1188 | arguments: Vec::new(), 1189 | }; 1190 | let selection_set_operation = SelectionSet { 1191 | selections: vec![Selection::Field(foo)], 1192 | }; 1193 | 1194 | let document = result.unwrap(); 1195 | let definition = Definition::Operation(OperationDefinition { 1196 | operation_type: OperationType::Query, 1197 | selection_set: selection_set_operation, 1198 | variable_definitions: Vec::new(), 1199 | name: None, 1200 | }); 1201 | let expected = Document { 1202 | definitions: vec![definition], 1203 | }; 1204 | assert_eq!(document, expected); 1205 | } 1206 | #[test] 1207 | fn parse_operation_name() { 1208 | let mut lexer = Lexer::new("query myQuery {foo}"); 1209 | let result = parse(&mut lexer); 1210 | assert!(result.is_ok()); 1211 | let definition = &result.unwrap().definitions[0]; 1212 | let operation_definition = enum_field!(Definition Operation definition); 1213 | assert_eq!(operation_definition.operation_type, OperationType::Query); 1214 | assert_eq!(operation_definition.name.as_ref().unwrap(), "myQuery"); 1215 | } 1216 | 1217 | #[test] 1218 | fn parse_variable_definitions() { 1219 | let mut lexer = Lexer::new(r#"query myQuery($var1: String = "hello") {foo}"#); 1220 | let result = parse(&mut lexer); 1221 | assert!(result.is_ok()); 1222 | let definition = &result.unwrap().definitions[0]; 1223 | let operation_definition = enum_field!(Definition Operation definition); 1224 | assert_eq!(operation_definition.variable_definitions.len(), 1); 1225 | let var_def = &operation_definition.variable_definitions[0]; 1226 | assert_eq!(var_def.name, "var1"); 1227 | assert_eq!( 1228 | var_def.type_reference, 1229 | TypeReference::NamedType(String::from("String")) 1230 | ); 1231 | assert_eq!( 1232 | var_def.default_value.as_ref().unwrap(), 1233 | &Value::StringValue(String::from("hello")) 1234 | ); 1235 | } 1236 | 1237 | #[test] 1238 | fn parse_variable_definitions_object_default_value() { 1239 | let mut lexer = Lexer::new(r#"query myQuery($var1: InputObject = {field: "hello"}) {foo}"#); 1240 | let result = parse(&mut lexer); 1241 | assert!(result.is_ok()); 1242 | let definition = &result.unwrap().definitions[0]; 1243 | let operation_definition = enum_field!(Definition Operation definition); 1244 | assert_eq!(operation_definition.variable_definitions.len(), 1); 1245 | let var_def = &operation_definition.variable_definitions[0]; 1246 | let default_value = var_def.default_value.as_ref().unwrap(); 1247 | let object_field = ObjectField { 1248 | name: String::from("field"), 1249 | value: Value::StringValue(String::from("hello")), 1250 | }; 1251 | assert_eq!( 1252 | default_value, 1253 | &Value::ObjectValue(Box::new(vec![object_field])) 1254 | ); 1255 | } 1256 | } 1257 | -------------------------------------------------------------------------------- /test/published-test/index.js: -------------------------------------------------------------------------------- 1 | const { parse } = require('graphql-wasm-parser'); 2 | 3 | const ast = parse("{foo}"); 4 | console.log(JSON.stringify(ast)); -------------------------------------------------------------------------------- /test/published-test/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "published-test", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "graphql-wasm-parser": { 8 | "version": "0.0.1", 9 | "resolved": "https://registry.npmjs.org/graphql-wasm-parser/-/graphql-wasm-parser-0.0.1.tgz", 10 | "integrity": "sha512-R2WrXk0DaKCeybD6EQxuqPQuKUhlvkGz+vemBPuqRpYjZyJUtied4R4ykoEjCOM5CiPQ0493DW1ntpw03hKMnw==" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /test/published-test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "published-test", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "private": true, 7 | "scripts": { 8 | "test": "echo \"Error: no test specified\" && exit 1" 9 | }, 10 | "author": "", 11 | "license": "ISC", 12 | "dependencies": { 13 | "graphql-wasm-parser": "0.0.1" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /test/test.js: -------------------------------------------------------------------------------- 1 | const { parse } = require('../pkg/graphql_wasm_parser'); 2 | const assert = require('assert'); 3 | 4 | const ast = parse(`type Foo{field1: String, field2: Int}`); 5 | // console.log(JSON.stringify(ast)); 6 | 7 | const objectDef = ast.definitions[0]; 8 | assert.equal(objectDef.ObjectType.name, "Foo"); 9 | assert.equal(objectDef.ObjectType.fields.length, 2); 10 | assert.equal(objectDef.ObjectType.fields[0].name, "field1"); 11 | assert.equal(objectDef.ObjectType.fields[1].name, "field2"); 12 | 13 | console.log("Tests successful"); --------------------------------------------------------------------------------