├── .gitignore ├── jakefile ├── source.js ├── nodemon.json ├── .vscode └── settings.json ├── .babelrc.js ├── package.json ├── highlighter.js ├── links.md ├── js.js ├── README.md ├── todo.md ├── evaluate.js ├── typecheck.js ├── lexer.js ├── parser.js ├── parser.test.js └── lexer.test.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules -------------------------------------------------------------------------------- /jakefile: -------------------------------------------------------------------------------- 1 | evaluate.js; 2 | -------------------------------------------------------------------------------- /source.js: -------------------------------------------------------------------------------- 1 | /hightlight/ / "me"; 2 | if (you) can(); 3 | 4 | function foo(a, b) { 5 | 7 + 9; 6 | } 7 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "events": { 3 | "start": "echo \"\\x1Bc\"" 4 | }, 5 | "delay": "100" 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "javascript.implicitProjectConfig.checkJs": true, 3 | "cSpell.words": [ 4 | "picojs" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /.babelrc.js: -------------------------------------------------------------------------------- 1 | const isTest = String(process.env["NODE_ENV"]) === "test"; 2 | 3 | module.exports = { 4 | presets: [ 5 | [ 6 | "@babel/preset-env", 7 | { 8 | modules: isTest ? "commonjs" : false, 9 | targets: { 10 | node: "current", 11 | }, 12 | }, 13 | ], 14 | ], 15 | }; 16 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "module", 3 | "name": "picojs", 4 | "version": "1.0.0", 5 | "description": "", 6 | "main": "index.js", 7 | "scripts": {}, 8 | "keywords": [], 9 | "author": "", 10 | "license": "ISC", 11 | "devDependencies": { 12 | "@babel/core": "^7.10.2", 13 | "@babel/preset-env": "^7.10.2", 14 | "@types/jest": "^25.2.3", 15 | "babel-jest": "^26.0.1", 16 | "jest": "^26.0.1", 17 | "prettier": "^2.0.5" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /highlighter.js: -------------------------------------------------------------------------------- 1 | const COLORS = { 2 | DivToken: "\x1b[31m", 3 | MulToken: "\x1b[31m", 4 | PlusToken: "\x1b[31m", 5 | RegExpToken: "\x1b[33m", 6 | String: "\x1b[32m", 7 | Id: "\x1b[34m", 8 | NumericLiteral: "\x1b[36m", 9 | CommentToken: "\x1b[37m\x1b[2m", 10 | Colon: "\x1b[37m\x1b[2m", 11 | Semicolon: "\x1b[37m\x1b[2m", 12 | OpenParent: "\x1b[37m\x1b[2m", 13 | CloseParent: "\x1b[37m\x1b[2m", 14 | OpenCurly: "\x1b[37m\x1b[2m", 15 | CloseCurly: "\x1b[37m\x1b[2m", 16 | If: "\x1b[31m", 17 | Else: "\x1b[31m", 18 | Function: "\x1b[31m", 19 | }; 20 | 21 | export function highlight(content, tokens) { 22 | let res = ""; 23 | for (const t of tokens) { 24 | const color = COLORS[t.type]; 25 | 26 | const tokenText = content.substring( 27 | t.loc.start.cursor, 28 | t.loc.end.cursor 29 | ); 30 | 31 | if (color) { 32 | res += `${color}${tokenText}\x1b[0m`; 33 | } else { 34 | res += tokenText; 35 | } 36 | } 37 | 38 | return res; 39 | } 40 | -------------------------------------------------------------------------------- /links.md: -------------------------------------------------------------------------------- 1 | Screen 1 2 | 3 | - Lexing number: 4 | 5 | 1. TS https://github.com/microsoft/TypeScript/blob/ec4cf688b6dfeb67c5edc53220f7814d813ec7c5/src/compiler/scanner.ts#L1033 6 | 2. TS https://github.com/microsoft/TypeScript/blob/ec4cf688b6dfeb67c5edc53220f7814d813ec7c5/src/compiler/scanner.ts#L995 7 | 3. V8 https://github.com/v8/v8/blob/4b9b23521e6fd42373ebbcb20ebe03bf445494f9/src/parsing/scanner.cc#L833 8 | 4. sm https://github.com/servo/mozjs/blob/aabcc9ba889b2755f1e4e83f28323a60415a790f/mozjs/js/src/frontend/TokenStream.cpp#L2455 9 | 10 | - Parsing number: https://www.daniweb.com/programming/software-development/threads/413628/converting-string-to-integer-x86 11 | 12 | Screen 2 13 | 14 | - About tokeniser (lexer): https://www.youtube.com/watch?v=uSkiDxb0m0Y&feature=youtu.be&t=1253 15 | 16 | - Lexer from SpiderMonkey: https://github.com/servo/mozjs/blob/aabcc9ba889b2755f1e4e83f28323a60415a790f/mozjs/js/src/frontend/TokenStream.cpp#L3152 17 | 18 | Screen 5: Slashes 19 | 20 | [JS syntactic quirks][2] by Jason Orendorff 21 | 22 | [2]: https://github.com/mozilla-spidermonkey/jsparagus/blob/master/js-quirks.md#slashes- 23 | -------------------------------------------------------------------------------- /js.js: -------------------------------------------------------------------------------- 1 | import { readFileSync } from "fs"; 2 | import { lexer } from "./lexer.js"; 3 | import { parser } from "./parser.js"; 4 | import { evaluate, Exception } from "./evaluate.js"; 5 | import { typecheck } from "./typecheck.js"; 6 | import { highlight } from "./highlighter.js"; 7 | 8 | const file = "./source.js"; 9 | const content = String(readFileSync(file)); 10 | 11 | const { ast, tokens } = parser(lexer(file, content)); 12 | console.dir(ast, { depth: null }); 13 | 14 | console.log(highlight(content, tokens)); 15 | 16 | // const typeErrors = typecheck(ast); 17 | // if (typeErrors.length) { 18 | // console.log("TYPE ERRORS:"); 19 | // for (const error of typeErrors) { 20 | // console.log(` ${error}`); 21 | // } 22 | // } 23 | 24 | // const result = evaluate(ast); 25 | // if (result instanceof Exception) { 26 | // console.log(`panic: ${result.message}`); 27 | // for (const loc of result.backtrace) { 28 | // console.log( 29 | // ` at ${loc.file}:${loc.start.line}:${loc.start.column}` 30 | // ); 31 | // } 32 | // } else { 33 | // console.dir(result, { depth: null }); 34 | // } 35 | 36 | console.log("DONE"); 37 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This is the source code gettig slowly written during a series of streams where Olie ([@olzh2102](https://github.com/olzh2102)) and Peter ([@peter-leonov](https://github.com/peter-leonov)) are trying to re-create the full frontend development stack in pure JavaScript from scratch, including parsing some minimal subset of JavaScript, then interpreting it, making some type checks a-la TypeScript/Flow, playing with bytecode, etc. 2 | 3 | Here are the links to the videos: 4 | 5 | 1. [Writing a JavaScript lexer (scanner, tokeniser) in JavaScript](https://www.youtube.com/watch?v=jMun70Q4J_Y&list=PLnipdbm4t965ROHrQlxLRcSavCudqq2mz&index=1) 6 | 2. [Writing a JavaScript parser in JavaScript](https://www.youtube.com/watch?v=iDL9bwADEoE&list=PLnipdbm4t965ROHrQlxLRcSavCudqq2mz&index=2) 7 | 3. [Unit testing the lexer and parser](https://www.youtube.com/watch?v=qJaugNyfjEM&list=PLnipdbm4t965ROHrQlxLRcSavCudqq2mz&index=3) 8 | 4. [Teaching operator precedence to the pico JavaScript parser](https://www.youtube.com/watch?v=qv-t5d4lkQM&list=PLnipdbm4t965ROHrQlxLRcSavCudqq2mz&index=4) 9 | 5. [Dealing with the famous ambiguity of forward slashes in the pico JavaScript parser](https://www.youtube.com/watch?v=THjv6Y7aJZ0&list=PLnipdbm4t965ROHrQlxLRcSavCudqq2mz&index=5) 10 | 6. [Evaluating the AST to a concrete value](https://www.youtube.com/watch?v=8somwjHNJOk&list=PLnipdbm4t965ROHrQlxLRcSavCudqq2mz&index=6) 11 | -------------------------------------------------------------------------------- /todo.md: -------------------------------------------------------------------------------- 1 | ## Goals 2 | 3 | Try to implement a tiny language but full dev stack. 4 | 5 | - JS is our C++ 6 | - no dependencies 7 | - pico JS implementation 8 | - it's fun and easy to follow 9 | 10 | ## Stream 1 11 | 12 | - [x] simple lexer (tokenazier) 13 | - [x] context aware errors for lexer 14 | 15 | ## Stream 2 16 | 17 | - [x] simple parser (produces AST) 18 | - [x] context aware errors for parser 19 | 20 | ## Stream 3 21 | 22 | - [x] just test things as they are 23 | 24 | ## Stream 4 25 | 26 | - [x] add operator precedence 27 | 28 | ## Stream 5 29 | 30 | - [x] add suport for both division and regexps 31 | - [x] add suport for comments 32 | 33 | ## Stream 6 34 | 35 | - [x] remove logging 🤦‍♂️ 36 | - [x] do some refactoring 37 | - [x] implement AST evaluator 38 | - [x] context aware errors for evaluator 39 | - [x] implement some simple exception logic 40 | 41 | ## Stream 7 42 | 43 | - [x] primitive type checking 44 | 45 | ## Stream 8 46 | 47 | - [x] finish the refactoring 48 | - [x] add some syntax highlighting 49 | 50 | ## Stream 9 51 | 52 | - [x] extend lexer and parser with conditionals etc 53 | 54 | ## Later 55 | 56 | - [ ] implement functions and then closures 57 | - [ ] macroses 58 | - [ ] maybe switch lexing mode from "expression" to "operator" 59 | - [ ] syntax highlight the error messages 60 | - [ ] bytecode emission and execution 61 | - [ ] implement some more complex exception propagation 62 | - [ ] babel-like transformations and code emition 63 | - [ ] VSCode language server 64 | - [ ] try bison in JS 65 | - [ ] emit WASM friendly output 66 | - [ ] try true exceptions 67 | - [ ] maybe something like source code rewrite as in prettier 68 | -------------------------------------------------------------------------------- /evaluate.js: -------------------------------------------------------------------------------- 1 | export function Exception(message, loc) { 2 | this.message = message; 3 | this.backtrace = [loc]; 4 | } 5 | 6 | export function evaluate(root) { 7 | const visitor = { 8 | NumericLiteral({ value }) { 9 | return value; 10 | }, 11 | 12 | RegExpToken() { 13 | return /unimplemented/; 14 | }, 15 | 16 | BinaryExpression({ left, operatorToken: op, right, loc }) { 17 | const leftValue = visit(left); 18 | const rightValue = visit(right); 19 | 20 | if (leftValue instanceof Exception) { 21 | leftValue.backtrace.push(loc); 22 | return leftValue; 23 | } 24 | if (rightValue instanceof Exception) { 25 | rightValue.backtrace.push(loc); 26 | return rightValue; 27 | } 28 | 29 | switch (op.type) { 30 | case "MulToken": 31 | return leftValue * rightValue; 32 | 33 | case "DivToken": 34 | // if (rightValue === 0) { 35 | // return new Exception("division by zero error", op.loc); 36 | // } 37 | return leftValue / rightValue; 38 | 39 | case "PlusToken": 40 | return leftValue + rightValue; 41 | 42 | default: 43 | throw new Error( 44 | `unknown operator "${op.type}" at ${op.loc.file}:${op.loc.start.line}:${op.loc.start.column}` 45 | ); 46 | } 47 | }, 48 | }; 49 | 50 | function visit(node) { 51 | const handler = visitor[node.type]; 52 | if (!handler) { 53 | throw new Error( 54 | `unknown node type "${node.type}" at ${node.loc.file}:${node.loc.start.line}:${node.loc.start.column}` 55 | ); 56 | } 57 | 58 | return handler(node); 59 | } 60 | 61 | return visit(root); 62 | } 63 | -------------------------------------------------------------------------------- /typecheck.js: -------------------------------------------------------------------------------- 1 | function unimplemented(message = "not implemented") { 2 | throw new Error(message); 3 | } 4 | 5 | function unreachable() { 6 | throw new Error("unreachable!"); 7 | } 8 | 9 | function locToStart(loc) { 10 | return `${loc.file}:${loc.start.line}:${loc.start.column}`; 11 | } 12 | 13 | class TypeBase { 14 | constructor(loc) { 15 | this.loc = loc; 16 | } 17 | name() { 18 | unreachable(); 19 | } 20 | operatorPlus() { 21 | return null; 22 | } 23 | operatorMul() { 24 | return null; 25 | } 26 | operatorDiv() { 27 | return null; 28 | } 29 | } 30 | 31 | class TypeAny extends TypeBase { 32 | name() { 33 | return "any"; 34 | } 35 | 36 | operatorPlus() { 37 | return TypeAny; 38 | } 39 | operatorMul() { 40 | return TypeAny; 41 | } 42 | operatorDiv() { 43 | return TypeAny; 44 | } 45 | } 46 | 47 | class TypeNumber extends TypeBase { 48 | name() { 49 | return "number"; 50 | } 51 | operatorPlus(right) { 52 | if (right instanceof TypeNumber) { 53 | return TypeNumber; 54 | } 55 | 56 | return null; 57 | } 58 | 59 | operatorMul(right) { 60 | if (right instanceof TypeNumber) { 61 | return TypeNumber; 62 | } 63 | 64 | return null; 65 | } 66 | 67 | operatorDiv(right) { 68 | if (right instanceof TypeNumber) { 69 | return TypeNumber; 70 | } 71 | 72 | return null; 73 | } 74 | } 75 | 76 | class TypeRegExp extends TypeBase { 77 | name() { 78 | return "RegExp"; 79 | } 80 | } 81 | 82 | export function typecheck(root) { 83 | const errors = []; 84 | 85 | const visitor = { 86 | NumericLiteral({ loc }) { 87 | return new TypeNumber(loc); 88 | }, 89 | 90 | RegExpToken({ loc }) { 91 | return new TypeRegExp(loc); 92 | }, 93 | 94 | BinaryExpression({ left, operatorToken: op, right, loc }) { 95 | const leftType = visit(left); 96 | const rightType = visit(right); 97 | 98 | const spanLoc = { 99 | file: left.loc.file, 100 | start: left.loc.start, 101 | end: right.loc.end, 102 | }; 103 | 104 | switch (op.type) { 105 | case "PlusToken": { 106 | const newType = leftType.operatorPlus(rightType); 107 | if (!newType) { 108 | errors.push( 109 | `Operator '+' cannot be applied to types '${leftType.name()}' and '${rightType.name()}' at ${locToStart( 110 | spanLoc 111 | )}.` 112 | ); 113 | return new TypeAny(spanLoc); 114 | } 115 | return new newType(spanLoc); 116 | } 117 | 118 | case "MulToken": { 119 | const newType = leftType.operatorMul(rightType); 120 | if (!newType) { 121 | errors.push( 122 | `Operator '*' cannot be applied to types '${leftType.name()}' and '${rightType.name()}' at ${locToStart( 123 | spanLoc 124 | )}.` 125 | ); 126 | } 127 | return new TypeAny(spanLoc); 128 | } 129 | 130 | case "DivToken": { 131 | const newType = leftType.operatorDiv(rightType); 132 | if (!newType) { 133 | errors.push( 134 | `Operator '/' cannot be applied to types '${leftType.name()}' and '${rightType.name()}' at ${locToStart( 135 | spanLoc 136 | )}.` 137 | ); 138 | } 139 | return new TypeAny(spanLoc); 140 | } 141 | 142 | default: 143 | throw new Error( 144 | `unknown operator "${op.type}" at ${locToStart(op.loc)}` 145 | ); 146 | } 147 | }, 148 | }; 149 | 150 | function visit(node) { 151 | const handler = visitor[node.type]; 152 | if (!handler) { 153 | throw new Error( 154 | `unknown node type "${node.type}" at ${locToStart(node.loc)}` 155 | ); 156 | } 157 | 158 | return handler(node); 159 | } 160 | 161 | visit(root); 162 | 163 | return errors; 164 | } 165 | -------------------------------------------------------------------------------- /lexer.js: -------------------------------------------------------------------------------- 1 | function isNumeric(c) { 2 | return "0" <= c && c <= "9"; 3 | } 4 | 5 | function isAlpha(c) { 6 | return ("a" <= c && c <= "z") || ("A" <= c && c <= "Z"); 7 | } 8 | 9 | export function lexer(file, str) { 10 | // little iterator ♥ 11 | let line = 1; 12 | let column = 1; 13 | let cursor = 0; 14 | let char = str[cursor]; 15 | // console.log("lexer: ", char); 16 | 17 | function position() { 18 | return { cursor, line, column }; 19 | } 20 | 21 | function next() { 22 | cursor++; 23 | char = str[cursor]; 24 | // console.log("lexer: ", char); 25 | column++; 26 | } 27 | 28 | function newline() { 29 | line++; 30 | column = 1; 31 | } 32 | 33 | function stringOfType(delimiter) { 34 | if (char !== delimiter) return null; 35 | 36 | const start = position(); 37 | next(); 38 | while (char !== delimiter) { 39 | next(); 40 | } 41 | 42 | next(); // last delimiter 43 | 44 | const end = position(); 45 | return { 46 | type: "String", 47 | loc: { file, start, end }, 48 | }; 49 | } 50 | 51 | function string() { 52 | return stringOfType('"') || stringOfType("'"); 53 | } 54 | 55 | function regexp() { 56 | if (char === "/") { 57 | const start = position(); 58 | next(); 59 | if (char === "/") { 60 | next(); 61 | return readComment(start); 62 | } 63 | 64 | next(); 65 | while (char !== "/") { 66 | next(); 67 | } 68 | 69 | next(); // last / 70 | 71 | const end = position(); 72 | return { 73 | type: "RegExpToken", 74 | loc: { file, start, end }, 75 | }; 76 | } 77 | } 78 | 79 | function readComment(start) { 80 | for (;;) { 81 | if (char === "\n") { 82 | newline(); 83 | next(); 84 | break; 85 | } 86 | 87 | if (char === undefined) { 88 | break; 89 | } 90 | 91 | next(); 92 | } 93 | 94 | const end = position(); 95 | 96 | return { 97 | type: "CommentToken", 98 | loc: { file, start, end }, 99 | }; 100 | } 101 | 102 | function operator() { 103 | if (char === "+") { 104 | const start = position(); 105 | next(); 106 | const end = position(); 107 | return { 108 | type: "PlusToken", 109 | loc: { file, start, end }, 110 | }; 111 | } 112 | 113 | if (char === "*") { 114 | const start = position(); 115 | next(); 116 | const end = position(); 117 | return { 118 | type: "MulToken", 119 | loc: { file, start, end }, 120 | }; 121 | } 122 | 123 | if (char === "/") { 124 | const start = position(); 125 | next(); 126 | if (char === "/") { 127 | next(); 128 | return readComment(start); 129 | } 130 | const end = position(); 131 | return { 132 | type: "DivToken", 133 | loc: { file, start, end }, 134 | }; 135 | } 136 | 137 | return null; 138 | } 139 | 140 | function number() { 141 | let buffer = ""; 142 | const start = position(); 143 | while (isNumeric(char)) { 144 | buffer += char; 145 | next(); 146 | } 147 | 148 | if (buffer.length >= 1) { 149 | const end = position(); 150 | return { 151 | type: "NumericLiteral", 152 | value: Number(buffer), 153 | loc: { file, start, end }, 154 | }; 155 | } 156 | 157 | return null; 158 | } 159 | 160 | const KEYWORDS = { 161 | if: "If", 162 | else: "Else", 163 | function: "Function", 164 | }; 165 | 166 | function id() { 167 | let buffer = ""; 168 | if (!isAlpha(char)) return null; 169 | const start = position(); 170 | buffer += char; 171 | next(); 172 | 173 | while (isNumeric(char) || isAlpha(char)) { 174 | buffer += char; 175 | next(); 176 | } 177 | 178 | const end = position(); 179 | 180 | const type = KEYWORDS[buffer]; 181 | if (type) { 182 | return { 183 | type, 184 | loc: { file, start, end }, 185 | }; 186 | } 187 | 188 | return { 189 | type: "Id", 190 | value: buffer, 191 | loc: { file, start, end }, 192 | }; 193 | 194 | return null; 195 | } 196 | 197 | function isWhitespace(c) { 198 | return c === " " || c === "\t"; 199 | } 200 | 201 | function semicolon() { 202 | if (char !== ";") return null; 203 | const start = position(); 204 | next(); 205 | 206 | const end = position(); 207 | 208 | return { 209 | type: "Semicolon", 210 | loc: { file, start, end }, 211 | }; 212 | } 213 | 214 | function colon() { 215 | if (char !== ",") return null; 216 | const start = position(); 217 | next(); 218 | 219 | const end = position(); 220 | 221 | return { 222 | type: "Colon", 223 | loc: { file, start, end }, 224 | }; 225 | } 226 | 227 | function parents() { 228 | if (char === "(") { 229 | const start = position(); 230 | next(); 231 | const end = position(); 232 | return { 233 | type: "OpenParent", 234 | loc: { file, start, end }, 235 | }; 236 | } 237 | 238 | if (char === ")") { 239 | const start = position(); 240 | next(); 241 | const end = position(); 242 | return { 243 | type: "CloseParent", 244 | loc: { file, start, end }, 245 | }; 246 | } 247 | 248 | if (char === "{") { 249 | const start = position(); 250 | next(); 251 | const end = position(); 252 | return { 253 | type: "OpenCurly", 254 | loc: { file, start, end }, 255 | }; 256 | } 257 | 258 | if (char === "}") { 259 | const start = position(); 260 | next(); 261 | const end = position(); 262 | return { 263 | type: "CloseCurly", 264 | loc: { file, start, end }, 265 | }; 266 | } 267 | 268 | return null; 269 | } 270 | 271 | function whitespace() { 272 | const start = position(); 273 | if (!isWhitespace(char)) { 274 | return null; 275 | } 276 | next(); 277 | 278 | while (isWhitespace(char)) { 279 | next(); 280 | } 281 | const end = position(); 282 | 283 | return { 284 | type: "Whitespace", 285 | loc: { file, start, end }, 286 | }; 287 | } 288 | 289 | function eol() { 290 | const start = position(); 291 | 292 | if (char !== "\n") { 293 | return null; 294 | } 295 | 296 | next(); 297 | newline(); 298 | 299 | const end = position(); 300 | 301 | return { 302 | type: "Newline", 303 | loc: { file, start, end }, 304 | }; 305 | } 306 | 307 | function eof() { 308 | if (char === undefined) { 309 | const start = position(); 310 | const end = start; 311 | return { 312 | type: "EndOfFileToken", 313 | loc: { file, start, end }, 314 | }; 315 | } 316 | 317 | return null; 318 | } 319 | 320 | function next2(mode) { 321 | function value() { 322 | return number() || string() || regexp(); 323 | } 324 | 325 | const token = 326 | whitespace() || 327 | id() || 328 | colon() || 329 | semicolon() || 330 | parents() || 331 | (mode === "expression" ? value() : operator()) || 332 | eol(); 333 | 334 | if (token) { 335 | return token; 336 | } 337 | 338 | const maybeEof = eof(); 339 | if (maybeEof) { 340 | return maybeEof; 341 | } 342 | 343 | throw new SyntaxError( 344 | `unexpected character "${char}" at ${file}:${line}:${column}` 345 | ); 346 | } 347 | 348 | return { 349 | next: next2, 350 | }; 351 | } 352 | -------------------------------------------------------------------------------- /parser.js: -------------------------------------------------------------------------------- 1 | export function parser(tokens) { 2 | let token = null; 3 | const rawTokens = []; 4 | 5 | function next(mode) { 6 | token = tokens.next(mode); 7 | if (!token) { 8 | throw new TypeError("next token is undefined"); 9 | } 10 | rawTokens.push(token); 11 | if ( 12 | token.type === "CommentToken" || 13 | token.type === "Whitespace" || 14 | token.type === "Newline" 15 | ) { 16 | return next(mode); 17 | } 18 | // console.log("parser: ", token && token.type); 19 | } 20 | 21 | function panic(message) { 22 | throw new SyntaxError( 23 | // @ts-ignore 24 | `${message} at ${token.loc.file}:${token.loc.start.line}:${token.loc.start.column}` 25 | ); 26 | } 27 | 28 | function FunctionCall(name) { 29 | const open = maybeTake("OpenParent", "expression"); 30 | if (!open) return name; 31 | 32 | const args = []; 33 | 34 | // head 35 | const expr = Expression(); 36 | if (expr) { 37 | args.push(expr); 38 | for (;;) { 39 | const colon = maybeTake("Colon", "expression"); 40 | if (!colon) break; 41 | const expr = Expression(); 42 | args.push(expr); 43 | } 44 | } 45 | 46 | const close = take("CloseParent"); 47 | 48 | return { 49 | type: "FunctionCall", 50 | name, 51 | arguments: args, 52 | loc: { 53 | file: open.loc.file, 54 | start: open.loc.start, 55 | end: close.loc.start, 56 | }, 57 | }; 58 | } 59 | 60 | function ExpressionMember() { 61 | if (token.type === "Id") { 62 | const _token = token; 63 | next(); 64 | return FunctionCall(_token); 65 | } 66 | 67 | if ( 68 | token.type === "NumericLiteral" || 69 | token.type === "String" || 70 | token.type === "RegExpToken" 71 | ) { 72 | const _token = token; 73 | next(); 74 | return _token; 75 | } 76 | 77 | return null; 78 | } 79 | 80 | function ExpressionMemberMust() { 81 | const next = ExpressionMember(); 82 | if (!next) { 83 | panic(`Expected ExpressionMember got "${token.type}"`); 84 | } 85 | return next; 86 | } 87 | 88 | function take(type, mode) { 89 | if (token.type === type) { 90 | const _token = token; 91 | next(mode); 92 | return _token; 93 | } 94 | 95 | panic(`Expected token type "${type}" got "${token.type}"`); 96 | } 97 | 98 | function maybeTake(type, mode) { 99 | if (token.type === type) { 100 | const _token = token; 101 | next(mode); 102 | return _token; 103 | } 104 | 105 | return null; 106 | } 107 | 108 | function PlusToken() { 109 | if (token.type === "PlusToken") { 110 | const _token = token; 111 | next("expression"); 112 | return _token; 113 | } 114 | 115 | return null; 116 | } 117 | 118 | function MulToken() { 119 | if (token.type === "MulToken") { 120 | const _token = token; 121 | next("expression"); 122 | return _token; 123 | } 124 | 125 | return null; 126 | } 127 | 128 | function DivToken() { 129 | if (token.type === "DivToken") { 130 | const _token = token; 131 | next("expression"); 132 | return _token; 133 | } 134 | 135 | return null; 136 | } 137 | 138 | function Expression() { 139 | return BinaryExpression(); 140 | } 141 | function BinaryExpression() { 142 | const head = ExpressionMember(); 143 | if (!head) return null; 144 | 145 | return PlusExpression(MulExpression(head)); 146 | } 147 | function PlusExpression(left) { 148 | const op = PlusToken(); 149 | if (!op) return left; 150 | const next = ExpressionMemberMust(); 151 | 152 | // magic!!! 153 | const right = MulExpression(next); 154 | 155 | const node = { 156 | type: "BinaryExpression", 157 | left, 158 | operatorToken: op, 159 | right: right, 160 | loc: { 161 | file: op.loc.file, 162 | start: left.loc.start, 163 | end: right.loc.end, 164 | }, 165 | }; 166 | 167 | return PlusExpression(node); 168 | } 169 | 170 | function MulExpression(left) { 171 | const op = MulToken() || DivToken(); 172 | if (!op) return left; 173 | const right = ExpressionMemberMust(); 174 | 175 | const node = { 176 | type: "BinaryExpression", 177 | left, 178 | operatorToken: op, 179 | right, 180 | loc: { 181 | file: op.loc.file, 182 | start: left.loc.start, 183 | end: right.loc.end, 184 | }, 185 | }; 186 | 187 | return MulExpression(node); 188 | } 189 | 190 | function Block() { 191 | const open = maybeTake("OpenCurly", "expression"); 192 | if (!open) return null; 193 | const body = Statements(); 194 | const close = take("CloseCurly", "expression"); 195 | 196 | return { 197 | type: "Block", 198 | body, 199 | loc: { 200 | file: open.loc.file, 201 | start: open.loc.start, 202 | end: close.loc.start, 203 | }, 204 | }; 205 | } 206 | 207 | function IfStatement() { 208 | const kw = maybeTake("If"); 209 | if (!kw) return null; 210 | take("OpenParent", "expression"); 211 | const condition = Expression(); 212 | if (!condition) { 213 | panic("Expected an Expression for condition"); 214 | } 215 | take("CloseParent", "expression"); 216 | const then = Block() || Statement(); 217 | if (!then) { 218 | panic("Expected an Expression for then"); 219 | } 220 | 221 | let els = null; 222 | const elseKw = maybeTake("Else", "expression"); 223 | if (elseKw) { 224 | els = Block() || Statement(); 225 | if (!els) { 226 | panic("Expected an Expression for else"); 227 | } 228 | } 229 | 230 | const end = els ? els.loc.end : then.loc.end; 231 | 232 | return { 233 | type: "If", 234 | condition, 235 | then, 236 | else: els, 237 | loc: { 238 | file: kw.loc.file, 239 | start: kw.loc.start, 240 | end, 241 | }, 242 | }; 243 | } 244 | 245 | function ArgumentList() { 246 | const args = []; 247 | take("OpenParent", "expression"); 248 | 249 | // head 250 | const id = maybeTake("Id"); 251 | if (id) { 252 | args.push(id); 253 | } 254 | 255 | for (;;) { 256 | const colon = maybeTake("Colon"); 257 | if (!colon) break; 258 | const id = take("Id"); 259 | args.push(id); 260 | } 261 | 262 | take("CloseParent"); 263 | 264 | return args; 265 | } 266 | 267 | function FunctionStatement() { 268 | const kw = maybeTake("Function"); 269 | if (!kw) return null; 270 | 271 | const name = take("Id"); 272 | const args = ArgumentList(); 273 | 274 | const body = Block(); 275 | if (!body) { 276 | panic("Expected a Bloc for the function"); 277 | } 278 | 279 | return { 280 | type: "FunctionStatement", 281 | name, 282 | args, 283 | body, 284 | loc: { 285 | file: kw.loc.file, 286 | start: kw.loc.start, 287 | end: body.loc.end, 288 | }, 289 | }; 290 | } 291 | 292 | function Statement() { 293 | const expression = Expression(); 294 | if (expression) { 295 | const sc = take("Semicolon", "expression"); 296 | return { 297 | type: "Statement", 298 | expression, 299 | loc: { 300 | file: expression.loc.file, 301 | start: expression.loc.start, 302 | end: sc.loc.end, 303 | }, 304 | }; 305 | } 306 | 307 | const ifstmt = IfStatement(); 308 | if (ifstmt) { 309 | maybeTake("Semicolon", "expression"); 310 | return ifstmt; 311 | } 312 | 313 | const fnstmt = FunctionStatement(); 314 | if (fnstmt) { 315 | return fnstmt; 316 | } 317 | 318 | return null; 319 | } 320 | 321 | function Statements() { 322 | const stmts = []; 323 | for (;;) { 324 | const stmt = Statement(); 325 | if (!stmt) break; 326 | stmts.push(stmt); 327 | } 328 | return stmts; 329 | } 330 | 331 | next("expression"); 332 | const ast = Statements(); 333 | 334 | // @ts-ignore 335 | if (token.type != "EndOfFileToken") { 336 | panic(`Expected token type "EndOfFileToken" got "${token.type}"`); 337 | } 338 | 339 | return { ast, tokens: rawTokens }; 340 | } 341 | -------------------------------------------------------------------------------- /parser.test.js: -------------------------------------------------------------------------------- 1 | const { lexer } = require("./lexer.js"); 2 | const { parser } = require("./parser.js"); 3 | 4 | describe("parser", () => { 5 | it("parses empty string", () => { 6 | const tokens = [ 7 | { 8 | type: "EndOfFileToken", 9 | }, 10 | ]; 11 | expect(parser("file", tokens.values())).toMatchInlineSnapshot( 12 | `null` 13 | ); 14 | }); 15 | 16 | it("reports an error if there is no EOF", () => { 17 | expect(() => 18 | parser("file", [].values()) 19 | ).toThrowErrorMatchingInlineSnapshot(`"next token is undefined"`); 20 | 21 | expect(() => 22 | parser("file", [{ type: "NumericLiteral" }].values()) 23 | ).toThrowErrorMatchingInlineSnapshot(`"next token is undefined"`); 24 | }); 25 | 26 | it("parses BinaryExpression", () => { 27 | expect(parser("file", lexer("file", "1"))).toMatchInlineSnapshot(` 28 | Object { 29 | "loc": Object { 30 | "end": Object { 31 | "column": 2, 32 | "line": 1, 33 | }, 34 | "start": Object { 35 | "column": 1, 36 | "line": 1, 37 | }, 38 | }, 39 | "type": "NumericLiteral", 40 | "value": 1, 41 | } 42 | `); 43 | 44 | expect(parser("file", lexer("file", "1+2"))) 45 | .toMatchInlineSnapshot(` 46 | Object { 47 | "left": Object { 48 | "loc": Object { 49 | "end": Object { 50 | "column": 2, 51 | "line": 1, 52 | }, 53 | "start": Object { 54 | "column": 1, 55 | "line": 1, 56 | }, 57 | }, 58 | "type": "NumericLiteral", 59 | "value": 1, 60 | }, 61 | "operatorToken": Object { 62 | "loc": Object { 63 | "end": Object { 64 | "column": 3, 65 | "line": 1, 66 | }, 67 | "start": Object { 68 | "column": 2, 69 | "line": 1, 70 | }, 71 | }, 72 | "type": "PlusToken", 73 | }, 74 | "right": Object { 75 | "loc": Object { 76 | "end": Object { 77 | "column": 4, 78 | "line": 1, 79 | }, 80 | "start": Object { 81 | "column": 3, 82 | "line": 1, 83 | }, 84 | }, 85 | "type": "NumericLiteral", 86 | "value": 2, 87 | }, 88 | "type": "BinaryExpression", 89 | } 90 | `); 91 | 92 | expect(parser("file", lexer("file", "1+2+3"))) 93 | .toMatchInlineSnapshot(` 94 | Object { 95 | "left": Object { 96 | "left": Object { 97 | "loc": Object { 98 | "end": Object { 99 | "column": 2, 100 | "line": 1, 101 | }, 102 | "start": Object { 103 | "column": 1, 104 | "line": 1, 105 | }, 106 | }, 107 | "type": "NumericLiteral", 108 | "value": 1, 109 | }, 110 | "operatorToken": Object { 111 | "loc": Object { 112 | "end": Object { 113 | "column": 3, 114 | "line": 1, 115 | }, 116 | "start": Object { 117 | "column": 2, 118 | "line": 1, 119 | }, 120 | }, 121 | "type": "PlusToken", 122 | }, 123 | "right": Object { 124 | "loc": Object { 125 | "end": Object { 126 | "column": 4, 127 | "line": 1, 128 | }, 129 | "start": Object { 130 | "column": 3, 131 | "line": 1, 132 | }, 133 | }, 134 | "type": "NumericLiteral", 135 | "value": 2, 136 | }, 137 | "type": "BinaryExpression", 138 | }, 139 | "operatorToken": Object { 140 | "loc": Object { 141 | "end": Object { 142 | "column": 5, 143 | "line": 1, 144 | }, 145 | "start": Object { 146 | "column": 4, 147 | "line": 1, 148 | }, 149 | }, 150 | "type": "PlusToken", 151 | }, 152 | "right": Object { 153 | "loc": Object { 154 | "end": Object { 155 | "column": 6, 156 | "line": 1, 157 | }, 158 | "start": Object { 159 | "column": 5, 160 | "line": 1, 161 | }, 162 | }, 163 | "type": "NumericLiteral", 164 | "value": 3, 165 | }, 166 | "type": "BinaryExpression", 167 | } 168 | `); 169 | 170 | expect(parser("file", lexer("file", "1+2+3+4"))) 171 | .toMatchInlineSnapshot(` 172 | Object { 173 | "left": Object { 174 | "left": Object { 175 | "left": Object { 176 | "loc": Object { 177 | "end": Object { 178 | "column": 2, 179 | "line": 1, 180 | }, 181 | "start": Object { 182 | "column": 1, 183 | "line": 1, 184 | }, 185 | }, 186 | "type": "NumericLiteral", 187 | "value": 1, 188 | }, 189 | "operatorToken": Object { 190 | "loc": Object { 191 | "end": Object { 192 | "column": 3, 193 | "line": 1, 194 | }, 195 | "start": Object { 196 | "column": 2, 197 | "line": 1, 198 | }, 199 | }, 200 | "type": "PlusToken", 201 | }, 202 | "right": Object { 203 | "loc": Object { 204 | "end": Object { 205 | "column": 4, 206 | "line": 1, 207 | }, 208 | "start": Object { 209 | "column": 3, 210 | "line": 1, 211 | }, 212 | }, 213 | "type": "NumericLiteral", 214 | "value": 2, 215 | }, 216 | "type": "BinaryExpression", 217 | }, 218 | "operatorToken": Object { 219 | "loc": Object { 220 | "end": Object { 221 | "column": 5, 222 | "line": 1, 223 | }, 224 | "start": Object { 225 | "column": 4, 226 | "line": 1, 227 | }, 228 | }, 229 | "type": "PlusToken", 230 | }, 231 | "right": Object { 232 | "loc": Object { 233 | "end": Object { 234 | "column": 6, 235 | "line": 1, 236 | }, 237 | "start": Object { 238 | "column": 5, 239 | "line": 1, 240 | }, 241 | }, 242 | "type": "NumericLiteral", 243 | "value": 3, 244 | }, 245 | "type": "BinaryExpression", 246 | }, 247 | "operatorToken": Object { 248 | "loc": Object { 249 | "end": Object { 250 | "column": 7, 251 | "line": 1, 252 | }, 253 | "start": Object { 254 | "column": 6, 255 | "line": 1, 256 | }, 257 | }, 258 | "type": "PlusToken", 259 | }, 260 | "right": Object { 261 | "loc": Object { 262 | "end": Object { 263 | "column": 8, 264 | "line": 1, 265 | }, 266 | "start": Object { 267 | "column": 7, 268 | "line": 1, 269 | }, 270 | }, 271 | "type": "NumericLiteral", 272 | "value": 4, 273 | }, 274 | "type": "BinaryExpression", 275 | } 276 | `); 277 | }); 278 | 279 | it("does not parse BinaryExpression", () => { 280 | expect(() => 281 | parser("file", lexer("file", "1+")) 282 | ).toThrowErrorMatchingInlineSnapshot( 283 | `"Expected token type \\"NumericLiteral\\" got \\"EndOfFileToken\\" at file:1:3"` 284 | ); 285 | }); 286 | }); 287 | -------------------------------------------------------------------------------- /lexer.test.js: -------------------------------------------------------------------------------- 1 | const { lexer } = require("./lexer.js"); 2 | 3 | describe("lexer", () => { 4 | it("return a generator", () => { 5 | const iter = [].values(); 6 | const tokens = lexer("file", ""); 7 | expect(tokens).toBeInstanceOf(iter.constructor); 8 | }); 9 | 10 | // TODO: remove once switched to TS 11 | it("takes two arguments", () => { 12 | expect(lexer.length).toBe(2); 13 | }); 14 | 15 | it("throws SyntaxError for an unknown charater", () => { 16 | expect(() => [ 17 | ...lexer("file", "%%%"), 18 | ]).toThrowErrorMatchingInlineSnapshot( 19 | `"unexpected character \\"%\\" at file:1:1"` 20 | ); 21 | }); 22 | 23 | it("return EndOfFileToken for an empty file", () => { 24 | expect([...lexer("file", "")]).toMatchInlineSnapshot(` 25 | Array [ 26 | Object { 27 | "loc": Object { 28 | "end": Object { 29 | "column": 1, 30 | "line": 1, 31 | }, 32 | "start": Object { 33 | "column": 1, 34 | "line": 1, 35 | }, 36 | }, 37 | "type": "EndOfFileToken", 38 | }, 39 | ] 40 | `); 41 | }); 42 | 43 | it("skipps whitespaces", () => { 44 | expect([...lexer("file", " \t ")]).toMatchInlineSnapshot(` 45 | Array [ 46 | Object { 47 | "loc": Object { 48 | "end": Object { 49 | "column": 9, 50 | "line": 1, 51 | }, 52 | "start": Object { 53 | "column": 9, 54 | "line": 1, 55 | }, 56 | }, 57 | "type": "EndOfFileToken", 58 | }, 59 | ] 60 | `); 61 | }); 62 | 63 | it("parses PlusToken", () => { 64 | expect([...lexer("file", "+++")]).toMatchInlineSnapshot(` 65 | Array [ 66 | Object { 67 | "loc": Object { 68 | "end": Object { 69 | "column": 2, 70 | "line": 1, 71 | }, 72 | "start": Object { 73 | "column": 1, 74 | "line": 1, 75 | }, 76 | }, 77 | "type": "PlusToken", 78 | }, 79 | Object { 80 | "loc": Object { 81 | "end": Object { 82 | "column": 3, 83 | "line": 1, 84 | }, 85 | "start": Object { 86 | "column": 2, 87 | "line": 1, 88 | }, 89 | }, 90 | "type": "PlusToken", 91 | }, 92 | Object { 93 | "loc": Object { 94 | "end": Object { 95 | "column": 4, 96 | "line": 1, 97 | }, 98 | "start": Object { 99 | "column": 3, 100 | "line": 1, 101 | }, 102 | }, 103 | "type": "PlusToken", 104 | }, 105 | Object { 106 | "loc": Object { 107 | "end": Object { 108 | "column": 4, 109 | "line": 1, 110 | }, 111 | "start": Object { 112 | "column": 4, 113 | "line": 1, 114 | }, 115 | }, 116 | "type": "EndOfFileToken", 117 | }, 118 | ] 119 | `); 120 | }); 121 | 122 | it("parses NumericLiteral", () => { 123 | expect([...lexer("file", "9 9")]).toMatchInlineSnapshot(` 124 | Array [ 125 | Object { 126 | "loc": Object { 127 | "end": Object { 128 | "column": 2, 129 | "line": 1, 130 | }, 131 | "start": Object { 132 | "column": 1, 133 | "line": 1, 134 | }, 135 | }, 136 | "type": "NumericLiteral", 137 | "value": 9, 138 | }, 139 | Object { 140 | "loc": Object { 141 | "end": Object { 142 | "column": 4, 143 | "line": 1, 144 | }, 145 | "start": Object { 146 | "column": 3, 147 | "line": 1, 148 | }, 149 | }, 150 | "type": "NumericLiteral", 151 | "value": 9, 152 | }, 153 | Object { 154 | "loc": Object { 155 | "end": Object { 156 | "column": 4, 157 | "line": 1, 158 | }, 159 | "start": Object { 160 | "column": 4, 161 | "line": 1, 162 | }, 163 | }, 164 | "type": "EndOfFileToken", 165 | }, 166 | ] 167 | `); 168 | 169 | expect([...lexer("file", "123 456")]).toMatchInlineSnapshot(` 170 | Array [ 171 | Object { 172 | "loc": Object { 173 | "end": Object { 174 | "column": 4, 175 | "line": 1, 176 | }, 177 | "start": Object { 178 | "column": 1, 179 | "line": 1, 180 | }, 181 | }, 182 | "type": "NumericLiteral", 183 | "value": 123, 184 | }, 185 | Object { 186 | "loc": Object { 187 | "end": Object { 188 | "column": 8, 189 | "line": 1, 190 | }, 191 | "start": Object { 192 | "column": 5, 193 | "line": 1, 194 | }, 195 | }, 196 | "type": "NumericLiteral", 197 | "value": 456, 198 | }, 199 | Object { 200 | "loc": Object { 201 | "end": Object { 202 | "column": 8, 203 | "line": 1, 204 | }, 205 | "start": Object { 206 | "column": 8, 207 | "line": 1, 208 | }, 209 | }, 210 | "type": "EndOfFileToken", 211 | }, 212 | ] 213 | `); 214 | }); 215 | 216 | it("parses newline", () => { 217 | expect([...lexer("file", "")]).toMatchInlineSnapshot(` 218 | Array [ 219 | Object { 220 | "loc": Object { 221 | "end": Object { 222 | "column": 1, 223 | "line": 1, 224 | }, 225 | "start": Object { 226 | "column": 1, 227 | "line": 1, 228 | }, 229 | }, 230 | "type": "EndOfFileToken", 231 | }, 232 | ] 233 | `); 234 | 235 | expect([...lexer("file", "\n")]).toMatchInlineSnapshot(` 236 | Array [ 237 | Object { 238 | "loc": Object { 239 | "end": Object { 240 | "column": 1, 241 | "line": 2, 242 | }, 243 | "start": Object { 244 | "column": 1, 245 | "line": 2, 246 | }, 247 | }, 248 | "type": "EndOfFileToken", 249 | }, 250 | ] 251 | `); 252 | 253 | expect([...lexer("file", " \n ")]).toMatchInlineSnapshot(` 254 | Array [ 255 | Object { 256 | "loc": Object { 257 | "end": Object { 258 | "column": 4, 259 | "line": 2, 260 | }, 261 | "start": Object { 262 | "column": 4, 263 | "line": 2, 264 | }, 265 | }, 266 | "type": "EndOfFileToken", 267 | }, 268 | ] 269 | `); 270 | 271 | expect([...lexer("file", " \n \n")]).toMatchInlineSnapshot(` 272 | Array [ 273 | Object { 274 | "loc": Object { 275 | "end": Object { 276 | "column": 1, 277 | "line": 3, 278 | }, 279 | "start": Object { 280 | "column": 1, 281 | "line": 3, 282 | }, 283 | }, 284 | "type": "EndOfFileToken", 285 | }, 286 | ] 287 | `); 288 | 289 | expect([...lexer("file", " \n \n ")]) 290 | .toMatchInlineSnapshot(` 291 | Array [ 292 | Object { 293 | "loc": Object { 294 | "end": Object { 295 | "column": 4, 296 | "line": 3, 297 | }, 298 | "start": Object { 299 | "column": 4, 300 | "line": 3, 301 | }, 302 | }, 303 | "type": "EndOfFileToken", 304 | }, 305 | ] 306 | `); 307 | }); 308 | 309 | expect([...lexer("file", "\n\n\n")]).toMatchInlineSnapshot(` 310 | Array [ 311 | Object { 312 | "loc": Object { 313 | "end": Object { 314 | "column": 1, 315 | "line": 4, 316 | }, 317 | "start": Object { 318 | "column": 1, 319 | "line": 4, 320 | }, 321 | }, 322 | "type": "EndOfFileToken", 323 | }, 324 | ] 325 | `); 326 | 327 | it("just parses multiple tokens", () => { 328 | expect([...lexer("file", "1+1 + 4 4 4 + + 9\n9 9 4444 \t\t\t ")]) 329 | .toMatchInlineSnapshot(` 330 | Array [ 331 | Object { 332 | "loc": Object { 333 | "end": Object { 334 | "column": 2, 335 | "line": 1, 336 | }, 337 | "start": Object { 338 | "column": 1, 339 | "line": 1, 340 | }, 341 | }, 342 | "type": "NumericLiteral", 343 | "value": 1, 344 | }, 345 | Object { 346 | "loc": Object { 347 | "end": Object { 348 | "column": 3, 349 | "line": 1, 350 | }, 351 | "start": Object { 352 | "column": 2, 353 | "line": 1, 354 | }, 355 | }, 356 | "type": "PlusToken", 357 | }, 358 | Object { 359 | "loc": Object { 360 | "end": Object { 361 | "column": 4, 362 | "line": 1, 363 | }, 364 | "start": Object { 365 | "column": 3, 366 | "line": 1, 367 | }, 368 | }, 369 | "type": "NumericLiteral", 370 | "value": 1, 371 | }, 372 | Object { 373 | "loc": Object { 374 | "end": Object { 375 | "column": 6, 376 | "line": 1, 377 | }, 378 | "start": Object { 379 | "column": 5, 380 | "line": 1, 381 | }, 382 | }, 383 | "type": "PlusToken", 384 | }, 385 | Object { 386 | "loc": Object { 387 | "end": Object { 388 | "column": 8, 389 | "line": 1, 390 | }, 391 | "start": Object { 392 | "column": 7, 393 | "line": 1, 394 | }, 395 | }, 396 | "type": "NumericLiteral", 397 | "value": 4, 398 | }, 399 | Object { 400 | "loc": Object { 401 | "end": Object { 402 | "column": 10, 403 | "line": 1, 404 | }, 405 | "start": Object { 406 | "column": 9, 407 | "line": 1, 408 | }, 409 | }, 410 | "type": "NumericLiteral", 411 | "value": 4, 412 | }, 413 | Object { 414 | "loc": Object { 415 | "end": Object { 416 | "column": 12, 417 | "line": 1, 418 | }, 419 | "start": Object { 420 | "column": 11, 421 | "line": 1, 422 | }, 423 | }, 424 | "type": "NumericLiteral", 425 | "value": 4, 426 | }, 427 | Object { 428 | "loc": Object { 429 | "end": Object { 430 | "column": 14, 431 | "line": 1, 432 | }, 433 | "start": Object { 434 | "column": 13, 435 | "line": 1, 436 | }, 437 | }, 438 | "type": "PlusToken", 439 | }, 440 | Object { 441 | "loc": Object { 442 | "end": Object { 443 | "column": 16, 444 | "line": 1, 445 | }, 446 | "start": Object { 447 | "column": 15, 448 | "line": 1, 449 | }, 450 | }, 451 | "type": "PlusToken", 452 | }, 453 | Object { 454 | "loc": Object { 455 | "end": Object { 456 | "column": 18, 457 | "line": 1, 458 | }, 459 | "start": Object { 460 | "column": 17, 461 | "line": 1, 462 | }, 463 | }, 464 | "type": "NumericLiteral", 465 | "value": 9, 466 | }, 467 | Object { 468 | "loc": Object { 469 | "end": Object { 470 | "column": 2, 471 | "line": 2, 472 | }, 473 | "start": Object { 474 | "column": 1, 475 | "line": 2, 476 | }, 477 | }, 478 | "type": "NumericLiteral", 479 | "value": 9, 480 | }, 481 | Object { 482 | "loc": Object { 483 | "end": Object { 484 | "column": 4, 485 | "line": 2, 486 | }, 487 | "start": Object { 488 | "column": 3, 489 | "line": 2, 490 | }, 491 | }, 492 | "type": "NumericLiteral", 493 | "value": 9, 494 | }, 495 | Object { 496 | "loc": Object { 497 | "end": Object { 498 | "column": 9, 499 | "line": 2, 500 | }, 501 | "start": Object { 502 | "column": 5, 503 | "line": 2, 504 | }, 505 | }, 506 | "type": "NumericLiteral", 507 | "value": 4444, 508 | }, 509 | Object { 510 | "loc": Object { 511 | "end": Object { 512 | "column": 14, 513 | "line": 2, 514 | }, 515 | "start": Object { 516 | "column": 14, 517 | "line": 2, 518 | }, 519 | }, 520 | "type": "EndOfFileToken", 521 | }, 522 | ] 523 | `); 524 | }); 525 | }); 526 | --------------------------------------------------------------------------------