├── .gitignore
├── .nvmrc
├── .nycrc
├── .prettierrc
├── LICENSE.txt
├── README.md
├── eslint.config.mjs
├── package-lock.json
├── package.json
├── src
├── astNode.ts
├── index.ts
├── moveContext.ts
├── parseFile.ts
├── parseGame.ts
├── parseMove.ts
├── parseTagPair.ts
├── parseToken.ts
├── parserError.ts
├── pgnParser.ts
├── pgnPrinter.ts
├── token.ts
├── tokenizer.ts
└── utils.ts
├── test
├── index.spec.ts
├── moveContext.spec.ts
├── parseFile.spec.ts
├── parseGame.spec.ts
├── parseMove.spec.ts
├── parseTagPair.spec.ts
├── parseToken.spec.ts
├── parserError.spec.ts
├── tokenizer.spec.ts
└── utils.spec.ts
└── tsconfig.json
/.gitignore:
--------------------------------------------------------------------------------
1 | .nyc_output
2 | coverage
3 | dist
4 | node_modules
5 | tsconfig.tsbuildinfo
6 |
--------------------------------------------------------------------------------
/.nvmrc:
--------------------------------------------------------------------------------
1 | 22.11.0
2 |
--------------------------------------------------------------------------------
/.nycrc:
--------------------------------------------------------------------------------
1 | {
2 | "reporter": ["text", "html"],
3 | "all": true,
4 | "include": ["src"],
5 | "exclude": ["dist", "test"],
6 | "reportDir": "coverage/nyc"
7 | }
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "semi": true,
3 | "singleQuote": true,
4 | "printWidth": 100,
5 | "tabWidth": 2,
6 | "bracketSpacing": false,
7 | "trailingComma": "none"
8 | }
9 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Guillaume Masclet
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
Prettier PGN plugin
2 |
3 | Prettier is an opinionated code formatter. It enforces a consistent style by parsing your code and
4 | re-printing it with its own rules that take the maximum line length into account, wrapping code
5 | when necessary.
6 |
7 | This plugin adds support for the Portable Game Notation format to Prettier.
8 |
9 | Portable Game Notation (PGN) is a standard plain text format for recording chess games (both the
10 | moves and related data), which can be read by humans and is also supported by most chess software.
11 |
12 | # Example
13 |
14 | This sample game:
15 |
16 | ```
17 | [Event "Immortal Game"][Date "1851.06.21"][White "Anderssen, Adolf"][Black
18 | "Kieseritzky, Lionel"][Result "*"] 1.e2-e4 e7-e5 f4 exf4 3.Bc4 3...Qh4+
19 | 4.Kf1 b5 {C33 King's Gambit Accepted: Bishop's Gambit, Bryan
20 | Countergambit} 5.Bxb5 Nf6 6.Nf3 Qh6 7.d3 Nh5 8.Nh4 ?! {Inaccuracy. Rg1 was
21 | best.} (8.Rg1 c6 9.Bc4 Be7 10.Nc3 O-O 11.Kf2 Nf6 12.Re1 Ng4+ 13.Kg1 Ba6
22 | 14.Bxa6 Nxa6 15.Qe2 Bc5+ 16.d4 Bxd4+ 17.Nxd4) 8...Qg5 9.Nf5 c6 {White
23 | eventually won by checkmate after 23 moves.} *
24 | ```
25 |
26 | will be transformed to this:
27 |
28 | ```
29 | [Event "Immortal Game"]
30 | [Date "1851.06.21"]
31 | [White "Anderssen, Adolf"]
32 | [Black "Kieseritzky, Lionel"]
33 | [Result "*"]
34 |
35 | 1.e4 e5 2.f4 exf4 3.Bc4 Qh4+ 4.Kf1 b5
36 | {C33 King's Gambit Accepted: Bishop's Gambit, Bryan Countergambit}
37 | 5.Bxb5 Nf6 6.Nf3 Qh6 7.d3 Nh5 8.Nh4?!
38 | {Inaccuracy. Rg1 was best.}
39 | (8.Rg1 c6 9.Bc4 Be7 10.Nc3 O-O 11.Kf2 Nf6 12.Re1 Ng4+ 13.Kg1 Ba6 14.Bxa6 Nxa6
40 | 15.Qe2 Bc5+ 16.d4 Bxd4+ 17.Nxd4)
41 | 8...Qg5 9.Nf5 c6
42 | {White eventually won by checkmate after 23 moves.}
43 | *
44 | ```
45 |
46 | # Getting started
47 |
48 | To run `prettier` with the PGN plugin, you're going to need [node](https://nodejs.org/en/download/).
49 |
50 | Install `prettier` and the plugin using the npm CLI:
51 |
52 | ```bash
53 | # Install locally if you intent to use Prettier in a specific folder
54 | npm install --save-dev prettier prettier-plugin-pgn
55 |
56 | # Or globally, which can be handy to format PGN files in any folder
57 | npm install --global prettier prettier-plugin-pgn
58 | ```
59 |
60 | # Usage
61 |
62 | The plugin can be activated in your [Prettier configuration file](https://prettier.io/docs/en/configuration):
63 |
64 | ```json
65 | {
66 | "plugins": ["prettier-plugin-pgn"]
67 | }
68 | ```
69 |
70 | Alternatively, it may be declared directly in the CLI, using the `--plugin` option:
71 |
72 | ```bash
73 | npx prettier --plugin="prettier-plugin-pgn" --write "path/to/file.pgn"
74 | ```
75 |
76 | # Features
77 |
78 | This plugin supports most of the PGN specification. In particular, it supports:
79 | * Variations (including nested ones).
80 | * Annotations (either NAG, or most commons literal annotations).
81 | * Comments (only those inside braces).
82 | * PGN files with multiple games are supported.
83 | * Whenever possible, the plugin is lenient. It will accept a non canonical PGN game as long as it is unambiguous.
84 |
85 | The following PGN features are currently not supported by this plugin:
86 | * "Rest of line" comments (those starting with a semicolon character and continuing to the end of the line).
87 | * Escaped lines using a percent sign character (`%`).
88 |
89 | # License
90 |
91 | The package is available as open source under the terms of the [MIT License](https://opensource.org/license/MIT).
92 |
--------------------------------------------------------------------------------
/eslint.config.mjs:
--------------------------------------------------------------------------------
1 | import path from 'node:path';
2 | import {fileURLToPath} from 'node:url';
3 | import globals from 'globals';
4 | import {includeIgnoreFile} from '@eslint/compat';
5 | import pluginJs from '@eslint/js';
6 | import tseslint from 'typescript-eslint';
7 | import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';
8 |
9 | const __filename = fileURLToPath(import.meta.url);
10 | const __dirname = path.dirname(__filename);
11 | const gitignorePath = path.resolve(__dirname, '.gitignore');
12 |
13 | /** @type {import('eslint').Linter.Config[]} */
14 | export default [
15 | includeIgnoreFile(gitignorePath),
16 | {files: ['**/*.ts']},
17 | {languageOptions: {globals: globals.browser}},
18 | pluginJs.configs.recommended,
19 | ...tseslint.configs.recommended,
20 | eslintPluginPrettierRecommended
21 | ];
22 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "prettier-plugin-pgn",
3 | "version": "1.0.0",
4 | "description": "A Prettier plugin for formatting PGN files",
5 | "keywords": [
6 | "prettier",
7 | "prettier-plugin",
8 | "pgn",
9 | "chess",
10 | "formatter"
11 | ],
12 | "author": "Guillaume Masclet",
13 | "license": "MIT",
14 | "repository": {
15 | "type": "git",
16 | "url": "git+https://github.com/gmasclet/prettier-plugin-pgn.git"
17 | },
18 | "main": "dist/index.js",
19 | "files": [
20 | "dist"
21 | ],
22 | "engines": {
23 | "node": ">=18"
24 | },
25 | "scripts": {
26 | "clean": "rimraf .nyc_output coverage dist tsconfig.tsbuildinfo",
27 | "build": "tsc",
28 | "lint": "eslint --fix .",
29 | "test": "node --test --require ts-node/register test/**/*.spec.ts",
30 | "coverage": "nyc npm test"
31 | },
32 | "peerDependencies": {
33 | "prettier": "^3.0.0"
34 | },
35 | "dependencies": {
36 | "chess.js": "1.0.0-beta.8"
37 | },
38 | "devDependencies": {
39 | "@eslint/compat": "1.2.4",
40 | "@eslint/js": "9.17.0",
41 | "@types/node": "22.10.2",
42 | "eslint": "9.17.0",
43 | "eslint-config-prettier": "9.1.0",
44 | "eslint-plugin-prettier": "5.2.1",
45 | "globals": "15.14.0",
46 | "nyc": "17.1.0",
47 | "prettier": "3.4.2",
48 | "rimraf": "6.0.1",
49 | "ts-node": "10.9.2",
50 | "typescript": "5.7.2",
51 | "typescript-eslint": "8.18.2"
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/astNode.ts:
--------------------------------------------------------------------------------
1 | export type ASTNode =
2 | | FileNode
3 | | GameNode
4 | | TagPairSectionNode
5 | | TagPairNode
6 | | MoveTextSectionNode
7 | | MoveNode
8 | | AnnotationNode
9 | | CommentNode
10 | | VariationNode
11 | | GameTerminationNode;
12 |
13 | export interface FileNode extends BaseNode {
14 | type: 'file';
15 | comments: CommentNode[];
16 | games: GameNode[];
17 | }
18 |
19 | export interface GameNode extends BaseNode {
20 | type: 'game';
21 | tagPairSection: TagPairSectionNode;
22 | moveTextSection: MoveTextSectionNode;
23 | }
24 |
25 | export interface TagPairSectionNode extends BaseNode {
26 | type: 'tagPairSection';
27 | tagPairs: TagPairNode[];
28 | }
29 |
30 | export interface TagPairNode extends BaseNode {
31 | type: 'tagPair';
32 | name: string;
33 | value: string;
34 | }
35 |
36 | export interface MoveTextSectionNode extends BaseNode {
37 | type: 'moveTextSection';
38 | moves: MoveNode[];
39 | gameTermination: GameTerminationNode;
40 | }
41 |
42 | export interface MoveNode extends BaseNode {
43 | type: 'move';
44 | number: number;
45 | turn: 'white' | 'black';
46 | value: string;
47 | suffix: AnnotationNode | undefined;
48 | annotations: AnnotationNode[];
49 | comments: CommentNode[];
50 | variations: VariationNode[];
51 | }
52 |
53 | export interface AnnotationNode extends BaseNode {
54 | type: 'annotation';
55 | value: string;
56 | }
57 |
58 | export interface CommentNode extends BaseNode {
59 | type: 'comment';
60 | value: string;
61 | }
62 |
63 | export interface VariationNode extends BaseNode {
64 | type: 'variation';
65 | moves: MoveNode[];
66 | }
67 |
68 | export interface GameTerminationNode extends BaseNode {
69 | type: 'gameTermination';
70 | value: string;
71 | }
72 |
73 | interface BaseNode {
74 | type: ASTNode['type'];
75 | start: number;
76 | end: number;
77 | }
78 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | import {PgnParser} from './pgnParser';
2 | import {PgnPrinter} from './pgnPrinter';
3 |
4 | export const languages = [
5 | {
6 | name: 'Portable Game Notation',
7 | parsers: ['pgn'],
8 | extensions: ['.pgn']
9 | }
10 | ];
11 |
12 | export const parsers = {
13 | pgn: new PgnParser()
14 | };
15 |
16 | export const printers = {
17 | pgn: new PgnPrinter()
18 | };
19 |
20 | export const options = {};
21 |
--------------------------------------------------------------------------------
/src/moveContext.ts:
--------------------------------------------------------------------------------
1 | import {Chess} from 'chess.js';
2 | import {ParserError} from './parserError';
3 | import {castToError} from './utils';
4 |
5 | /**
6 | * Handle the state of a chess game and validate moves.
7 | *
8 | * Accept a FEN string at construction, to support non-standard starting positions.
9 | */
10 | export class MoveContext {
11 | private readonly engine: Chess;
12 |
13 | constructor(fen?: string) {
14 | this.engine = new Chess(fen);
15 | }
16 |
17 | /**
18 | * Get the current move number.
19 | */
20 | get number(): number {
21 | return this.engine.moveNumber();
22 | }
23 |
24 | /**
25 | * Get which side is the current turn to move.
26 | */
27 | get turn(): 'white' | 'black' {
28 | return this.engine.turn() === 'w' ? 'white' : 'black';
29 | }
30 |
31 | /**
32 | * Play a move and update the state accordingly. Return the SAN (Standard Algebraic Notation)
33 | * representation of the move. Throw if the move is invalid or illegal for the current position.
34 | */
35 | play(move: {value: string; start: number}): string {
36 | try {
37 | return this.engine.move(move.value, {strict: false}).san;
38 | } catch (e) {
39 | throw new ParserError(castToError(e).message, move);
40 | }
41 | }
42 |
43 | /**
44 | * Create a clone of the current instance. Useful to handle variations without affecting the
45 | * main line.
46 | */
47 | clone(): MoveContext {
48 | return new MoveContext(this.engine.fen());
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/src/parseFile.ts:
--------------------------------------------------------------------------------
1 | import {FileNode} from './astNode';
2 | import {parseGame} from './parseGame';
3 | import {Tokenizer} from './tokenizer';
4 | import {repeat} from './utils';
5 |
6 | export function parseFile(tokens: Tokenizer): FileNode {
7 | const games = repeat(() => parseGame(tokens));
8 | tokens.expectEndOfFile();
9 | return {
10 | type: 'file',
11 | comments: tokens.getComments(),
12 | games: games,
13 | start: games.length === 0 ? 0 : games[0].start,
14 | end: games.length === 0 ? 0 : games[games.length - 1].end
15 | };
16 | }
17 |
--------------------------------------------------------------------------------
/src/parseGame.ts:
--------------------------------------------------------------------------------
1 | import {GameNode, MoveTextSectionNode, TagPairSectionNode} from './astNode';
2 | import {MoveContext} from './moveContext';
3 | import {parseMove} from './parseMove';
4 | import {parseTagPair} from './parseTagPair';
5 | import {Tokenizer} from './tokenizer';
6 | import {noValue, repeat} from './utils';
7 |
8 | export function parseGame(tokens: Tokenizer): GameNode | undefined {
9 | const tagPairSection = parseTagPairSection(tokens);
10 | const moveTextSection = parseMoveTextSection(tokens, createContext(tagPairSection));
11 | if (noValue(tagPairSection) && noValue(moveTextSection)) {
12 | return undefined;
13 | }
14 | return {
15 | type: 'game',
16 | tagPairSection: tagPairSection ?? {
17 | type: 'tagPairSection',
18 | tagPairs: [],
19 | start: moveTextSection!.start,
20 | end: moveTextSection!.start
21 | },
22 | moveTextSection: moveTextSection ?? {
23 | type: 'moveTextSection',
24 | moves: [],
25 | gameTermination: {
26 | type: 'gameTermination',
27 | value: '*',
28 | start: tagPairSection!.end,
29 | end: tagPairSection!.end
30 | },
31 | start: tagPairSection!.end,
32 | end: tagPairSection!.end
33 | },
34 | start: (tagPairSection ?? moveTextSection)!.start,
35 | end: (moveTextSection ?? tagPairSection)!.end
36 | };
37 | }
38 |
39 | function parseTagPairSection(tokens: Tokenizer): TagPairSectionNode | undefined {
40 | const tagPairs = repeat(() => parseTagPair(tokens));
41 | if (tagPairs.length === 0) {
42 | return undefined;
43 | }
44 | return {
45 | type: 'tagPairSection',
46 | tagPairs: tagPairs,
47 | start: tagPairs[0].start,
48 | end: tagPairs[tagPairs.length - 1].end
49 | };
50 | }
51 |
52 | function parseMoveTextSection(
53 | tokens: Tokenizer,
54 | context: MoveContext
55 | ): MoveTextSectionNode | undefined {
56 | const moves = repeat(() => parseMove(tokens, context));
57 | const gameTermination = tokens.accept('gameTermination');
58 | if (moves.length === 0 && noValue(gameTermination)) {
59 | return undefined;
60 | }
61 | return {
62 | type: 'moveTextSection',
63 | moves: moves,
64 | gameTermination: gameTermination ?? {
65 | type: 'gameTermination',
66 | value: '*',
67 | start: moves[moves.length - 1].end,
68 | end: moves[moves.length - 1].end
69 | },
70 | start: (moves[0] ?? gameTermination).start,
71 | end: (gameTermination ?? moves[moves.length - 1]).end
72 | };
73 | }
74 |
75 | function createContext(tagPairSection: TagPairSectionNode | undefined): MoveContext {
76 | const fen = tagPairSection?.tagPairs.find((tagPair) => tagPair.name === 'FEN')?.value;
77 | return new MoveContext(fen);
78 | }
79 |
--------------------------------------------------------------------------------
/src/parseMove.ts:
--------------------------------------------------------------------------------
1 | import {AnnotationNode, CommentNode, MoveNode, VariationNode} from './astNode';
2 | import {MoveContext} from './moveContext';
3 | import {Tokenizer} from './tokenizer';
4 | import {hasValue, noValue, repeat} from './utils';
5 |
6 | export function parseMove(tokens: Tokenizer, context: MoveContext): MoveNode | undefined {
7 | const number = tokens.accept('integer');
8 | const periods = repeat(() => tokens.accept('period'));
9 | const move =
10 | hasValue(number) || periods.length > 0 ? tokens.expect('symbol') : tokens.accept('symbol');
11 | if (noValue(move)) {
12 | return undefined;
13 | }
14 |
15 | const comments: CommentNode[] = [];
16 | const annotations: AnnotationNode[] = [];
17 | for (;;) {
18 | const comment = tokens.accept('comment');
19 | if (hasValue(comment)) {
20 | comments.push(comment);
21 | continue;
22 | }
23 | const annotation = tokens.accept('annotation');
24 | if (hasValue(annotation)) {
25 | annotations.push(annotation);
26 | continue;
27 | }
28 | break;
29 | }
30 |
31 | const variations = repeat(() => parseVariation(tokens, context.clone()));
32 | const node = {
33 | type: 'move',
34 | number: context.number,
35 | turn: context.turn,
36 | value: context.play(move),
37 | suffix: annotations.find((annotation) => isSuffix(annotation)),
38 | annotations: annotations.filter((annotation) => !isSuffix(annotation)),
39 | comments: comments,
40 | variations: variations,
41 | start: number ? number.start : [...periods, move][0].start,
42 | end: (
43 | [...[...annotations, ...comments].sort((a, b) => a.end - b.end), ...variations].pop() ?? move
44 | ).end
45 | } as const;
46 |
47 | return node;
48 | }
49 |
50 | function isSuffix(annotation: AnnotationNode): boolean {
51 | return ['!', '?', '!!', '??', '!?', '?!'].includes(annotation.value);
52 | }
53 |
54 | function parseVariation(tokens: Tokenizer, context: MoveContext): VariationNode | undefined {
55 | const leftParenthesis = tokens.accept('leftParenthesis');
56 | if (noValue(leftParenthesis)) {
57 | return undefined;
58 | }
59 |
60 | const moves = repeat(() => parseMove(tokens, context));
61 | const rightParenthesis = tokens.expect('rightParenthesis');
62 | return {
63 | type: 'variation',
64 | moves: moves,
65 | start: leftParenthesis.start,
66 | end: rightParenthesis.end
67 | };
68 | }
69 |
--------------------------------------------------------------------------------
/src/parseTagPair.ts:
--------------------------------------------------------------------------------
1 | import {TagPairNode} from './astNode';
2 | import {Tokenizer} from './tokenizer';
3 | import {noValue} from './utils';
4 |
5 | export function parseTagPair(tokens: Tokenizer): TagPairNode | undefined {
6 | const leftBracket = tokens.accept('leftBracket');
7 | if (noValue(leftBracket)) {
8 | return undefined;
9 | }
10 | const name = tokens.expect('symbol');
11 | const value = tokens.expect('string');
12 | const rightBracket = tokens.accept('rightBracket');
13 | return {
14 | type: 'tagPair',
15 | name: name.value,
16 | value: value.value,
17 | start: leftBracket.start,
18 | end: (rightBracket ?? value).end
19 | };
20 | }
21 |
--------------------------------------------------------------------------------
/src/parseToken.ts:
--------------------------------------------------------------------------------
1 | import {ParserError} from './parserError';
2 | import {Token, TokenType} from './token';
3 |
4 | /**
5 | * Parses a token from the given text starting at the specified index.
6 | */
7 | export function parseToken(text: string, index: number): Token | undefined {
8 | while (index < text.length && isWhitespace(text[index])) {
9 | index++;
10 | }
11 | if (index >= text.length) {
12 | return undefined;
13 | }
14 | const type = findTokenType(text, index);
15 | switch (type) {
16 | case 'period':
17 | case 'leftBracket':
18 | case 'rightBracket':
19 | case 'leftParenthesis':
20 | case 'rightParenthesis':
21 | return {
22 | type: type,
23 | start: index,
24 | end: index + 1
25 | };
26 |
27 | case 'comment':
28 | return parseComment(text, index);
29 |
30 | case 'string':
31 | return parseString(text, index);
32 |
33 | case 'annotation':
34 | return parseAnnotation(text, index);
35 |
36 | case 'symbol':
37 | return parseSymbol(text, index);
38 | }
39 | }
40 |
41 | function isWhitespace(character: string): boolean {
42 | return /\s/.test(character);
43 | }
44 |
45 | function findTokenType(
46 | text: string,
47 | index: number
48 | ): Exclude {
49 | const character = text[index];
50 | switch (character) {
51 | case '.':
52 | return 'period';
53 | case '[':
54 | return 'leftBracket';
55 | case ']':
56 | return 'rightBracket';
57 | case '(':
58 | return 'leftParenthesis';
59 | case ')':
60 | return 'rightParenthesis';
61 | case '{':
62 | return 'comment';
63 | case '"':
64 | return 'string';
65 | default:
66 | if (/[$!?=~+-]/.test(character)) {
67 | return 'annotation';
68 | }
69 | if (/[*0-9a-zA-Z]/.test(character)) {
70 | return 'symbol';
71 | }
72 | throw new ParserError(`Unknown token type "${character}"`, {start: index});
73 | }
74 | }
75 |
76 | function parseComment(text: string, startIndex: number): Token {
77 | let index = startIndex + 1;
78 | let value = '';
79 | while (index < text.length) {
80 | const character = text[index];
81 | index++;
82 | if (character === '}') {
83 | break;
84 | } else {
85 | value += character;
86 | }
87 | }
88 | return {
89 | type: 'comment',
90 | value: value,
91 | start: startIndex,
92 | end: index
93 | };
94 | }
95 |
96 | function parseString(text: string, startIndex: number): Token {
97 | let index = startIndex + 1;
98 | let value = '';
99 | while (index < text.length) {
100 | const character = text[index];
101 | if (character === '"') {
102 | index++;
103 | break;
104 | }
105 | if (character === '\\') {
106 | index++;
107 | if (index < text.length) {
108 | value += text[index];
109 | index++;
110 | }
111 | } else {
112 | value += character;
113 | index++;
114 | }
115 | }
116 |
117 | return {
118 | type: 'string',
119 | value: value,
120 | start: startIndex,
121 | end: index
122 | };
123 | }
124 |
125 | function parseAnnotation(text: string, startIndex: number): Token {
126 | const value = parseAnnotationValue(text, startIndex);
127 | return {
128 | type: 'annotation',
129 | value: value,
130 | start: startIndex,
131 | end: startIndex + value.length
132 | };
133 | }
134 |
135 | function parseAnnotationValue(text: string, startIndex: number): string {
136 | if (text[startIndex] === '$') {
137 | let index = startIndex + 1;
138 | while (index < text.length && /[0-9]/.test(text[index])) {
139 | index++;
140 | }
141 | return text.substring(startIndex, index);
142 | }
143 |
144 | const annotations = [
145 | '!',
146 | '?',
147 | '!!',
148 | '??',
149 | '!?',
150 | '?!',
151 | '+--',
152 | '--+',
153 | '+-',
154 | '-+',
155 | '+/-',
156 | '-/+',
157 | '+=',
158 | '=+',
159 | '=',
160 | '~'
161 | ].sort((a, b) => b.length - a.length);
162 |
163 | for (const value of annotations) {
164 | if (text.startsWith(value, startIndex)) {
165 | return value;
166 | }
167 | }
168 | throw new ParserError('Unknown annotation', {start: startIndex});
169 | }
170 |
171 | function parseSymbol(text: string, startIndex: number): Token {
172 | const value = parseSymbolValue(text, startIndex);
173 | if (['*', '1-0', '0-1', '1/2-1/2'].includes(value)) {
174 | return {
175 | type: 'gameTermination',
176 | value: value,
177 | start: startIndex,
178 | end: startIndex + value.length
179 | };
180 | } else if (/^[0-9]+$/.test(value)) {
181 | return {
182 | type: 'integer',
183 | value: Number.parseInt(value),
184 | start: startIndex,
185 | end: startIndex + value.length
186 | };
187 | } else {
188 | return {
189 | type: 'symbol',
190 | value: value,
191 | start: startIndex,
192 | end: startIndex + value.length
193 | };
194 | }
195 | }
196 |
197 | function parseSymbolValue(text: string, startIndex: number): string {
198 | if (text[startIndex] === '*') {
199 | return text[startIndex];
200 | }
201 | let index = startIndex + 1;
202 | while (index < text.length && /[0-9a-zA-Z_+#=:/-]/.test(text[index])) {
203 | index++;
204 | }
205 | return text.substring(startIndex, index);
206 | }
207 |
--------------------------------------------------------------------------------
/src/parserError.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Thrown internally when the parsing fails, typically due to a syntax error.
3 | *
4 | * Provide an adapter method, to convert itself into the type of errors expected by Prettier,
5 | * so that the location of the error can be reported.
6 | */
7 | export class ParserError extends Error {
8 | constructor(
9 | message: string,
10 | private readonly loc: {start: number}
11 | ) {
12 | super(message);
13 | }
14 |
15 | convertToPrettierError(text: string): Error {
16 | return new PrettierError(this.message, {start: this.findLoc(text, this.loc.start)}, this);
17 | }
18 |
19 | private findLoc(text: string, index: number): {line: number; column: number} {
20 | const lines = text.substring(0, index).split('\n');
21 | return {line: lines.length, column: lines[lines.length - 1].length + 1};
22 | }
23 | }
24 |
25 | class PrettierError extends SyntaxError {
26 | constructor(
27 | message: string,
28 | public readonly loc: {start: {line: number; column: number}},
29 | public readonly cause: Error
30 | ) {
31 | super(`${message} (${loc.start.line}:${loc.start.column})`);
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/pgnParser.ts:
--------------------------------------------------------------------------------
1 | import {Parser} from 'prettier';
2 | import {ASTNode} from './astNode';
3 | import {ParserError} from './parserError';
4 | import {parseFile} from './parseFile';
5 | import {Tokenizer} from './tokenizer';
6 |
7 | /**
8 | * Implement the Prettier `Parser` interface, for the PGN format.
9 | *
10 | * This is a recursive descent parser. The algorithm proceeds in a top-down manner: the entry point
11 | * parses the root node of the abstract syntax tree, which is done by parsing its child nodes, and
12 | * so on down to the leaf nodes.
13 | *
14 | * These leaf nodes are built by assembling the base tokens of the grammar, which are lazily read
15 | * from the input text using the `Tokenizer` class.
16 | */
17 | export class PgnParser implements Parser {
18 | get astFormat(): string {
19 | return 'pgn';
20 | }
21 |
22 | parse(text: string): ASTNode {
23 | try {
24 | return parseFile(new Tokenizer(text));
25 | } catch (error) {
26 | throw error instanceof ParserError ? error.convertToPrettierError(text) : error;
27 | }
28 | }
29 |
30 | locStart(node: ASTNode): number {
31 | return node.start;
32 | }
33 |
34 | locEnd(node: ASTNode): number {
35 | return node.end;
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/src/pgnPrinter.ts:
--------------------------------------------------------------------------------
1 | import {AstPath, doc, Doc, ParserOptions, Printer} from 'prettier';
2 | import {ASTNode, CommentNode, MoveNode} from './astNode';
3 |
4 | const {fill, hardline, indent, join, line} = doc.builders;
5 |
6 | /**
7 | * Implement the Prettier `Printer` interface, for the PGN format.
8 | */
9 | export class PgnPrinter implements Printer {
10 | print(
11 | path: AstPath,
12 | _options: ParserOptions,
13 | print: (path: AstPath) => Doc
14 | ): Doc {
15 | const node = path.node;
16 | switch (node.type) {
17 | case 'file':
18 | return [join([hardline, hardline], path.map(print, 'games')), hardline];
19 |
20 | case 'game':
21 | if (node.tagPairSection.tagPairs.length === 0) {
22 | return path.call(print, 'moveTextSection');
23 | } else {
24 | return [
25 | path.call(print, 'tagPairSection'),
26 | hardline,
27 | hardline,
28 | path.call(print, 'moveTextSection')
29 | ];
30 | }
31 |
32 | case 'tagPairSection':
33 | return join(hardline, path.map(print, 'tagPairs'));
34 |
35 | case 'tagPair':
36 | return `[${node.name} ${this.quote(node.value)}]`;
37 |
38 | case 'moveTextSection':
39 | if (node.moves.length === 0) {
40 | return path.call(print, 'gameTermination');
41 | } else {
42 | return fill([
43 | ...this.printMoves(path, print),
44 | this.getMoveSeparator(node.moves[node.moves.length - 1]),
45 | path.call(print, 'gameTermination')
46 | ]);
47 | }
48 |
49 | case 'move':
50 | return this.printMove(path, print);
51 |
52 | case 'annotation':
53 | return node.value;
54 |
55 | case 'comment':
56 | return this.printCommentNode(path.node as CommentNode);
57 |
58 | case 'variation':
59 | return fill(['(', ...this.printMoves(path, print), ')']);
60 |
61 | case 'gameTermination':
62 | return node.value;
63 | }
64 | }
65 |
66 | private quote(value: string): string {
67 | return `"${value.replace(/[\\|"]/g, (v) => '\\' + v)}"`;
68 | }
69 |
70 | private printMoves(path: AstPath, print: (path: AstPath) => Doc): Doc[] {
71 | const result: Doc[] = [];
72 | path.each((move) => {
73 | result.push(...this.printMove(move, print));
74 | if (!move.isLast) {
75 | result.push(this.getMoveSeparator(move.node, path.node.type === 'variation'));
76 | }
77 | }, 'moves');
78 | return result;
79 | }
80 |
81 | private getMoveSeparator(move: MoveNode, isVariation = false) {
82 | return (!isVariation && move.comments.length > 0) || move.variations.length > 0
83 | ? hardline
84 | : line;
85 | }
86 |
87 | private printMove(path: AstPath, print: (path: AstPath) => Doc): Doc[] {
88 | const node = path.node as MoveNode;
89 | const parts: Doc[] = [this.printMoveValue(path)];
90 | if (node.annotations.length > 0) {
91 | parts.push(
92 | ...[line, ...join(line, path.map(print, 'annotations')).flatMap((value) => value)]
93 | );
94 | }
95 | if (node.comments.length > 0) {
96 | if (path.parent?.type === 'moveTextSection') {
97 | parts.push(
98 | indent([
99 | hardline,
100 | join(
101 | hardline,
102 | node.comments.map((comment) => fill(this.printCommentNode(comment)))
103 | )
104 | ])
105 | );
106 | } else {
107 | parts.push(
108 | ...[
109 | line,
110 | ...join(
111 | line,
112 | node.comments.map((comment) => this.printCommentNode(comment))
113 | ).flatMap((value) => value)
114 | ]
115 | );
116 | }
117 | }
118 | if (node.variations.length > 0) {
119 | parts.push(indent([hardline, join(hardline, path.map(print, 'variations'))]));
120 | }
121 | return parts;
122 | }
123 |
124 | private printMoveValue(path: AstPath): Doc {
125 | const node = path.node as MoveNode;
126 | const value = node.value + (node.suffix?.value ?? '');
127 | if (node.turn === 'white') {
128 | return `${node.number}.${value}`;
129 | } else if (
130 | path.isFirst ||
131 | (path.previous?.type === 'move' &&
132 | (path.previous.comments.length > 0 || path.previous.variations.length > 0))
133 | ) {
134 | return `${node.number}...${value}`;
135 | }
136 | return value;
137 | }
138 |
139 | private printCommentNode(node: CommentNode): Doc[] {
140 | return [
141 | '{',
142 | ...join(
143 | line,
144 | node.value
145 | .trim()
146 | .split(/\s/)
147 | .filter((part) => part.length > 0)
148 | ),
149 | '}'
150 | ];
151 | }
152 |
153 | canAttachComment(node: ASTNode): boolean {
154 | return node.type !== 'comment';
155 | }
156 |
157 | willPrintOwnComments(path: AstPath): boolean {
158 | return path.node.type === 'move';
159 | }
160 |
161 | printComment(path: AstPath): Doc {
162 | return fill(this.printCommentNode(path.node as CommentNode));
163 | }
164 | }
165 |
--------------------------------------------------------------------------------
/src/token.ts:
--------------------------------------------------------------------------------
1 | export type TokenType = BaseTokenType | StringTokenType | NumberTokenType;
2 |
3 | type BaseTokenType =
4 | | 'period'
5 | | 'leftBracket'
6 | | 'rightBracket'
7 | | 'leftParenthesis'
8 | | 'rightParenthesis';
9 |
10 | type StringTokenType = 'annotation' | 'comment' | 'string' | 'symbol' | 'gameTermination';
11 |
12 | type NumberTokenType = 'integer';
13 |
14 | export type Token = T extends BaseTokenType
15 | ? BaseToken
16 | : T extends StringTokenType
17 | ? ValueToken
18 | : T extends NumberTokenType
19 | ? ValueToken
20 | : never;
21 |
22 | interface ValueToken extends BaseToken {
23 | value: V;
24 | }
25 |
26 | interface BaseToken {
27 | type: T;
28 | start: number;
29 | end: number;
30 | }
31 |
--------------------------------------------------------------------------------
/src/tokenizer.ts:
--------------------------------------------------------------------------------
1 | import {ParserError} from './parserError';
2 | import {Token, TokenType} from './token';
3 | import {parseToken} from './parseToken';
4 | import {hasValue, noValue} from './utils';
5 |
6 | /**
7 | * Lazily convert a given text input into tokens.
8 | *
9 | * Skip and store apart comments, unless a comment token is explicitly asked for. This is due to
10 | * the fact that we have two ways to handle comments:
11 | *
12 | * - Comments in the movetext are manually attached to the preceding move and are handled as
13 | * regular nodes by our printer.
14 | *
15 | * - Other comments are attached to the root node. They are handled using Prettier's comment
16 | * algorithm.
17 | */
18 | export class Tokenizer {
19 | private comments: Token<'comment'>[] = [];
20 | private buffer: Token | undefined;
21 | private index = 0;
22 |
23 | constructor(private readonly text: string) {}
24 |
25 | /**
26 | * Fetch and return the next token if it matches the specified type. Otherwise, return
27 | * `undefined`.
28 | */
29 | accept(tokenType: T): Token | undefined {
30 | const token = this.fetch({skipComments: tokenType !== 'comment'});
31 | if (noValue(token)) {
32 | return undefined;
33 | }
34 | if (token.type !== tokenType) {
35 | return undefined;
36 | }
37 | this.buffer = undefined;
38 | return token as Token;
39 | }
40 |
41 | /**
42 | * Fetch and return the next token if it matches the specified type. Otherwise, throw an error.
43 | */
44 | expect(tokenType: T): Token {
45 | const token = this.fetch({skipComments: tokenType !== 'comment'});
46 | if (noValue(token)) {
47 | const loc = {start: this.text.length};
48 | throw new ParserError(`Unexpected end of file, was expecting a ${tokenType}`, loc);
49 | }
50 | if (token.type !== tokenType) {
51 | throw new ParserError(`Unexpected token ${token.type}, was expecting a ${tokenType}`, token);
52 | }
53 | this.buffer = undefined;
54 | return token as Token;
55 | }
56 |
57 | /**
58 | * Throw an error if there are any remaining tokens.
59 | */
60 | expectEndOfFile(): void {
61 | const token = this.fetch({skipComments: true});
62 | if (hasValue(token)) {
63 | throw new ParserError(`Unexpected token ${token.type}`, token);
64 | }
65 | }
66 |
67 | /**
68 | * Return all the comments skipped during tokenization.
69 | */
70 | getComments(): Token<'comment'>[] {
71 | return this.comments;
72 | }
73 |
74 | private fetch(options: {skipComments: boolean}): Token | undefined {
75 | while (noValue(this.buffer)) {
76 | const token = parseToken(this.text, this.index);
77 | if (noValue(token)) {
78 | break;
79 | }
80 | this.index = token.end;
81 | if (options.skipComments && token.type === 'comment') {
82 | this.comments.push(token);
83 | } else {
84 | this.buffer = token;
85 | }
86 | }
87 | return this.buffer;
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | export function hasValue(arg: T): arg is NonNullable {
2 | return arg !== undefined && arg !== null;
3 | }
4 |
5 | export function noValue(arg: unknown): arg is undefined | null {
6 | return !hasValue(arg);
7 | }
8 |
9 | export function repeat(parse: () => T | undefined): T[] {
10 | const nodes: T[] = [];
11 | for (;;) {
12 | const node = parse();
13 | if (hasValue(node)) {
14 | nodes.push(node);
15 | } else {
16 | return nodes;
17 | }
18 | }
19 | }
20 |
21 | export function castToError(throwable: unknown): Error {
22 | if (throwable instanceof Error) {
23 | return throwable;
24 | }
25 | return new Error(String(throwable));
26 | }
27 |
--------------------------------------------------------------------------------
/test/index.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import * as prettier from 'prettier';
4 |
5 | describe('Index', () => {
6 | it('Should format an empty string', async () => {
7 | await expectFormat('');
8 | });
9 |
10 | it('Should format a simple game', async () => {
11 | await expectFormat(`
12 | [Event "F/S Return Match"]
13 |
14 | 1.e4 e5 2.Nf3 Nc6 3.Bb5 a6 *
15 | `);
16 | });
17 |
18 | it('Should format a complete game', async () => {
19 | await expectFormat(`
20 | [Event "F/S Return Match"]
21 | [Site "Belgrade, Serbia JUG"]
22 | [Date "1992.11.04"]
23 | [Round "29"]
24 | [White "Fischer, Robert J."]
25 | [Black "Spassky, Boris V."]
26 | [Result "1/2-1/2"]
27 |
28 | 1.e4 e5 2.Nf3 Nc6 3.Bb5 a6 4.Ba4 Nf6 5.O-O Be7 6.Re1 b5 7.Bb3 d6 8.c3 O-O 9.h3
29 | Nb8 10.d4 Nbd7 11.c4 c6 12.cxb5 axb5 13.Nc3 Bb7 14.Bg5 b4 15.Nb1 h6 16.Bh4 c5
30 | 17.dxe5 Nxe4 18.Bxe7 Qxe7 19.exd6 Qf6 20.Nbd2 Nxd6 21.Nc4 Nxc4 22.Bxc4 Nb6
31 | 23.Ne5 Rae8 24.Bxf7+ Rxf7 25.Nxf7 Rxe1+ 26.Qxe1 Kxf7 27.Qe3 Qg5 28.Qxg5 hxg5
32 | 29.b3 Ke6 30.a3 Kd6 31.axb4 cxb4 32.Ra5 Nd5 33.f3 Bc8 34.Kf2 Bf5 35.Ra7 g6
33 | 36.Ra6+ Kc5 37.Ke1 Nf4 38.g3 Nxh3 39.Kd2 Kb5 40.Rd6 Kc5 41.Ra6 Nf2 42.g4 Bd3
34 | 43.Re6 1/2-1/2
35 | `);
36 | });
37 |
38 | it('Should put an empty line between each game', async () => {
39 | await expectFormat(`
40 | [Event "F/S Return Match"]
41 | [Site "Belgrade, Serbia JUG"]
42 |
43 | 1.e4 e5 2.Nf3 Nc6 3.Bb5 a6 *
44 |
45 | [White "Adolf Anderssen"]
46 | [Black "Lionel Kieseritzky"]
47 |
48 | 1.e4 e5 2.f4 exf4 3.Bc4 Qh4+ *
49 | `);
50 | });
51 |
52 | it('Should format games without tag pairs', async () => {
53 | await expectFormat(`
54 | 1.e4 e5 2.Nf3 Nc6 3.Bb5 a6 *
55 |
56 | 1.d4 d5 2.c4 e6 3.Nc3 Nf6 *
57 |
58 | 1.e4 e6 2.d4 d5 3.Nc3 Nf6 *
59 | `);
60 | });
61 |
62 | it('Should format a game with a variation', async () => {
63 | await expectFormat(`
64 | [Event "F/S Return Match"]
65 |
66 | 1.e4 e5
67 | (1...e6 2.d4)
68 | 2.Nf3 Nc6 3.Bb5 a6 *
69 | `);
70 | });
71 |
72 | it('Should format a game with several variations', async () => {
73 | await expectFormat(`
74 | [Event "F/S Return Match"]
75 |
76 | 1.e4 e5
77 | (1...e6 2.d4)
78 | (1...c5 2.Nf3)
79 | 2.Nf3 Nc6 3.Bb5 a6 *
80 | `);
81 | });
82 |
83 | it('Should format a game with nested variations', async () => {
84 | await expectFormat(`
85 | [Event "F/S Return Match"]
86 |
87 | 1.e4 e5
88 | (1...e6 2.d4
89 | (2.d3))
90 | 2.Nf3 Nc6 3.Bb5 a6 *
91 | `);
92 | });
93 |
94 | it('Should format a game with an empty comment in the movetext', async () => {
95 | await expectFormat(`
96 | [Event "F/S Return Match"]
97 |
98 | 1.e4 e5 2.Nf3 Nc6 3.Bb5
99 | {}
100 | 3...a6 *
101 | `);
102 | });
103 |
104 | it('Should format a game with a short comment in the movetext', async () => {
105 | await expectFormat(`
106 | [Event "F/S Return Match"]
107 |
108 | 1.e4 e5 2.Nf3 Nc6 3.Bb5
109 | {The Ruy Lopez}
110 | 3...a6 *
111 | `);
112 | });
113 |
114 | it('Should format a game with a long comment spanning over several lines in the movetext', async () => {
115 | await expectFormat(`
116 | [Event "F/S Return Match"]
117 |
118 | 1.e4 e5 2.Nf3 Nc6 3.Bb5
119 | {The essential move marking the Spanish Game, or Ruy Lopez. It is the double king's
120 | pawn opening most commonly used in master play; it has been adopted by almost all
121 | players at some point in their careers and many play it from both the White and
122 | Black sides.}
123 | 3...a6 *
124 | `);
125 | });
126 |
127 | it('Should format a game with a short comment in a variation', async () => {
128 | await expectFormat(`
129 | [Event "F/S Return Match"]
130 |
131 | 1.e4 e5
132 | (1...e6 2.d4 d5 {The French Defense})
133 | 2.Nf3 Nc6 3.Bb5 a6 *
134 | `);
135 | });
136 |
137 | it('Should format a game with a long comment in a variation', async () => {
138 | await expectFormat(`
139 | [Event "F/S Return Match"]
140 |
141 | 1.e4 e5
142 | (1...e6 {The main line of the French Defence continues 2.d4 d5. White sets up
143 | a pawn centre, which Black immediately challenges by attacking the pawn on e4.})
144 | 2.Nf3 Nc6 3.Bb5 a6 *
145 | `);
146 | });
147 |
148 | it('Should format a game with a comment before the movetext', async () => {
149 | await expectFormat(`
150 | [Event "F/S Return Match"]
151 |
152 | {The 1992 Fischer–Spassky match was a chess match between former world chess champions
153 | Bobby Fischer and Boris Spassky. It was billed as a World Chess Championship, though
154 | it was an unofficial rematch of their 1972 World Championship match.}
155 | 1.e4 e5 2.Nf3 Nc6 3.Bb5 a6 *
156 | `);
157 | });
158 |
159 | it('Should format a game with a header comment', async () => {
160 | await expectFormat(`
161 | {The 1992 Fischer–Spassky match was a chess match between former world chess champions
162 | Bobby Fischer and Boris Spassky. It was billed as a World Chess Championship, though
163 | it was an unofficial rematch of their 1972 World Championship match.}
164 | [Event "F/S Return Match"]
165 |
166 | 1.e4 e5 2.Nf3 Nc6 3.Bb5 a6 *
167 | `);
168 | });
169 |
170 | it('Should format a game with a footer comment', async () => {
171 | await expectFormat(`
172 | [Event "F/S Return Match"]
173 |
174 | 1.e4 e5 2.Nf3 Nc6 3.Bb5 a6 *
175 | {The 1992 Fischer–Spassky match was a chess match between former world chess champions
176 | Bobby Fischer and Boris Spassky. It was billed as a World Chess Championship, though
177 | it was an unofficial rematch of their 1972 World Championship match.}
178 | `);
179 | });
180 |
181 | it('Should format a game with annotations and comments', async () => {
182 | await expectFormat(`
183 | [Event "F/S Return Match"]
184 |
185 | 1.e4 e5 2.Nf3 Nc6! ~ 3.Bb5!? =
186 | {The Ruy Lopez}
187 | 3...a6!! $10 *
188 | `);
189 | });
190 |
191 | async function expectFormat(text: string) {
192 | const expected = trimIndent(text);
193 | const result = await prettier.format(expected, {
194 | parser: 'pgn',
195 | plugins: ['./dist/index.js']
196 | });
197 |
198 | assert.strictEqual(result, expected);
199 | }
200 |
201 | function trimIndent(text: string): string {
202 | if (!text.includes('\n')) {
203 | return text;
204 | }
205 | const lines = text.split('\n');
206 | const indent = [...lines[1]].findIndex((character) => /[^\s]/.test(character));
207 | return lines
208 | .slice(1, lines.length - 1)
209 | .map((line) => line.substring(indent))
210 | .join('\n')
211 | .concat('\n');
212 | }
213 | });
214 |
--------------------------------------------------------------------------------
/test/moveContext.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {MoveContext} from '../src/moveContext';
4 |
5 | describe('MoveContext', () => {
6 | it('should initialize with the default position', () => {
7 | const context = new MoveContext();
8 | assert.strictEqual(context.number, 1);
9 | assert.strictEqual(context.turn, 'white');
10 | });
11 |
12 | it('should initialize with a given FEN string', () => {
13 | const fen = 'rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1';
14 | const context = new MoveContext(fen);
15 | assert.strictEqual(context.number, 1);
16 | assert.strictEqual(context.turn, 'white');
17 | });
18 |
19 | it('should play a move and return the SAN', () => {
20 | const context = new MoveContext();
21 | const san = context.play({value: 'e4', start: 0});
22 | assert.strictEqual(san, 'e4');
23 | assert.strictEqual(context.number, 1);
24 | assert.strictEqual(context.turn, 'black');
25 | });
26 |
27 | it('should maintain independent states between original and clone', () => {
28 | const context = new MoveContext();
29 | context.play({value: 'e4', start: 0});
30 |
31 | const clone = context.clone();
32 | clone.play({value: 'e5', start: 3});
33 |
34 | assert.strictEqual(context.number, 1);
35 | assert.strictEqual(context.turn, 'black');
36 | assert.strictEqual(clone.number, 2);
37 | assert.strictEqual(clone.turn, 'white');
38 | });
39 |
40 | it('should fix non standard moves', () => {
41 | const san = new MoveContext().play({value: 'e2-e4', start: 0});
42 | assert.strictEqual(san, 'e4');
43 | });
44 |
45 | it('should throw if an invalid move is played', () => {
46 | assert.throws(() => {
47 | new MoveContext().play({value: 'e5', start: 0});
48 | });
49 | });
50 | });
51 |
--------------------------------------------------------------------------------
/test/parseFile.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {parseFile} from '../src/parseFile';
4 | import {Tokenizer} from '../src/tokenizer';
5 |
6 | describe('parseFile', () => {
7 | it('should parse an empty token stream', () => {
8 | const result = parseFile(new Tokenizer(''));
9 | assert.deepStrictEqual(result, {
10 | type: 'file',
11 | comments: [],
12 | games: [],
13 | start: 0,
14 | end: 0
15 | });
16 | });
17 |
18 | it('should parse a single game', () => {
19 | const result = parseFile(new Tokenizer('1.e4 e5 2.Nf3 Nc6 *'));
20 | assert.deepStrictEqual(result, {
21 | type: 'file',
22 | comments: [],
23 | games: [
24 | {
25 | type: 'game',
26 | tagPairSection: {
27 | type: 'tagPairSection',
28 | tagPairs: [],
29 | start: 0,
30 | end: 0
31 | },
32 | moveTextSection: {
33 | type: 'moveTextSection',
34 | moves: [
35 | {
36 | type: 'move',
37 | number: 1,
38 | turn: 'white',
39 | value: 'e4',
40 | suffix: undefined,
41 | annotations: [],
42 | comments: [],
43 | variations: [],
44 | start: 0,
45 | end: 4
46 | },
47 | {
48 | type: 'move',
49 | number: 1,
50 | turn: 'black',
51 | value: 'e5',
52 | suffix: undefined,
53 | annotations: [],
54 | comments: [],
55 | variations: [],
56 | start: 5,
57 | end: 7
58 | },
59 | {
60 | type: 'move',
61 | number: 2,
62 | turn: 'white',
63 | value: 'Nf3',
64 | suffix: undefined,
65 | annotations: [],
66 | comments: [],
67 | variations: [],
68 | start: 8,
69 | end: 13
70 | },
71 | {
72 | type: 'move',
73 | number: 2,
74 | turn: 'black',
75 | value: 'Nc6',
76 | suffix: undefined,
77 | annotations: [],
78 | comments: [],
79 | variations: [],
80 | start: 14,
81 | end: 17
82 | }
83 | ],
84 | gameTermination: {
85 | type: 'gameTermination',
86 | value: '*',
87 | start: 18,
88 | end: 19
89 | },
90 | start: 0,
91 | end: 19
92 | },
93 | start: 0,
94 | end: 19
95 | }
96 | ],
97 | start: 0,
98 | end: 19
99 | });
100 | });
101 |
102 | it('should parse multiple games', () => {
103 | const result = parseFile(new Tokenizer('* * *'));
104 | assert.deepStrictEqual(result, {
105 | type: 'file',
106 | comments: [],
107 | games: [
108 | {
109 | type: 'game',
110 | tagPairSection: {
111 | type: 'tagPairSection',
112 | tagPairs: [],
113 | start: 0,
114 | end: 0
115 | },
116 | moveTextSection: {
117 | type: 'moveTextSection',
118 | moves: [],
119 | gameTermination: {
120 | type: 'gameTermination',
121 | value: '*',
122 | start: 0,
123 | end: 1
124 | },
125 | start: 0,
126 | end: 1
127 | },
128 | start: 0,
129 | end: 1
130 | },
131 | {
132 | type: 'game',
133 | tagPairSection: {
134 | type: 'tagPairSection',
135 | tagPairs: [],
136 | start: 2,
137 | end: 2
138 | },
139 | moveTextSection: {
140 | type: 'moveTextSection',
141 | moves: [],
142 | gameTermination: {
143 | type: 'gameTermination',
144 | value: '*',
145 | start: 2,
146 | end: 3
147 | },
148 | start: 2,
149 | end: 3
150 | },
151 | start: 2,
152 | end: 3
153 | },
154 | {
155 | type: 'game',
156 | tagPairSection: {
157 | type: 'tagPairSection',
158 | tagPairs: [],
159 | start: 4,
160 | end: 4
161 | },
162 | moveTextSection: {
163 | type: 'moveTextSection',
164 | moves: [],
165 | gameTermination: {
166 | type: 'gameTermination',
167 | value: '*',
168 | start: 4,
169 | end: 5
170 | },
171 | start: 4,
172 | end: 5
173 | },
174 | start: 4,
175 | end: 5
176 | }
177 | ],
178 | start: 0,
179 | end: 5
180 | });
181 | });
182 |
183 | it('should parse a game with a comment', () => {
184 | const result = parseFile(new Tokenizer('1.e4 e5 * {The open game}'));
185 | assert.deepStrictEqual(result, {
186 | type: 'file',
187 | comments: [
188 | {
189 | type: 'comment',
190 | value: 'The open game',
191 | start: 10,
192 | end: 25
193 | }
194 | ],
195 | games: [
196 | {
197 | type: 'game',
198 | tagPairSection: {
199 | type: 'tagPairSection',
200 | tagPairs: [],
201 | start: 0,
202 | end: 0
203 | },
204 | moveTextSection: {
205 | type: 'moveTextSection',
206 | moves: [
207 | {
208 | type: 'move',
209 | number: 1,
210 | turn: 'white',
211 | value: 'e4',
212 | suffix: undefined,
213 | annotations: [],
214 | comments: [],
215 | variations: [],
216 | start: 0,
217 | end: 4
218 | },
219 | {
220 | type: 'move',
221 | number: 1,
222 | turn: 'black',
223 | value: 'e5',
224 | suffix: undefined,
225 | annotations: [],
226 | comments: [],
227 | variations: [],
228 | start: 5,
229 | end: 7
230 | }
231 | ],
232 | gameTermination: {
233 | type: 'gameTermination',
234 | value: '*',
235 | start: 8,
236 | end: 9
237 | },
238 | start: 0,
239 | end: 9
240 | },
241 | start: 0,
242 | end: 9
243 | }
244 | ],
245 | start: 0,
246 | end: 9
247 | });
248 | });
249 |
250 | it('should throw if an unexpected token is encountered', () => {
251 | assert.throws(() => {
252 | parseFile(new Tokenizer('1.e4 e5 ]'));
253 | });
254 | });
255 | });
256 |
--------------------------------------------------------------------------------
/test/parseGame.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {parseGame} from '../src/parseGame';
4 | import {Tokenizer} from '../src/tokenizer';
5 |
6 | describe('parseGame', () => {
7 | it('should return undefined if there is no token', () => {
8 | const result = parseGame(new Tokenizer(''));
9 | assert.strictEqual(result, undefined);
10 | });
11 |
12 | it('should return undefined if the first token is invalid', () => {
13 | const result = parseGame(new Tokenizer(']'));
14 | assert.strictEqual(result, undefined);
15 | });
16 |
17 | it('should parse a game with only tag pairs', () => {
18 | const result = parseGame(
19 | new Tokenizer('[Event "F/S Return Match"][Site "Belgrade, Serbia JUG"][Date "1992.11.04"]')
20 | );
21 |
22 | assert.deepStrictEqual(result, {
23 | type: 'game',
24 | tagPairSection: {
25 | type: 'tagPairSection',
26 | tagPairs: [
27 | {type: 'tagPair', name: 'Event', value: 'F/S Return Match', start: 0, end: 26},
28 | {type: 'tagPair', name: 'Site', value: 'Belgrade, Serbia JUG', start: 26, end: 55},
29 | {type: 'tagPair', name: 'Date', value: '1992.11.04', start: 55, end: 74}
30 | ],
31 | start: 0,
32 | end: 74
33 | },
34 | moveTextSection: {
35 | type: 'moveTextSection',
36 | moves: [],
37 | gameTermination: {
38 | type: 'gameTermination',
39 | value: '*',
40 | start: 74,
41 | end: 74
42 | },
43 | start: 74,
44 | end: 74
45 | },
46 | start: 0,
47 | end: 74
48 | });
49 | });
50 |
51 | it('should parse a game with only moves', () => {
52 | const result = parseGame(new Tokenizer('1.e4 e5 2.Nf3 Nc6'));
53 | assert.deepStrictEqual(result, {
54 | type: 'game',
55 | tagPairSection: {
56 | type: 'tagPairSection',
57 | tagPairs: [],
58 | start: 0,
59 | end: 0
60 | },
61 | moveTextSection: {
62 | type: 'moveTextSection',
63 | moves: [
64 | {
65 | type: 'move',
66 | number: 1,
67 | turn: 'white',
68 | value: 'e4',
69 | suffix: undefined,
70 | annotations: [],
71 | comments: [],
72 | variations: [],
73 | start: 0,
74 | end: 4
75 | },
76 | {
77 | type: 'move',
78 | number: 1,
79 | turn: 'black',
80 | value: 'e5',
81 | suffix: undefined,
82 | annotations: [],
83 | comments: [],
84 | variations: [],
85 | start: 5,
86 | end: 7
87 | },
88 | {
89 | type: 'move',
90 | number: 2,
91 | turn: 'white',
92 | value: 'Nf3',
93 | suffix: undefined,
94 | annotations: [],
95 | comments: [],
96 | variations: [],
97 | start: 8,
98 | end: 13
99 | },
100 | {
101 | type: 'move',
102 | number: 2,
103 | turn: 'black',
104 | value: 'Nc6',
105 | suffix: undefined,
106 | annotations: [],
107 | comments: [],
108 | variations: [],
109 | start: 14,
110 | end: 17
111 | }
112 | ],
113 | gameTermination: {
114 | type: 'gameTermination',
115 | value: '*',
116 | start: 17,
117 | end: 17
118 | },
119 | start: 0,
120 | end: 17
121 | },
122 | start: 0,
123 | end: 17
124 | });
125 | });
126 |
127 | it('should parse a game with only a move termination', () => {
128 | const result = parseGame(new Tokenizer('1-0'));
129 | assert.deepStrictEqual(result, {
130 | type: 'game',
131 | tagPairSection: {
132 | type: 'tagPairSection',
133 | tagPairs: [],
134 | start: 0,
135 | end: 0
136 | },
137 | moveTextSection: {
138 | type: 'moveTextSection',
139 | moves: [],
140 | gameTermination: {
141 | type: 'gameTermination',
142 | value: '1-0',
143 | start: 0,
144 | end: 3
145 | },
146 | start: 0,
147 | end: 3
148 | },
149 | start: 0,
150 | end: 3
151 | });
152 | });
153 |
154 | it('should parse a game with tag pairs and moves', () => {
155 | const result = parseGame(new Tokenizer('[Event "F/S Return Match"] 1.e4 e5 2.Nf3 Nc6'));
156 | assert.deepStrictEqual(result, {
157 | type: 'game',
158 | tagPairSection: {
159 | type: 'tagPairSection',
160 | tagPairs: [
161 | {
162 | type: 'tagPair',
163 | name: 'Event',
164 | value: 'F/S Return Match',
165 | start: 0,
166 | end: 26
167 | }
168 | ],
169 | start: 0,
170 | end: 26
171 | },
172 | moveTextSection: {
173 | type: 'moveTextSection',
174 | moves: [
175 | {
176 | type: 'move',
177 | number: 1,
178 | turn: 'white',
179 | value: 'e4',
180 | suffix: undefined,
181 | annotations: [],
182 | comments: [],
183 | variations: [],
184 | start: 27,
185 | end: 31
186 | },
187 | {
188 | type: 'move',
189 | number: 1,
190 | turn: 'black',
191 | value: 'e5',
192 | suffix: undefined,
193 | annotations: [],
194 | comments: [],
195 | variations: [],
196 | start: 32,
197 | end: 34
198 | },
199 | {
200 | type: 'move',
201 | number: 2,
202 | turn: 'white',
203 | value: 'Nf3',
204 | suffix: undefined,
205 | annotations: [],
206 | comments: [],
207 | variations: [],
208 | start: 35,
209 | end: 40
210 | },
211 | {
212 | type: 'move',
213 | number: 2,
214 | turn: 'black',
215 | value: 'Nc6',
216 | suffix: undefined,
217 | annotations: [],
218 | comments: [],
219 | variations: [],
220 | start: 41,
221 | end: 44
222 | }
223 | ],
224 | gameTermination: {
225 | type: 'gameTermination',
226 | value: '*',
227 | start: 44,
228 | end: 44
229 | },
230 | start: 27,
231 | end: 44
232 | },
233 | start: 0,
234 | end: 44
235 | });
236 | });
237 |
238 | it('should parse a complete game', () => {
239 | const result = parseGame(new Tokenizer('[Event "F/S Return Match"] 1.e4 e5 2.Nf3 Nc6 *'));
240 | assert.deepStrictEqual(result, {
241 | type: 'game',
242 | tagPairSection: {
243 | type: 'tagPairSection',
244 | tagPairs: [
245 | {
246 | type: 'tagPair',
247 | name: 'Event',
248 | value: 'F/S Return Match',
249 | start: 0,
250 | end: 26
251 | }
252 | ],
253 | start: 0,
254 | end: 26
255 | },
256 | moveTextSection: {
257 | type: 'moveTextSection',
258 | moves: [
259 | {
260 | type: 'move',
261 | number: 1,
262 | turn: 'white',
263 | value: 'e4',
264 | suffix: undefined,
265 | annotations: [],
266 | comments: [],
267 | variations: [],
268 | start: 27,
269 | end: 31
270 | },
271 | {
272 | type: 'move',
273 | number: 1,
274 | turn: 'black',
275 | value: 'e5',
276 | suffix: undefined,
277 | annotations: [],
278 | comments: [],
279 | variations: [],
280 | start: 32,
281 | end: 34
282 | },
283 | {
284 | type: 'move',
285 | number: 2,
286 | turn: 'white',
287 | value: 'Nf3',
288 | suffix: undefined,
289 | annotations: [],
290 | comments: [],
291 | variations: [],
292 | start: 35,
293 | end: 40
294 | },
295 | {
296 | type: 'move',
297 | number: 2,
298 | turn: 'black',
299 | value: 'Nc6',
300 | suffix: undefined,
301 | annotations: [],
302 | comments: [],
303 | variations: [],
304 | start: 41,
305 | end: 44
306 | }
307 | ],
308 | gameTermination: {
309 | type: 'gameTermination',
310 | value: '*',
311 | start: 45,
312 | end: 46
313 | },
314 | start: 27,
315 | end: 46
316 | },
317 | start: 0,
318 | end: 46
319 | });
320 | });
321 |
322 | it('should parse a game with an alternative starting position', () => {
323 | const result = parseGame(
324 | new Tokenizer(
325 | '[SetUp "1"][FEN "rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2"] 2...d6 *'
326 | )
327 | );
328 | assert.deepStrictEqual(result, {
329 | type: 'game',
330 | tagPairSection: {
331 | type: 'tagPairSection',
332 | tagPairs: [
333 | {
334 | type: 'tagPair',
335 | name: 'SetUp',
336 | value: '1',
337 | start: 0,
338 | end: 11
339 | },
340 | {
341 | type: 'tagPair',
342 | name: 'FEN',
343 | value: 'rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2',
344 | start: 11,
345 | end: 81
346 | }
347 | ],
348 | start: 0,
349 | end: 81
350 | },
351 | moveTextSection: {
352 | type: 'moveTextSection',
353 | moves: [
354 | {
355 | type: 'move',
356 | number: 2,
357 | turn: 'black',
358 | value: 'd6',
359 | suffix: undefined,
360 | annotations: [],
361 | comments: [],
362 | variations: [],
363 | start: 82,
364 | end: 88
365 | }
366 | ],
367 | gameTermination: {
368 | type: 'gameTermination',
369 | value: '*',
370 | start: 89,
371 | end: 90
372 | },
373 | start: 82,
374 | end: 90
375 | },
376 | start: 0,
377 | end: 90
378 | });
379 | });
380 | });
381 |
--------------------------------------------------------------------------------
/test/parseMove.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {MoveContext} from '../src/moveContext';
4 | import {parseMove} from '../src/parseMove';
5 | import {Tokenizer} from '../src/tokenizer';
6 |
7 | describe('parseMove', () => {
8 | it('should return undefined if there is no token', () => {
9 | const result = parseMove(new Tokenizer(''), new MoveContext());
10 | assert.strictEqual(result, undefined);
11 | });
12 |
13 | it('should return undefined if the first token is not a move token', () => {
14 | const result = parseMove(new Tokenizer('*'), new MoveContext());
15 | assert.strictEqual(result, undefined);
16 | });
17 |
18 | it('should parse a white move', () => {
19 | const result = parseMove(new Tokenizer('1.e4'), new MoveContext());
20 | assert.deepStrictEqual(result, {
21 | type: 'move',
22 | number: 1,
23 | turn: 'white',
24 | value: 'e4',
25 | suffix: undefined,
26 | annotations: [],
27 | comments: [],
28 | variations: [],
29 | start: 0,
30 | end: 4
31 | });
32 | });
33 |
34 | it('should parse a black move', () => {
35 | const result = parseMove(
36 | new Tokenizer('1...e5'),
37 | new MoveContext('rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1')
38 | );
39 | assert.deepStrictEqual(result, {
40 | type: 'move',
41 | number: 1,
42 | turn: 'black',
43 | value: 'e5',
44 | suffix: undefined,
45 | annotations: [],
46 | comments: [],
47 | variations: [],
48 | start: 0,
49 | end: 6
50 | });
51 | });
52 |
53 | it('should parse a move without a number', () => {
54 | const result = parseMove(new Tokenizer('Nf3'), new MoveContext());
55 | assert.deepStrictEqual(result, {
56 | type: 'move',
57 | number: 1,
58 | turn: 'white',
59 | value: 'Nf3',
60 | suffix: undefined,
61 | annotations: [],
62 | comments: [],
63 | variations: [],
64 | start: 0,
65 | end: 3
66 | });
67 | });
68 |
69 | it('should parse a move without a period', () => {
70 | const result = parseMove(new Tokenizer('1 Nf3'), new MoveContext());
71 | assert.deepStrictEqual(result, {
72 | type: 'move',
73 | number: 1,
74 | turn: 'white',
75 | value: 'Nf3',
76 | suffix: undefined,
77 | annotations: [],
78 | comments: [],
79 | variations: [],
80 | start: 0,
81 | end: 5
82 | });
83 | });
84 |
85 | it('should parse a move with annotations', () => {
86 | const result = parseMove(new Tokenizer('1.Nf3! +/-'), new MoveContext());
87 | assert.deepStrictEqual(result, {
88 | type: 'move',
89 | number: 1,
90 | turn: 'white',
91 | value: 'Nf3',
92 | suffix: {
93 | type: 'annotation',
94 | value: '!',
95 | start: 5,
96 | end: 6
97 | },
98 | annotations: [
99 | {
100 | type: 'annotation',
101 | value: '+/-',
102 | start: 7,
103 | end: 10
104 | }
105 | ],
106 | comments: [],
107 | variations: [],
108 | start: 0,
109 | end: 10
110 | });
111 | });
112 |
113 | it('should ignore redundant suffix annotations', () => {
114 | const result = parseMove(new Tokenizer('1.Nf3!!!!'), new MoveContext());
115 | assert.deepStrictEqual(result, {
116 | type: 'move',
117 | number: 1,
118 | turn: 'white',
119 | value: 'Nf3',
120 | suffix: {
121 | type: 'annotation',
122 | value: '!!',
123 | start: 5,
124 | end: 7
125 | },
126 | annotations: [],
127 | comments: [],
128 | variations: [],
129 | start: 0,
130 | end: 9
131 | });
132 | });
133 |
134 | it('should parse a move with comments', () => {
135 | const result = parseMove(
136 | new Tokenizer('1.e4 {A comment} {Another comment}'),
137 | new MoveContext()
138 | );
139 | assert.deepStrictEqual(result, {
140 | type: 'move',
141 | number: 1,
142 | turn: 'white',
143 | value: 'e4',
144 | suffix: undefined,
145 | annotations: [],
146 | comments: [
147 | {
148 | type: 'comment',
149 | value: 'A comment',
150 | start: 5,
151 | end: 16
152 | },
153 | {
154 | type: 'comment',
155 | value: 'Another comment',
156 | start: 17,
157 | end: 34
158 | }
159 | ],
160 | variations: [],
161 | start: 0,
162 | end: 34
163 | });
164 | });
165 |
166 | it('should parse a move with annotations and comments', () => {
167 | const result = parseMove(
168 | new Tokenizer('1.e4! {A comment} ~ {Another comment}'),
169 | new MoveContext()
170 | );
171 | assert.deepStrictEqual(result, {
172 | type: 'move',
173 | number: 1,
174 | turn: 'white',
175 | value: 'e4',
176 | suffix: {
177 | type: 'annotation',
178 | value: '!',
179 | start: 4,
180 | end: 5
181 | },
182 | annotations: [
183 | {
184 | type: 'annotation',
185 | value: '~',
186 | start: 18,
187 | end: 19
188 | }
189 | ],
190 | comments: [
191 | {
192 | type: 'comment',
193 | value: 'A comment',
194 | start: 6,
195 | end: 17
196 | },
197 | {
198 | type: 'comment',
199 | value: 'Another comment',
200 | start: 20,
201 | end: 37
202 | }
203 | ],
204 | variations: [],
205 | start: 0,
206 | end: 37
207 | });
208 | });
209 |
210 | it('should parse a move with a variation', () => {
211 | const result = parseMove(new Tokenizer('1.e4 (1.d4)'), new MoveContext());
212 | assert.deepStrictEqual(result, {
213 | type: 'move',
214 | number: 1,
215 | turn: 'white',
216 | value: 'e4',
217 | suffix: undefined,
218 | annotations: [],
219 | comments: [],
220 | variations: [
221 | {
222 | type: 'variation',
223 | moves: [
224 | {
225 | type: 'move',
226 | number: 1,
227 | turn: 'white',
228 | value: 'd4',
229 | suffix: undefined,
230 | annotations: [],
231 | comments: [],
232 | variations: [],
233 | start: 6,
234 | end: 10
235 | }
236 | ],
237 | start: 5,
238 | end: 11
239 | }
240 | ],
241 | start: 0,
242 | end: 11
243 | });
244 | });
245 |
246 | it('should parse a move with several variations', () => {
247 | const result = parseMove(new Tokenizer('1.e4 (1.d4) (1.c4)'), new MoveContext());
248 | assert.deepStrictEqual(result, {
249 | type: 'move',
250 | number: 1,
251 | turn: 'white',
252 | value: 'e4',
253 | suffix: undefined,
254 | annotations: [],
255 | comments: [],
256 | variations: [
257 | {
258 | type: 'variation',
259 | moves: [
260 | {
261 | type: 'move',
262 | number: 1,
263 | turn: 'white',
264 | value: 'd4',
265 | suffix: undefined,
266 | annotations: [],
267 | comments: [],
268 | variations: [],
269 | start: 6,
270 | end: 10
271 | }
272 | ],
273 | start: 5,
274 | end: 11
275 | },
276 | {
277 | type: 'variation',
278 | moves: [
279 | {
280 | type: 'move',
281 | number: 1,
282 | turn: 'white',
283 | value: 'c4',
284 | suffix: undefined,
285 | annotations: [],
286 | comments: [],
287 | variations: [],
288 | start: 13,
289 | end: 17
290 | }
291 | ],
292 | start: 12,
293 | end: 18
294 | }
295 | ],
296 | start: 0,
297 | end: 18
298 | });
299 | });
300 |
301 | it('should parse a move with nested variations', () => {
302 | const result = parseMove(new Tokenizer('1.e4 (1.d4 d5 (1...Nf6))'), new MoveContext());
303 | assert.deepStrictEqual(result, {
304 | type: 'move',
305 | number: 1,
306 | turn: 'white',
307 | value: 'e4',
308 | suffix: undefined,
309 | annotations: [],
310 | comments: [],
311 | variations: [
312 | {
313 | type: 'variation',
314 | moves: [
315 | {
316 | type: 'move',
317 | number: 1,
318 | turn: 'white',
319 | value: 'd4',
320 | suffix: undefined,
321 | annotations: [],
322 | comments: [],
323 | variations: [],
324 | start: 6,
325 | end: 10
326 | },
327 | {
328 | type: 'move',
329 | number: 1,
330 | turn: 'black',
331 | value: 'd5',
332 | suffix: undefined,
333 | annotations: [],
334 | comments: [],
335 | variations: [
336 | {
337 | type: 'variation',
338 | moves: [
339 | {
340 | type: 'move',
341 | number: 1,
342 | turn: 'black',
343 | value: 'Nf6',
344 | suffix: undefined,
345 | annotations: [],
346 | comments: [],
347 | variations: [],
348 | start: 15,
349 | end: 22
350 | }
351 | ],
352 | start: 14,
353 | end: 23
354 | }
355 | ],
356 | start: 11,
357 | end: 23
358 | }
359 | ],
360 | start: 5,
361 | end: 24
362 | }
363 | ],
364 | start: 0,
365 | end: 24
366 | });
367 | });
368 |
369 | it('should throw if an unfinished variation is encountered', () => {
370 | assert.throws(() => {
371 | parseMove(new Tokenizer('1.e4 (1.d4'), new MoveContext());
372 | });
373 | });
374 |
375 | it('should throw if a non-closed variation is encountered', () => {
376 | assert.throws(() => {
377 | parseMove(new Tokenizer('1.e4 (1.d4 (1.Nf3'), new MoveContext());
378 | });
379 | });
380 |
381 | it('should throw if a number without a move is encountered', () => {
382 | assert.throws(() => {
383 | parseMove(new Tokenizer('42'), new MoveContext());
384 | });
385 | });
386 |
387 | it('should throw if an unexpected token is encountered', () => {
388 | assert.throws(() => {
389 | parseMove(new Tokenizer('42...*'), new MoveContext());
390 | });
391 | });
392 | });
393 |
--------------------------------------------------------------------------------
/test/parseTagPair.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {parseTagPair} from '../src/parseTagPair';
4 | import {Tokenizer} from '../src/tokenizer';
5 |
6 | describe('parseTagPair', () => {
7 | it('should return undefined if there is no token', () => {
8 | const result = parseTagPair(new Tokenizer(''));
9 | assert.strictEqual(result, undefined);
10 | });
11 |
12 | it('should return undefined if the first token is not a left bracket', () => {
13 | const result = parseTagPair(new Tokenizer('1.e4 e5 *'));
14 | assert.strictEqual(result, undefined);
15 | });
16 |
17 | it('should parse a tag pair', () => {
18 | const result = parseTagPair(new Tokenizer('[Event "F/S Return Match"]'));
19 | assert.deepStrictEqual(result, {
20 | type: 'tagPair',
21 | name: 'Event',
22 | value: 'F/S Return Match',
23 | start: 0,
24 | end: 26
25 | });
26 | });
27 |
28 | it('should parse a tag pair without right bracket', () => {
29 | const result = parseTagPair(new Tokenizer('[Event "F/S Return Match"'));
30 | assert.deepStrictEqual(result, {
31 | type: 'tagPair',
32 | name: 'Event',
33 | value: 'F/S Return Match',
34 | start: 0,
35 | end: 25
36 | });
37 | });
38 |
39 | it('should throw if a tag pair without a value is encountered', () => {
40 | assert.throws(() => {
41 | parseTagPair(new Tokenizer('[Event'));
42 | });
43 | });
44 |
45 | it('should throw if a left bracket is encountered alone', () => {
46 | assert.throws(() => {
47 | parseTagPair(new Tokenizer('['));
48 | });
49 | });
50 | });
51 |
--------------------------------------------------------------------------------
/test/parseToken.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {parseToken} from '../src/parseToken';
4 |
5 | describe('parseToken', () => {
6 | it('should parse an empty string', () => {
7 | const token = parseToken('', 0);
8 | assert.deepStrictEqual(token, undefined);
9 | });
10 |
11 | it('should parse a period', () => {
12 | const token = parseToken('.', 0);
13 | assert.deepStrictEqual(token, {type: 'period', start: 0, end: 1});
14 | });
15 |
16 | it('should parse a left bracket', () => {
17 | const token = parseToken('[', 0);
18 | assert.deepStrictEqual(token, {type: 'leftBracket', start: 0, end: 1});
19 | });
20 |
21 | it('should parse a right bracket', () => {
22 | const token = parseToken(']', 0);
23 | assert.deepStrictEqual(token, {type: 'rightBracket', start: 0, end: 1});
24 | });
25 |
26 | it('should parse a left parenthesis', () => {
27 | const token = parseToken('(', 0);
28 | assert.deepStrictEqual(token, {type: 'leftParenthesis', start: 0, end: 1});
29 | });
30 |
31 | it('should parse a right parenthesis', () => {
32 | const token = parseToken(')', 0);
33 | assert.deepStrictEqual(token, {type: 'rightParenthesis', start: 0, end: 1});
34 | });
35 |
36 | it('should parse a string', () => {
37 | const token = parseToken('"hello"', 0);
38 | assert.deepStrictEqual(token, {type: 'string', value: 'hello', start: 0, end: 7});
39 | });
40 |
41 | it('should parse a string containing a quote', () => {
42 | const token = parseToken('"hello \\"quote\\""', 0);
43 | assert.deepStrictEqual(token, {type: 'string', value: 'hello "quote"', start: 0, end: 17});
44 | });
45 |
46 | it('should parse a string containing a backslash', () => {
47 | const token = parseToken('"hello \\\\"', 0);
48 | assert.deepStrictEqual(token, {type: 'string', value: 'hello \\', start: 0, end: 10});
49 | });
50 |
51 | it('should parse a string containing a non-escaped backslash', () => {
52 | const token = parseToken('"hello \\ "', 0);
53 | assert.deepStrictEqual(token, {type: 'string', value: 'hello ', start: 0, end: 10});
54 | });
55 |
56 | it('should parse an unterminated string', () => {
57 | const token = parseToken('"hello', 0);
58 | assert.deepStrictEqual(token, {type: 'string', value: 'hello', start: 0, end: 6});
59 | });
60 |
61 | it('should parse an unterminated string ending with a non-escaped backslash', () => {
62 | const token = parseToken('"hello \\', 0);
63 | assert.deepStrictEqual(token, {type: 'string', value: 'hello ', start: 0, end: 8});
64 | });
65 |
66 | it('should parse a numeric annotation glyph', () => {
67 | const token = parseToken('$1', 0);
68 | assert.deepStrictEqual(token, {type: 'annotation', value: '$1', start: 0, end: 2});
69 | });
70 |
71 | it(`should parse the "!" annotation`, () => {
72 | const token = parseToken('!', 0);
73 | assert.deepStrictEqual(token, {type: 'annotation', value: '!', start: 0, end: 1});
74 | });
75 |
76 | it(`should parse the "!!" annotation`, () => {
77 | const token = parseToken('!!', 0);
78 | assert.deepStrictEqual(token, {type: 'annotation', value: '!!', start: 0, end: 2});
79 | });
80 |
81 | it(`should parse the "?" annotation`, () => {
82 | const token = parseToken('?', 0);
83 | assert.deepStrictEqual(token, {type: 'annotation', value: '?', start: 0, end: 1});
84 | });
85 |
86 | it(`should parse the "??" annotation`, () => {
87 | const token = parseToken('??', 0);
88 | assert.deepStrictEqual(token, {type: 'annotation', value: '??', start: 0, end: 2});
89 | });
90 |
91 | it(`should parse the "!?" annotation`, () => {
92 | const token = parseToken('!?', 0);
93 | assert.deepStrictEqual(token, {type: 'annotation', value: '!?', start: 0, end: 2});
94 | });
95 |
96 | it(`should parse the "?!" annotation`, () => {
97 | const token = parseToken('?!', 0);
98 | assert.deepStrictEqual(token, {type: 'annotation', value: '?!', start: 0, end: 2});
99 | });
100 |
101 | it(`should parse the "+--" annotation`, () => {
102 | const token = parseToken('+--', 0);
103 | assert.deepStrictEqual(token, {type: 'annotation', value: '+--', start: 0, end: 3});
104 | });
105 |
106 | it(`should parse the "+-" annotation`, () => {
107 | const token = parseToken('+-', 0);
108 | assert.deepStrictEqual(token, {type: 'annotation', value: '+-', start: 0, end: 2});
109 | });
110 |
111 | it(`should parse the "+/-" annotation`, () => {
112 | const token = parseToken('+/-', 0);
113 | assert.deepStrictEqual(token, {type: 'annotation', value: '+/-', start: 0, end: 3});
114 | });
115 |
116 | it(`should parse the "+=" annotation`, () => {
117 | const token = parseToken('+=', 0);
118 | assert.deepStrictEqual(token, {type: 'annotation', value: '+=', start: 0, end: 2});
119 | });
120 |
121 | it(`should parse the "--+" annotation`, () => {
122 | const token = parseToken('--+', 0);
123 | assert.deepStrictEqual(token, {type: 'annotation', value: '--+', start: 0, end: 3});
124 | });
125 |
126 | it(`should parse the "-+" annotation`, () => {
127 | const token = parseToken('-+', 0);
128 | assert.deepStrictEqual(token, {type: 'annotation', value: '-+', start: 0, end: 2});
129 | });
130 |
131 | it(`should parse the "-/+" annotation`, () => {
132 | const token = parseToken('-/+', 0);
133 | assert.deepStrictEqual(token, {type: 'annotation', value: '-/+', start: 0, end: 3});
134 | });
135 |
136 | it(`should parse the "=+" annotation`, () => {
137 | const token = parseToken('=+', 0);
138 | assert.deepStrictEqual(token, {type: 'annotation', value: '=+', start: 0, end: 2});
139 | });
140 |
141 | it(`should parse the "=" annotation`, () => {
142 | const token = parseToken('=', 0);
143 | assert.deepStrictEqual(token, {type: 'annotation', value: '=', start: 0, end: 1});
144 | });
145 |
146 | it(`should parse the "~" annotation`, () => {
147 | const token = parseToken('~', 0);
148 | assert.deepStrictEqual(token, {type: 'annotation', value: '~', start: 0, end: 1});
149 | });
150 |
151 | it('should parse a simple move symbol', () => {
152 | const token = parseToken('e4', 0);
153 | assert.deepStrictEqual(token, {type: 'symbol', value: 'e4', start: 0, end: 2});
154 | });
155 |
156 | it('should parse a capture move symbol', () => {
157 | const token = parseToken('Nxc7', 0);
158 | assert.deepStrictEqual(token, {type: 'symbol', value: 'Nxc7', start: 0, end: 4});
159 | });
160 |
161 | it('should parse a check move symbol', () => {
162 | const token = parseToken('Re1+', 0);
163 | assert.deepStrictEqual(token, {type: 'symbol', value: 'Re1+', start: 0, end: 4});
164 | });
165 |
166 | it('should parse a checkmate move symbol', () => {
167 | const token = parseToken('Re8#', 0);
168 | assert.deepStrictEqual(token, {type: 'symbol', value: 'Re8#', start: 0, end: 4});
169 | });
170 |
171 | it('should parse a promotion move symbol', () => {
172 | const token = parseToken('d8=Q', 0);
173 | assert.deepStrictEqual(token, {type: 'symbol', value: 'd8=Q', start: 0, end: 4});
174 | });
175 |
176 | it('should parse the kingside castle symbol', () => {
177 | const token = parseToken('O-O', 0);
178 | assert.deepStrictEqual(token, {type: 'symbol', value: 'O-O', start: 0, end: 3});
179 | });
180 |
181 | it('should parse the queenside castle symbol', () => {
182 | const token = parseToken('O-O-O', 0);
183 | assert.deepStrictEqual(token, {type: 'symbol', value: 'O-O-O', start: 0, end: 5});
184 | });
185 |
186 | it('should parse the unknown termination', () => {
187 | const token = parseToken('*', 0);
188 | assert.deepStrictEqual(token, {type: 'gameTermination', value: '*', start: 0, end: 1});
189 | });
190 |
191 | it('should parse the white win termination', () => {
192 | const token = parseToken('1-0', 0);
193 | assert.deepStrictEqual(token, {type: 'gameTermination', value: '1-0', start: 0, end: 3});
194 | });
195 |
196 | it('should parse the black win termination', () => {
197 | const token = parseToken('0-1', 0);
198 | assert.deepStrictEqual(token, {type: 'gameTermination', value: '0-1', start: 0, end: 3});
199 | });
200 |
201 | it('should parse the draw termination', () => {
202 | const token = parseToken('1/2-1/2', 0);
203 | assert.deepStrictEqual(token, {type: 'gameTermination', value: '1/2-1/2', start: 0, end: 7});
204 | });
205 |
206 | it('should parse a single digit integer', () => {
207 | const token = parseToken('1', 0);
208 | assert.deepStrictEqual(token, {type: 'integer', value: 1, start: 0, end: 1});
209 | });
210 |
211 | it('should parse a multi-digit integer', () => {
212 | const token = parseToken('207', 0);
213 | assert.deepStrictEqual(token, {type: 'integer', value: 207, start: 0, end: 3});
214 | });
215 |
216 | it('should parse a comment', () => {
217 | const token = parseToken('{This is a comment}', 0);
218 | assert.deepStrictEqual(token, {type: 'comment', value: 'This is a comment', start: 0, end: 19});
219 | });
220 |
221 | it('should parse an unterminated comment', () => {
222 | const token = parseToken('{This is a comment', 0);
223 | assert.deepStrictEqual(token, {type: 'comment', value: 'This is a comment', start: 0, end: 18});
224 | });
225 |
226 | it('should skip white spaces', () => {
227 | const token = parseToken(' e4 ', 0);
228 | assert.deepStrictEqual(token, {type: 'symbol', value: 'e4', start: 3, end: 5});
229 | });
230 |
231 | it('should skip line returns', () => {
232 | const token = parseToken('\n\ne4', 0);
233 | assert.deepStrictEqual(token, {type: 'symbol', value: 'e4', start: 2, end: 4});
234 | });
235 |
236 | it('should throw if an invalid character is encountered', () => {
237 | assert.throws(() => {
238 | parseToken('@', 0);
239 | });
240 | });
241 |
242 | it('should throw if an invalid annotation is encountered', () => {
243 | assert.throws(() => {
244 | parseToken('+++', 0);
245 | });
246 | });
247 | });
248 |
--------------------------------------------------------------------------------
/test/parserError.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {ParserError} from '../src/parserError';
4 |
5 | describe('ParserError', () => {
6 | it('should convert to a Prettier error with the correct location', () => {
7 | const error = new ParserError('Unexpected token @', {start: 65});
8 | const text = '[Event "F/S Return Match"]\n\n1. e4 e5\n2. Nf3 Nc6\n3. Bb5 a6\n4. Ba4 @\n*';
9 | const prettierError = error.convertToPrettierError(text);
10 |
11 | assert(prettierError instanceof SyntaxError);
12 | assert('loc' in prettierError);
13 | assert.strictEqual(prettierError.message, 'Unexpected token @ (6:8)');
14 | assert.deepStrictEqual(prettierError.loc, {start: {line: 6, column: 8}});
15 | });
16 | });
17 |
--------------------------------------------------------------------------------
/test/tokenizer.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {Tokenizer} from '../src/tokenizer';
4 |
5 | describe('Tokenizer', () => {
6 | it('should accept a token of the correct type', () => {
7 | const token = new Tokenizer('[').accept('leftBracket');
8 | assert.deepStrictEqual(token, {type: 'leftBracket', start: 0, end: 1});
9 | });
10 |
11 | it('should return undefined when accepting a token of the wrong type', () => {
12 | const token = new Tokenizer('[').accept('integer');
13 | assert.strictEqual(token, undefined);
14 | });
15 |
16 | it('should return undefined when accepting a token at the end of the file', () => {
17 | const token = new Tokenizer('').accept('integer');
18 | assert.strictEqual(token, undefined);
19 | });
20 |
21 | it('should expect a token of the correct type', () => {
22 | const token = new Tokenizer('[').expect('leftBracket');
23 | assert.deepStrictEqual(token, {type: 'leftBracket', start: 0, end: 1});
24 | });
25 |
26 | it('should throw an error when expecting a token of the wrong type', () => {
27 | assert.throws(() => {
28 | new Tokenizer('[').expect('symbol');
29 | });
30 | });
31 |
32 | it('should throw an error when expecting a token at the end of the file', () => {
33 | assert.throws(() => {
34 | new Tokenizer('').expect('integer');
35 | });
36 | });
37 |
38 | it('should not throw an error when there are no tokens at the end of the file', () => {
39 | assert.doesNotThrow(() => {
40 | new Tokenizer('').expectEndOfFile();
41 | });
42 | });
43 |
44 | it('should throw an error when there are unexpected tokens at the end of the file', () => {
45 | assert.throws(() => {
46 | new Tokenizer('[').expectEndOfFile();
47 | });
48 | });
49 |
50 | it('should accept several tokens', () => {
51 | const tokens = new Tokenizer('1.e4');
52 | const result = [tokens.accept('integer'), tokens.accept('period'), tokens.accept('symbol')];
53 | assert.deepStrictEqual(result, [
54 | {type: 'integer', value: 1, start: 0, end: 1},
55 | {type: 'period', start: 1, end: 2},
56 | {type: 'symbol', value: 'e4', start: 2, end: 4}
57 | ]);
58 | });
59 |
60 | it('should skip comments', () => {
61 | const tokens = new Tokenizer('{abc def} e4');
62 | assert.deepStrictEqual(tokens.accept('symbol'), {
63 | type: 'symbol',
64 | value: 'e4',
65 | start: 10,
66 | end: 12
67 | });
68 | assert.deepStrictEqual(tokens.getComments(), [
69 | {
70 | type: 'comment',
71 | value: 'abc def',
72 | start: 0,
73 | end: 9
74 | }
75 | ]);
76 | });
77 | });
78 |
--------------------------------------------------------------------------------
/test/utils.spec.ts:
--------------------------------------------------------------------------------
1 | import * as assert from 'node:assert';
2 | import {describe, it} from 'node:test';
3 | import {hasValue, noValue, repeat, castToError} from '../src/utils';
4 |
5 | describe('utils.ts', () => {
6 | describe('hasValue', () => {
7 | it('should return true for non-null and non-undefined values', () => {
8 | assert.strictEqual(hasValue(1), true);
9 | assert.strictEqual(hasValue('test'), true);
10 | assert.strictEqual(hasValue({}), true);
11 | assert.strictEqual(hasValue([]), true);
12 | });
13 |
14 | it('should return false for null and undefined values', () => {
15 | assert.strictEqual(hasValue(null), false);
16 | assert.strictEqual(hasValue(undefined), false);
17 | });
18 | });
19 |
20 | describe('noValue', () => {
21 | it('should return true for null and undefined values', () => {
22 | assert.strictEqual(noValue(null), true);
23 | assert.strictEqual(noValue(undefined), true);
24 | });
25 |
26 | it('should return false for non-null and non-undefined values', () => {
27 | assert.strictEqual(noValue(1), false);
28 | assert.strictEqual(noValue('test'), false);
29 | assert.strictEqual(noValue({}), false);
30 | assert.strictEqual(noValue([]), false);
31 | });
32 | });
33 |
34 | describe('repeat', () => {
35 | it('should repeat the function until it returns undefined', () => {
36 | let i = 1;
37 | const result = repeat(() => (i <= 2 ? i++ : undefined));
38 | assert.deepStrictEqual(result, [1, 2]);
39 | });
40 |
41 | it('should return an empty array if parse returns undefined immediately', () => {
42 | const result = repeat(() => undefined);
43 | assert.deepStrictEqual(result, []);
44 | });
45 | });
46 |
47 | describe('castToError', () => {
48 | it('should return the same error if the throwable is an instance of Error', () => {
49 | const error = new Error('test error');
50 | assert.strictEqual(castToError(error), error);
51 | });
52 |
53 | it('should return a new error with the string representation of the throwable if it is not an instance of Error', () => {
54 | const throwable = 'test error';
55 | const result = castToError(throwable);
56 | assert(result instanceof Error);
57 | assert.strictEqual(result.message, 'test error');
58 | });
59 | });
60 | });
61 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2015",
4 | "module": "CommonJS",
5 | "declaration": true,
6 | "incremental": true,
7 | "strict": true,
8 | "strictNullChecks": true,
9 | "resolveJsonModule": true,
10 | "noImplicitAny": true,
11 | "rootDir": "src",
12 | "outDir": "dist"
13 | },
14 | "exclude": ["node_modules"],
15 | "include": ["src/**/*"]
16 | }
17 |
--------------------------------------------------------------------------------