├── .projrc
├── LICENSE
├── README.md
├── build.gradle
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── settings.gradle
└── src
├── main
└── java
│ ├── com
│ └── onkiup
│ │ └── linker
│ │ └── parser
│ │ ├── EnumRule.java
│ │ ├── EvaluationError.java
│ │ ├── LinkerParser.java
│ │ ├── NestingContext.java
│ │ ├── NullMatcher.java
│ │ ├── NumberMatcher.java
│ │ ├── ParserLocation.java
│ │ ├── PatternMatcher.java
│ │ ├── Rule.java
│ │ ├── SyntaxError.java
│ │ ├── TerminalMatcher.java
│ │ ├── TestResult.java
│ │ ├── TokenGrammar.java
│ │ ├── TokenMatcher.java
│ │ ├── TokenTestResult.java
│ │ ├── UnknownReference.java
│ │ ├── annotation
│ │ ├── AdjustPriority.java
│ │ ├── Alternatives.java
│ │ ├── CaptureLimit.java
│ │ ├── CapturePattern.java
│ │ ├── ContextAware.java
│ │ ├── CustomMatcher.java
│ │ ├── IgnoreCharacters.java
│ │ ├── IgnoreVariant.java
│ │ ├── MetaToken.java
│ │ ├── OptionalToken.java
│ │ └── SkipIfFollowedBy.java
│ │ ├── token
│ │ ├── AbstractToken.java
│ │ ├── CollectionToken.java
│ │ ├── CompoundToken.java
│ │ ├── ConsumingToken.java
│ │ ├── EnumToken.java
│ │ ├── PartialToken.java
│ │ ├── Rotatable.java
│ │ ├── RuleToken.java
│ │ ├── TerminalToken.java
│ │ └── VariantToken.java
│ │ └── util
│ │ ├── LoggerLayout.java
│ │ ├── ParserError.java
│ │ ├── SelfPopulatingBuffer.java
│ │ └── TextUtils.java
│ └── resources
│ └── log4j.properties
└── test
├── java
└── com
│ └── onkiup
│ └── linker
│ └── parser
│ └── token
│ ├── AbstractTokenTest.java
│ ├── CollectionTokenTest.java
│ ├── CompoundTokenTest.java
│ ├── EnumTokenTest.java
│ ├── PartialTokenTest.java
│ └── RuleTokenTest.java
└── resources
└── log4j.properties
/.projrc:
--------------------------------------------------------------------------------
1 | proj-java
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Dmitrii Chechetkin
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | #  Linker-parser: JavaCC on steroids.
2 |
3 | [](https://maven-badges.herokuapp.com/maven-central/com.onkiup/linker-parser)
4 |
5 | Linker-parser is a FSM-backed non-recursive top-down LL(k) parser that uses Java language to define grammar rules. In other words, it accepts Java classes as grammar definitions and instantiates objects from these classes using text it parses as a template.
6 |
7 | ## But... Why?
8 | I started this project out of frustration of working with javacc (*Java* *C*ompiler *C*ompiler), which was created decades ago and is very complicated to use. I also didn't want to deal with BNF notations as Java can be used to describe grammars on itself and I love Java :)
9 |
10 | ## Using in your projects
11 | 1. Make sure that your project has `maven-central` configured as artifact source
12 | 2. For Gradle projects, add the following to your `build.gradle`:
13 | ```gradle
14 | compile group: 'com.onkiup', name: 'linker-parser', version: '0.8'
15 | ```
16 | For Maven projects, add the following to your `pom.xml`:
17 | ```xml
18 |
19 | com.onkiup
20 | linker-parser
21 | 0.8
22 |
23 | ```
24 |
25 | ## Quickstart guide
26 | Some basic examples are provided in this README file. More information is available in the [Quickstart Guide](https://github.com/chedim/linker-parser/wiki) in project's wiki.
27 |
28 | ## Creating a grammar rule
29 | Linker-parser grammar rules are defined as Java classes using a set of simple conventions:
30 | * Each non-transient field of the class represents a token (other rule or a terminal);
31 | * Terminal tokens defined using static String fields by setting their values to the token itself;
32 | * Capture tokens defined as String fields with CapturePattern annotation;
33 | * Token repititions defined as array fields of corresponding to the token type;
34 | * Alternatives can be defined as fields of an interface type - each class that implements the interface will be processed as an alternative token;
35 | * Repetitions are always greedy;
36 | * Repetition limits can be defined using `Limit` annotation;
37 | * Optional fields marked with `OptionalToken` annotation;
38 |
39 |
40 | For example a Java multiline comment can be defined as follows:
41 | ```java
42 | public class MultilineComment implements Rule {
43 | private static final String OPEN_MARKER = "/*";
44 |
45 | @CapturePattern(until="\\*/");
46 | private String comment;
47 |
48 | private static final String CLOSE_MARKER = "*/";
49 | }
50 | ```
51 | Other examples are available in [Linker-Sail](https://github.com/dmitriic/lisa) project, most interstingly:
52 | - [BinaryOperatorStatement](https://github.com/dmitriic/lisa/blob/master/src/main/java/com/onkiup/linker/sail/operator/BinaryOperatorStatement.java)
53 | - [RuleInvocation](https://github.com/dmitriic/lisa/blob/master/src/main/java/com/onkiup/linker/sail/grammar/RuleInvocation.java)
54 |
55 | Here's also a screencast of a simple interaction with Lisa REPL that uses three differently configured instances of Linker-parser to parse command arguments, SAIL expressions and REPL commands:
56 | [](https://asciinema.org/a/UAaJ9lmJf3AhZ2iMls0ryMIlr)
57 |
58 | ## Creating a parser
59 | Invoke `TokenGrammar.forClass(Class extends Rule> rule)` with your root token class as parameter.
60 |
61 | ## Parsing
62 | Invoking `TokenGrammar::parse(Reader source)` will read and parse the text from the source into a token and will return the resulting token as an object.
63 |
64 | ## Evaluating
65 | Linker-parser will invoke `Rule::reevaluate` callback each time a token field is populated.
66 |
67 | [Linker-Sail](https://github.com/dmitriic/lisa) evaluator `Rule` definitions, for example, use that callback to test whether the token has been populated (`Rule::populated`) and then recalculate their result value and push it either to its subscriber (parent token), or in case of variable declaration/assignment -- pass that value into shared context which propagates this value to any tokens that subscribe to the variable.
68 |
69 | ## Left recursion
70 | As any leftmost variation parser, Linker-parser is susceptible to infinite loops when processing alternatives that invoke themselves. Consider the following set of rules:
71 |
72 | ```java
73 | public interface Token extends Rule { }
74 | public class Test implements Token {
75 | private Token token;
76 | }
77 | ```
78 | which is equivalent to:
79 | ```
80 | A -> X
81 | X -> A
82 | ```
83 | Classic LL(k) parser would not be able to handle these rules and fail by falling into infinite loop. Linker-parser deals with such situation by keeping a list of all tested alternative rules for current position and not re-testing rules that are in that list. The list is dropped every time parser changes its current position.
84 |
85 | Alternatively, the order in which variations are tested can be manipulated by marking a variation with `AdjustPriority` annotation. Variations are tested in ascending order of their priority level, so variations with smaller priorities are tested first.
86 |
87 | ## Token Rotation
88 | Left-recursive tokens pose another challenge when parsing nested statements like this one:
89 | ```
90 | 1 + 1 + 1
91 | ```
92 | The problem here is that, as all captures are greedy, any token that defines a binary operator statement would first consume characters `1 + 1` and will be marked as populated, leaving unparsed characters ` + 1` that don't anymore match binary operator statement and are likely to cause parser to throw a SyntaxError.
93 |
94 | To resolve this issue, Linker-Parser will try to rotate children of failing PartialToken before discarding it. A child PartialToken can be rotated only if it satisfies all of these conditions:
95 | * The PartialToken is populated
96 | * The resulting token can be assigned to the first field of the PartialToken
97 |
98 | Token rotations are similar to tree rotations when balancing BSTs: rotating PartialToken clones its state into a new PartialToken, resets its state and then advances to the second field by assigning created PartialToken its first field.
99 |
100 | Rotations can also be attempted on root tokens upon parser unexpectedly hitting end of input.
101 |
102 | Token rotations are always performed before testing if the token has any alternatives left and successful rotations prevent parser from advancing to the next possible alternative (as rotated token is an alternative on itself).
103 |
104 | ## Token post-rotation
105 | If populated token is deemed rotatable and it has a compatible child token with *lower* priority, Linker-parser will rotate parent token so that token with *higher* priority becomes a child of a token with *lower* priority. In other words, whenever parser detects a rotatable combination of populated tokens, it makes sure that token priorities always increase from AST root to AST leaves. This ensures that mathematical expressions like `1 + 2 * 3` are parsed as:
106 | ```
107 | 1 + 2 * 3
108 | ^^^^^ leaf (2 * 3)
109 | ^^^^^^^ root (1 + leaf)
110 | ```
111 | and not as:
112 | ```
113 | 1 + 2 * 3
114 | ^^^^^ leaf (1 + 2)
115 | ^^^^^^^ root (leaf * 3)
116 | ```
117 | This allows based on Linker-parser evaluators calculate results of mathematical expressions without having to re-arrange parsed tokens in proper order.
118 |
119 | ## Support
120 | For any questions or issues -- please either open a github issue in this project or tweet directly at [chedim](http://twitter.com/chedim) and I will do my best to help you. It would help me a lot if you include definitions for your failing rules in the message ;-)
121 |
122 | ## Version History
123 | * 0.8
124 | - Major refactoring
125 | - Multiple optimizations implemented for `VariantToken`s
126 | * 0.7.1
127 | - Various bugfixes and enhancements
128 | - IgnoreCharacters.inherit now will be false by default
129 | - Adds option to skip trailing characters from a list
130 | - Adds option to create variant tokens ignored by parser
131 | - Logging improvements
132 | - Fixes VariantToken's pullback logic
133 | * 0.7
134 | * Support for mathematical equations based on token priority;
135 | * Improved token position reporting
136 |
137 | * 0.6 - Major refactoring triggered by a design mistake in token rollback logic.
138 | Known bug: this version may not report column/line position correctly
139 | * 0.5 - Added left-recursion avoidance logic
140 | * 0.3.1 - transient fields now will be ignored
141 | * 0.3
142 | major refactoring from recursive to stack-based algorithm
143 | Support for token repetitions
144 | * 0.2.2 - first publicly available version
145 | * -100 - my first parser, used at Politico to parse and normalize HTML for articles migrated from capitalnewyork.com to politico.com (no source code of that parser was used here, only experience) :)
146 |
147 | ## Development Roadmap
148 | * Implement object pool for PartialTokens and TokenMatchers
149 | * Investigate possibility for multi-threaded VariantToken processing
150 | * Add support for Number terminals
151 | * Add support for Enum terminals
152 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * This build file was auto generated by running the Gradle 'init' task
3 | * by 'chedim' at '7/5/19 1:04 PM' with Gradle 3.2.1
4 | *
5 | * This generated file contains a sample Java project to get you started.
6 | * For more details take a look at the Java Quickstart chapter in the Gradle
7 | * user guide available at https://docs.gradle.org/3.2.1/userguide/tutorial_java_projects.html
8 | */
9 |
10 | // Apply the java plugin to add support for Java
11 | plugins {
12 | id "maven-publish"
13 | id "eclipse"
14 | id "java"
15 | }
16 |
17 | project.group = 'com.onkiup'
18 | project.version = '0.8.1'
19 |
20 | compileJava {
21 | sourceCompatibility = '1.8'
22 | targetCompatibility = '1.8'
23 | }
24 |
25 | // In this section you declare where to find the dependencies of your project
26 | repositories {
27 | // Use 'jcenter' for resolving your dependencies.
28 | // You can declare any Maven/Ivy/file repository here.
29 | jcenter()
30 | mavenLocal()
31 | }
32 |
33 | // In this section you declare the dependencies for your production and test code
34 | dependencies {
35 | // The production code uses the SLF4J logging API at compile time
36 | compile 'org.slf4j:slf4j-api:+'
37 | compile 'org.slf4j:slf4j-log4j12:+'
38 | // https://mvnrepository.com/artifact/org.reflections/reflections
39 | compile group: 'org.reflections', name: 'reflections', version: '0.9.11'
40 | compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.12.1'
41 |
42 |
43 | // Declare the dependency for your favourite test framework you want to use in your tests.
44 | // TestNG is also supported by the Gradle Test task. Just change the
45 | // testCompile dependency to testCompile 'org.testng:testng:6.8.1' and add
46 | // 'test.useTestNG()' to your build script.
47 | testCompile 'junit:junit:4.12'
48 | testCompile group: 'org.mockito', name: 'mockito-core', version: '3.0.0'
49 | testCompile group: 'org.powermock', name: 'powermock-module-junit4', version: '2.0.2'
50 | testCompile group: 'org.powermock', name: 'powermock-api-mockito2', version: '2.0.2'
51 | }
52 |
53 | publishing {
54 | publications {
55 | maven(MavenPublication) {
56 | from components.java
57 | }
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chedim/linker-parser/1c63103ff9a1593d15df25b52ace42d0bbc2a380/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.2.2-bin.zip
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 | # Determine the Java command to use to start the JVM.
86 | if [ -n "$JAVA_HOME" ] ; then
87 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
88 | # IBM's JDK on AIX uses strange locations for the executables
89 | JAVACMD="$JAVA_HOME/jre/sh/java"
90 | else
91 | JAVACMD="$JAVA_HOME/bin/java"
92 | fi
93 | if [ ! -x "$JAVACMD" ] ; then
94 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
95 |
96 | Please set the JAVA_HOME variable in your environment to match the
97 | location of your Java installation."
98 | fi
99 | else
100 | JAVACMD="java"
101 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
102 |
103 | Please set the JAVA_HOME variable in your environment to match the
104 | location of your Java installation."
105 | fi
106 |
107 | # Increase the maximum file descriptors if we can.
108 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
109 | MAX_FD_LIMIT=`ulimit -H -n`
110 | if [ $? -eq 0 ] ; then
111 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
112 | MAX_FD="$MAX_FD_LIMIT"
113 | fi
114 | ulimit -n $MAX_FD
115 | if [ $? -ne 0 ] ; then
116 | warn "Could not set maximum file descriptor limit: $MAX_FD"
117 | fi
118 | else
119 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
120 | fi
121 | fi
122 |
123 | # For Darwin, add options to specify how the application appears in the dock
124 | if $darwin; then
125 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
126 | fi
127 |
128 | # For Cygwin or MSYS, switch paths to Windows format before running java
129 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
130 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
131 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
132 | JAVACMD=`cygpath --unix "$JAVACMD"`
133 |
134 | # We build the pattern for arguments to be converted via cygpath
135 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
136 | SEP=""
137 | for dir in $ROOTDIRSRAW ; do
138 | ROOTDIRS="$ROOTDIRS$SEP$dir"
139 | SEP="|"
140 | done
141 | OURCYGPATTERN="(^($ROOTDIRS))"
142 | # Add a user-defined pattern to the cygpath arguments
143 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
144 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
145 | fi
146 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
147 | i=0
148 | for arg in "$@" ; do
149 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
150 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
151 |
152 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
153 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
154 | else
155 | eval `echo args$i`="\"$arg\""
156 | fi
157 | i=`expr $i + 1`
158 | done
159 | case $i in
160 | 0) set -- ;;
161 | 1) set -- "$args0" ;;
162 | 2) set -- "$args0" "$args1" ;;
163 | 3) set -- "$args0" "$args1" "$args2" ;;
164 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
165 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
166 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
167 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
168 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
169 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
170 | esac
171 | fi
172 |
173 | # Escape application args
174 | save () {
175 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
176 | echo " "
177 | }
178 | APP_ARGS=`save "$@"`
179 |
180 | # Collect all arguments for the java command, following the shell quoting and substitution rules
181 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
182 |
183 | exec "$JAVACMD" "$@"
184 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto init
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto init
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :init
68 | @rem Get command-line arguments, handling Windows variants
69 |
70 | if not "%OS%" == "Windows_NT" goto win9xME_args
71 |
72 | :win9xME_args
73 | @rem Slurp the command line arguments.
74 | set CMD_LINE_ARGS=
75 | set _SKIP=2
76 |
77 | :win9xME_args_slurp
78 | if "x%~1" == "x" goto execute
79 |
80 | set CMD_LINE_ARGS=%*
81 |
82 | :execute
83 | @rem Setup the command line
84 |
85 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
86 |
87 | @rem Execute Gradle
88 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
89 |
90 | :end
91 | @rem End local scope for the variables with windows NT shell
92 | if "%ERRORLEVEL%"=="0" goto mainEnd
93 |
94 | :fail
95 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
96 | rem the _cmd.exe /c_ return code!
97 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
98 | exit /b 1
99 |
100 | :mainEnd
101 | if "%OS%"=="Windows_NT" endlocal
102 |
103 | :omega
104 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * This settings file was auto generated by the Gradle buildInit task
3 | * by 'chedim' at '7/5/19 1:04 PM' with Gradle 3.2.1
4 | *
5 | * The settings file is used to specify which projects to include in your build.
6 | * In a single project build this file can be empty or even removed.
7 | *
8 | * Detailed information about configuring a multi-project build in Gradle can be found
9 | * in the user guide at https://docs.gradle.org/3.2.1/userguide/multi_project_builds.html
10 | */
11 |
12 | /*
13 | // To declare projects as part of a multi-project build use the 'include' method
14 | include 'shared'
15 | include 'api'
16 | include 'services:webservice'
17 | */
18 |
19 | rootProject.name = 'linker-parser'
20 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/EnumRule.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | public interface EnumRule {
4 |
5 | }
6 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/EvaluationError.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import com.onkiup.linker.parser.token.PartialToken;
4 |
5 | public class EvaluationError extends RuntimeException {
6 |
7 | public EvaluationError(PartialToken token, Object context, Exception cause) {
8 | super("Failed to evaluate token " + token.tokenType(), cause);
9 | }
10 | }
11 |
12 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/LinkerParser.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | public class LinkerParser {
4 | private Class startProduction;
5 | }
6 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/NestingContext.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import java.util.HashMap;
4 |
5 | // since 0.2.2
6 | public class NestingContext {
7 | private SubContext context;
8 |
9 | public NestingContext() {
10 | this.context = new SubContext("<[ROOT]>");
11 | }
12 |
13 | public Object get(String key) {
14 | return context.member(key);
15 | }
16 |
17 | public void set(String name, Object value) {
18 | context.member(name, value);
19 | }
20 |
21 | public void push(String name) {
22 | context = new SubContext(name, context);
23 | }
24 |
25 | public void pop() {
26 | context = context.parent();
27 | }
28 |
29 | public SubContext dump() {
30 | return context;
31 | }
32 |
33 | private class SubContext {
34 | private final String name;
35 | private final SubContext parent;
36 | private final HashMap members = new HashMap<>();
37 |
38 | public SubContext(String name) {
39 | this(name, null);
40 | }
41 |
42 | public SubContext(String name, SubContext parent) {
43 | this.name = name;
44 | this.parent = parent;
45 | }
46 |
47 | public SubContext subContext(String name) {
48 | return new SubContext(name, this);
49 | }
50 |
51 | public String name() {
52 | return name;
53 | }
54 |
55 | public Object member(String name) {
56 | if (!members.containsKey(name)) {
57 | if (parent != null) {
58 | return parent.member(name);
59 | } else {
60 | throw new UnknownReference(name);
61 | }
62 | }
63 | return members.get(name);
64 | }
65 |
66 | public void member(String name, Object value) {
67 | members.put(name, value);
68 | }
69 |
70 | public boolean isMember(String name) {
71 | return members.containsKey(name);
72 | }
73 |
74 | public boolean isReferable(String name) {
75 | return isMember(name) || (parent != null && parent.isReferable(name));
76 | }
77 |
78 | public SubContext parent() {
79 | if (parent == null) {
80 | throw new RuntimeException("Unable to return parent context: already at root");
81 | }
82 | return parent;
83 | }
84 | }
85 | }
86 |
87 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/NullMatcher.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | public class NullMatcher implements TokenMatcher {
4 |
5 | public NullMatcher() {
6 |
7 | }
8 |
9 | @Override
10 | public TokenTestResult apply(CharSequence buffer) {
11 | return TestResult.match(0, null);
12 | }
13 |
14 | @Override
15 | public String toString() {
16 | return "NullMatcher";
17 | }
18 | }
19 |
20 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/NumberMatcher.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import java.lang.reflect.Constructor;
4 | import java.lang.reflect.InvocationTargetException;
5 |
6 | @Deprecated
7 | public class NumberMatcher implements TokenMatcher {
8 | private Constructor extends Number> pattern;
9 | private Class extends Number> type;
10 |
11 | public NumberMatcher(Class extends Number> type) {
12 | try {
13 | this.type = type;
14 | this.pattern = type.getConstructor(String.class);
15 | } catch (NoSuchMethodException nse) {
16 | throw new RuntimeException("Failed to create number matcher for type '" + type.getCanonicalName() + "'", nse);
17 | }
18 | }
19 |
20 | @Override
21 | public TokenTestResult apply(CharSequence buffer) {
22 | try {
23 | pattern.newInstance(buffer.toString());
24 | return TestResult.matchContinue(buffer.length(), buffer.toString());
25 | } catch (InvocationTargetException nfe) {
26 | Throwable cause = nfe.getCause();
27 | if (!(cause instanceof NumberFormatException)) {
28 | return TestResult.fail();
29 | }
30 | if (cause.getMessage().indexOf("out of range") > -1){
31 | return TestResult.fail();
32 | }
33 | if (buffer.length() > 1) {
34 | // rolling back one character
35 | try {
36 | char drop = buffer.charAt(buffer.length() - 1);
37 | if (drop != '.') {
38 | Number token = pattern.newInstance(buffer.subSequence(0, buffer.length()));
39 | return TestResult.match(buffer.length() - 1, token);
40 | }
41 | } catch (InvocationTargetException nfe2) {
42 | if (nfe2.getCause() instanceof NumberFormatException) {
43 | // this is fine
44 | } else {
45 | throw new RuntimeException(nfe2.getCause());
46 | }
47 | } catch (Throwable e) {
48 | throw new RuntimeException(e);
49 | }
50 | }
51 | } catch (Throwable e) {
52 | throw new RuntimeException("Failed to test " + type + " against '" + buffer + "'", e);
53 | }
54 | return TestResult.fail();
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/ParserLocation.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import java.util.Objects;
4 |
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 |
8 | public class ParserLocation {
9 |
10 | private static final Logger logger = LoggerFactory.getLogger(ParserLocation.class);
11 | public static ParserLocation ZERO = new ParserLocation("unknown", 0,0,0);
12 |
13 | private final int line, column, position;
14 | private final String name;
15 |
16 | public static ParserLocation endOf(CharSequence text) {
17 | int lines = 0;
18 | int column = 0;
19 | for (int i = 0; i < text.length(); i++) {
20 | if (text.charAt(i) == '\n') {
21 | lines++;
22 | column = 0;
23 | } else {
24 | column++;
25 | }
26 | }
27 |
28 | return new ParserLocation(null, text.length(), lines, column);
29 | }
30 |
31 | public ParserLocation(String name, int position, int line, int column) {
32 | if (position < 0) {
33 | throw new IllegalArgumentException("Position cannot be negative");
34 | }
35 |
36 | if (line < 0) {
37 | throw new IllegalArgumentException("Line cannot be negative");
38 | }
39 |
40 | if (column < 0) {
41 | throw new IllegalArgumentException("Column cannot be negative");
42 | }
43 |
44 | this.name = name;
45 | this.position = position;
46 | this.line = line;
47 | this.column = column;
48 | }
49 |
50 | public String name() {
51 | return name;
52 | }
53 |
54 | public int position() {
55 | return position;
56 | }
57 |
58 | public int line() {
59 | return line;
60 | }
61 |
62 | public int column() {
63 | return column;
64 | }
65 |
66 | @Override
67 | public String toString() {
68 | return new StringBuilder()
69 | .append(name)
70 | .append(" - ")
71 | .append(line)
72 | .append(':')
73 | .append(column)
74 | .toString();
75 | }
76 |
77 |
78 | public ParserLocation advance(CharSequence source) {
79 | int position = this.position + source.length();
80 | int line = this.line;
81 | int column = this.column;
82 | for (int i = 0; i < source.length(); i++) {
83 | if (source.charAt(i) == '\n') {
84 | line++;
85 | column = 0;
86 | } else {
87 | column++;
88 | }
89 | }
90 |
91 | ParserLocation result = new ParserLocation(name, position, line, column);
92 | logger.debug("Advanced from {} to {} using chars: '{}'", this, result, source);
93 | return result;
94 | }
95 |
96 | public ParserLocation advance(char character) {
97 | if (character < 0) {
98 | return this;
99 | }
100 | int column = this.column + 1;
101 | int line = this.line;
102 | if (character == '\n') {
103 | line++;
104 | column = 0;
105 | }
106 | return new ParserLocation(name, position + 1, line, column);
107 | }
108 |
109 | public ParserLocation add(ParserLocation another) {
110 | if (another.name() != null && !Objects.equals(name(), another.name())) {
111 | throw new IllegalArgumentException("Unable to add parser location with a different name");
112 | }
113 | int anotherLines = another.line();
114 | int resultLine = line + anotherLines;
115 | int resultColumn = anotherLines == 0 ? column + another.column() : another.column();
116 | return new ParserLocation(name, position + another.position(), resultLine, resultColumn);
117 | }
118 | }
119 |
120 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/PatternMatcher.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import java.util.regex.Matcher;
4 | import java.util.regex.Pattern;
5 |
6 | import com.onkiup.linker.parser.annotation.CapturePattern;
7 |
8 | public class PatternMatcher implements TokenMatcher {
9 | private final Pattern pattern;
10 | private final String replacement;
11 | private final String until;
12 | private final Matcher matcher;
13 |
14 | public PatternMatcher(String pattern) {
15 | this.pattern = Pattern.compile(pattern);
16 | this.matcher = this.pattern.matcher("");
17 | this.replacement = "";
18 | this.until = "";
19 | }
20 |
21 | public PatternMatcher(CapturePattern pattern) {
22 | String matcherPattern = pattern.pattern();
23 | if (matcherPattern.length() == 0) {
24 | String value = pattern.value();
25 | if (value.length() == 0) {
26 | if (pattern.until().length() == 0) {
27 | throw new IllegalArgumentException("Either pattern or until must be specified");
28 | } else {
29 | matcherPattern = pattern.until();
30 | }
31 | } else {
32 | matcherPattern = value;
33 | }
34 | }
35 | this.replacement = pattern.replacement();
36 | this.until = pattern.until();
37 | this.pattern = Pattern.compile(matcherPattern);
38 | matcher = this.pattern.matcher("");
39 | }
40 |
41 | @Override
42 | public TokenTestResult apply(CharSequence buffer) {
43 | matcher.reset(buffer);
44 | boolean matches = matcher.matches(),
45 | lookingAt = matcher.lookingAt(),
46 | hitEnd = matcher.hitEnd();
47 |
48 | if (until.length() == 0) {
49 | if(hitEnd && lookingAt && matches) {
50 | return TestResult.matchContinue(buffer.length(), buffer.toString());
51 | } else if (lookingAt) {
52 | if (replacement != null && replacement.length() > 0) {
53 | StringBuffer result = new StringBuffer();
54 | matcher.appendReplacement(result, replacement);
55 | return TestResult.match(matcher.end(), result.toString());
56 | } else {
57 | String token = buffer.subSequence(0, matcher.end()).toString();
58 | return TestResult.match(matcher.end(), token);
59 | }
60 | } else {
61 | return TestResult.fail();
62 | }
63 | } else {
64 | if (matches || matcher.find()) {
65 | if (replacement != null && replacement.length() > 0) {
66 | String token = matcher.replaceAll(replacement);
67 | return TestResult.match(buffer.length(), token);
68 | } else if (lookingAt) {
69 | return TestResult.fail();
70 | } else {
71 | String token = buffer.subSequence(0, matcher.start()).toString();
72 | return TestResult.match(matcher.start(), token);
73 | }
74 | } else {
75 | return TestResult.matchContinue(buffer.length(), buffer.toString());
76 | }
77 | }
78 | }
79 |
80 | @Override
81 | public String toString() {
82 | return "PatternMatcher["+pattern+"]";
83 | }
84 | }
85 |
86 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/Rule.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import java.io.IOException;
4 | import java.io.InputStream;
5 | import java.io.ObjectInputStream;
6 | import java.io.ObjectOutputStream;
7 | import java.io.OutputStream;
8 | import java.util.Optional;
9 | import java.util.concurrent.ConcurrentHashMap;
10 |
11 | import com.onkiup.linker.parser.token.CollectionToken;
12 | import com.onkiup.linker.parser.token.PartialToken;
13 | import com.onkiup.linker.parser.token.RuleToken;
14 | import com.onkiup.linker.parser.token.VariantToken;
15 |
16 | // in 0.4:
17 | // - changed Metadata to hold PartialTokens instead of ParserLocations
18 | // in 0.2.2:
19 | // - added "C" type parameter
20 | // - made it implement Consumer
21 | /**
22 | * Main interface for all grammar definitions
23 | */
24 | public interface Rule {
25 |
26 | static class Metadata {
27 | private static ConcurrentHashMap metadata = new ConcurrentHashMap<>();
28 |
29 | public static Optional metadata(Rule rule) {
30 | return Optional.ofNullable(metadata.get(rule));
31 | }
32 |
33 | public static void metadata(Rule rule, PartialToken token) {
34 | metadata.put(rule, token);
35 | }
36 |
37 | static void remove(Rule rule) {
38 | metadata.remove(rule);
39 | }
40 | }
41 |
42 | static X load(InputStream is) throws IOException, ClassNotFoundException {
43 | ObjectInputStream ois = new ObjectInputStream(is);
44 | return load(ois);
45 | }
46 |
47 | static X load(ObjectInputStream ois) throws IOException, ClassNotFoundException {
48 | Object result = ois.readObject();
49 | if (result instanceof Rule) {
50 | return (X)result;
51 | }
52 | String resultType = result == null ? "null" : result.getClass().getName();
53 | throw new IllegalArgumentException(resultType + " is not a Rule");
54 | }
55 |
56 | /**
57 | * @return parent token or null if this token is root token
58 | */
59 | default Optional parent() {
60 | return Metadata.metadata(this)
61 | .map(meta -> {
62 | do {
63 | meta = (PartialToken) meta.parent().orElse(null);
64 | } while (!(meta instanceof RuleToken));
65 | return meta;
66 | })
67 | .flatMap(PartialToken::token);
68 | }
69 |
70 | /**
71 | * @return true if this token was successfully populated; false if parser is still working on some of the token's fields
72 | */
73 | default boolean populated() {
74 | return Metadata.metadata(this)
75 | .map(PartialToken::isPopulated)
76 | .orElse(false);
77 | }
78 |
79 | default void onPopulated() {
80 |
81 | }
82 |
83 | default Optional metadata() {
84 | return Metadata.metadata(this);
85 | }
86 |
87 | default ParserLocation location() {
88 | return metadata().map(PartialToken::location).orElse(null);
89 | }
90 |
91 | /**
92 | * Reevaluation callback.
93 | * Called by parser every time it updates the token
94 | */
95 | default void reevaluate() {
96 |
97 | }
98 |
99 | /**
100 | * Invalidation callback
101 | * called by arser every time it detaches the token from the tree
102 | */
103 | default void invalidate() {
104 |
105 | }
106 |
107 | default CharSequence source() {
108 | return metadata().map(PartialToken::source).orElse(null);
109 | }
110 |
111 | default void store(OutputStream os) throws IOException {
112 | ObjectOutputStream oos = new ObjectOutputStream(os);
113 | store(oos);
114 | }
115 |
116 | default void store(ObjectOutputStream oos) throws IOException {
117 | oos.writeObject(this);
118 | }
119 | }
120 |
121 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/SyntaxError.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import java.lang.reflect.Field;
4 | import java.util.Arrays;
5 | import java.util.LinkedList;
6 | import java.util.List;
7 |
8 | import com.onkiup.linker.parser.token.PartialToken;
9 | import com.onkiup.linker.parser.token.RuleToken;
10 | import com.onkiup.linker.parser.token.VariantToken;
11 |
12 | public class SyntaxError extends RuntimeException {
13 |
14 | private PartialToken> expected;
15 | private CharSequence source;
16 | private String message;
17 |
18 | public SyntaxError(String message, PartialToken expected, CharSequence source) {
19 | this.message = message;
20 | this.expected = expected;
21 | this.source = source;
22 | }
23 |
24 | @Override
25 | public String toString() {
26 | StringBuilder result = new StringBuilder("Parser error:")
27 | .append(message)
28 | .append("\n")
29 | .append("\tExpected ")
30 | .append(expected)
31 | .append(" but got: '")
32 | .append(expected != null && source != null && expected.position() < source.length() ? source.subSequence(expected.position(), source.length()) : source)
33 | .append("'\n\tSource:\n\t\t")
34 | .append(source)
35 | .append("\n\n\tTraceback:\n");
36 |
37 | if (expected != null) {
38 | expected.path().stream()
39 | .map(PartialToken::toString)
40 | .map(text -> text.replaceAll("\n", "\n\t\t") + '\n')
41 | .forEach(result::append);
42 | } else {
43 | result.append("No traceback provided");
44 | }
45 |
46 | return result.toString();
47 | }
48 | }
49 |
50 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/TerminalMatcher.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | public class TerminalMatcher implements TokenMatcher {
4 |
5 | private final String pattern;
6 | private final int patternLen;
7 |
8 | public TerminalMatcher(String pattern) {
9 | this.pattern = pattern;
10 | this.patternLen = pattern.length();
11 | }
12 |
13 | @Override
14 | public TokenTestResult apply(CharSequence buffer) {
15 | int bufferLen = buffer.length();
16 | int charsToCompare = Math.min(patternLen, bufferLen);
17 | for (int i = 0; i < charsToCompare; i++) {
18 | if (pattern.charAt(i) != buffer.charAt(i)) {
19 | return TestResult.fail();
20 | }
21 | }
22 |
23 | if (patternLen <= bufferLen) {
24 | return TestResult.match(patternLen, pattern);
25 | }
26 | return TestResult.continueNoMatch();
27 | }
28 |
29 | @Override
30 | public String toString() {
31 | return "TerminalMatcher["+pattern+"]";
32 | }
33 | }
34 |
35 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/TestResult.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | public enum TestResult {
4 | FAIL, CONTINUE, MATCH_CONTINUE, MATCH;
5 |
6 | public static TokenTestResult fail() {
7 | return FAIL.token(0, null);
8 | }
9 |
10 | public static TokenTestResult matchContinue(int position, Object token) {
11 | return MATCH_CONTINUE.token(position, token);
12 | }
13 |
14 | public static TokenTestResult match(int position, Object token) {
15 | return MATCH.token(position, token);
16 | }
17 |
18 | public static TokenTestResult continueNoMatch() {
19 | return CONTINUE.token(0, null);
20 | }
21 |
22 | public TokenTestResult token(int length, Object token) {
23 | return new TokenTestResult(this, length, token);
24 | }
25 |
26 | }
27 |
28 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/TokenGrammar.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import java.io.IOException;
4 | import java.io.Reader;
5 | import java.io.StringReader;
6 | import java.lang.reflect.Modifier;
7 | import java.util.Enumeration;
8 | import java.util.Optional;
9 | import java.util.concurrent.atomic.AtomicInteger;
10 | import java.util.function.Supplier;
11 |
12 | import org.apache.log4j.Appender;
13 | import org.apache.log4j.Layout;
14 | import org.slf4j.Logger;
15 | import org.slf4j.LoggerFactory;
16 |
17 | import com.onkiup.linker.parser.token.CompoundToken;
18 | import com.onkiup.linker.parser.token.ConsumingToken;
19 | import com.onkiup.linker.parser.token.PartialToken;
20 | import com.onkiup.linker.parser.util.LoggerLayout;
21 | import com.onkiup.linker.parser.util.ParserError;
22 | import com.onkiup.linker.parser.util.SelfPopulatingBuffer;
23 |
24 | /**
25 | * Main class for parsing.
26 | * Please use {@link #forClass(Class)} to create instances
27 | * @param type of the object to parse into.
28 | */
29 | public class TokenGrammar {
30 | private static final Logger logger = LoggerFactory.getLogger("PARSER LOOP");
31 | private static final ThreadLocal BUFFER = new ThreadLocal<>();
32 | private Class type;
33 | private Class metaType;
34 | private String ignoreTrail;
35 |
36 | /**
37 | * Default constructor
38 | * @param type resulting token type
39 | * @return
40 | */
41 | public static TokenGrammar forClass(Class type) {
42 | return new TokenGrammar<>(type, null);
43 | }
44 |
45 | /**
46 | * For future handling of metatokens like comments
47 | * @param type resulting token type
48 | * @param metaType meta token type
49 | * @return
50 | */
51 | public static TokenGrammar forClass(Class type, Class metaType) {
52 | return new TokenGrammar<>(type, metaType);
53 | }
54 |
55 | protected TokenGrammar(Class type, Class metaType) {
56 | this.type = type;
57 | this.metaType = metaType;
58 | }
59 |
60 | /**
61 | * @param test Type to test
62 | * @return true if the type is not abstract
63 | */
64 | public static boolean isConcrete(Class> test) {
65 | return !(test.isInterface() || Modifier.isAbstract(test.getModifiers()));
66 | }
67 |
68 | /**
69 | * @return resulting token type
70 | */
71 | public Class getTokenType() {
72 | return type;
73 | }
74 |
75 | /**
76 | * Configures this parser to ignore trailing characters based on the input string
77 | * @param chars trailing characters to ignore
78 | */
79 | public void ignoreTrailCharacters(String chars) {
80 | this.ignoreTrail = chars;
81 | }
82 |
83 | /**
84 | * Parses a string into resulting token
85 | * @param source string to parse
86 | * @return parsed token
87 | * @throws SyntaxError
88 | */
89 | public X parse(String source) throws SyntaxError {
90 | return parse("unknown", source);
91 | }
92 |
93 | /**
94 | * Parses named string to resulting token
95 | * @param name name of the source that will be parsed
96 | * @param source contents to parse
97 | * @return parsed token
98 | * @throws SyntaxError
99 | */
100 | public X parse(String name, String source) throws SyntaxError {
101 | return parse(name, new StringReader(source));
102 | }
103 |
104 | /**
105 | * Parses contents from a Reader
106 | * @param source Reader to get contents from
107 | * @return parsed token
108 | * @throws SyntaxError
109 | */
110 | public X parse(Reader source) throws SyntaxError {
111 | return parse("unknown", source);
112 | }
113 |
114 | /**
115 | * Parses named text from a Reader
116 | * @param name name of the source
117 | * @param source reader to get contents from
118 | * @return parsed token
119 | * @throws SyntaxError
120 | */
121 | public X parse(String name, Reader source) throws SyntaxError {
122 | X result = tokenize(name, source);
123 | StringBuilder tail = new StringBuilder();
124 | try {
125 | int nextChar;
126 | while (-1 != (nextChar = source.read())) {
127 | tail.append((char) nextChar);
128 | }
129 | } catch (Exception e) {
130 | throw new RuntimeException(e);
131 | }
132 |
133 | if (tail.length() > 0) {
134 | throw new SyntaxError("Unmatched trailing symbols: '" + tail + "'", null, tail);
135 | }
136 | return result;
137 | }
138 |
139 | /**
140 | * Parses contents from the reader
141 | * @param source reader to get contents from
142 | * @return parsed token
143 | * @throws SyntaxError
144 | */
145 | public X tokenize(Reader source) throws SyntaxError {
146 | return tokenize("unknown", source);
147 | }
148 |
149 | /**
150 | * Main parser entrance
151 | * @param sourceName the name of the source that will be parsed
152 | * @param source reader to get contents from
153 | * @return parsed token
154 | * @throws SyntaxError
155 | */
156 | public X tokenize(String sourceName, Reader source) throws SyntaxError {
157 | AtomicInteger position = new AtomicInteger(0);
158 | SelfPopulatingBuffer buffer = null;
159 | try {
160 | buffer = new SelfPopulatingBuffer(sourceName, source);
161 | } catch (IOException e) {
162 | throw new RuntimeException("Failed to read source " + sourceName, e);
163 | }
164 | try {
165 | CompoundToken rootToken = CompoundToken.forClass(type, new ParserLocation(sourceName, 0, 0, 0));
166 | ConsumingToken.ConsumptionState.rootBuffer(rootToken, buffer);
167 | CompoundToken parent = rootToken;
168 | ConsumingToken> consumer = nextConsumingToken(parent).orElseThrow(() -> new ParserError("No possible consuming tokens found", parent));
169 | ConsumingToken> bestFail = consumer;
170 | setupLoggingLayouts(buffer, position::get);
171 | do {
172 | if (logger.isDebugEnabled()) {
173 | System.out.print("\u001B[H\u001Bc");
174 | System.out.println("|----------------------------------------------------------------------------------------");
175 | System.out.println(consumer.location().toString());
176 | System.out.println("|----------------------------------------------------------------------------------------");
177 | final ConsumingToken> currentConsumer = consumer;
178 | System.out.print(rootToken.dumpTree(token -> {
179 | StringBuilder result = new StringBuilder();
180 | if (token == currentConsumer) {
181 | result.append(">>> ");
182 | }
183 | return result
184 | .append(token.getClass().getSimpleName())
185 | .append("(").append(token.position()).append(" - ").append(token.end().position()).append(")")
186 | .append(" :: '")
187 | .append(LoggerLayout.sanitize(token.head(50)))
188 | .append("'");
189 | }));
190 | System.out.println("|----------------------------------------------------------------------------------------");
191 | System.out.println("|----------------------------------------------------------------------------------------");
192 | }
193 |
194 | ConsumingToken lastConsumer = consumer;
195 |
196 | processConsumingToken(consumer, position);
197 | boolean hitEnd = position.get() >= buffer.length();
198 |
199 | if (consumer.isFailed()) {
200 | logger.debug("!!! CONSUMER FAILED !!! {}", consumer.tag());
201 | bestFail = bestFail.position() > consumer.position() ? bestFail : consumer;
202 | consumer = processTraceback(consumer).orElse(null);
203 | } else if (consumer.isPopulated()) {
204 | logger.debug("consumer populated: {}", consumer.tag());
205 | consumer = onPopulated(consumer, hitEnd).orElse(null);
206 | } else if (hitEnd) {
207 | logger.debug("Hit end while processing {}", consumer.tag());
208 | consumer.atEnd();
209 | consumer = nextConsumingToken(consumer).orElse(null);
210 | }
211 |
212 | if (consumer != null) {
213 | position.set(consumer.end().position());
214 | }
215 |
216 | if (consumer == null || hitEnd) {
217 | logger.debug("attempting to recover; consumer == {}, buffer.length() == {}", consumer == null ? null : consumer.tag(), buffer.length());
218 | if (rootToken.isPopulated()) {
219 | if (!hitEnd) {
220 | if (!validateTrailingCharacters(buffer, position.get())) {
221 | consumer = processEarlyPopulation(rootToken, buffer, position.get()).orElseThrow(
222 | () -> new ParserError("Failed to recover from early population", lastConsumer));
223 | logger.debug("Recovered to {}", consumer.tag());
224 | } else {
225 | logger.debug("Successfully parsed (with valid trailing characters '{}') into: {}", buffer.subSequence(position.get(), buffer.length()), rootToken.tag());
226 | return rootToken.token().orElse(null);
227 | }
228 | } else {
229 | logger.debug("Perfectly parsed into: {}", rootToken.tag());
230 | return rootToken.token().get();
231 | }
232 | } else if (consumer != null) {
233 | logger.debug("Hit end and root token is not populated -- trying to traceback...");
234 | do {
235 | consumer.onFail();
236 | consumer = processTraceback(consumer).orElse(null);
237 | } while (buffer.length() == 0 && consumer != null);
238 |
239 | if (consumer != null && rootToken.isPopulated()) {
240 | consumer = processEarlyPopulation(rootToken, buffer, position.get()).orElseThrow(() ->
241 | new ParserError("Failed to recover from null consumer", lastConsumer));
242 | logger.debug("Recovered to {}", consumer.tag());
243 | } else if (rootToken.isPopulated()) {
244 | return rootToken.token().get();
245 | }
246 | } else {
247 | throw new SyntaxError("Advanced up to this token and then failed", bestFail, buffer);
248 | }
249 | }
250 |
251 | } while(consumer != null && position.get() < buffer.length());
252 |
253 | if (rootToken.isPopulated()) {
254 | return rootToken.token().orElse(null);
255 | }
256 |
257 | throw new SyntaxError("Unexpected end of input", consumer, buffer);
258 | } catch (SyntaxError se) {
259 | throw new RuntimeException("Syntax error at position " + position.get(), se);
260 | } catch (Exception e) {
261 | throw new RuntimeException(e);
262 | } finally {
263 | restoreLoggingLayouts();
264 | }
265 | }
266 |
267 | /**
268 | * Tries to recover from a situation where parser populates AST before the whole source is processed by either
269 | * validating all trailing characters, rotating root token, or tracing back to the next umtested grammar junction
270 | * @param rootToken the root token of failing AST
271 | * @param buffer a reference to a buffer with source contents
272 | * @param position position in the buffer at which early population occured
273 | * @return empty optional if failed to recover, or next consuming token after successfull recovery
274 | */
275 | private Optional> processEarlyPopulation(CompoundToken> rootToken, CharSequence buffer, int position) {
276 | logger.debug("Early population detected...");
277 | if (validateTrailingCharacters(buffer, position)) {
278 | logger.debug("Successfully parsed (with valid trailing characters '{}') into: {}", buffer, rootToken.tag());
279 | return Optional.empty();
280 | } else if (rootToken.rotatable()) {
281 | logger.debug("Rotating root token");
282 | rootToken.rotate();
283 | return nextConsumingToken(rootToken);
284 | } else if (rootToken.alternativesLeft()) {
285 | logger.info("Root token populated too early, failing it... (Buffer left: '{}'", LoggerLayout.sanitize(buffer.subSequence(position, buffer.length())));
286 | rootToken.traceback();
287 | return nextConsumingToken(rootToken);
288 | } else {
289 | return Optional.empty();
290 | }
291 | }
292 |
293 | /**
294 | * Propagates population event from child token to its parents until parent tokens report they are populated
295 | * @param child populated token
296 | * @param hitEnd a flag that indicates that parent tokens should not expect any future characters to be consume and should be either populated or failed after receiving this event (not unfailed and unpopulated)
297 | * @return next consuming token from the AST or empty when all parents are populated of one of the parents reported to fail and there is no alternatives left
298 | */
299 | private static Optional> onPopulated(PartialToken> child, boolean hitEnd) {
300 | return child.parent().flatMap(parent -> {
301 | parent.onChildPopulated();
302 | if (hitEnd) {
303 | parent.atEnd();
304 | }
305 | if (parent.isPopulated()) {
306 | return onPopulated(parent, hitEnd);
307 | } else if (parent.isFailed()) {
308 | return processTraceback(parent);
309 | }
310 | return nextConsumingToken(parent);
311 | });
312 | }
313 |
314 | /**
315 | * Traces back from a failed token to its first parent with left alternatives, then advances to the next available alternative
316 | * @param child failed token
317 | * @return consuming token from the next available alternative or empty
318 | */
319 | private static Optional> processTraceback(PartialToken> child) {
320 | return child.parent().flatMap(parent -> {
321 | if (child.isFailed()) {
322 | logger.debug("^^^--- TRACEBACK: {} <- {}", parent.tag(), child.tag());
323 | parent.onChildFailed();
324 | if (parent.isFailed() || parent.isPopulated()) {
325 | if (parent.isPopulated()) {
326 | return onPopulated(parent, false);
327 | }
328 | return processTraceback(parent);
329 | }
330 |
331 | if (!child.isOptional()) {
332 | parent.traceback();
333 | } else {
334 | child.traceback();
335 | }
336 | return firstUnfilledParent(parent).flatMap(TokenGrammar::nextConsumingToken);
337 | } else {
338 | logger.debug("|||--- TRACEBACK: (self) <- {}", child.tag());
339 | return firstUnfilledParent(child).flatMap(TokenGrammar::nextConsumingToken);
340 | }
341 | });
342 | }
343 |
344 | /**
345 | * traces back to a first unpopulated parent
346 | * @param child token to trace back from
347 | * @return the first unpopulated parent
348 | */
349 | private static Optional> firstUnfilledParent(PartialToken> child) {
350 | logger.debug("traversing back to first unfilled parent from {}", child.tag());
351 | if (child instanceof CompoundToken && !child.isFailed() && ((CompoundToken>)child).unfilledChildren() > 0) {
352 | logger.debug("<<<--- NEXT UNFILLED: (self) <--- {}", child.tag());
353 | return Optional.of((CompoundToken>)child);
354 | }
355 |
356 | return Optional.ofNullable(
357 | child.parent().flatMap(parent -> {
358 | logger.debug("parent: {}", parent.tag());
359 | parent.onChildPopulated();
360 | if (parent.isPopulated()) {
361 | logger.debug("^^^--- NEXT UNFILLED: {} <-?- {}", parent.tag(), child.tag());
362 | return firstUnfilledParent(parent);
363 | } else {
364 | logger.debug("<<<--- NEXT UNFILLED: {} <--- {}", parent.tag(), child.tag());
365 | return Optional.of(parent);
366 | }
367 | }).orElseGet(() -> {
368 | if (child instanceof CompoundToken) {
369 | logger.debug("XXX NO NEXT UNFILLED: XXX <--- {} (compound: true, unfilled children: {}", child, ((CompoundToken>)child).unfilledChildren());
370 | } else {
371 | logger.debug("XXX NO NEXT UNFILLED: XXX <--- {}", child);
372 | }
373 | return null;
374 | })
375 | );
376 | }
377 |
378 | /**
379 | * Advances to the next available consuming token after passed token; traces back any failed tokens it finds while advancing
380 | * @param from token to advance from
381 | * @return next consuming token
382 | */
383 | public static Optional> nextConsumingToken(CompoundToken> from) {
384 | while (from != null) {
385 | PartialToken> child = from.nextChild().orElse(null);
386 | logger.debug("Searching for next consumer in child {}", child == null ? null : child.tag());
387 | if (child instanceof ConsumingToken) {
388 | logger.debug("--->>> NEXT CONSUMER: {} ---> {}", from.tag(), child.tag());
389 | return Optional.of((ConsumingToken>)child);
390 | } else if (child instanceof CompoundToken) {
391 | logger.debug("--->>> searching for next consumer in {} --> {}", from.tag(), child.tag());
392 | from = (CompoundToken)child;
393 | } else if (child == null) {
394 | CompoundToken> parent = from.parent().orElse(null);
395 | logger.debug("^^^--- searching for next consumer in parent {} <--- {}", parent == null ? null : parent.tag(), from.tag());
396 | if (from.isFailed()) {
397 | logger.debug("notifying parent about child failure");
398 | return processTraceback(from);
399 | } else if (from.isPopulated()) {
400 | logger.debug("notifying parent about child population");
401 | return onPopulated(from, false);
402 | } else {
403 | throw new ParserError("next child == null but from is neither failed or populated", from);
404 | }
405 | } else {
406 | throw new RuntimeException("Unknown child type: " + child.getClass());
407 | }
408 | }
409 | logger.debug("---XXX NEXT CONSUMER: {} ---> XXX (not found)", from == null ? null : from.tag());
410 | return Optional.empty();
411 | }
412 |
413 | /**
414 | * Advances to the next available consuming token in the parent of provided consuming token
415 | * @see #nextConsumingToken(CompoundToken)
416 | * @param from consuming token to advance from
417 | * @return next consuming token in the AST
418 | */
419 | private static Optional> nextConsumingToken(ConsumingToken> from) {
420 | return from.parent().flatMap(TokenGrammar::nextConsumingToken);
421 | }
422 |
423 | /**
424 | * Continuously calls ConsumingToken::consume until the method returns false and then adjusts parser position to
425 | * the end of the token
426 | * @param token token that should consume characters from parser's buffer
427 | * @param position parser position to update with consuming token's end position after the consumption is complete
428 | */
429 | private void processConsumingToken(ConsumingToken> token, AtomicInteger position) {
430 | while (token.consume()) {
431 | //position.incrementAndGet();
432 | }
433 | position.set(token.end().position());
434 | }
435 |
436 | /**
437 | * Validates all characters in provided buffer starting with provided position to be in preconfigured ignored trailing characters list
438 | * @param buffer buffer to validate
439 | * @param from starting position
440 | * @return true if all characters starting from provided position can be ighored, false otherwise
441 | */
442 | private boolean validateTrailingCharacters(CharSequence buffer, int from) {
443 | logger.debug("Validating trailing characters with pattern '{}' on '{}'", LoggerLayout.sanitize(ignoreTrail), LoggerLayout.sanitize(buffer.subSequence(from, buffer.length())));
444 | if (from >= buffer.length()) {
445 | logger.debug("no trailing chars!");
446 | return true;
447 | }
448 | char character;
449 | do {
450 | character = buffer.charAt(from++);
451 | } while (buffer.length() > from && ignoreTrail != null && ignoreTrail.indexOf(character) > -1);
452 | boolean result = from >= buffer.length();
453 | logger.debug("Only valid trailing chars left? {}; from == {}; buffer.length == {}", result, from, buffer.length());
454 | return result;
455 | }
456 |
457 | /**
458 | * Configures log4j appenders with custom {@link LoggerLayout}
459 | * @param buffer parser buffer to display in logs
460 | * @param position supplier of current parser position to display in logs
461 | */
462 | private void setupLoggingLayouts(CharSequence buffer, Supplier position) {
463 | Enumeration appenders = org.apache.log4j.Logger.getRootLogger().getAllAppenders();
464 | while(appenders.hasMoreElements()) {
465 | Appender appender = appenders.nextElement();
466 | LoggerLayout loggerLayout = new LoggerLayout(appender.getLayout(), buffer, position);
467 | appender.setLayout(loggerLayout);
468 | }
469 | }
470 |
471 | /**
472 | * Removes custom {@link LoggerLayout} configurations from log4j appenders
473 | */
474 | private void restoreLoggingLayouts() {
475 | Enumeration appenders = org.apache.log4j.Logger.getRootLogger().getAllAppenders();
476 | while(appenders.hasMoreElements()) {
477 | Appender appender = appenders.nextElement();
478 | Layout layout = appender.getLayout();
479 | if (layout instanceof LoggerLayout) {
480 | LoggerLayout loggerLayout = (LoggerLayout) layout;
481 | appender.setLayout(loggerLayout.parent());
482 | }
483 | }
484 | }
485 | }
486 |
487 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/TokenMatcher.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | import java.lang.reflect.Field;
4 | import java.lang.reflect.Modifier;
5 | import java.util.function.Function;
6 |
7 | import com.onkiup.linker.parser.annotation.CapturePattern;
8 | import com.onkiup.linker.parser.annotation.ContextAware;
9 | import com.onkiup.linker.parser.token.CompoundToken;
10 | import com.onkiup.linker.parser.util.LoggerLayout;
11 |
12 | @FunctionalInterface
13 | public interface TokenMatcher extends Function {
14 |
15 | public static TokenMatcher forField(CompoundToken> parent, Field field) {
16 | Class type = field.getType();
17 | return forField(parent, field, type);
18 | }
19 |
20 | public static TokenMatcher forField(CompoundToken> parent, Field field, Class type) {
21 | if (type.isArray()) {
22 | throw new IllegalArgumentException("Array fields should be handled as ArrayTokens");
23 | } else if (Rule.class.isAssignableFrom(type)) {
24 | throw new IllegalArgumentException("Rule fields should be handled as RuleTokens");
25 | } else if (type != String.class) {
26 | throw new IllegalArgumentException("Unsupported field type: " + type);
27 | }
28 |
29 | try {
30 | field.setAccessible(true);
31 | if (Modifier.isStatic(field.getModifiers())) {
32 | String terminal = (String) field.get(null);
33 | if (terminal == null) {
34 | throw new IllegalArgumentException("null terminal");
35 | }
36 |
37 | return new TerminalMatcher(terminal);
38 | } else if (field.isAnnotationPresent(CapturePattern.class)) {
39 | CapturePattern pattern = field.getAnnotation(CapturePattern.class);
40 | return new PatternMatcher(pattern);
41 | } else if (field.isAnnotationPresent(ContextAware.class)) {
42 | ContextAware contextAware = field.getAnnotation(ContextAware.class);
43 | if (contextAware.matchField().length() > 0) {
44 | Object token = parent.token().orElseThrow(() -> new IllegalStateException("Parent token is null"));
45 | Field dependency = field.getDeclaringClass().getDeclaredField(contextAware.matchField());
46 | dependency.setAccessible(true);
47 | Object fieldValue = dependency.get(token);
48 | if (fieldValue instanceof String) {
49 | parent.log("Creating context-aware matcher for field $" + field.getName() + " to be equal to '" +
50 | LoggerLayout.sanitize(fieldValue) + "' value of target field $" + dependency.getName());
51 | return new TerminalMatcher((String)fieldValue);
52 | } else if (fieldValue == null) {
53 | parent.log("Creating context-aware null matcher for field $" + field.getName() + " to be equal to null value of target field $" + dependency.getName());
54 | return new NullMatcher();
55 | } else {
56 | throw new IllegalArgumentException("Unable to create field matcher for target field value of type '" + fieldValue.getClass().getName() + "'");
57 | }
58 | } else {
59 | throw new IllegalArgumentException("Misconfigured ContextAware annotation?");
60 | }
61 | } else {
62 | throw new IllegalArgumentException("Non-static String fields MUST have CapturePattern annotation");
63 | }
64 | } catch (Exception e) {
65 | throw new IllegalArgumentException("Failed to create matcher for field " + field, e);
66 | }
67 | }
68 |
69 | }
70 |
71 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/TokenTestResult.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | public class TokenTestResult {
4 | private TestResult result;
5 | private X token;
6 | private int length;
7 |
8 | protected TokenTestResult(TestResult result, int length, X token) {
9 | this.result = result;
10 | this.length = length;
11 | this.token = token;
12 | }
13 |
14 | public TestResult getResult() {
15 | return result;
16 | }
17 |
18 | public void setTokenLength(int length) {
19 | this.length = length;
20 | }
21 |
22 | public int getTokenLength() {
23 | return length;
24 | }
25 |
26 | public X getToken() {
27 | return token;
28 | }
29 |
30 | public boolean isFailed() {
31 | return result == TestResult.FAIL;
32 | }
33 |
34 | public boolean isMatch() {
35 | return result == TestResult.MATCH;
36 | }
37 |
38 | public boolean isContinue() {
39 | return result == TestResult.CONTINUE;
40 | }
41 |
42 | public boolean isMatchContinue() {
43 | return result == TestResult.MATCH_CONTINUE;
44 | }
45 |
46 | @Override
47 | public String toString() {
48 | return "TestResult: " + result + " (" + token + ") ";
49 | }
50 | }
51 |
52 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/UnknownReference.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser;
2 |
3 | public class UnknownReference extends RuntimeException {
4 |
5 | public UnknownReference(String reference) {
6 | super(reference);
7 | }
8 | }
9 |
10 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/AdjustPriority.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Adjusts concrete token priority that affects token testing order for grammar junctions ({@link com.onkiup.linker.parser.token.VariantToken})
10 | * (tokens tested in ascending order of their priority: token with priority 0 will be tested prior to token with priority 9999)
11 | */
12 | @Target(ElementType.TYPE)
13 | @Retention(RetentionPolicy.RUNTIME)
14 | public @interface AdjustPriority {
15 | /**
16 | * @return value to which token's priority should be adjusted
17 | */
18 | int value();
19 |
20 | /**
21 | * @return boolean flag that indicates whether this priority adjustment should be propagated to parent token
22 | * (used primarily for arithmetical equations)
23 | */
24 | boolean propagate() default false;
25 | }
26 |
27 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/Alternatives.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Instructs {@link com.onkiup.linker.parser.token.VariantToken} instances to use provided list of alternatives instead of generating it using Reflections
10 | */
11 | @Target(ElementType.TYPE)
12 | @Retention(RetentionPolicy.RUNTIME)
13 | public @interface Alternatives {
14 | /**
15 | * @return an array with alternatives to use
16 | */
17 | Class[] value();
18 | }
19 |
20 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/CaptureLimit.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Limits number of elements (array members, characters, etc) to be captured into the annotated field
10 | */
11 | @Target(ElementType.FIELD)
12 | @Retention(RetentionPolicy.RUNTIME)
13 | public @interface CaptureLimit {
14 | /**
15 | * @return Minimum number of elements required for the token to be populated
16 | */
17 | int min() default 0;
18 |
19 | /**
20 | * @return Maximum number of elements allowed to be populated into the token for it to not fail
21 | */
22 | int max() default Integer.MAX_VALUE;
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/CapturePattern.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * This annotation can be used on String fields to define capturing terminal limits
10 | */
11 | @Retention(RetentionPolicy.RUNTIME)
12 | @Target(ElementType.FIELD)
13 | public @interface CapturePattern {
14 | /**
15 | * Accepts a regular expression that will be used to match characters from the input
16 | * If provided then "until" parameter will be ignored
17 | */
18 | String value() default "";
19 |
20 | /**
21 | * Deprecated, use value instead
22 | */
23 | @Deprecated
24 | String pattern() default "";
25 |
26 | /**
27 | * Accepts a regular expression replacement parameter that can be used either to:
28 | * - transform matched by defined as "value()" regexp text
29 | * - transform matched by "until()" limiter and append transformation result to the end of captured text
30 | */
31 | String replacement() default "";
32 |
33 | /**
34 | * Accepts a regular expression that Parser will use as stop token for capturing process
35 | * If no "replacement()" is specified, then matched by this expression stop token will be discarded
36 | * If "replacement()" is specified, then stop token will be transformed using that value and appended to captured text
37 | * Ignored if either "value()" or "pattern()" are not empty
38 | */
39 | String until() default "";
40 | }
41 |
42 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/ContextAware.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Allows context-aware token matching
10 | */
11 | @Retention(RetentionPolicy.RUNTIME)
12 | @Target(ElementType.FIELD)
13 | public @interface ContextAware {
14 | /**
15 | * Instructs the parser to create a ConsumingToken for this field that would exactly match value from a previously populated field
16 | * @return
17 | */
18 | String matchField() default "";
19 | }
20 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/CustomMatcher.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | import com.onkiup.linker.parser.TokenMatcher;
9 |
10 | @Deprecated
11 | @Retention(RetentionPolicy.RUNTIME)
12 | @Target(ElementType.FIELD)
13 | public @interface CustomMatcher {
14 | Class extends TokenMatcher> value();
15 | }
16 |
17 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/IgnoreCharacters.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Instructs the parser to ignore provided characters before matching every field of the rule
10 | */
11 | @Target(ElementType.TYPE)
12 | @Retention(RetentionPolicy.RUNTIME)
13 | public @interface IgnoreCharacters {
14 | /**
15 | * @return string with characters to ignore
16 | */
17 | String value() default "";
18 |
19 | /**
20 | * @return a flag that indicates that parser should also use ignored charcters list from the parent token
21 | */
22 | boolean inherit() default false;
23 | }
24 |
25 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/IgnoreVariant.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Instructs parser to completely ignore this class during parsing
10 | */
11 | @Retention(RetentionPolicy.RUNTIME)
12 | @Target(ElementType.TYPE)
13 | public @interface IgnoreVariant {
14 |
15 | }
16 |
17 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/MetaToken.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Marks a rule definition class as a MetaToken,
10 | * which causes VariantToken to "hide" matched instances of marked class by detaching them from the AST and
11 | * putting them into the next matched variant's metadata
12 | */
13 | @Target(ElementType.TYPE)
14 | @Retention(RetentionPolicy.RUNTIME)
15 | public @interface MetaToken {
16 | }
17 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/OptionalToken.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * Marks a field as optional
10 | */
11 | @Retention(RetentionPolicy.RUNTIME)
12 | @Target(ElementType.FIELD)
13 | public @interface OptionalToken {
14 | /**
15 | * Instructs the parser to treat this field as optional only if its possible position
16 | * in the source contains returned characters instead
17 | * @return characters to test for
18 | */
19 | String whenFollowedBy() default "";
20 |
21 | /**
22 | * Instructs the parser to treat this field as optional only when other (previously processed) field is null
23 | * @return the name of the other field to test
24 | */
25 | String whenFieldIsNull() default "";
26 |
27 | /**
28 | * Instructs the parser to treat this field as optional only when other (previously processed) field is not null
29 | * @return the name of the other field to test
30 | */
31 | String whenFieldNotNull() default "";
32 | }
33 |
34 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/annotation/SkipIfFollowedBy.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.annotation;
2 |
3 | import java.lang.annotation.ElementType;
4 | import java.lang.annotation.Retention;
5 | import java.lang.annotation.RetentionPolicy;
6 | import java.lang.annotation.Target;
7 |
8 | /**
9 | * @see {@link OptionalToken#whenFollowedBy()}
10 | * @implNote current behaviour is similar to {@link OptionalToken#whenFollowedBy()} (the parser first tries to process the field and tests if its optional only when matching fails), but this may change later (so that the parser skips the field completely when optionality test succeeds without trying to match it)
11 | */
12 | @Retention(RetentionPolicy.RUNTIME)
13 | @Target(ElementType.FIELD)
14 | public @interface SkipIfFollowedBy {
15 | String value();
16 | }
17 |
18 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/token/AbstractToken.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.token;
2 |
3 | import java.io.Serializable;
4 | import java.lang.reflect.Field;
5 | import java.util.LinkedList;
6 | import java.util.Optional;
7 |
8 | import org.slf4j.Logger;
9 | import org.slf4j.LoggerFactory;
10 |
11 | import com.onkiup.linker.parser.ParserLocation;
12 |
13 | /**
14 | * Common implementation for PartialTokens
15 | * @param type of resulting token
16 | */
17 | public abstract class AbstractToken implements PartialToken, Serializable {
18 |
19 | private CompoundToken> parent;
20 | /**
21 | * The field for which this token was created
22 | */
23 | private Field field;
24 | /**
25 | * location of the first character matched with the token and the next character after the last character matched with the token
26 | */
27 | private ParserLocation location, end;
28 | /**
29 | * Token status flags
30 | */
31 | private boolean optional, populated, failed;
32 | /**
33 | * Token optionality condition
34 | */
35 | private CharSequence optionalCondition;
36 | private transient Logger logger;
37 | private LinkedList metatokens = new LinkedList();
38 |
39 | /**
40 | * Main constructor
41 | * @param parent parent token
42 | * @param targetField field for which this token is being constructed
43 | * @param location token's location in parser's buffer
44 | */
45 | public AbstractToken(CompoundToken> parent, Field targetField, ParserLocation location) {
46 | this.parent = parent;
47 | this.field = targetField;
48 | this.location = location;
49 |
50 | readFlags(field);
51 | }
52 |
53 | /**
54 | * Sets optionality flag on this token: optional tokens don't propagate matching failures to their parents
55 | */
56 | @Override
57 | public void markOptional() {
58 | log("marked optional");
59 | this.optional = true;
60 | }
61 |
62 | /**
63 | * @return true if this token is optional
64 | */
65 | @Override
66 | public boolean isOptional() {
67 | return optional;
68 | }
69 |
70 | /**
71 | * @return true if this token was successfully populated
72 | */
73 | @Override
74 | public boolean isPopulated() {
75 | return populated;
76 | }
77 |
78 | /**
79 | * resets token population flag
80 | */
81 | @Override
82 | public void dropPopulated() {
83 | populated = false;
84 | log("Dropped population flag");
85 | }
86 |
87 | /**
88 | * @return true if this token did not match the source
89 | */
90 | @Override
91 | public boolean isFailed() {
92 | return failed;
93 | }
94 |
95 | /**
96 | * @return location of this token in parser's input
97 | */
98 | @Override
99 | public ParserLocation location() {
100 | return location;
101 | }
102 |
103 | /**
104 | * Sets location of this token in parser's input
105 | * @param location new token location
106 | */
107 | protected void location(ParserLocation location) {
108 | this.location = location;
109 | }
110 |
111 | /**
112 | * @return location that immediately follows the last character matched with this token
113 | */
114 | @Override
115 | public ParserLocation end() {
116 | return this.end == null ? this.location : this.end;
117 | }
118 |
119 | /**
120 | * @return parent token
121 | */
122 | @Override
123 | public Optional> parent() {
124 | return Optional.ofNullable(parent);
125 | }
126 |
127 | /**
128 | * @return the field for which this token was created
129 | */
130 | @Override
131 | public Optional targetField() {
132 | return Optional.ofNullable(field);
133 | }
134 |
135 | /**
136 | * Handler for token population event
137 | * @param end location after the last character matched with this token
138 | */
139 | @Override
140 | public void onPopulated(ParserLocation end) {
141 | log("populated up to {}", end.position());
142 | populated = true;
143 | failed = false;
144 | this.end = end;
145 | }
146 |
147 | /**
148 | * @return logger configured with information about matching token
149 | */
150 | @Override
151 | public Logger logger() {
152 | if (logger == null) {
153 | logger = LoggerFactory.getLogger(tag());
154 | }
155 | return logger;
156 | }
157 |
158 | /**
159 | * @return token identifier to be used in logs
160 | */
161 | @Override
162 | public String tag() {
163 | return targetField()
164 | .map(field -> field.getDeclaringClass().getName() + "$" + field.getName() + "(" + position() + ")")
165 | .orElseGet(super::toString);
166 | }
167 |
168 | @Override
169 | public String toString() {
170 | ParserLocation location = location();
171 | return targetField()
172 | .map(field -> String.format(
173 | "%50.50s || %s (%d:%d -- %d - %d)",
174 | head(50),
175 | field.getDeclaringClass().getName() + "$" + field.getName(),
176 | location.line(),
177 | location.column(),
178 | location.position(),
179 | end().position()
180 | ))
181 | .orElseGet(super::toString);
182 | }
183 |
184 | /**
185 | * reads optionality configuration for the field
186 | * @param field field to read the configuration from
187 | */
188 | protected void readFlags(Field field) {
189 | optionalCondition = PartialToken.getOptionalCondition(field).orElse(null);
190 | optional = optionalCondition == null && PartialToken.hasOptionalAnnotation(field);
191 | }
192 |
193 | /**
194 | * Handler that will be invoked upon token matching failure
195 | */
196 | @Override
197 | public void onFail() {
198 | failed = true;
199 | populated = false;
200 | end = location;
201 | PartialToken.super.onFail();
202 | }
203 |
204 | /**
205 | * @return characters that must appear in place of the token in order for the token to be considered optional
206 | */
207 | public Optional optionalCondition() {
208 | return Optional.ofNullable(optionalCondition);
209 | }
210 |
211 | /**
212 | * Stores a metatoken under this token
213 | * @param metatoken object to store as metatoken
214 | */
215 | @Override
216 | public void addMetaToken(Object metatoken) {
217 | metatokens.add(metatoken);
218 | }
219 |
220 | /**
221 | * @return all metatokens for this token
222 | */
223 | @Override
224 | public LinkedList> metaTokens() {
225 | return metatokens;
226 | }
227 | }
228 |
229 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/token/CollectionToken.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.token;
2 |
3 | import java.io.Serializable;
4 | import java.lang.reflect.Array;
5 | import java.lang.reflect.Field;
6 | import java.util.Arrays;
7 | import java.util.LinkedList;
8 | import java.util.Optional;
9 | import java.util.function.Function;
10 |
11 | import com.onkiup.linker.parser.ParserLocation;
12 | import com.onkiup.linker.parser.annotation.CaptureLimit;
13 | import com.onkiup.linker.parser.util.ParserError;
14 |
15 | /**
16 | * Token that is used to populate array fields
17 | * @param array class of the result token
18 | */
19 | public class CollectionToken extends AbstractToken implements CompoundToken, Serializable {
20 | /**
21 | * the type of the resulting token
22 | */
23 | private Class fieldType;
24 | /**
25 | * the type of array members
26 | */
27 | private Class memberType;
28 | /**
29 | * tokens that represent matched array members
30 | */
31 | private LinkedList children = new LinkedList<>();
32 | /**
33 | * maximum number of array members to match
34 | */
35 | private CaptureLimit captureLimit;
36 | /**
37 | * the position immediately after the end of the last matched token (or CollectionToken's location if no tokens were matched)
38 | */
39 | private ParserLocation lastTokenEnd;
40 | /**
41 | * index of the next member to match
42 | */
43 | private int nextMember = 0;
44 |
45 | /**
46 | * Main constructor
47 | * @param parent parent token
48 | * @param field field for which this token is constructed
49 | * @param tokenType type of the resulting array
50 | * @param location location of the token in parser's buffer
51 | */
52 | public CollectionToken(CompoundToken parent, Field field, Class tokenType, ParserLocation location) {
53 | super(parent, field, location);
54 | lastTokenEnd = location;
55 | this.fieldType = tokenType;
56 | this.memberType = fieldType.getComponentType();
57 | if (field.isAnnotationPresent(CaptureLimit.class)) {
58 | captureLimit = field.getAnnotation(CaptureLimit.class);
59 | }
60 | }
61 |
62 | /**
63 | * Handler that is invoked every time an array member is populated
64 | */
65 | @Override
66 | public void onChildPopulated() {
67 | if (children.size() == 0) {
68 | throw new RuntimeException("OnChildPopulated called when there is no child!");
69 | }
70 | PartialToken> current = children.peekLast();
71 | if (current.isMetaToken()) {
72 | addMetaToken(current.token());
73 | children.pollLast();
74 | return;
75 | }
76 | log("Populated collection token #{}: {}", children.size(), current.tag());
77 | lastTokenEnd = current.end();
78 | if (captureLimit != null && children.size() >= captureLimit.max()) {
79 | onPopulated(lastTokenEnd);
80 | }
81 | }
82 |
83 | /**
84 | * Callback that handles end-of-input situation by marking the array populated or failed (if number of children is smaller than configured by {@link CaptureLimit} annotation on the target field)
85 | */
86 | @Override
87 | public void atEnd() {
88 | log("Force-populating...");
89 | if (captureLimit == null || children.size() >= captureLimit.min()) {
90 | onPopulated(lastTokenEnd);
91 | } else {
92 | onFail();
93 | }
94 | }
95 |
96 | /**
97 | * Callback that handles member token matching failure by marking the array populated or failed (if number of children is smaller than configured by {@link CaptureLimit} annotation on the target field)
98 | */
99 | @Override
100 | public void onChildFailed() {
101 | if (children.size() == 0) {
102 | throw new ParserError("No child is currently populated yet onChildFailed was called", this);
103 | }
104 |
105 | children.pollLast();
106 | lastTokenEnd = children.size() > 0 ? children.peekLast().end() : location();
107 | int size = children.size();
108 | if (captureLimit != null && size < captureLimit.min()) {
109 | log("Child failed and collection is underpopulated -- failing the whole collection");
110 | if (!alternativesLeft()) {
111 | onFail();
112 | } else {
113 | log("Not failing -- have some alternatives left");
114 | }
115 | } else {
116 | log("Child failed and collection has enough elements (or no lower limit) -- marking collection as populated");
117 | onPopulated(children.size() == 0 ? location() : lastTokenEnd);
118 | }
119 | }
120 |
121 | /**
122 | * @return the type of the resulting array (not it members!)
123 | */
124 | @Override
125 | public Class tokenType () {
126 | return fieldType;
127 | }
128 |
129 | /**
130 | * @return matched token
131 | */
132 | @Override
133 | public Optional token() {
134 | if (!isPopulated()) {
135 | return Optional.empty();
136 | }
137 |
138 | return Optional.of((X) children.stream()
139 | .map(PartialToken::token)
140 | .map(o -> o.orElse(null))
141 | .toArray(size -> newArray(memberType, size)));
142 | }
143 |
144 | /**
145 | * Creates an array of elements of given type
146 | * @param memberType type of the members of the resulting array
147 | * @param size the size of the array
148 | * @return created array
149 | */
150 | private static final M[] newArray(Class memberType, int size) {
151 | return (M[]) Array.newInstance(memberType, size);
152 | }
153 |
154 | @Override
155 | public String tag() {
156 | return fieldType.getName() + "[]("+position()+")";
157 | }
158 |
159 | @Override
160 | public String toString() {
161 | ParserLocation location = location();
162 | return String.format(
163 | "%50.50s || %s[%d] (%d:%d -- %d - %d)",
164 | head(50),
165 | fieldType.getName(),
166 | children.size(),
167 | location.line(),
168 | location.column(),
169 | location.position(),
170 | end().position()
171 | );
172 | }
173 |
174 | @Override
175 | public ParserLocation end() {
176 | return isFailed() ? location() : children.size() > 0 ? children.peekLast().end() : lastTokenEnd;
177 | }
178 |
179 | @Override
180 | public Optional> nextChild() {
181 | if (isFailed() || isPopulated()) {
182 | return Optional.empty();
183 | }
184 |
185 | PartialToken> current = null;
186 | if (captureLimit == null || captureLimit.max() > children.size()) {
187 | if (nextMember == children.size()) {
188 | log("creating partial token for member#{}", children.size());
189 | current = PartialToken.forField(this, targetField().orElse(null), memberType, lastTokenEnd);
190 | children.add(current);
191 | } else if (nextMember < children.size()) {
192 | current = children.get(nextMember);
193 | }
194 | nextMember++;
195 | log("nextChild = [{}]{}", children.size(), current.tag());
196 | return Optional.of(current);
197 | }
198 | return Optional.empty();
199 | }
200 |
201 | @Override
202 | public PartialToken[] children() {
203 | return children.toArray(new PartialToken[children.size()]);
204 | }
205 |
206 | @Override
207 | public int unfilledChildren() {
208 | if (isPopulated()) {
209 | return 0;
210 | }
211 | if (captureLimit == null) {
212 | return 1;
213 | }
214 |
215 | return captureLimit.max() - children.size();
216 | }
217 |
218 | @Override
219 | public int currentChild() {
220 | return children.size() - 1;
221 | }
222 |
223 | @Override
224 | public void nextChild(int newIndex) {
225 | nextMember = newIndex;
226 | log("next child set to {}/{} ({})", newIndex, children.size(), children.get(newIndex));
227 | }
228 |
229 | @Override
230 | public void children(PartialToken>[] children) {
231 | this.children = new LinkedList<>(Arrays.asList(children));
232 | }
233 |
234 | @Override
235 | public boolean alternativesLeft() {
236 | for (int i = children.size() - 1; i > -1; i--) {
237 | PartialToken> child = children.get(i);
238 | log("getting alternatives from [{}]{}", i, child.tag());
239 | if (child.alternativesLeft()) {
240 | log("found alternatives at [{}]{}", i, child.tag());
241 | return true;
242 | }
243 | }
244 |
245 | return false;
246 | }
247 |
248 | @Override
249 | public CharSequence dumpTree(int offset, CharSequence prefix, CharSequence childPrefix, Function, CharSequence> formatter) {
250 | final int childOffset = offset + 1;
251 | String insideFormat = "%s ├─%s #%s : %s";
252 | String lastFormat = "%s └─%s #%s : %s";
253 | StringBuilder result = new StringBuilder(super.dumpTree(offset, prefix, childPrefix, formatter));
254 | if (!isPopulated()) {
255 | int last = children.size() - 1;
256 | for (int i = 0; i < children.size(); i++) {
257 | PartialToken> child = children.get(i);
258 | String format = i == last ? lastFormat : insideFormat;
259 | if (child == null) {
260 | result.append(String.format(format, childPrefix, "[N]", i, null));
261 | result.append('\n');
262 | } else if (child.isPopulated()) {
263 | result.append(child.dumpTree(childOffset, String.format(format, childPrefix, "[+]", i, ""),
264 | childPrefix + " │", formatter));
265 | } else if (child.isFailed()) {
266 | result.append(child.dumpTree(childOffset, String.format(format, childPrefix, "[F]", i, ""),
267 | childPrefix + " │", formatter));
268 | } else {
269 | result.append(child.dumpTree(childOffset, String.format(format, childPrefix, ">>>", i, ""),
270 | childPrefix + " │", formatter));
271 | }
272 | }
273 | }
274 | return result;
275 | }
276 | }
277 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/token/CompoundToken.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.token;
2 |
3 | import java.io.Serializable;
4 | import java.util.Arrays;
5 | import java.util.Objects;
6 | import java.util.Optional;
7 | import java.util.function.Consumer;
8 |
9 | import com.onkiup.linker.parser.ParserLocation;
10 | import com.onkiup.linker.parser.Rule;
11 | import com.onkiup.linker.parser.TokenGrammar;
12 |
13 | /**
14 | * Common interface for any tokens that can contain children tokens
15 | * @param the type of resulting token
16 | */
17 | public interface CompoundToken extends PartialToken, Serializable {
18 |
19 | /**
20 | * Creates a new CompoundToken for the provided class
21 | * @param type class for which new token should be created
22 | * @param position position at which the token will be located in the parser's input
23 | * @return created CompoundToken
24 | */
25 | static CompoundToken forClass(Class extends Rule> type, ParserLocation position) {
26 | if (position == null) {
27 | position = new ParserLocation(null, 0, 0, 0);
28 | }
29 | if (TokenGrammar.isConcrete(type)) {
30 | return new RuleToken(null, null, type, position);
31 | } else {
32 | return new VariantToken(null, null, type, position);
33 | }
34 | }
35 |
36 | /**
37 | * Callback method invoked every time a child token is successfully populated from parser's input
38 | */
39 | void onChildPopulated();
40 |
41 | /**
42 | * Callback method invoked every time a child token population fails
43 | */
44 | void onChildFailed();
45 |
46 | /**
47 | * @return the number of children left to be filled
48 | */
49 | int unfilledChildren();
50 |
51 | /**
52 | * @return true if token contains any unfilled children
53 | */
54 | default boolean hasUnfilledChildren() {
55 | return unfilledChildren() > 0;
56 | }
57 |
58 | /**
59 | * @return true when this token has only one unfilled child left
60 | */
61 | default boolean onlyOneUnfilledChildLeft() {
62 | return unfilledChildren() == 1;
63 | }
64 |
65 | /**
66 | * @return the number of currently populating child
67 | */
68 | int currentChild();
69 |
70 | /**
71 | * Forces the token to move its internal children pointer so that next populating child will be from the provided position
72 | * @param newIndex the position of the child to be populated next
73 | */
74 | void nextChild(int newIndex);
75 |
76 | /**
77 | * @return all previously created children, optionally excluding any possible future children
78 | */
79 | PartialToken>[] children();
80 |
81 | /**
82 | * @param children an array of PartialToken objects to replace current token's children with
83 | */
84 | void children(PartialToken>[] children);
85 |
86 | /**
87 | * @return the next child of this token to be populated
88 | */
89 | Optional> nextChild();
90 |
91 | /**
92 | * Walks through token's children in reverse order removing them until the first child with alternativesLeft() > 0
93 | * If no such child found, then returns full token source
94 | * @return source for removed tokens
95 | */
96 | default void traceback() {
97 | log("!!! TRACING BACK");
98 | PartialToken>[] children = children();
99 | if (children.length == 0) {
100 | invalidate();
101 | onFail();
102 | return;
103 | }
104 | int newSize = 0;
105 | for (int i = children.length - 1; i > -1; i--) {
106 | PartialToken> child = children[i];
107 | if (child == null) {
108 | continue;
109 | }
110 |
111 | child.traceback();
112 |
113 | if (!child.isFailed()) {
114 | log("found alternatives at child#{}", i);
115 | newSize = i + 1;
116 | break;
117 | }
118 |
119 | child.onFail();
120 | }
121 |
122 | if (newSize > 0) {
123 | PartialToken>[] newChildren = new PartialToken>[newSize];
124 | System.arraycopy(children, 0, newChildren, 0, newSize);
125 | children(newChildren);
126 | nextChild(newSize - 1);
127 | dropPopulated();
128 | log("Traced back to child #{}: {}", newSize - 1, newChildren[newSize-1].tag());
129 | } else {
130 | onFail();
131 | }
132 | }
133 |
134 | /**
135 | * @return number of alternatives for this token, including its children
136 | */
137 | @Override
138 | default boolean alternativesLeft() {
139 | PartialToken>[] children = children();
140 | for (int i = 0; i < children.length; i++) {
141 | PartialToken> child = children[i];
142 | if (child != null) {
143 | log("getting alternatives from child#{} {}", i, child.tag());
144 | if (child.alternativesLeft()) {
145 | log("child#{} {} reported that it has alternatives", i, child.tag());
146 | return true;
147 | }
148 | }
149 | }
150 | return false;
151 | }
152 |
153 | @Override
154 | default int basePriority() {
155 | int result = PartialToken.super.basePriority();
156 |
157 | for (PartialToken child : children()) {
158 | if (child != null && child.propagatePriority()) {
159 | result += child.basePriority();
160 | }
161 | }
162 |
163 | return result;
164 | }
165 |
166 | /**
167 | * Rotates this token
168 | */
169 | default void rotate() {
170 | }
171 |
172 | /**
173 | * @return true when this token can be rotated
174 | */
175 | default boolean rotatable() {
176 | return false;
177 | }
178 |
179 | /**
180 | * Performs reverse-rotation on this token
181 | */
182 | default void unrotate() {
183 | }
184 |
185 | /**
186 | * Uses the given visitor to walk over the AST starting with this token
187 | * @param visitor token visitor
188 | */
189 | @Override
190 | default void visit(Consumer> visitor) {
191 | Arrays.stream(children())
192 | .filter(Objects::nonNull)
193 | .forEach(child -> child.visit(visitor));
194 | PartialToken.super.visit(visitor);
195 | }
196 | }
197 |
198 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/token/ConsumingToken.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.token;
2 |
3 | import java.io.Serializable;
4 | import java.util.Optional;
5 | import java.util.concurrent.ConcurrentHashMap;
6 | import java.util.function.Function;
7 |
8 | import com.onkiup.linker.parser.ParserLocation;
9 | import com.onkiup.linker.parser.Rule;
10 | import com.onkiup.linker.parser.TestResult;
11 | import com.onkiup.linker.parser.TokenMatcher;
12 | import com.onkiup.linker.parser.TokenTestResult;
13 | import com.onkiup.linker.parser.util.LoggerLayout;
14 | import com.onkiup.linker.parser.util.ParserError;
15 |
16 | /**
17 | * Interfacde that represents any token that can advance parser by consuming characters from parser's buffer
18 | * @param type of resulting token
19 | */
20 | public interface ConsumingToken extends PartialToken, Serializable {
21 |
22 | /**
23 | * Provides TokenMatcher for default consumption algorithm
24 | * @param matcher the matcher to use against consumed characters
25 | */
26 | default void setTokenMatcher(TokenMatcher matcher) {
27 | ConsumptionState.create(this, matcher);
28 | }
29 |
30 | /**
31 | * Callback method invoked upon partial or full match against consumed characters
32 | * @param token resulting token, as provided by previously configured matcher
33 | */
34 | void onConsumeSuccess(Object token);
35 |
36 | /**
37 | * Attempts to consume next character
38 | * @return true if consumption should continue
39 | */
40 | default boolean consume() {
41 | ConsumptionState consumption = ConsumptionState.of(this).orElseThrow(() -> new ParserError("No consumption state found (call ConsumingToken::setTokenMatcher to create it first)", this));
42 |
43 | boolean doNext = consumption.consume();
44 |
45 | TokenTestResult result = consumption.test();
46 |
47 | if (result.isFailed()) {
48 | log("failed; switching to lookahead mode");
49 | consumption.setFailed();
50 | consumption.lookahead();
51 | consumption.clear();
52 | onFail();
53 | return false;
54 | } else if (result.isMatch()) {
55 | consumption.trim(result.getTokenLength());
56 | log("matched at position {}", consumption.end().position());
57 | onConsumeSuccess(result.getToken());
58 | onPopulated(consumption.end());
59 | return false;
60 | }
61 |
62 | if (result.isMatchContinue()) {
63 | log("matched; continuing...");
64 | onConsumeSuccess(result.getToken());
65 | onPopulated(consumption.end());
66 | } else if (consumption.hitEnd()) {
67 | onFail();
68 | }
69 |
70 | return doNext;
71 | }
72 |
73 | @Override
74 | default void invalidate() {
75 | PartialToken.super.invalidate();
76 | ConsumptionState.discard(this);
77 | }
78 |
79 | @Override
80 | default void atEnd() {
81 | parent().ifPresent(CompoundToken::atEnd);
82 | }
83 |
84 | /**
85 | * A helper class that implements major parts of consumption algorithm and stores consumption states for ConsumingToken instances
86 | */
87 | class ConsumptionState {
88 | private static final ConcurrentHashMap states = new ConcurrentHashMap<>();
89 | private static final ConcurrentHashMap buffers = new ConcurrentHashMap<>();
90 |
91 | /**
92 | * Returns previously registered ConsumptionState for the given token
93 | * @param token token whose ConsumptionState should be returned
94 | * @return ConsumptionState instance for provided token
95 | */
96 | private static synchronized Optional of(ConsumingToken token) {
97 | return Optional.ofNullable(states.get(token));
98 | }
99 |
100 | /**
101 | * Creates and registers a new ConsumptionState for the given token
102 | * @param token token for which a new ConsumptionState should be created
103 | * @param tester function that will be used to match consumed characters
104 | */
105 | private static void create(ConsumingToken token, Function tester) {
106 | states.put(token, new ConsumptionState(token, tester));
107 | }
108 |
109 | /**
110 | * Register given ConsuptionState for given token
111 | * @param token a token for which given ConsumptionState should be registered
112 | * @param state ConsumptionState that sould be registered for the given token
113 | */
114 | static void inject(ConsumingToken token, ConsumptionState state) {
115 | states.put(token, state);
116 | }
117 |
118 | /**
119 | * Discards ConsumptionState registered for given token
120 | * @param token token whose ConsumptionState should be discarded
121 | */
122 | private static void discard(ConsumingToken token) {
123 | states.remove(token);
124 | }
125 |
126 | /**
127 | * List of characters to ignore at the beginning of consumption
128 | */
129 | private final String ignoredCharacters;
130 | /**
131 | * The tester used to match consumed characters
132 | */
133 | private final Function tester;
134 | /**
135 | * Pointers to the buffer
136 | */
137 | private ParserLocation start, end, ignored;
138 | /**
139 | * Failure flag
140 | */
141 | private boolean failed;
142 | /**
143 | * the token with which this consumption is associated
144 | */
145 | private ConsumingToken token;
146 | /**
147 | * parser buffer
148 | */
149 | private CharSequence buffer;
150 |
151 | private boolean hitEnd = false;
152 |
153 | private ConsumptionState(ConsumingToken> token, Function tester) {
154 | this.token = token;
155 | this.ignoredCharacters = token.ignoredCharacters();
156 | this.tester = tester;
157 | this.start = this.end = this.ignored = token.location();
158 | this.buffer = rootBuffer(token.root()).orElseThrow(() ->
159 | new RuntimeException("No root buffer registered for token " + token));
160 | }
161 |
162 | ConsumptionState(ParserLocation start, ParserLocation ignored, ParserLocation end) {
163 | this.ignoredCharacters = "";
164 | this.tester = null;
165 | this.start = start;
166 | this.end = end;
167 | this.ignored = ignored;
168 | }
169 |
170 | /**
171 | * Stores a parser buffer used to populate given AST root
172 | * @param rootToken AST root whose parser buffer should be stored
173 | * @param buffer buffer used to populate the given AST
174 | * @param
175 | */
176 | public static void rootBuffer(PartialToken rootToken, CharSequence buffer) {
177 | buffers.put(rootToken, buffer);
178 | }
179 |
180 | /**
181 | * @param root root token of the AST
182 | * @return parser buffer used to populate given AST
183 | */
184 | public static Optional rootBuffer(PartialToken> root) {
185 | return Optional.ofNullable(buffers.get(root));
186 | }
187 |
188 | /**
189 | * @return consumed characters minus ignored prefix
190 | */
191 | protected CharSequence buffer() {
192 | return buffer.subSequence(ignored.position(), end.position());
193 | }
194 |
195 | /**
196 | * @return consumed characters, including ignored prefix
197 | */
198 | protected CharSequence consumed() {
199 | return buffer.subSequence(start.position(), end.position());
200 | }
201 |
202 | /**
203 | * @return location in parser's buffer immediately after the last consumed character or consumption start location when no characters were consumed
204 | */
205 | protected ParserLocation end() {
206 | return end;
207 | }
208 |
209 | /**
210 | * @param character character to test
211 | * @return true if provided character should be ignored and no non-ignorable characters were previously consumed
212 | */
213 | private boolean ignored(int character) {
214 | return ignoredCharacters != null && ignoredCharacters.chars().anyMatch(ignored -> ignored == character);
215 | }
216 |
217 | /**
218 | * Consumes the character at consumption's end location and advances that location if the character was consumed
219 | * @return true if consumption process can proceed to the next character or false if the consumption should be stopped
220 | */
221 | private boolean consume() {
222 | if (end.position() < buffer.length()) {
223 | char consumed = buffer.charAt(end.position());
224 | end = end.advance(consumed);
225 | if (end.position() - ignored.position() < 2 && ignored(consumed)) {
226 | ignored = ignored.advance(consumed);
227 | token.log("Ignored '{}' ({} - {} - {})", LoggerLayout.sanitize(consumed), start.position(), ignored.position(), end.position());
228 | return true;
229 | }
230 | token.log("Consumed '{}' ({} - {} - {})", LoggerLayout.sanitize(consumed), start.position(), ignored.position(), end.position());
231 | return true;
232 | } else {
233 | hitEnd = true;
234 | }
235 | return false;
236 | }
237 |
238 | /**
239 | * @return true if consumption ended at parser buffer's end
240 | */
241 | private boolean hitEnd() {
242 | return hitEnd;
243 | }
244 |
245 | /**
246 | * tests configured TokenMatcher against consumed characters (excluding ignored prefix)
247 | * @return reported by TokenMatcher test result structure
248 | */
249 | private TokenTestResult test() {
250 | if (end.position() - ignored.position() == 0) {
251 | return TestResult.continueNoMatch();
252 | }
253 | return tester.apply(buffer());
254 | }
255 |
256 | /**
257 | * Marks this consumption as failed
258 | */
259 | private void setFailed() {
260 | failed = true;
261 | }
262 |
263 | /**
264 | * @return true if this consumption was marked as failed
265 | */
266 | private boolean failed() {
267 | return failed;
268 | }
269 |
270 | /**
271 | * adjusts internal buffer pointers so that the number of consumed after ignored prefix characters appears to be equal to the given number
272 | * @param size the new size for consumption buffer
273 | */
274 | private void trim(int size) {
275 | end = ignored.advance(buffer().subSequence(0, size));
276 | }
277 |
278 | /**
279 | * reinitializes internal buffer pointers
280 | */
281 | private void clear() {
282 | end = ignored = start;
283 | }
284 |
285 | /**
286 | * performs lookahead on consumption's token
287 | */
288 | private void lookahead() {
289 | token.lookahead(buffer, ignored.position());
290 | token.log("Lookahead complete");
291 | token.onFail();
292 | }
293 |
294 | }
295 | }
296 |
297 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/token/EnumToken.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.token;
2 |
3 | import java.io.Serializable;
4 | import java.lang.reflect.Field;
5 | import java.util.ArrayList;
6 | import java.util.HashMap;
7 | import java.util.List;
8 | import java.util.Map;
9 | import java.util.Optional;
10 |
11 | import com.onkiup.linker.parser.ParserLocation;
12 | import com.onkiup.linker.parser.PatternMatcher;
13 | import com.onkiup.linker.parser.Rule;
14 | import com.onkiup.linker.parser.TerminalMatcher;
15 | import com.onkiup.linker.parser.TestResult;
16 | import com.onkiup.linker.parser.TokenMatcher;
17 | import com.onkiup.linker.parser.TokenTestResult;
18 | import com.onkiup.linker.parser.annotation.CapturePattern;
19 | import com.onkiup.linker.parser.util.ParserError;
20 |
21 | /**
22 | * Partial token used to populate Enum fields
23 | * TODO: test
24 | * @param
25 | */
26 | public class EnumToken extends AbstractToken implements ConsumingToken, Serializable {
27 |
28 | private Class enumType;
29 | private transient int nextVariant = 0;
30 | private transient Map variants = new HashMap<>();
31 | private X token;
32 | private boolean failed, populated;
33 | private String ignoreCharacters;
34 |
35 | public EnumToken(CompoundToken parent, Field field, Class enumType, ParserLocation location) {
36 | super(parent, field, location);
37 | this.enumType = enumType;
38 |
39 | for (X variant : enumType.getEnumConstants()) {
40 | try {
41 | Field variantField = enumType.getDeclaredField(variant.name());
42 | CapturePattern annotation = variantField.getAnnotation(CapturePattern.class);
43 | TokenMatcher matcher = annotation == null ? new TerminalMatcher(variant.toString()) : new PatternMatcher(annotation);
44 | variants.put(variant, matcher);
45 | } catch (ParserError pe) {
46 | throw pe;
47 | } catch (Exception e) {
48 | throw new ParserError("Failed to read field for enum value " + variant, this, e);
49 | }
50 | }
51 |
52 | setTokenMatcher(buffer -> {
53 | if (variants.size() == 0) {
54 | return TestResult.fail();
55 | }
56 |
57 | List failed = new ArrayList<>();
58 | for (Map.Entry entry : variants.entrySet()) {
59 | TokenTestResult result = entry.getValue().apply(buffer);
60 | if (result.isMatch()) {
61 | return TestResult.match(result.getTokenLength(), entry.getKey());
62 | } else if (result.isFailed()) {
63 | failed.add(entry.getKey());
64 | }
65 | }
66 |
67 | failed.forEach(variants::remove);
68 |
69 | if (variants.size() == 0) {
70 | return TestResult.fail();
71 | }
72 |
73 | return TestResult.continueNoMatch();
74 | });
75 | }
76 |
77 | @Override
78 | public Optional token() {
79 | return Optional.ofNullable(token);
80 | }
81 |
82 | @Override
83 | public Class tokenType() {
84 | return enumType;
85 | }
86 |
87 | @Override
88 | public void atEnd() {
89 |
90 | }
91 |
92 | @Override
93 | public void onConsumeSuccess(Object value) {
94 | token = (X) value;
95 | this.populated = true;
96 | }
97 |
98 | }
99 |
100 |
--------------------------------------------------------------------------------
/src/main/java/com/onkiup/linker/parser/token/PartialToken.java:
--------------------------------------------------------------------------------
1 | package com.onkiup.linker.parser.token;
2 |
3 | import java.io.IOException;
4 | import java.io.InputStream;
5 | import java.io.ObjectInputStream;
6 | import java.io.ObjectOutputStream;
7 | import java.io.OutputStream;
8 | import java.io.Serializable;
9 | import java.lang.reflect.Field;
10 | import java.util.LinkedList;
11 | import java.util.List;
12 | import java.util.Objects;
13 | import java.util.Optional;
14 | import java.util.function.Consumer;
15 | import java.util.function.Function;
16 | import java.util.function.Predicate;
17 |
18 | import org.slf4j.Logger;
19 | import org.slf4j.LoggerFactory;
20 |
21 | import com.onkiup.linker.parser.ParserLocation;
22 | import com.onkiup.linker.parser.Rule;
23 | import com.onkiup.linker.parser.TokenGrammar;
24 | import com.onkiup.linker.parser.annotation.AdjustPriority;
25 | import com.onkiup.linker.parser.annotation.MetaToken;
26 | import com.onkiup.linker.parser.annotation.OptionalToken;
27 | import com.onkiup.linker.parser.annotation.SkipIfFollowedBy;
28 | import com.onkiup.linker.parser.util.LoggerLayout;
29 | import com.onkiup.linker.parser.util.ParserError;
30 | import com.onkiup.linker.parser.util.TextUtils;
31 |
32 | /**
33 | * Generic interface for structures used to populate tokens
34 | * @param
35 | */
36 | public interface PartialToken extends Serializable {
37 |
38 | /**
39 | * Creates a new PartialToken for provided field
40 | * @param parent parent token
41 | * @param field the field for which a new PartialToken should be created
42 | * @param position token position in parser's buffer
43 | * @return created PartialToken
44 | */
45 | static PartialToken forField(CompoundToken parent, Field field, ParserLocation position) {
46 |
47 | if (position == null) {
48 | throw new ParserError("Child token position cannot be null", parent);
49 | }
50 |
51 | Class fieldType = field.getType();
52 | return forField(parent, field, fieldType, position);
53 | }
54 |
55 | /**
56 | * Creates a new PartialToken of given type for given field
57 | * @param parent parent token
58 | * @param field field for which a new PartialToken will be created
59 | * @param tokenType the type of the resulting token
60 | * @param position token position in parser's buffer
61 | * @param
62 | * @return created PartialToken
63 | */
64 | static PartialToken forField(CompoundToken parent, Field field, Class tokenType, ParserLocation position) {
65 | if (tokenType.isArray()) {
66 | return new CollectionToken(parent, field, tokenType, position);
67 | } else if (Rule.class.isAssignableFrom(tokenType)) {
68 | if (!TokenGrammar.isConcrete(tokenType)) {
69 | return new VariantToken(parent, field, tokenType, position);
70 | } else {
71 | return new RuleToken(parent, field, tokenType, position);
72 | }
73 | } else if (tokenType == String.class) {
74 | return (PartialToken) new TerminalToken(parent, field, tokenType, position);
75 | } else if (tokenType.isEnum()) {
76 | return (PartialToken) new EnumToken(parent, field, tokenType, position);
77 | }
78 | throw new IllegalArgumentException("Unsupported field type: " + tokenType);
79 | }
80 |
81 | /**
82 | * Reads optionality condition for the field
83 | * @param field field to read optionality condition for
84 | * @return optionality condition or empty
85 | */
86 | static Optional getOptionalCondition(Field field) {
87 | if (field == null) {
88 | return Optional.empty();
89 | }
90 | CharSequence result = null;
91 | if (field.isAnnotationPresent(OptionalToken.class)) {
92 | result = field.getAnnotation(OptionalToken.class).whenFollowedBy();
93 | } else if (field.isAnnotationPresent(SkipIfFollowedBy.class)) {
94 | result = field.getAnnotation(SkipIfFollowedBy.class).value();
95 | }
96 |
97 | return Optional.ofNullable(result == null || result.length() == 0 ? null : result);
98 | }
99 |
100 | /**
101 | * @param field field to check for presence of OptionalToken or SkipIfFollowedBy annotations
102 | * @return true if the field is annotated with either {@link OptionalToken} or {@link SkipIfFollowedBy}
103 | */
104 | static boolean hasOptionalAnnotation(Field field) {
105 | return field != null && (field.isAnnotationPresent(OptionalToken.class) || field.isAnnotationPresent(SkipIfFollowedBy.class));
106 | }
107 |
108 | /**
109 | * Context-aware field optionality checks
110 | * @param owner Context to check
111 | * @param field Field to check
112 | * @return true if the field should be optional in this context
113 | */
114 | static boolean isOptional(CompoundToken owner, Field field) {
115 | try {
116 | if (field.isAnnotationPresent(OptionalToken.class)) {
117 | owner.log("Performing context-aware optionality check for field ${}", field);
118 | OptionalToken optionalToken = field.getAnnotation(OptionalToken.class);
119 | boolean result;
120 | if (optionalToken.whenFieldIsNull().length() != 0) {
121 | final String fieldName = optionalToken.whenFieldIsNull();
122 | result = testContextField(owner, fieldName, Objects::isNull);
123 | owner.log("whenFieldIsNull({}) == {}", fieldName, result);
124 | } else if (optionalToken.whenFieldNotNull().length() != 0) {
125 | final String fieldName = optionalToken.whenFieldNotNull();
126 | result = testContextField(owner, fieldName, Objects::nonNull);
127 | owner.log("whenFieldNotNull({}) == {}", fieldName, result);
128 | } else {
129 | result = optionalToken.whenFollowedBy().length() == 0;
130 | owner.log("No context-aware conditions found; isOptional = {}", result);
131 | }
132 | return result;
133 | }
134 |
135 | return false;
136 | } catch (Exception e) {
137 | throw new ParserError("Failed to determine if field " + field.getName() + " should be optional", owner);
138 | }
139 | }
140 |
141 | /**
142 | * Tests if given field has context-aware optionality condition and should be optional in the current context
143 | * @param owner the token that contains the field
144 | * @param fieldName the name of the field
145 | * @param tester Predicate to use in the test
146 | * @return test result
147 | * @throws NoSuchFieldException
148 | * @throws IllegalAccessException
149 | */
150 | static boolean testContextField(CompoundToken owner, String fieldName, Predicate