├── .git-blame-ignore-revs ├── .github ├── CODEOWNERS ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── clean.yml │ └── format.yml ├── .gitignore ├── .sbtopts ├── .scala-steward.conf ├── .scalafmt.conf ├── CHANGELOG ├── LICENSE ├── README.rst ├── README_new.md ├── build.sbt ├── examples └── src │ ├── main │ └── scala │ │ └── org │ │ └── parboiled2 │ │ └── examples │ │ ├── ABCParser.scala │ │ ├── Calculator1.scala │ │ ├── Calculator2.scala │ │ ├── CsvParser.scala │ │ └── JsonParser.scala │ └── test │ └── scala │ └── org │ └── parboiled2 │ └── examples │ ├── CsvParserSpec.scala │ └── JsonParserSpec.scala ├── jsonBenchmark └── src │ └── main │ ├── resources │ └── test.json │ └── scala │ └── org │ └── parboiled │ └── examples │ └── JsonParserBenchmark.scala ├── notes ├── 2.0.0.markdown ├── 2.0.1.markdown └── about.markdown ├── parboiled-core └── src │ ├── main │ ├── scala-2 │ │ └── org │ │ │ └── parboiled2 │ │ │ ├── ParserMacros.scala │ │ │ └── support │ │ │ ├── HListable.scala │ │ │ ├── OpTreeContext.scala │ │ │ └── TailSwitch.scala │ ├── scala-3 │ │ └── org │ │ │ └── parboiled2 │ │ │ ├── ParserMacros.scala │ │ │ └── support │ │ │ ├── HListable.scala │ │ │ ├── OpTreeContext.scala │ │ │ └── TailSwitch.scala │ └── scala │ │ └── org │ │ └── parboiled2 │ │ ├── CharPredicate.scala │ │ ├── CharUtils.scala │ │ ├── ErrorFormatter.scala │ │ ├── ParseError.scala │ │ ├── Parser.scala │ │ ├── ParserInput.scala │ │ ├── Rule.scala │ │ ├── RuleDSLActions.scala │ │ ├── RuleDSLBasics.scala │ │ ├── RuleDSLCombinators.scala │ │ ├── ValueStack.scala │ │ ├── package.scala │ │ ├── support │ │ ├── ActionOpsSupport.scala │ │ ├── Lifter.scala │ │ ├── RunResult.scala │ │ ├── Unpack.scala │ │ ├── hlist │ │ │ ├── hlists.scala │ │ │ ├── ops │ │ │ │ └── hlists.scala │ │ │ └── syntax │ │ │ │ └── hlists.scala │ │ └── package.scala │ │ └── util │ │ └── Base64.scala │ └── test │ └── scala │ └── org │ └── parboiled2 │ ├── ActionSpec.scala │ ├── BasicSpec.scala │ ├── CalculatorSpec.scala │ ├── CharPredicateSpec.scala │ ├── CharUtilsSpec.scala │ ├── CombinatorSpec.scala │ ├── CompileDurationTest.scala │ ├── CustomCharAndStringExpansionSpec.scala │ ├── CutSpec.scala │ ├── DSLTest.scala │ ├── DefaultArgumentsSpec.scala │ ├── ErrorReportingSpec.scala │ ├── MetaRuleSpec.scala │ ├── ReductionResetSpec.scala │ ├── ReductionTypeSpec.scala │ ├── RunSubParserSpec.scala │ ├── RunningSpec.scala │ ├── TailrecSpec.scala │ ├── TestParserSpec.scala │ ├── ValueStackSpec.scala │ ├── VarianceSpec.scala │ ├── nestedpackage │ └── AlienPackageParserSpec.scala │ └── util │ └── Base64Spec.scala ├── parboiled └── src │ ├── main │ ├── scala-2 │ │ └── org │ │ │ └── parboiled2 │ │ │ └── DynamicRuleDispatchMacro.scala │ ├── scala-3 │ │ └── org │ │ │ └── parboiled2 │ │ │ └── DynamicRuleDispatchMacro.scala │ └── scala │ │ └── org │ │ └── parboiled2 │ │ ├── Base64Parsing.scala │ │ ├── DynamicRuleDispatch.scala │ │ └── StringBuilding.scala │ └── test │ └── scala │ └── org │ └── parboiled2 │ ├── Base64ParsingSpec.scala │ └── DynamicRuleDispatchSpec.scala ├── project ├── ActionOpsBoilerplate.scala ├── build.properties └── plugins.sbt └── scalaParser └── src ├── main └── scala │ └── scalaparser │ ├── CharacterClasses.scala │ ├── L0_Basics.scala │ ├── L1_KeywordsAndOperators.scala │ ├── L2_Identifiers.scala │ ├── L3_Literals.scala │ ├── L4_Types.scala │ ├── L4_Xml.scala │ ├── L5_Exprs.scala │ ├── L6_TopLevel.scala │ ├── ScalaParser.scala │ └── WhitespaceStringsAndChars.scala └── test └── scala └── scalaparser ├── RealSourcesSpec.scala └── SnippetSpec.scala /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # scalafmt 2 | cf11185179248cf195f8dd5b7c8a28fe1a4f393d 3 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @sirthias 2 | 3 | build.sbt @xuwei-k 4 | project/* @xuwei-k 5 | .github/workflows/* @xuwei-k 6 | .scalafmt.conf @xuwei-k 7 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by sbt-github-actions using the 2 | # githubWorkflowGenerate task. You should add and commit this file to 3 | # your git repository. It goes without saying that you shouldn't edit 4 | # this file by hand! Instead, if you wish to make changes, you should 5 | # change your sbt build configuration to revise the workflow description 6 | # to meet your needs, then regenerate this file. 7 | 8 | name: Continuous Integration 9 | 10 | on: 11 | pull_request: 12 | branches: ['**'] 13 | push: 14 | branches: ['**'] 15 | tags: [v*] 16 | 17 | env: 18 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 19 | 20 | jobs: 21 | build: 22 | name: Build and Test 23 | strategy: 24 | matrix: 25 | os: [ubuntu-latest, windows-latest] 26 | scala: [2.12.20, 2.13.16, 3.3.6] 27 | java: [temurin@8, temurin@21] 28 | runs-on: ${{ matrix.os }} 29 | steps: 30 | - name: Ignore line ending differences in git 31 | if: contains(runner.os, 'windows') 32 | shell: bash 33 | run: git config --global core.autocrlf false 34 | 35 | - name: Configure pagefile for Windows 36 | if: contains(runner.os, 'windows') 37 | uses: al-cheb/configure-pagefile-action@v1.4 38 | with: 39 | minimum-size: 4GB 40 | maximum-size: 16GB 41 | disk-root: 'C:' 42 | 43 | - name: Checkout current branch (full) 44 | uses: actions/checkout@v4 45 | with: 46 | fetch-depth: 0 47 | 48 | - name: Setup Java (temurin@8) 49 | if: matrix.java == 'temurin@8' 50 | uses: actions/setup-java@v4 51 | with: 52 | distribution: temurin 53 | java-version: 8 54 | cache: sbt 55 | 56 | - name: Setup Java (temurin@21) 57 | if: matrix.java == 'temurin@21' 58 | uses: actions/setup-java@v4 59 | with: 60 | distribution: temurin 61 | java-version: 21 62 | cache: sbt 63 | 64 | - name: Setup sbt 65 | uses: sbt/setup-sbt@v1 66 | 67 | - name: Check that workflows are up to date 68 | shell: bash 69 | run: sbt '++ ${{ matrix.scala }}' githubWorkflowCheck 70 | 71 | - name: Header check 72 | if: ${{ matrix.java=='temurin@8' && matrix.scala=='2.12.20' && matrix.os=='ubuntu-latest' }} 73 | shell: bash 74 | run: sbt '++ ${{ matrix.scala }}' headerCheckAll scalaParser/headerCheckAll 75 | 76 | - name: Build project 77 | shell: bash 78 | run: sbt '++ ${{ matrix.scala }}' Test/compile test 'scalaParser/testOnly scalaparser.SnippetSpec' 79 | 80 | - name: Compress target directories 81 | shell: bash 82 | run: tar cf targets.tar examples/target parboiled-core/.native/target target scalaParser/target jsonBenchmark/target parboiled/.js/target parboiled/.native/target parboiled/.jvm/target parboiled-core/.jvm/target parboiled-core/.js/target project/target 83 | 84 | - name: Upload target directories 85 | uses: actions/upload-artifact@v4 86 | with: 87 | name: target-${{ matrix.os }}-${{ matrix.scala }}-${{ matrix.java }} 88 | path: targets.tar 89 | 90 | publish: 91 | name: Publish Artifacts 92 | needs: [build] 93 | if: github.event_name != 'pull_request' && (startsWith(github.ref, 'refs/tags/v')) && (github.repository_owner == 'sirthias') 94 | strategy: 95 | matrix: 96 | os: [ubuntu-latest] 97 | scala: [3.3.6] 98 | java: [temurin@8] 99 | runs-on: ${{ matrix.os }} 100 | steps: 101 | - name: Ignore line ending differences in git 102 | if: contains(runner.os, 'windows') 103 | run: git config --global core.autocrlf false 104 | 105 | - name: Configure pagefile for Windows 106 | if: contains(runner.os, 'windows') 107 | uses: al-cheb/configure-pagefile-action@v1.4 108 | with: 109 | minimum-size: 4GB 110 | maximum-size: 16GB 111 | disk-root: 'C:' 112 | 113 | - name: Checkout current branch (full) 114 | uses: actions/checkout@v4 115 | with: 116 | fetch-depth: 0 117 | 118 | - name: Setup Java (temurin@8) 119 | if: matrix.java == 'temurin@8' 120 | uses: actions/setup-java@v4 121 | with: 122 | distribution: temurin 123 | java-version: 8 124 | cache: sbt 125 | 126 | - name: Setup Java (temurin@21) 127 | if: matrix.java == 'temurin@21' 128 | uses: actions/setup-java@v4 129 | with: 130 | distribution: temurin 131 | java-version: 21 132 | cache: sbt 133 | 134 | - name: Setup sbt 135 | uses: sbt/setup-sbt@v1 136 | 137 | - name: Download target directories (2.12.20) 138 | uses: actions/download-artifact@v4 139 | with: 140 | name: target-${{ matrix.os }}-2.12.20-${{ matrix.java }} 141 | 142 | - name: Inflate target directories (2.12.20) 143 | run: | 144 | tar xf targets.tar 145 | rm targets.tar 146 | 147 | - name: Download target directories (2.13.16) 148 | uses: actions/download-artifact@v4 149 | with: 150 | name: target-${{ matrix.os }}-2.13.16-${{ matrix.java }} 151 | 152 | - name: Inflate target directories (2.13.16) 153 | run: | 154 | tar xf targets.tar 155 | rm targets.tar 156 | 157 | - name: Download target directories (3.3.6) 158 | uses: actions/download-artifact@v4 159 | with: 160 | name: target-${{ matrix.os }}-3.3.6-${{ matrix.java }} 161 | 162 | - name: Inflate target directories (3.3.6) 163 | run: | 164 | tar xf targets.tar 165 | rm targets.tar 166 | 167 | - name: Publish project 168 | env: 169 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 170 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 171 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 172 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 173 | run: sbt ci-release 174 | -------------------------------------------------------------------------------- /.github/workflows/clean.yml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by sbt-github-actions using the 2 | # githubWorkflowGenerate task. You should add and commit this file to 3 | # your git repository. It goes without saying that you shouldn't edit 4 | # this file by hand! Instead, if you wish to make changes, you should 5 | # change your sbt build configuration to revise the workflow description 6 | # to meet your needs, then regenerate this file. 7 | 8 | name: Clean 9 | 10 | on: push 11 | 12 | jobs: 13 | delete-artifacts: 14 | name: Delete Artifacts 15 | runs-on: ubuntu-latest 16 | env: 17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | steps: 19 | - name: Delete artifacts 20 | shell: bash {0} 21 | run: | 22 | # Customize those three lines with your repository and credentials: 23 | REPO=${GITHUB_API_URL}/repos/${{ github.repository }} 24 | 25 | # A shortcut to call GitHub API. 26 | ghapi() { curl --silent --location --user _:$GITHUB_TOKEN "$@"; } 27 | 28 | # A temporary file which receives HTTP response headers. 29 | TMPFILE=$(mktemp) 30 | 31 | # An associative array, key: artifact name, value: number of artifacts of that name. 32 | declare -A ARTCOUNT 33 | 34 | # Process all artifacts on this repository, loop on returned "pages". 35 | URL=$REPO/actions/artifacts 36 | while [[ -n "$URL" ]]; do 37 | 38 | # Get current page, get response headers in a temporary file. 39 | JSON=$(ghapi --dump-header $TMPFILE "$URL") 40 | 41 | # Get URL of next page. Will be empty if we are at the last page. 42 | URL=$(grep '^Link:' "$TMPFILE" | tr ',' '\n' | grep 'rel="next"' | head -1 | sed -e 's/.*.*//') 43 | rm -f $TMPFILE 44 | 45 | # Number of artifacts on this page: 46 | COUNT=$(( $(jq <<<$JSON -r '.artifacts | length') )) 47 | 48 | # Loop on all artifacts on this page. 49 | for ((i=0; $i < $COUNT; i++)); do 50 | 51 | # Get name of artifact and count instances of this name. 52 | name=$(jq <<<$JSON -r ".artifacts[$i].name?") 53 | ARTCOUNT[$name]=$(( $(( ${ARTCOUNT[$name]} )) + 1)) 54 | 55 | id=$(jq <<<$JSON -r ".artifacts[$i].id?") 56 | size=$(( $(jq <<<$JSON -r ".artifacts[$i].size_in_bytes?") )) 57 | printf "Deleting '%s' #%d, %'d bytes\n" $name ${ARTCOUNT[$name]} $size 58 | ghapi -X DELETE $REPO/actions/artifacts/$id 59 | done 60 | done 61 | -------------------------------------------------------------------------------- /.github/workflows/format.yml: -------------------------------------------------------------------------------- 1 | name: Scalafmt 2 | 3 | permissions: read-all 4 | 5 | on: 6 | pull_request: 7 | branches: ['**'] 8 | 9 | jobs: 10 | build: 11 | name: Code is formatted 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout current branch (full) 15 | uses: actions/checkout@v4 16 | with: 17 | fetch-depth: 0 18 | persist-credentials: false 19 | 20 | - name: Check project is formatted 21 | uses: jrouly/scalafmt-native-action@v3 22 | with: 23 | arguments: '--list --mode diff-ref=origin/master' 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Only put project-/build-tool-specific things here! 2 | # Everything specific to your particular dev machine 3 | # (like OS- or IDE-related artifacts) should go into 4 | # a personal, global .gitignore file! 5 | # (See https://help.github.com/articles/ignoring-files/ for details) 6 | 7 | index.js 8 | 9 | ########### SBT ########### 10 | .bsp 11 | lib_managed 12 | target 13 | project/boot 14 | project/build/target 15 | project/plugins/lib_managed 16 | project/plugins/src_managed 17 | project/plugins/target 18 | project/plugins/project/build.properties 19 | buildinfo.properties 20 | proguard-sbt.txt 21 | 22 | ########### Xcode ########### 23 | Pods/ 24 | build/ 25 | *.pbxuser 26 | !default.pbxuser 27 | *.mode1v3 28 | !default.mode1v3 29 | *.mode2v3 30 | !default.mode2v3 31 | *.perspectivev3 32 | !default.perspectivev3 33 | xcuserdata 34 | *.xccheckout 35 | *.moved-aside 36 | DerivedData 37 | *.hmap 38 | *.ipa 39 | *.xcuserstate 40 | project.xcworkspace 41 | 42 | ########### Android ########### 43 | build/ 44 | .gradle 45 | local.properties 46 | 47 | ########### node.js ########### 48 | node_modules/ 49 | npm-debug.log 50 | yarn-error.log 51 | 52 | ########### BUCK ########### 53 | buck-out/ 54 | \.buckd/ 55 | *.keystore 56 | 57 | ########### fastlane ########### 58 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 59 | # screenshots whenever they are needed. 60 | # For more information about the recommended setup visit: 61 | # https://docs.fastlane.tools/best-practices/source-control/ 62 | */fastlane/report.xml 63 | */fastlane/Preview.html 64 | */fastlane/screenshots 65 | 66 | ########### Bundle artifact ########### 67 | *.jsbundle 68 | 69 | ########### Intellij ########### 70 | .idea 71 | -------------------------------------------------------------------------------- /.sbtopts: -------------------------------------------------------------------------------- 1 | -J-Xms6G 2 | -J-Xmx6G 3 | -J-Xss6M 4 | -J-XX:+UseG1GC 5 | -------------------------------------------------------------------------------- /.scala-steward.conf: -------------------------------------------------------------------------------- 1 | scalafmt.runAfterUpgrading = false 2 | 3 | updates.pin = [ 4 | { groupId = "org.scala-lang", artifactId = "scala3-library" , version = "3.3." } 5 | { groupId = "org.scala-lang", artifactId = "scala3-library_sjs1" , version = "3.3." } 6 | ] 7 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 3.9.7 2 | 3 | runner.dialect = scala213source3 4 | project.layout = standardConvention 5 | project.git = true 6 | 7 | docstrings.style = keep 8 | 9 | maxColumn = 120 10 | assumeStandardLibraryStripMargin = true 11 | align.preset = more 12 | align.allowOverflow = true 13 | 14 | binPack.parentConstructors = true 15 | runner.optimizer.forceConfigStyleOnOffset = -1 16 | 17 | indentOperator.preset = spray 18 | 19 | rewrite.rules = [RedundantBraces, RedundantParens, SortImports, SortModifiers, PreferCurlyFors] 20 | rewrite.redundantBraces.maxLines = 5 21 | rewrite.scala3.convertToNewSyntax = true 22 | rewrite.scala3.newSyntax.control = false 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This software is licensed under the Apache 2 license, quoted below. 2 | 3 | Copyright © 2009-2013 Mathias Doenitz 4 | Copyright © 2013 Alexander Myltsev 5 | 6 | Licensed under the Apache License, Version 2.0 (the "License"); you may not 7 | use this file except in compliance with the License. You may obtain a copy of 8 | the License at 9 | 10 | [http://www.apache.org/licenses/LICENSE-2.0] 11 | 12 | Unless required by applicable law or agreed to in writing, software 13 | distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 14 | WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 15 | License for the specific language governing permissions and limitations under 16 | the License. -------------------------------------------------------------------------------- /examples/src/main/scala/org/parboiled2/examples/ABCParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.examples 18 | 19 | import scala.annotation.tailrec 20 | import scala.util.{Failure, Success} 21 | import scala.io.StdIn 22 | import org.parboiled2.* 23 | 24 | object ABCParser extends App { 25 | repl() 26 | 27 | @tailrec 28 | def repl(): Unit = { 29 | // TODO: Replace next three lines with `scala.Predef.readLine(text: String, args: Any*)` 30 | // once BUG https://github.com/scala/bug/issues/8167 is fixed 31 | print("---\nEnter expression for abc-parser > ") 32 | Console.out.flush() 33 | StdIn.readLine() match { 34 | case "" => 35 | case line => 36 | val parser = new ABCParser(line) 37 | parser.InputLine.run() match { 38 | case Success(_) => println("Expression is valid") 39 | case Failure(e: ParseError) => println("Expression is not valid: " + parser.formatError(e)) 40 | case Failure(e) => println("Unexpected error during parsing run: " + e) 41 | } 42 | repl() 43 | } 44 | } 45 | } 46 | 47 | /** This parser reads the classic non-context-free language: 48 | * 49 | * a^n b^n c^n (for n > 1) 50 | * 51 | * See also: http://en.wikipedia.org/wiki/Parsing_expression_grammar#Examples 52 | */ 53 | class ABCParser(val input: ParserInput) extends Parser { 54 | 55 | def InputLine = 56 | rule { 57 | &(A ~ 'c') ~ oneOrMore('a') ~ B ~ !(ch('a') | 'b' | 'c') ~ EOI 58 | } 59 | 60 | def A: Rule0 = 61 | rule { 62 | 'a' ~ optional(A) ~ 'b' 63 | } 64 | 65 | def B: Rule0 = 66 | rule { 67 | 'b' ~ optional(B) ~ 'c' 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /examples/src/main/scala/org/parboiled2/examples/Calculator1.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.examples 18 | 19 | import scala.annotation.tailrec 20 | import scala.util.{Failure, Success} 21 | import scala.io.StdIn 22 | import org.parboiled2.* 23 | 24 | object Calculator1 extends App { 25 | repl() 26 | 27 | @tailrec 28 | def repl(): Unit = { 29 | // TODO: Replace next three lines with `scala.Predef.readLine(text: String, args: Any*)` 30 | // once BUG https://github.com/scala/bug/issues/8167 is fixed 31 | print("---\nEnter calculator expression > ") 32 | Console.out.flush() 33 | StdIn.readLine() match { 34 | case "" => 35 | case line => 36 | val parser = new Calculator1(line) 37 | parser.InputLine.run() match { 38 | case Success(result) => println("Result: " + result) 39 | case Failure(e: ParseError) => println("Expression is not valid: " + parser.formatError(e)) 40 | case Failure(e) => println("Unexpected error during parsing run: " + e) 41 | } 42 | repl() 43 | } 44 | } 45 | } 46 | 47 | /** This parser reads simple calculator expressions and evaluates them right during 48 | * the parsing run with the help of the value stack. 49 | */ 50 | class Calculator1(val input: ParserInput) extends Parser { 51 | def InputLine = rule(Expression ~ EOI) 52 | 53 | def Expression: Rule1[Int] = 54 | rule { 55 | Term ~ zeroOrMore( 56 | '+' ~ Term ~> ((_: Int) + _) 57 | | '-' ~ Term ~> ((_: Int) - _) 58 | ) 59 | } 60 | 61 | def Term = 62 | rule { 63 | Factor ~ zeroOrMore( 64 | '*' ~ Factor ~> ((_: Int) * _) 65 | | '/' ~ Factor ~> ((_: Int) / _) 66 | ) 67 | } 68 | 69 | def Factor = rule(Number | Parens) 70 | 71 | def Parens = rule('(' ~ Expression ~ ')') 72 | 73 | def Number = rule(capture(Digits) ~> (_.toInt)) 74 | 75 | def Digits = rule(oneOrMore(CharPredicate.Digit)) 76 | } 77 | -------------------------------------------------------------------------------- /examples/src/main/scala/org/parboiled2/examples/Calculator2.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.examples 18 | 19 | import scala.annotation.tailrec 20 | import scala.util.{Failure, Success} 21 | import scala.io.StdIn 22 | import org.parboiled2.* 23 | 24 | object Calculator2 extends App { 25 | repl() 26 | 27 | @tailrec 28 | def repl(): Unit = { 29 | // TODO: Replace next three lines with `scala.Predef.readLine(text: String, args: Any*)` 30 | // once BUG https://github.com/scala/bug/issues/8167 is fixed 31 | print("---\nEnter calculator expression > ") 32 | Console.out.flush() 33 | StdIn.readLine() match { 34 | case "" => 35 | case line => 36 | val parser = new Calculator2(line) 37 | parser.InputLine.run() match { 38 | case Success(exprAst) => println("Result: " + eval(exprAst)) 39 | case Failure(e: ParseError) => println("Expression is not valid: " + parser.formatError(e)) 40 | case Failure(e) => println("Unexpected error during parsing run: " + e) 41 | } 42 | repl() 43 | } 44 | } 45 | 46 | def eval(expr: Expr): Int = 47 | expr match { 48 | case Value(v) => v.toInt 49 | case Addition(a, b) => eval(a) + eval(b) 50 | case Subtraction(a, b) => eval(a) - eval(b) 51 | case Multiplication(a, b) => eval(a) * eval(b) 52 | case Division(a, b) => eval(a) / eval(b) 53 | } 54 | 55 | // our abstract syntax tree model 56 | sealed trait Expr 57 | case class Value(value: String) extends Expr 58 | case class Addition(lhs: Expr, rhs: Expr) extends Expr 59 | case class Subtraction(lhs: Expr, rhs: Expr) extends Expr 60 | case class Multiplication(lhs: Expr, rhs: Expr) extends Expr 61 | case class Division(lhs: Expr, rhs: Expr) extends Expr 62 | } 63 | 64 | /** This parser reads simple calculator expressions and builds an AST 65 | * for them, to be evaluated in a separate phase, after parsing is completed. 66 | */ 67 | class Calculator2(val input: ParserInput) extends Parser { 68 | import Calculator2.* 69 | 70 | def InputLine = rule(Expression ~ EOI) 71 | 72 | def Expression: Rule1[Expr] = 73 | rule { 74 | Term ~ zeroOrMore( 75 | '+' ~ Term ~> Addition.apply _ 76 | | '-' ~ Term ~> Subtraction.apply _ 77 | ) 78 | } 79 | 80 | def Term = 81 | rule { 82 | Factor ~ zeroOrMore( 83 | '*' ~ Factor ~> Multiplication.apply _ 84 | | '/' ~ Factor ~> Division.apply _ 85 | ) 86 | } 87 | 88 | def Factor = rule(Number | Parens) 89 | 90 | def Parens = rule('(' ~ Expression ~ ')') 91 | 92 | def Number = rule(capture(Digits) ~> Value.apply _) 93 | 94 | def Digits = rule(oneOrMore(CharPredicate.Digit)) 95 | } 96 | -------------------------------------------------------------------------------- /examples/src/main/scala/org/parboiled2/examples/CsvParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.examples 18 | 19 | import scala.collection.immutable 20 | import org.parboiled2.* 21 | 22 | object CsvParser { 23 | 24 | case class CsvFile(header: Option[Record], records: immutable.Seq[Record]) 25 | case class Record(fields: immutable.Seq[String]) 26 | case class Error(msg: String) 27 | 28 | /** Parses the given input into a [[CsvFile]] or an [[Error]] instance. 29 | */ 30 | def apply(input: ParserInput, headerPresent: Boolean = true, fieldDelimiter: Char = ','): Either[Error, CsvFile] = { 31 | import Parser.DeliveryScheme.Either 32 | val parser = new CsvParser(input, headerPresent, fieldDelimiter) 33 | parser.file.run().left.map(error => Error(parser.formatError(error))) 34 | } 35 | 36 | private val `TEXTDATA-BASE` = CharPredicate.Printable -- '"' 37 | private val QTEXTDATA = `TEXTDATA-BASE` ++ "\r\n" 38 | } 39 | 40 | /** Simple, fast CSV parser. 41 | * 42 | * See http://tools.ietf.org/html/rfc4180#section-2 43 | */ 44 | class CsvParser(val input: ParserInput, headerPresent: Boolean, fieldDelimiter: Char) 45 | extends Parser with StringBuilding { 46 | import CsvParser.* 47 | 48 | val TEXTDATA = `TEXTDATA-BASE` -- fieldDelimiter 49 | 50 | def file = 51 | rule { 52 | OWS ~ optional(test(headerPresent) ~ header ~ NL) ~ oneOrMore(record) 53 | .separatedBy(NL) ~ optional(NL) ~ EOI ~> CsvFile.apply _ 54 | } 55 | 56 | def header = rule(record) 57 | 58 | def record = rule(oneOrMore(field).separatedBy(fieldDelimiter) ~> Record.apply _) 59 | 60 | def field = rule(`quoted-field` | `unquoted-field`) 61 | 62 | def `quoted-field` = 63 | rule { 64 | OWS ~ '"' ~ clearSB() ~ zeroOrMore((QTEXTDATA | "\"\"") ~ appendSB()) ~ '"' ~ OWS ~ push(sb.toString) 65 | } 66 | 67 | def `unquoted-field` = rule(capture(zeroOrMore(TEXTDATA))) 68 | 69 | def NL = rule(optional('\r') ~ '\n') 70 | 71 | def OWS = rule(zeroOrMore(' ')) 72 | } 73 | -------------------------------------------------------------------------------- /examples/src/main/scala/org/parboiled2/examples/JsonParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.examples 18 | 19 | import scala.annotation.switch 20 | import org.parboiled2.* 21 | import spray.json.{ParserInput as _, *} 22 | 23 | /** This is a feature-complete JSON parser implementation that almost directly 24 | * models the JSON grammar presented at http://www.json.org as a parboiled2 PEG parser. 25 | */ 26 | class JsonParser(val input: ParserInput) extends Parser with StringBuilding { 27 | import CharPredicate.{Digit, Digit19, HexDigit} 28 | import JsonParser.* 29 | 30 | // the root rule 31 | def Json = rule(WhiteSpace ~ Value ~ EOI) 32 | 33 | def JsonObject: Rule1[JsObject] = 34 | rule { 35 | ws('{') ~ zeroOrMore(Pair).separatedBy(ws(',')) ~ ws('}') ~> ((fields: Seq[JsField]) => JsObject(fields*)) 36 | } 37 | 38 | def Pair = rule(JsonStringUnwrapped ~ ws(':') ~ Value ~> ((_, _))) 39 | 40 | def Value: Rule1[JsValue] = 41 | rule { 42 | // as an optimization of the equivalent rule: 43 | // JsonString | JsonNumber | JsonObject | JsonArray | JsonTrue | JsonFalse | JsonNull 44 | // we make use of the fact that one-char lookahead is enough to discriminate the cases 45 | run { 46 | (cursorChar: @switch) match { 47 | case '"' => JsonString 48 | case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '-' => JsonNumber 49 | case '{' => JsonObject 50 | case '[' => JsonArray 51 | case 't' => JsonTrue 52 | case 'f' => JsonFalse 53 | case 'n' => JsonNull 54 | case _ => MISMATCH 55 | } 56 | } 57 | } 58 | 59 | def JsonString = rule(JsonStringUnwrapped ~> (JsString(_))) 60 | 61 | def JsonStringUnwrapped = rule('"' ~ clearSB() ~ Characters ~ ws('"') ~ push(sb.toString)) 62 | 63 | def JsonNumber = rule(capture(Integer ~ optional(Frac) ~ optional(Exp)) ~> (JsNumber(_)) ~ WhiteSpace) 64 | 65 | def JsonArray = rule(ws('[') ~ zeroOrMore(Value).separatedBy(ws(',')) ~ ws(']') ~> (JsArray(_*))) 66 | 67 | def Characters = rule(zeroOrMore(NormalChar | '\\' ~ EscapedChar)) 68 | 69 | def NormalChar = rule(!QuoteBackslash ~ ANY ~ appendSB()) 70 | 71 | def EscapedChar = 72 | rule( 73 | QuoteSlashBackSlash ~ appendSB() 74 | | 'b' ~ appendSB('\b') 75 | | 'f' ~ appendSB('\f') 76 | | 'n' ~ appendSB('\n') 77 | | 'r' ~ appendSB('\r') 78 | | 't' ~ appendSB('\t') 79 | | Unicode ~> { code => sb.append(code.asInstanceOf[Char]); () } 80 | ) 81 | 82 | def Unicode = rule('u' ~ capture(HexDigit ~ HexDigit ~ HexDigit ~ HexDigit) ~> (java.lang.Integer.parseInt(_, 16))) 83 | 84 | def Integer = rule(optional('-') ~ (Digit19 ~ Digits | Digit)) 85 | 86 | def Digits = rule(oneOrMore(Digit)) 87 | 88 | def Frac = rule("." ~ Digits) 89 | 90 | def Exp = rule(ignoreCase('e') ~ optional(anyOf("+-")) ~ Digits) 91 | 92 | def JsonTrue = rule("true" ~ WhiteSpace ~ push(JsTrue)) 93 | 94 | def JsonFalse = rule("false" ~ WhiteSpace ~ push(JsFalse)) 95 | 96 | def JsonNull = rule("null" ~ WhiteSpace ~ push(JsNull)) 97 | 98 | def WhiteSpace = rule(zeroOrMore(WhiteSpaceChar)) 99 | 100 | def ws(c: Char) = rule(c ~ WhiteSpace) 101 | } 102 | 103 | object JsonParser { 104 | val WhiteSpaceChar = CharPredicate(" \n\r\t\f") 105 | val QuoteBackslash = CharPredicate("\"\\") 106 | val QuoteSlashBackSlash = QuoteBackslash ++ "/" 107 | } 108 | -------------------------------------------------------------------------------- /examples/src/test/scala/org/parboiled2/examples/CsvParserSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.examples 18 | 19 | import org.parboiled2.examples.CsvParser.{CsvFile, Record} 20 | import utest.* 21 | 22 | object CsvParserSpec extends TestSuite { 23 | 24 | val tests = Tests { 25 | 26 | "The CsvParser" - { 27 | "correctly parse simple CSV test input" - { 28 | CsvParser { 29 | """"first_name","last_name","company_name","address","city","county","state","zip","phone1","phone2","email","web" 30 | |"James","Butt", "Benton, John B Jr","6649 N Blue Gum St","New Orleans","Orleans","LA",70116,"504-621-8927","504-845-1427","jbutt@gmail.com","http://www.bentonjohnbjr.com" 31 | |"Josephine","Darakjy","Chanay, Jeffrey A Esq","4 B Blue Ridge Blvd","Brighton","Livingston","MI",48116,"810-292-9388","810-374-9840","josephine_darakjy@darakjy.org","http://www.chanayjeffreyaesq.com" 32 | |Art,"Venere","Chemel, James L Cpa","8 W Cerritos Ave #54","Bridgeport","Gloucester","NJ",08014 ,"856-636-8749","856-264-4130","art@venere.org","http://www.chemeljameslcpa.com" 33 | |"Lenna","Paprocki","Feltz ""Printing"" Service", 639 Main St,"Anchorage","Anchorage","AK",99501,"907-385-4412","907-921-2010","lpaprocki@hotmail.com","http://www.feltzprintingservice.com" 34 | |""".stripMargin 35 | } ==> file( 36 | record( 37 | "first_name", 38 | "last_name", 39 | "company_name", 40 | "address", 41 | "city", 42 | "county", 43 | "state", 44 | "zip", 45 | "phone1", 46 | "phone2", 47 | "email", 48 | "web" 49 | ), 50 | record( 51 | "James", 52 | "Butt", 53 | "Benton, John B Jr", 54 | "6649 N Blue Gum St", 55 | "New Orleans", 56 | "Orleans", 57 | "LA", 58 | "70116", 59 | "504-621-8927", 60 | "504-845-1427", 61 | "jbutt@gmail.com", 62 | "http://www.bentonjohnbjr.com" 63 | ), 64 | record( 65 | "Josephine", 66 | "Darakjy", 67 | "Chanay, Jeffrey A Esq", 68 | "4 B Blue Ridge Blvd", 69 | "Brighton", 70 | "Livingston", 71 | "MI", 72 | "48116", 73 | "810-292-9388", 74 | "810-374-9840", 75 | "josephine_darakjy@darakjy.org", 76 | "http://www.chanayjeffreyaesq.com" 77 | ), 78 | record( 79 | "Art", 80 | "Venere", 81 | "Chemel, James L Cpa", 82 | "8 W Cerritos Ave #54", 83 | "Bridgeport", 84 | "Gloucester", 85 | "NJ", 86 | "08014 ", 87 | "856-636-8749", 88 | "856-264-4130", 89 | "art@venere.org", 90 | "http://www.chemeljameslcpa.com" 91 | ), 92 | record( 93 | "Lenna", 94 | "Paprocki", 95 | "Feltz \"Printing\" Service", 96 | " 639 Main St", 97 | "Anchorage", 98 | "Anchorage", 99 | "AK", 100 | "99501", 101 | "907-385-4412", 102 | "907-921-2010", 103 | "lpaprocki@hotmail.com", 104 | "http://www.feltzprintingservice.com" 105 | ) 106 | ) 107 | } 108 | } 109 | } 110 | 111 | def file(header: Record, records: Record*) = Right(CsvFile(Some(header), records.toVector)) 112 | def record(fields: String*) = Record(fields.toVector) 113 | } 114 | -------------------------------------------------------------------------------- /examples/src/test/scala/org/parboiled2/examples/JsonParserSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.examples 18 | 19 | import scala.util.{Failure, Success} 20 | import utest.* 21 | import spray.json.{JsonParser as _, *} 22 | import org.parboiled2.* 23 | 24 | object JsonParserSpec extends TestSuite { 25 | 26 | val tests = Tests { 27 | 28 | "The JsonParser" - { 29 | "parse 'null' to JsNull" - { 30 | parse("null") ==> JsNull 31 | } 32 | "parse 'true' to JsTrue" - { 33 | parse("true") ==> JsTrue 34 | } 35 | "parse 'false' to JsFalse" - { 36 | parse("false") ==> JsFalse 37 | } 38 | "parse '0' to JsNumber" - { 39 | parse("0") ==> JsNumber(0) 40 | } 41 | "parse '1.23' to JsNumber" - { 42 | parse("1.23") ==> JsNumber(1.23) 43 | } 44 | "parse '-1E10' to JsNumber" - { 45 | parse("-1E10") ==> JsNumber("-1E+10") 46 | } 47 | "parse '12.34e-10' to JsNumber" - { 48 | parse("12.34e-10") ==> JsNumber("1.234E-9") 49 | } 50 | "parse \"xyz\" to JsString" - { 51 | parse("\"xyz\"") ==> JsString("xyz") 52 | } 53 | "parse escapes in a JsString" - { 54 | parse(""""\"\\/\b\f\n\r\t"""") ==> JsString("\"\\/\b\f\n\r\t") 55 | parse("\"L\\" + "u00e4nder\"") ==> JsString("Länder") 56 | } 57 | "properly parse a simple JsObject" - ( 58 | parse(""" { "key" :42, "key2": "value" }""") ==> 59 | JsObject("key" -> JsNumber(42), "key2" -> JsString("value")) 60 | ) 61 | "properly parse a simple JsArray" - ( 62 | parse("""[null, 1.23 ,{"key":true } ] """) ==> 63 | JsArray(JsNull, JsNumber(1.23), JsObject("key" -> JsTrue)) 64 | ) 65 | } 66 | } 67 | 68 | def parse(s: String): JsValue = { 69 | val parser = new JsonParser(s) 70 | parser.Json.run() match { 71 | case Success(result) => result 72 | case Failure(e: ParseError) => sys.error(parser.formatError(e, new ErrorFormatter(showTraces = true))) 73 | case Failure(e) => throw e 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /jsonBenchmark/src/main/scala/org/parboiled/examples/JsonParserBenchmark.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled.examples 18 | 19 | import org.openjdk.jmh.annotations.{Benchmark, Scope, State} 20 | import org.parboiled2.examples.JsonParser 21 | 22 | @State(Scope.Thread) 23 | class TestState { 24 | // 744kb test JSON produced with http://www.json-generator.com/ 25 | val json = io.Source.fromInputStream(getClass.getResourceAsStream("/test.json")).mkString 26 | } 27 | 28 | class JsonParserBenchmark { 29 | 30 | @Benchmark 31 | def sprayJsonParser(state: TestState) = 32 | spray.json.JsonParser(state.json) 33 | 34 | @Benchmark 35 | def parboiled2JsonParser(state: TestState) = 36 | new JsonParser(state.json).Json.run().get 37 | 38 | @Benchmark 39 | def json4SNative(state: TestState) = 40 | org.json4s.native.JsonMethods.parse(state.json) 41 | 42 | @Benchmark 43 | def json4SJackson(state: TestState) = 44 | org.json4s.jackson.JsonMethods.parse(state.json) 45 | } 46 | -------------------------------------------------------------------------------- /notes/2.0.0.markdown: -------------------------------------------------------------------------------- 1 | This is the final release of parboiled 2.0. 2 | 3 | parboiled 2 improves upon parboiled 1.x in many ways, most importantly parsing performance and DSL flexibility. 4 | The final adds only one small patch to the preceding RC2 and fixes an issue causing stack overflows in certain situations involving large input. 5 | 6 | The release announcement on the mailing list can be found 7 | [here](https://groups.google.com/d/msg/parboiled-user/fKiTldnhd6s/Dcf2lEvinKgJ). 8 | 9 | CHANGELOG: 10 | 11 | - Fixed stack overflow caused by scala issue 8657 (#78) -------------------------------------------------------------------------------- /notes/2.0.1.markdown: -------------------------------------------------------------------------------- 1 | This is mainly a maintenance release bringing these improvements: 2 | 3 | - Fixed ScalaReflectionException under Scala 2.11 when scala-reflect is not on the classpath (#81) 4 | - Increased operator argument limit from 9 to 22 (#85) 5 | - Added `named` rule modifier and `namedRule` helper for explicit naming of inner rules (#50) 6 | - Smaller project-internal cleanup 7 | 8 | Additionally I have added a new documentation chapter on “Meta-Rules” [here](https://github.com/sirthias/parboiled2#advanced-techniques), 9 | a topic which appears to become a frequently asked question on the parboiled2 mailing list. 10 | 11 | The release announcement on the mailing list can be found 12 | [here](https://groups.google.com/forum/#!topic/parboiled-user/Ygb_M6XU5P8). -------------------------------------------------------------------------------- /notes/about.markdown: -------------------------------------------------------------------------------- 1 | [parboiled2](http://parboiled2.org) is a macro-based PEG parser generator for Scala 2.10.3+ and Scala 2.11, 2 | enabling lightweight and easy-to-use, yet powerful, fast and elegant parsing of arbitrary input text. 3 | It serves well as a replacement for Scala's parser combinators and regular expressions where parsing performance, 4 | readability/maintainability of the parser definition and/or error reporting quality is of the essence. -------------------------------------------------------------------------------- /parboiled-core/src/main/scala-2/org/parboiled2/ParserMacros.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.OpTreeContext 20 | import org.parboiled2.support.hlist.HList 21 | 22 | private[parboiled2] trait ParserMacroMethods { 23 | 24 | /** Converts a compile-time only rule definition into the corresponding rule method implementation. 25 | */ 26 | def rule[I <: HList, O <: HList](r: Rule[I, O]): Rule[I, O] = macro ParserMacros.ruleImpl[I, O] 27 | 28 | /** Converts a compile-time only rule definition into the corresponding rule method implementation 29 | * with an explicitly given name. 30 | */ 31 | def namedRule[I <: HList, O <: HList](name: String)(r: Rule[I, O]): Rule[I, O] = 32 | macro ParserMacros.namedRuleImpl[I, O] 33 | 34 | } 35 | 36 | private[parboiled2] trait RuleRunnable { 37 | 38 | /** THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing! 39 | */ 40 | implicit class Runnable[L <: HList](rule: RuleN[L]) { 41 | def run()(implicit scheme: Parser.DeliveryScheme[L]): scheme.Result = macro ParserMacros.runImpl[L] 42 | } 43 | } 44 | 45 | object ParserMacros { 46 | import scala.reflect.macros.whitebox.Context 47 | 48 | /** THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing! 49 | */ 50 | type RunnableRuleContext[L <: HList] = Context { type PrefixType = Rule.Runnable[L] } 51 | 52 | def runImpl[L <: HList: c.WeakTypeTag]( 53 | c: RunnableRuleContext[L] 54 | )()(scheme: c.Expr[Parser.DeliveryScheme[L]]): c.Expr[scheme.value.Result] = { 55 | import c.universe._ 56 | val runCall = c.prefix.tree match { 57 | case q"parboiled2.this.Rule.Runnable[$l]($ruleExpr)" => 58 | ruleExpr match { 59 | case q"$p.$r[..$ts](...$argss)" if p.tpe <:< typeOf[Parser] => 60 | q"val p = $p; p.__run[$l](p.$r[..$ts](...$argss))($scheme)" 61 | case rule if rule.tpe <:< typeOf[RuleX] => q"__run[$l]($ruleExpr)($scheme)" 62 | case x => c.abort(x.pos, "Illegal `.run()` call base: " + x) 63 | } 64 | case x => c.abort(x.pos, "Illegal `Runnable.apply` call: " + x) 65 | } 66 | c.Expr[scheme.value.Result](runCall) 67 | } 68 | 69 | /** THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing! 70 | */ 71 | type ParserContext = Context { type PrefixType = Parser } 72 | 73 | def ruleImpl[I <: HList: ctx.WeakTypeTag, O <: HList: ctx.WeakTypeTag]( 74 | ctx: ParserContext 75 | )(r: ctx.Expr[Rule[I, O]]): ctx.Expr[Rule[I, O]] = { 76 | import ctx.universe._ 77 | namedRuleImpl(ctx)(ctx.Expr[String](Literal(Constant(ctx.internal.enclosingOwner.name.decodedName.toString))))(r) 78 | } 79 | 80 | def namedRuleImpl[I <: HList: ctx.WeakTypeTag, O <: HList: ctx.WeakTypeTag]( 81 | ctx: ParserContext 82 | )(name: ctx.Expr[String])(r: ctx.Expr[Rule[I, O]]): ctx.Expr[Rule[I, O]] = { 83 | val opTreeCtx = new OpTreeContext[ctx.type] { val c: ctx.type = ctx } 84 | val opTree = opTreeCtx.RuleCall(Left(opTreeCtx.OpTree(r.tree)), name.tree) 85 | import ctx.universe._ 86 | val ruleTree = q""" 87 | def wrapped: Boolean = ${opTree.render(wrapped = true)} 88 | val matched = 89 | if (__inErrorAnalysis) wrapped 90 | else ${opTree.render(wrapped = false)} 91 | if (matched) org.parboiled2.Rule else null""" // we encode the "matched" boolean as 'ruleResult ne null' 92 | 93 | reify(ctx.Expr[RuleX](ruleTree).splice.asInstanceOf[Rule[I, O]]) 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala-2/org/parboiled2/support/HListable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support 18 | 19 | import org.parboiled2.support.hlist._ 20 | 21 | trait HListable[T] { 22 | type Out <: HList 23 | } 24 | 25 | object HListable extends LowerPriorityHListable { 26 | implicit def fromUnit: HListable[Unit] { type Out = HNil } = `n/a` 27 | implicit def fromHList[T <: HList]: HListable[T] { type Out = T } = `n/a` 28 | } 29 | 30 | abstract class LowerPriorityHListable { 31 | implicit def fromAnyRef[T]: HListable[T] { type Out = T :: HNil } = `n/a` 32 | } 33 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala-2/org/parboiled2/support/TailSwitch.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support 18 | 19 | import scala.annotation.implicitNotFound 20 | import org.parboiled2.support.hlist._ 21 | import org.parboiled2.support.hlist.ops.hlist.ReversePrepend 22 | 23 | // format: OFF 24 | 25 | /** 26 | * type-level implementation of this logic: 27 | * Out = 28 | * R if T has a tail of type L 29 | * (L dropRight T) ::: R if L has a tail of type T 30 | */ 31 | @implicitNotFound("Illegal rule composition") 32 | sealed trait TailSwitch[L <: HList, T <: HList, R <: HList] { 33 | type Out <: HList 34 | } 35 | object TailSwitch { 36 | implicit def tailSwitch[L <: HList, T <: HList, R <: HList, Out0 <: HList] 37 | (implicit ts: Aux[L, L, T, T, R, HNil, Out0]): TailSwitch[L, T, R] { type Out = Out0 } = `n/a` 38 | 39 | // type-level implementation of this algorithm: 40 | // @tailrec def rec(L, LI, T, TI, R, RI) = 41 | // if (TI <: L) R 42 | // else if (LI <: T) RI.reverse ::: R 43 | // else if (LI <: HNil) rec(L, HNil, T, TI.tail, R, RI) 44 | // else if (TI <: HNil) rec(L, LI.tail, T, HNil, R, LI.head :: RI) 45 | // else rec(L, LI.tail, T, TI.tail, R, LI.head :: RI) 46 | // rec(L, L, T, T, R, HNil) 47 | sealed trait Aux[L <: HList, LI <: HList, T <: HList, TI <: HList, R <: HList, RI <: HList, Out <: HList] 48 | 49 | object Aux extends Aux1 { 50 | // if TI <: L then Out = R 51 | implicit def terminate1[L <: HList, LI <: HList, T <: HList, TI <: L, R <: HList, RI <: HList]: 52 | Aux[L, LI, T, TI, R, RI, R] = `n/a` 53 | } 54 | 55 | abstract private[parboiled2] class Aux1 extends Aux2 { 56 | // if LI <: T then Out = RI.reverse ::: R 57 | implicit def terminate2[T <: HList, TI <: HList, L <: HList, LI <: T, R <: HList, RI <: HList, Out <: HList] 58 | (implicit rp: ReversePrepend.Aux[RI, R, Out]): Aux[L, LI, T, TI, R, RI, Out] = `n/a` 59 | } 60 | 61 | abstract private[parboiled2] class Aux2 { 62 | implicit def iter1[L <: HList, T <: HList, TH, TT <: HList, R <: HList, RI <: HList, Out <: HList] 63 | (implicit next: Aux[L, HNil, T, TT, R, RI, Out]): Aux[L, HNil, T, TH :: TT, R, RI, Out] = `n/a` 64 | 65 | implicit def iter2[L <: HList, LH, LT <: HList, T <: HList, R <: HList, RI <: HList, Out <: HList] 66 | (implicit next: Aux[L, LT, T, HNil, R, LH :: RI, Out]): Aux[L, LH :: LT, T, HNil, R, RI, Out] = `n/a` 67 | 68 | implicit def iter3[L <: HList, LH, LT <: HList, T <: HList, TH, TT <: HList, R <: HList, RI <: HList, Out <: HList] 69 | (implicit next: Aux[L, LT, T, TT, R, LH :: RI, Out]): Aux[L, LH :: LT, T, TH :: TT, R, RI, Out] = `n/a` 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala-3/org/parboiled2/ParserMacros.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.hlist.HList 20 | 21 | private[parboiled2] trait ParserMacroMethods { parser: Parser => 22 | 23 | /** Converts a compile-time only rule definition into the corresponding rule method implementation. 24 | */ 25 | inline def rule[I <: HList, O <: HList](inline r: Rule[I, O]): Rule[I, O] = ${ ParserMacros.ruleImpl('parser, 'r) } 26 | 27 | /** Converts a compile-time only rule definition into the corresponding rule method implementation 28 | * with an explicitly given name. 29 | */ 30 | inline def namedRule[I <: HList, O <: HList](name: String)(inline r: Rule[I, O]): Rule[I, O] = ${ 31 | ParserMacros.nameRuleImpl('parser, 'name, 'r) 32 | } 33 | 34 | } 35 | 36 | private[parboiled2] trait RuleRunnable { 37 | 38 | /** THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing! 39 | */ 40 | extension [L <: HList](inline rule: RuleN[L]) { 41 | inline def run()(using scheme: Parser.DeliveryScheme[L]): scheme.Result = 42 | ${ ParserMacros.runImpl[L, scheme.Result]()('rule, 'scheme) } 43 | } 44 | } 45 | 46 | object ParserMacros { 47 | import scala.quoted.* 48 | import scala.compiletime.* 49 | 50 | // TODO: the `R` type parameter is a workaround for https://github.com/lampepfl/dotty/issues/13376 51 | // Discussion at https://github.com/sirthias/parboiled2/pull/274#issuecomment-904926294 52 | def runImpl[L <: HList: Type, R: Type]()(ruleExpr: Expr[RuleN[L]], schemeExpr: Expr[Parser.DeliveryScheme[L]])(using 53 | Quotes 54 | ): Expr[R] = { 55 | import quotes.reflect.* 56 | 57 | /* 58 | the `rule.run()` macro supports two scenarios (`rule` has type `RuleN[L]`): 59 | 60 | 1. someParserExpression.rule[targs](args).run()(deliveryScheme) 61 | is re-written to 62 | { val p = someParserExpression 63 | p.__run[L](p.rule[targs](args))(deliveryScheme) } 64 | 65 | 2. Within a Parser subclass: 66 | rule(...).run()(deliveryScheme) 67 | is re-written to 68 | this.__run[L](rule(...))(deliveryScheme) 69 | Note that `rule` is also a macro, we work with the macro expansion of the `rule` call. 70 | */ 71 | 72 | case class RuleFromParser(parser: Term, rule: Symbol, targs: List[TypeTree], argss: List[List[Term]]) { 73 | def ruleCall[P](localParser: Expr[P]): Expr[RuleN[L]] = { 74 | val r = Select(localParser.asTerm, rule) 75 | argss.foldLeft(if (targs.isEmpty) r else TypeApply(r, targs))((t, args) => Apply(t, args)).asExprOf[RuleN[L]] 76 | } 77 | } 78 | 79 | object RuleFromParser { 80 | def dissect(t: Term, targs: List[TypeTree], argss: List[List[Term]]): (Term, List[TypeTree], List[List[Term]]) = 81 | t.underlyingArgument match { 82 | case Apply(f, args) => dissect(f, targs, args :: argss) 83 | case TypeApply(f, targs) => dissect(f, targs, argss) 84 | case t => (t, targs, argss) 85 | } 86 | 87 | def unapply(t: Term): Option[RuleFromParser] = dissect(t, Nil, Nil) match { 88 | case (rule @ Select(parser, _), targs, argss) if parser.tpe <:< TypeRepr.of[Parser] => 89 | Some(RuleFromParser(parser, rule.symbol, targs, argss)) 90 | case _ => None 91 | } 92 | } 93 | 94 | def isRuleMacro(sym: Symbol) = 95 | sym.owner == TypeRepr.of[ParserMacroMethods].typeSymbol && 96 | (sym.name == "rule" || sym.name == "namedRule") 97 | 98 | ruleExpr.asTerm match { 99 | case RuleFromParser(rule) => 100 | rule.parser.tpe.asType match { 101 | case '[pT] => 102 | // TODO: the parser type `pT` is not bounded by `<: Parser`, not sure how to do that. 103 | // This is why `asInstanceOf[Parser]` is needed below 104 | val parserExpr = rule.parser.asExprOf[pT] 105 | '{ 106 | val p: pT = $parserExpr 107 | p.asInstanceOf[Parser].__run[L](${ rule.ruleCall('p) })($schemeExpr).asInstanceOf[R] 108 | } 109 | } 110 | case Inlined(_, _, Inlined(Some(ruleMacro), List(ValDef(_, _, Some(parserThis))), rule)) 111 | if rule.tpe <:< TypeRepr.of[RuleX] && isRuleMacro(ruleMacro.symbol) => 112 | // The `Inlined` tree for the `rule` macro has a binding for the parser instance. 113 | // TODO: we re-use the rhs of that binding (parserThis), I didn't manage to create the right This() tree. 114 | '{ ${ parserThis.asExprOf[Parser] }.__run[L]($ruleExpr)($schemeExpr).asInstanceOf[R] } 115 | case r => 116 | report.error(s"""Cannot rewrite `myRule.run()` call for rule: ${ruleExpr.show} 117 | |`myRule` needs to be either of the form `someParser.someRule[targs](args)` 118 | |or it needs to be a `rule(...)` definition within a Parser subclass.""".stripMargin) 119 | throw new MatchError(r) 120 | } 121 | } 122 | 123 | def ruleImpl[I <: HList: Type, O <: HList: Type](parser: Expr[Parser], r: Expr[Rule[I, O]])(using 124 | Quotes 125 | ): Expr[Rule[I, O]] = { 126 | import quotes.reflect.* 127 | nameRuleImpl(parser, Expr(Symbol.spliceOwner.owner.name), r) 128 | } 129 | 130 | def nameRuleImpl[I <: HList: Type, O <: HList: Type](parser: Expr[Parser], name: Expr[String], r: Expr[Rule[I, O]])( 131 | using Quotes 132 | ): Expr[Rule[I, O]] = { 133 | import quotes.reflect.* 134 | 135 | val ctx = new support.OpTreeContext(parser) 136 | val opTree = ctx.topLevel(ctx.deconstruct(r), name) 137 | 138 | '{ 139 | def wrapped: Boolean = ${ opTree.render(wrapped = true) } 140 | val matched = 141 | if ($parser.__inErrorAnalysis) wrapped 142 | else ${ opTree.render(wrapped = false) } 143 | if (matched) org.parboiled2.Rule.asInstanceOf[Rule[I, O]] else null 144 | } 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala-3/org/parboiled2/support/HListable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support 18 | 19 | import org.parboiled2.support.hlist.* 20 | 21 | trait HListable[T] { 22 | type Out <: HList 23 | } 24 | 25 | object HListable { 26 | type HL0[T] <: HList = T match { 27 | case Unit => HNil 28 | case HNil => HNil 29 | case ::[a, b] => ::[a, b] 30 | case _ => T :: HNil 31 | } 32 | 33 | implicit def calc[T]: HListable[T] { type Out = HL0[T] } = `n/a` 34 | } 35 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala-3/org/parboiled2/support/TailSwitch.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support 18 | 19 | import hlist.* 20 | 21 | import scala.annotation.implicitNotFound 22 | 23 | /** 24 | * type-level implementation of this logic: 25 | * Out = 26 | * R if T has a tail of type L 27 | * (L dropRight T) ::: R if L has a tail of type T 28 | */ 29 | @implicitNotFound("Illegal rule composition") 30 | sealed trait TailSwitch[L <: HList, T <: HList, R <: HList] { 31 | type Out <: HList 32 | } 33 | object TailSwitch { 34 | type Reverse0[Acc <: HList, L <: HList] <: HList = L match { 35 | case HNil => Acc 36 | case ::[h, t] => Reverse0[h :: Acc, t] 37 | } 38 | 39 | type Reverse1[L <: HList] <: HList = L match { 40 | case HNil => HNil 41 | case ::[h, t] => Reverse0[h :: HNil, t] 42 | } 43 | 44 | type Prepend0[A <: HList, B <: HList] <: HList = A match { 45 | case HNil => B 46 | case ::[h, t] => ::[h, Prepend0[t, B]] 47 | } 48 | 49 | // type-level implementation of this algorithm: 50 | // @tailrec def rec(L, LI, T, TI, R, RI) = 51 | // if (TI <: L) R 52 | // else if (LI <: T) RI.reverse ::: R 53 | // else if (LI <: HNil) rec(L, HNil, T, TI.tail, R, RI) 54 | // else if (TI <: HNil) rec(L, LI.tail, T, HNil, R, LI.head :: RI) 55 | // else rec(L, LI.tail, T, TI.tail, R, LI.head :: RI) 56 | // rec(L, L, T, T, R, HNil) 57 | type TailSwitch0[L <: HList, LI <: HList, T <: HList, TI <: HList, R <: HList, RI <: HList] <: HList = TI match { 58 | case L => R 59 | case _ => 60 | LI match { 61 | case T => Prepend0[Reverse1[RI], R] 62 | case HNil => 63 | TI match { 64 | case ::[?, t] => TailSwitch0[L, HNil, T, t, R, RI] 65 | } 66 | case ::[h, t] => 67 | TI match { 68 | case HNil => TailSwitch0[L, t, T, HNil, R, h :: RI] 69 | case ::[?, tt] => TailSwitch0[L, t, T, tt, R, h :: RI] 70 | } 71 | } 72 | } 73 | 74 | type Aux[L <: HList, LI <: HList, T <: HList, TI <: HList, R <: HList, RI <: HList, Out0 <: HList] = 75 | TailSwitch[L, T, R] { type Out = Out0 } 76 | 77 | implicit def tailSwitch[L <: HList, T <: HList, R <: HList] 78 | : TailSwitch[L, T, R] { type Out = TailSwitch0[L, L, T, T, R, HNil] } = `n/a` 79 | } 80 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/ParserInput.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.annotation.tailrec 20 | import java.nio.ByteBuffer 21 | 22 | trait ParserInput { 23 | 24 | /** Returns the character at the given (zero-based) index. 25 | * Note: this method is hot and should be small and efficient. 26 | * A range-check is not required for the parser to work correctly. 27 | */ 28 | def charAt(ix: Int): Char 29 | 30 | /** The number of characters in this input. 31 | * Note: this method is hot and should be small and efficient. 32 | */ 33 | def length: Int 34 | 35 | /** Returns the characters between index `start` (inclusively) and `end` (exclusively) as a `String`. 36 | */ 37 | def sliceString(start: Int, end: Int): String 38 | 39 | /** Returns the characters between index `start` (inclusively) and `end` (exclusively) as an `Array[Char]`. 40 | */ 41 | def sliceCharArray(start: Int, end: Int): Array[Char] 42 | 43 | /** Gets the input line with the given number as a String. 44 | * Note: the first line is line number one! 45 | */ 46 | def getLine(line: Int): String 47 | } 48 | 49 | object ParserInput { 50 | val Empty = apply(Array.empty[Byte]) 51 | 52 | implicit def apply(bytes: Array[Byte]): ByteArrayBasedParserInput = new ByteArrayBasedParserInput(bytes) 53 | 54 | implicit def apply(bytes: Array[Byte], endIndex: Int): ByteArrayBasedParserInput = 55 | new ByteArrayBasedParserInput(bytes, endIndex) 56 | implicit def apply(string: String): StringBasedParserInput = new StringBasedParserInput(string) 57 | implicit def apply(chars: Array[Char]): CharArrayBasedParserInput = new CharArrayBasedParserInput(chars) 58 | 59 | implicit def apply(chars: Array[Char], endIndex: Int): CharArrayBasedParserInput = 60 | new CharArrayBasedParserInput(chars, endIndex) 61 | 62 | abstract class DefaultParserInput extends ParserInput { 63 | 64 | def getLine(line: Int): String = { 65 | @tailrec def rec(ix: Int, lineStartIx: Int, lineNr: Int): String = 66 | if (ix < length) 67 | if (charAt(ix) == '\n') 68 | if (lineNr < line) rec(ix + 1, ix + 1, lineNr + 1) 69 | else sliceString(lineStartIx, ix) 70 | else rec(ix + 1, lineStartIx, lineNr) 71 | else if (lineNr == line) sliceString(lineStartIx, ix) 72 | else "" 73 | rec(ix = 0, lineStartIx = 0, lineNr = 1) 74 | } 75 | } 76 | 77 | /** ParserInput reading directly off a byte array. 78 | * This avoids a separate decoding step but assumes that each byte represents exactly one character, 79 | * which is encoded by ISO-8859-1! 80 | * You can therefore use this ParserInput type only if you know that all input will be `ISO-8859-1`-encoded, 81 | * or only contains 7-bit ASCII characters (which is a subset of ISO-8859-1)! 82 | * 83 | * Note that this ParserInput type will NOT work with general `UTF-8`-encoded input as this can contain 84 | * character representations spanning multiple bytes. However, if you know that your input will only ever contain 85 | * 7-bit ASCII characters (0x00-0x7F) then UTF-8 is fine, since the first 127 UTF-8 characters are 86 | * encoded with only one byte that is identical to 7-bit ASCII and ISO-8859-1. 87 | */ 88 | class ByteArrayBasedParserInput(bytes: Array[Byte], endIndex: Int = 0) extends DefaultParserInput { 89 | val length = if (endIndex <= 0 || endIndex > bytes.length) bytes.length else endIndex 90 | def charAt(ix: Int) = (bytes(ix) & 0xff).toChar 91 | def sliceString(start: Int, end: Int) = new String(bytes, start, math.max(end - start, 0), `ISO-8859-1`) 92 | 93 | def sliceCharArray(start: Int, end: Int) = 94 | `ISO-8859-1`.decode(ByteBuffer.wrap(java.util.Arrays.copyOfRange(bytes, start, end))).array() 95 | } 96 | 97 | class StringBasedParserInput(string: String) extends DefaultParserInput { 98 | def charAt(ix: Int) = string.charAt(ix) 99 | def length = string.length 100 | def sliceString(start: Int, end: Int) = string.substring(start, math.min(end, string.length)) 101 | 102 | def sliceCharArray(start: Int, end: Int) = { 103 | val chars = new Array[Char](end - start) 104 | string.getChars(start, end, chars, 0) 105 | chars 106 | } 107 | } 108 | 109 | class CharArrayBasedParserInput(chars: Array[Char], endIndex: Int = 0) extends DefaultParserInput { 110 | val length = if (endIndex <= 0 || endIndex > chars.length) chars.length else endIndex 111 | def charAt(ix: Int) = chars(ix) 112 | def sliceString(start: Int, end: Int) = new String(chars, start, math.max(end - start, 0)) 113 | def sliceCharArray(start: Int, end: Int) = java.util.Arrays.copyOfRange(chars, start, end) 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/Rule.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.annotation.unchecked.uncheckedVariance 20 | import scala.annotation.compileTimeOnly 21 | import scala.collection.immutable 22 | import org.parboiled2.support.* 23 | import org.parboiled2.support.hlist.HList 24 | 25 | sealed trait RuleX 26 | 27 | /** The general model of a parser rule. 28 | * It is characterized by consuming a certain number of elements from the value stack (whose types are captured by the 29 | * HList type parameter `I` for "Input") and itself pushing a certain number of elements onto the value stack (whose 30 | * types are captured by the HList type parameter `O` for "Output"). 31 | * 32 | * At runtime there are only two instances of this class which signal whether the rule has matched (or mismatched) 33 | * at the current point in the input. 34 | */ 35 | sealed class Rule[-I <: HList, +O <: HList] extends RuleX { 36 | // Note: we could model `Rule` as a value class, however, tests have shown that this doesn't result in any measurable 37 | // performance benefit and, in addition, comes with other drawbacks (like generated bridge methods) 38 | 39 | /** Concatenates this rule with the given other one. 40 | * The resulting rule type is computed on a type-level. 41 | * Here is an illustration (using an abbreviated HList notation): 42 | * Rule[, A] ~ Rule[, B] = Rule[, A:B] 43 | * Rule[A:B:C, D:E:F] ~ Rule[F, G:H] = Rule[A:B:C, D:E:G:H] 44 | * Rule[A, B:C] ~ Rule[D:B:C, E:F] = Rule[D:A, E:F] 45 | */ 46 | @compileTimeOnly("Calls to `~` must be inside `rule` macro") 47 | def ~[I2 <: HList, O2 <: HList](that: Rule[I2, O2])(implicit 48 | i: TailSwitch[I2, O @uncheckedVariance, I @uncheckedVariance], 49 | o: TailSwitch[O @uncheckedVariance, I2, O2] 50 | ): Rule[i.Out, o.Out] = `n/a` 51 | 52 | /** Same as `~` but with "cut" semantics, meaning that the parser will never backtrack across this boundary. 53 | * If the rule being concatenated doesn't match a parse error will be triggered immediately. 54 | */ 55 | @compileTimeOnly("Calls to `~!~` must be inside `rule` macro") 56 | def ~!~[I2 <: HList, O2 <: HList](that: Rule[I2, O2])(implicit 57 | i: TailSwitch[I2, O @uncheckedVariance, I @uncheckedVariance], 58 | o: TailSwitch[O @uncheckedVariance, I2, O2] 59 | ): Rule[i.Out, o.Out] = `n/a` 60 | 61 | /** Combines this rule with the given other one in a way that the resulting rule matches if this rule matches 62 | * or the other one matches. If this rule doesn't match the parser is reset and the given alternative tried. 63 | * This operators therefore implements the "ordered choice' PEG combinator. 64 | */ 65 | @compileTimeOnly("Calls to `|` must be inside `rule` macro") 66 | def |[I2 <: I, O2 >: O <: HList](that: Rule[I2, O2]): Rule[I2, O2] = `n/a` 67 | 68 | /** Creates a "negative syntactic predicate", i.e. a rule that matches only if this rule mismatches and vice versa. 69 | * The resulting rule doesn't cause the parser to make any progress (i.e. match any input) and also clears out all 70 | * effects that the underlying rule might have had on the value stack. 71 | */ 72 | @compileTimeOnly("Calls to `unary_!` must be inside `rule` macro") 73 | def unary_! : Rule0 = `n/a` 74 | 75 | /** Attaches the given explicit name to this rule. 76 | */ 77 | @compileTimeOnly("Calls to `named` must be inside `rule` macro") 78 | def named(name: String): this.type = `n/a` 79 | 80 | /** Postfix shortcut for `optional`. 81 | */ 82 | @compileTimeOnly("Calls to `.?` must be inside `rule` macro") 83 | def ?(implicit l: Lifter[Option, I @uncheckedVariance, O @uncheckedVariance]): Rule[l.In, l.OptionalOut] = `n/a` 84 | 85 | /** Postfix shortcut for `zeroOrMore`. 86 | */ 87 | @compileTimeOnly("Calls to `.*` must be inside `rule` macro") 88 | def *(implicit 89 | l: Lifter[immutable.Seq, I @uncheckedVariance, O @uncheckedVariance] 90 | ): Rule[l.In, l.OptionalOut] with Repeated = `n/a` 91 | 92 | /** Postfix shortcut for `zeroOrMore(...).separatedBy(...)`. 93 | */ 94 | @compileTimeOnly("Calls to `.*` must be inside `rule` macro") 95 | def *(separator: Rule0)(implicit 96 | l: Lifter[immutable.Seq, I @uncheckedVariance, O @uncheckedVariance] 97 | ): Rule[l.In, l.OptionalOut] = `n/a` 98 | 99 | /** Postfix shortcut for `oneOrMore`. 100 | */ 101 | @compileTimeOnly("Calls to `.+` must be inside `rule` macro") 102 | def +(implicit 103 | l: Lifter[immutable.Seq, I @uncheckedVariance, O @uncheckedVariance] 104 | ): Rule[l.In, l.StrictOut] with Repeated = `n/a` 105 | 106 | /** Postfix shortcut for `oneOrMore(...).separatedBy(...)`. 107 | */ 108 | @compileTimeOnly("Calls to `.+` must be inside `rule` macro") 109 | def +(separator: Rule0)(implicit 110 | l: Lifter[immutable.Seq, I @uncheckedVariance, O @uncheckedVariance] 111 | ): Rule[l.In, l.StrictOut] = `n/a` 112 | } 113 | 114 | /** THIS IS NOT PUBLIC API and might become hidden in future. Use only if you know what you are doing! 115 | */ 116 | object Rule extends Rule0 with RuleRunnable 117 | 118 | abstract class RuleDSL extends RuleDSLBasics with RuleDSLCombinators with RuleDSLActions 119 | 120 | // phantom type for WithSeparatedBy pimp 121 | trait Repeated 122 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/RuleDSLActions.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.annotation.compileTimeOnly 20 | import org.parboiled2.support.hlist.ops.hlist.Prepend 21 | import org.parboiled2.support.* 22 | import org.parboiled2.support.hlist.* 23 | 24 | trait RuleDSLActions { 25 | 26 | /** Pushes the input text matched by its inner rule onto the value stack 27 | * after its inner rule has been run successfully (and only then). 28 | */ 29 | @compileTimeOnly("Calls to `capture` must be inside `rule` macro") 30 | def capture[I <: HList, O <: HList](r: Rule[I, O])(implicit p: Prepend[O, String :: HNil]): Rule[I, p.Out] = `n/a` 31 | 32 | /** Implements a semantic predicate. If the argument expression evaluates to `true` the created 33 | * rule matches otherwise it doesn't. 34 | */ 35 | @compileTimeOnly("Calls to `test` must be inside `rule` macro") 36 | def test(condition: Boolean): Rule0 = `n/a` 37 | 38 | /** Runs the given block / expression / action function. 39 | * A `run` rule can have several shapes, depending on its argument type. If the `arg` evaluates to 40 | * 41 | * - a rule (i.e. has type `R <: Rule[_, _]`) the result type of `run` is this rule's type (i.e. `R`) and the 42 | * produced rule is immediately executed. 43 | * 44 | * - a function with 1 to 5 parameters these parameters are mapped against the top of the value stack, popped 45 | * and the function executed. Thereby the function behaves just like an action function for the `~>` operator, 46 | * i.e. if it produces a Unit value this result is simply dropped. HList results are pushed onto the value stack 47 | * (all their elements individually), rule results are immediately executed and other result values are pushed 48 | * onto the value stack as a single element. 49 | * 50 | * - a function with one HList parameter the behavior is similar to the previous case with the difference that the 51 | * elements of this parameter HList are mapped against the value stack top. This allows for consumption of an 52 | * arbitrary number of value stack elements. (Note: This feature of ``run`` is not yet currently implemented.) 53 | * 54 | * - any other value the result type of `run` is an always succeeding `Rule0`. 55 | * 56 | * NOTE: Even though the block is not a call-by-name parameter it will be executed 57 | * for every rule application anew! (Since the expression is directly transplanted 58 | * into the rule method by the `rule` macro. 59 | */ 60 | @compileTimeOnly("Calls to `run` must be inside `rule` macro") 61 | def run[T](arg: T)(implicit rr: RunResult[T]): rr.Out = `n/a` 62 | 63 | /** Pushes the given value onto the value stack. 64 | * - if `T` is `Unit` nothing is pushed (i.e. `push` with a block/expression evaluating to `Unit` is identical to `run`) 65 | * - if `T <: HList` all values of the HList is pushed as individual elements 66 | * - otherwise a single value of type `T` is pushed. 67 | */ 68 | @compileTimeOnly("Calls to `push` must be inside `rule` macro") 69 | def push[T](value: T)(implicit h: HListable[T]): RuleN[h.Out] = `n/a` 70 | 71 | /** Drops one or more values from the top of the value stack. 72 | * E.g. `drop[Int]` will drop the top ``Int`` value and `drop[Int :: String :: HNil]` will drop the top two values, 73 | * which must be an ``Int`` underneath a ``String`` (the string being the top stack element). 74 | */ 75 | @compileTimeOnly("Calls to `drop` must be inside `rule` macro") 76 | def drop[T](implicit h: HListable[T]): PopRule[h.Out] = `n/a` 77 | 78 | @compileTimeOnly("Calls to `rule2ActionOperator` must be inside `rule` macro") 79 | implicit def rule2ActionOperator[I <: HList, O <: HList](r: Rule[I, O])(implicit 80 | ops: ActionOps[I, O] 81 | ): ActionOperator[I, O, ops.Out] = `n/a` 82 | 83 | sealed trait ActionOperator[I <: HList, O <: HList, Ops] { 84 | def ~> : Ops 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/RuleDSLBasics.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.annotation.compileTimeOnly 20 | import org.parboiled2.support.* 21 | import org.parboiled2.support.hlist.HList 22 | 23 | trait RuleDSLBasics { 24 | 25 | /** Matches the given single character. 26 | */ 27 | @compileTimeOnly("Calls to `ch` must be inside `rule` macro") 28 | implicit def ch(c: Char): Rule0 = `n/a` 29 | 30 | /** Matches the given string of characters. 31 | */ 32 | @compileTimeOnly("Calls to `str` must be inside `rule` macro") 33 | implicit def str(s: String): Rule0 = `n/a` 34 | 35 | /** Matches any (single) character matched by the given `CharPredicate`. 36 | */ 37 | @compileTimeOnly("Calls to `predicate` must be inside `rule` macro") 38 | implicit def predicate(p: CharPredicate): Rule0 = `n/a` 39 | 40 | /** Matches any of the given maps keys and pushes the respective value upon 41 | * a successful match. 42 | */ 43 | @compileTimeOnly("Calls to `valueMap` must be inside `rule` macro") 44 | implicit def valueMap[T](m: Map[String, T])(implicit h: HListable[T]): RuleN[h.Out] = `n/a` 45 | 46 | /** Matches any of the given maps keys and pushes the respective value upon 47 | * a successful match. 48 | * 49 | * @param ignoreCase a flag that tells if map keys case should be ignored 50 | */ 51 | @compileTimeOnly("Calls to `valueMap` must be inside `rule` macro") 52 | def valueMap[T](m: Map[String, T], ignoreCase: Boolean = false)(implicit h: HListable[T]): RuleN[h.Out] = `n/a` 53 | 54 | /** Matches any single one of the given characters. 55 | * 56 | * Note: This helper has O(n) runtime with n being the length of the given string. 57 | * If your string consists only of 7-bit ASCII chars using a pre-allocated 58 | * [[CharPredicate]] will be more efficient. 59 | */ 60 | @compileTimeOnly("Calls to `anyOf` must be inside `rule` macro") 61 | def anyOf(chars: String): Rule0 = `n/a` 62 | 63 | /** Matches any single character except the ones in the given string and except EOI. 64 | * 65 | * Note: This helper has O(n) runtime with n being the length of the given string. 66 | * If your string consists only of 7-bit ASCII chars using a pre-allocated 67 | * [[CharPredicate]] will be more efficient. 68 | */ 69 | @compileTimeOnly("Calls to `noneOf` must be inside `rule` macro") 70 | def noneOf(chars: String): Rule0 = `n/a` 71 | 72 | /** Matches the given single character case insensitively. 73 | * Note: the given character must be specified in lower-case! 74 | * This requirement is currently NOT enforced! 75 | */ 76 | @compileTimeOnly("Calls to `ignoreCase` must be inside `rule` macro") 77 | def ignoreCase(c: Char): Rule0 = `n/a` 78 | 79 | /** Matches the given string of characters case insensitively. 80 | * Note: the given string must be specified in all lower-case! 81 | * This requirement is currently NOT enforced! 82 | */ 83 | @compileTimeOnly("Calls to `ignoreCase` must be inside `rule` macro") 84 | def ignoreCase(s: String): Rule0 = `n/a` 85 | 86 | /** Matches any character except EOI. 87 | */ 88 | @compileTimeOnly("Calls to `ANY` must be inside `rule` macro") 89 | def ANY: Rule0 = `n/a` 90 | 91 | /** Matches the EOI (end-of-input) character. 92 | */ 93 | def EOI: Char = org.parboiled2.EOI 94 | 95 | /** Matches no character (i.e. doesn't cause the parser to make any progress) but succeeds always (as a rule). 96 | */ 97 | def MATCH: Rule0 = Rule 98 | 99 | /** A Rule0 that always fails. 100 | */ 101 | def MISMATCH0: Rule0 = MISMATCH 102 | 103 | /** A generic Rule that always fails. 104 | */ 105 | def MISMATCH[I <: HList, O <: HList]: Rule[I, O] = null 106 | 107 | /** A rule that always fails and causes the parser to immediately terminate the parsing run. 108 | * The resulting parse error only has a single trace with a single frame which holds the given error message. 109 | */ 110 | def fail(expected: String): Rule0 = `n/a` 111 | 112 | /** Fully generic variant of [[fail]]. 113 | */ 114 | def failX[I <: HList, O <: HList](expected: String): Rule[I, O] = `n/a` 115 | 116 | @compileTimeOnly("Calls to `str2CharRangeSupport` must be inside `rule` macro") 117 | implicit def str2CharRangeSupport(s: String): CharRangeSupport = `n/a` 118 | 119 | sealed trait CharRangeSupport { 120 | def -(other: String): Rule0 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/RuleDSLCombinators.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.annotation.compileTimeOnly 20 | import scala.collection.immutable 21 | import org.parboiled2.support.* 22 | import org.parboiled2.support.hlist.* 23 | 24 | trait RuleDSLCombinators { 25 | 26 | /** Runs its inner rule and succeeds even if the inner rule doesn't. 27 | * Resulting rule type is 28 | * Rule0 if r == Rule0 29 | * Rule1[Option[T]] if r == Rule1[T] 30 | * Rule[I, O] if r == Rule[I, O <: I] // so called "reduction", which leaves the value stack unchanged on a type level 31 | */ 32 | @compileTimeOnly("Calls to `optional` must be inside `rule` macro") 33 | def optional[I <: HList, O <: HList](r: Rule[I, O])(implicit l: Lifter[Option, I, O]): Rule[l.In, l.OptionalOut] = 34 | `n/a` 35 | 36 | /** Runs its inner rule until it fails, always succeeds. 37 | * Resulting rule type is 38 | * Rule0 if r == Rule0 39 | * Rule1[Seq[T]] if r == Rule1[T] 40 | * Rule[I, O] if r == Rule[I, O <: I] // so called "reduction", which leaves the value stack unchanged on a type level 41 | */ 42 | @compileTimeOnly("Calls to `zeroOrMore` must be inside `rule` macro") 43 | def zeroOrMore[I <: HList, O <: HList](r: Rule[I, O])(implicit 44 | l: Lifter[immutable.Seq, I, O] 45 | ): Rule[l.In, l.OptionalOut] with Repeated = `n/a` 46 | 47 | /** Runs its inner rule until it fails, succeeds if its inner rule succeeded at least once. 48 | * Resulting rule type is 49 | * Rule0 if r == Rule0 50 | * Rule1[Seq[T]] if r == Rule1[T] 51 | * Rule[I, O] if r == Rule[I, O <: I] // so called "reduction", which leaves the value stack unchanged on a type level 52 | */ 53 | @compileTimeOnly("Calls to `oneOrMore` must be inside `rule` macro") 54 | def oneOrMore[I <: HList, O <: HList](r: Rule[I, O])(implicit 55 | l: Lifter[immutable.Seq, I, O] 56 | ): Rule[l.In, l.StrictOut] with Repeated = `n/a` 57 | 58 | /** Runs its inner rule but resets the parser (cursor and value stack) afterwards, 59 | * succeeds only if its inner rule succeeded. 60 | */ 61 | @compileTimeOnly("Calls to `&` must be inside `rule` macro") 62 | def &(r: Rule[?, ?]): Rule0 = `n/a` 63 | 64 | /** Marks a rule as "undividable" from an error reporting perspective. 65 | * The parser will never report errors *inside* of the marked rule. 66 | * Rather, if the rule mismatches, the error will be reported at the 67 | * very beginning of the attempted rule match. 68 | */ 69 | @compileTimeOnly("Calls to `atomic` must be inside `rule` macro") 70 | def atomic[I <: HList, O <: HList](r: Rule[I, O]): Rule[I, O] = `n/a` 71 | 72 | /** Marks a rule as "quiet" from an error reporting perspective. 73 | * Quiet rules only show up in error rule traces if no "unquiet" rules match up to the error location. 74 | * This marker frequently used for low-level syntax rules (like whitespace or comments) that might be matched 75 | * essentially everywhere and are therefore not helpful when appearing in the "expected" set of an error report. 76 | */ 77 | @compileTimeOnly("Calls to `atomic` must be inside `rule` macro") 78 | def quiet[I <: HList, O <: HList](r: Rule[I, O]): Rule[I, O] = `n/a` 79 | 80 | /** Allows creation of a sub parser and running of one of its rules as part of the current parsing process. 81 | * The subparser will start parsing at the current input position and the outer parser (this parser) 82 | * will continue where the sub-parser stopped. 83 | */ 84 | @compileTimeOnly("Calls to `runSubParser` must be inside `rule` macro") 85 | def runSubParser[I <: HList, O <: HList](f: ParserInput => Rule[I, O]): Rule[I, O] = `n/a` 86 | 87 | @compileTimeOnly("Calls to `int2NTimes` must be inside `rule` macro") 88 | implicit def int2NTimes(i: Int): NTimes = `n/a` 89 | 90 | @compileTimeOnly("Calls to `range2NTimes` must be inside `rule` macro") 91 | implicit def range2NTimes(range: Range): NTimes = `n/a` 92 | 93 | sealed trait NTimes { 94 | 95 | /** Repeats the given sub rule `r` the given number of times. 96 | * Both bounds of the range must be positive and the upper bound must be >= the lower bound. 97 | * If the upper bound is zero the rule is equivalent to `MATCH`. 98 | * 99 | * Resulting rule type is 100 | * Rule0 if r == Rule0 101 | * Rule1[Seq[T]] if r == Rule1[T] 102 | * Rule[I, O] if r == Rule[I, O <: I] // so called "reduction", which leaves the value stack unchanged on a type level 103 | */ 104 | @compileTimeOnly("Calls to `times` must be inside `rule` macro") 105 | def times[I <: HList, O <: HList](r: Rule[I, O])(implicit 106 | s: Lifter[immutable.Seq, I, O] 107 | ): Rule[s.In, s.StrictOut] with Repeated 108 | } 109 | 110 | @compileTimeOnly("Calls to `rule2WithSeparatedBy` constructor must be inside `rule` macro") 111 | implicit def rule2WithSeparatedBy[I <: HList, O <: HList](r: Rule[I, O] with Repeated): WithSeparatedBy[I, O] = `n/a` 112 | 113 | trait WithSeparatedBy[I <: HList, O <: HList] { 114 | def separatedBy(separator: Rule0): Rule[I, O] = `n/a` 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org 18 | 19 | import org.parboiled2.support.hlist.* 20 | import java.nio.charset.Charset 21 | 22 | package object parboiled2 { 23 | 24 | type Rule0 = RuleN[HNil] 25 | type Rule1[+T] = RuleN[T :: HNil] 26 | type Rule2[+A, +B] = RuleN[A :: B :: HNil] 27 | type RuleN[+L <: HList] = Rule[HNil, L] 28 | type PopRule[-L <: HList] = Rule[L, HNil] 29 | 30 | val EOI = '\uFFFF' 31 | 32 | val UTF8 = Charset.forName("UTF-8") 33 | val `ISO-8859-1` = Charset.forName("ISO-8859-1") 34 | 35 | val EmptyArray = Array.empty[Any] 36 | } 37 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/support/ActionOpsSupport.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support 18 | 19 | import org.parboiled2.support.hlist.* 20 | import org.parboiled2.Rule 21 | import org.parboiled2.support.hlist.ops.hlist.Prepend 22 | 23 | /* 24 | * The main ActionOps boilerplate is generated by a custom SBT sourceGenerator. 25 | * This file only contains support types. 26 | */ 27 | 28 | // we want to support the "short case class notation" `... ~> Foo` 29 | // unfortunately the Tree for the function argument to the `apply` overloads above does *not* allow us to inspect the 30 | // function type which is why we capture it separately with this helper type 31 | sealed trait FCapture[T] 32 | 33 | object FCapture { 34 | implicit def apply[T]: FCapture[T] = `n/a` 35 | } 36 | 37 | // builds `In` and `Out` types according to this logic: 38 | // if (R == Unit) 39 | // In = I, Out = L 40 | // else if (R <: HList) 41 | // In = I, Out = L ::: R 42 | // else if (R <: Rule[I2, O2]) 43 | // In = TailSwitch[I2, L, I], Out = TailSwitch[L, I2, O2] 44 | // else 45 | // In = I, Out = L ::: R :: HNil 46 | sealed trait Join[I <: HList, L <: HList, R] { 47 | type In <: HList 48 | type Out <: HList 49 | } 50 | 51 | object Join extends LowPrioJoin { 52 | 53 | implicit def forUnit[I <: HList, L <: HList]: Aux[I, L, Unit, I, L] = `n/a` 54 | 55 | implicit def forHList[I <: HList, L <: HList, R <: HList, O <: HList](implicit 56 | x: Prepend.Aux[L, R, O] 57 | ): Aux[I, L, R, I, O] = `n/a` 58 | 59 | implicit def forRule[I <: HList, O <: HList, I2 <: HList, O2 <: HList, In <: HList, Out <: HList](implicit 60 | i: TailSwitch.Aux[I2, I2, O, O, I, HNil, In], 61 | o: TailSwitch.Aux[O, O, I2, I2, O2, HNil, Out] 62 | ): Aux[I, O, Rule[I2, O2], In, Out] = `n/a` 63 | } 64 | 65 | sealed abstract class LowPrioJoin { 66 | 67 | type Aux[I <: HList, L <: HList, R, In0 <: HList, Out0 <: HList] = 68 | Join[I, L, R] { type In = In0; type Out = Out0 } 69 | 70 | implicit def forAny[I <: HList, L <: HList, R, In <: HList, Out <: HList](implicit 71 | x: Aux[I, L, R :: HNil, In, Out] 72 | ): Aux[I, L, R, In, Out] = `n/a` 73 | } 74 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/support/Lifter.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support 18 | 19 | import scala.annotation.implicitNotFound 20 | import org.parboiled2.support.hlist.* 21 | 22 | @implicitNotFound( 23 | "The `optional`, `zeroOrMore`, `oneOrMore` and `times` modifiers " + "can only be used on rules of type `Rule0`, `Rule1[T]` and `Rule[I, O <: I]`!" 24 | ) 25 | sealed trait Lifter[M[_], I <: HList, O <: HList] { 26 | type In <: HList 27 | type StrictOut <: HList 28 | type OptionalOut <: HList 29 | } 30 | 31 | object Lifter extends LowerPriorityLifter { 32 | 33 | implicit def forRule0[M[_]]: Lifter[M, HNil, HNil] { 34 | type In = HNil 35 | type StrictOut = HNil 36 | type OptionalOut = StrictOut 37 | } = `n/a` 38 | 39 | implicit def forRule1[M[_], T]: Lifter[M, HNil, T :: HNil] { 40 | type In = HNil 41 | type StrictOut = M[T] :: HNil 42 | type OptionalOut = StrictOut 43 | } = `n/a` 44 | } 45 | 46 | sealed abstract class LowerPriorityLifter { 47 | 48 | implicit def forReduction[M[_], L <: HList, R <: L]: Lifter[M, L, R] { 49 | type In = L 50 | type StrictOut = R 51 | type OptionalOut = L 52 | } = `n/a` 53 | } 54 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/support/RunResult.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support 18 | 19 | import org.parboiled2.support.hlist.* 20 | import org.parboiled2.* 21 | 22 | // phantom type, only used for rule DSL typing 23 | sealed trait RunResult[T] { 24 | type Out <: RuleX 25 | } 26 | 27 | object RunResult { 28 | implicit def fromAux[T, Out0 <: RuleX](implicit aux: Aux[T, Out0]): RunResult[T] { type Out = Out0 } = `n/a` 29 | 30 | sealed trait Aux[T, Out] 31 | 32 | object Aux extends Aux1 { 33 | implicit def forRule[R <: RuleX]: Aux[R, R] = `n/a` 34 | // implicit def forFHList[I <: HList, R, In0 <: HList, Out0 <: HList](implicit x: JA[I, R, In0, Out0]): Aux[I => R, Rule[In0, Out0]] = `n/a` 35 | } 36 | 37 | abstract class Aux1 extends Aux2 { 38 | 39 | implicit def forF1[Z, R, In0 <: HList, Out0 <: HList](implicit 40 | x: JA[Z :: HNil, R, In0, Out0] 41 | ): Aux[Z => R, Rule[In0, Out0]] = `n/a` 42 | 43 | implicit def forF2[Y, Z, R, In0 <: HList, Out0 <: HList](implicit 44 | x: JA[Y :: Z :: HNil, R, In0, Out0] 45 | ): Aux[(Y, Z) => R, Rule[In0, Out0]] = `n/a` 46 | 47 | implicit def forF3[X, Y, Z, R, In0 <: HList, Out0 <: HList](implicit 48 | x: JA[X :: Y :: Z :: HNil, R, In0, Out0] 49 | ): Aux[(X, Y, Z) => R, Rule[In0, Out0]] = `n/a` 50 | 51 | implicit def forF4[W, X, Y, Z, R, In0 <: HList, Out0 <: HList](implicit 52 | x: JA[W :: X :: Y :: Z :: HNil, R, In0, Out0] 53 | ): Aux[(W, X, Y, Z) => R, Rule[In0, Out0]] = `n/a` 54 | 55 | implicit def forF5[V, W, X, Y, Z, R, In0 <: HList, Out0 <: HList](implicit 56 | x: JA[V :: W :: X :: Y :: Z :: HNil, R, In0, Out0] 57 | ): Aux[(V, W, X, Y, Z) => R, Rule[In0, Out0]] = `n/a` 58 | } 59 | 60 | abstract class Aux2 { 61 | protected type JA[I <: HList, R, In0 <: HList, Out0 <: HList] = Join.Aux[I, HNil, R, In0, Out0] 62 | implicit def forAny[T]: Aux[T, Rule0] = `n/a` 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/support/Unpack.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support 18 | 19 | import org.parboiled2.support.hlist.* 20 | 21 | /** "Unpacks" an HList if it has only zero or one element(s). 22 | * Out = 23 | * Unit if L == HNil 24 | * T if L == T :: HNil 25 | * L otherwise 26 | * 27 | * You can `import Unpack.dontUnpack` if you'd like to circumvent this unpacking logic. 28 | */ 29 | sealed trait Unpack[L <: HList] { 30 | type Out 31 | def apply(hlist: L): Out 32 | } 33 | 34 | object Unpack extends AlternativeUnpacks { 35 | 36 | implicit def fromAux[L <: HList, Out0](implicit aux: Aux[L, Out0]): Unpack[L] { type Out = Out0 } = 37 | new Unpack[L] { 38 | type Out = Out0 39 | def apply(hlist: L) = aux(hlist) 40 | } 41 | 42 | sealed trait Aux[L <: HList, Out0] { 43 | def apply(hlist: L): Out0 44 | } 45 | 46 | implicit def hnil[L <: HNil]: Aux[L, Unit] = HNilUnpack.asInstanceOf[Aux[L, Unit]] 47 | 48 | implicit object HNilUnpack extends Aux[HNil, Unit] { 49 | def apply(hlist: HNil): Unit = () 50 | } 51 | 52 | implicit def single[T]: Aux[T :: HNil, T] = SingleUnpack.asInstanceOf[Aux[T :: HNil, T]] 53 | 54 | private object SingleUnpack extends Aux[Any :: HList, Any] { 55 | def apply(hlist: Any :: HList): Any = hlist.head 56 | } 57 | } 58 | 59 | sealed abstract class AlternativeUnpacks { 60 | 61 | /** Import if you'd like to *always* deliver the valueStack as an `HList` 62 | * at the end of the parsing run, even if it has only zero or one element(s). 63 | */ 64 | implicit def dontUnpack[L <: HList]: Unpack.Aux[L, L] = DontUnpack.asInstanceOf[Unpack.Aux[L, L]] 65 | 66 | private object DontUnpack extends Unpack.Aux[HList, HList] { 67 | def apply(hlist: HList): HList = hlist 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/support/hlist/hlists.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support.hlist 18 | 19 | /** `HList` ADT base trait. 20 | * 21 | * @author Miles Sabin 22 | */ 23 | sealed trait HList extends Product with Serializable 24 | 25 | /** Non-empty `HList` element type. 26 | * 27 | * @author Miles Sabin 28 | */ 29 | final case class ::[+H, +T <: HList](head: H, tail: T) extends HList { 30 | 31 | override def toString: String = head match { 32 | case _: ::[?, ?] => s"($head) :: $tail" 33 | case _ => s"$head :: $tail" 34 | } 35 | } 36 | 37 | /** Empty `HList` element type. 38 | * 39 | * @author Miles Sabin 40 | */ 41 | sealed trait HNil extends HList { 42 | def ::[H](h: H): H :: HNil = new ::(h, this) 43 | override def toString = "HNil" 44 | } 45 | 46 | /** Empty `HList` value. 47 | * 48 | * @author Miles Sabin 49 | */ 50 | case object HNil extends HNil 51 | 52 | object HList { 53 | import org.parboiled2.support.hlist.syntax.HListOps 54 | 55 | def apply(): HNil.type = HNil 56 | 57 | def apply[T](t: T): T :: HNil = t :: HNil 58 | 59 | implicit def hlistOps[L <: HList](l: L): HListOps[L] = new HListOps(l) 60 | 61 | /** Convenience aliases for HList :: and List :: allowing them to be used together within match expressions. 62 | */ 63 | object ListCompat { 64 | val :: = scala.collection.immutable.:: 65 | val #: = org.parboiled2.support.hlist.:: 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/support/hlist/ops/hlists.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support.hlist 18 | package ops 19 | 20 | object hlist { 21 | 22 | /** Dependent unary function type. */ 23 | trait DepFn1[T] { 24 | type Out 25 | def apply(t: T): Out 26 | } 27 | 28 | /** Dependent binary function type. */ 29 | trait DepFn2[T, U] { 30 | type Out 31 | def apply(t: T, u: U): Out 32 | } 33 | 34 | /** Type class supporting reversing this `HList`. 35 | * 36 | * @author Miles Sabin 37 | */ 38 | trait Reverse[L <: HList] extends DepFn1[L] with Serializable { type Out <: HList } 39 | 40 | object Reverse { 41 | def apply[L <: HList](implicit reverse: Reverse[L]): Aux[L, reverse.Out] = reverse 42 | 43 | type Aux[L <: HList, Out0 <: HList] = Reverse[L] { type Out = Out0 } 44 | 45 | implicit def reverse[L <: HList, Out0 <: HList](implicit reverse: Reverse0[HNil, L, Out0]): Aux[L, Out0] = 46 | new Reverse[L] { 47 | type Out = Out0 48 | def apply(l: L): Out = reverse(HNil, l) 49 | } 50 | 51 | trait Reverse0[Acc <: HList, L <: HList, Out <: HList] extends Serializable { 52 | def apply(acc: Acc, l: L): Out 53 | } 54 | 55 | object Reverse0 { 56 | 57 | implicit def hnilReverse[Out <: HList]: Reverse0[Out, HNil, Out] = 58 | new Reverse0[Out, HNil, Out] { 59 | def apply(acc: Out, l: HNil): Out = acc 60 | } 61 | 62 | implicit def hlistReverse[Acc <: HList, InH, InT <: HList, Out <: HList](implicit 63 | rt: Reverse0[InH :: Acc, InT, Out] 64 | ): Reverse0[Acc, InH :: InT, Out] = 65 | new Reverse0[Acc, InH :: InT, Out] { 66 | def apply(acc: Acc, l: InH :: InT): Out = rt(l.head :: acc, l.tail) 67 | } 68 | } 69 | } 70 | 71 | /** Type class supporting prepending to this `HList`. 72 | * 73 | * @author Miles Sabin 74 | */ 75 | trait Prepend[P <: HList, S <: HList] extends DepFn2[P, S] with Serializable { 76 | type Out <: HList 77 | } 78 | 79 | trait LowestPriorityPrepend { 80 | type Aux[P <: HList, S <: HList, Out0 <: HList] = Prepend[P, S] { type Out = Out0 } 81 | 82 | implicit def hlistPrepend[PH, PT <: HList, S <: HList, PtOut <: HList](implicit 83 | pt: Prepend.Aux[PT, S, PtOut] 84 | ): Prepend.Aux[PH :: PT, S, PH :: PtOut] = 85 | new Prepend[PH :: PT, S] { 86 | type Out = PH :: PtOut 87 | def apply(prefix: PH :: PT, suffix: S): Out = prefix.head :: pt(prefix.tail, suffix) 88 | } 89 | } 90 | 91 | trait LowPriorityPrepend extends LowestPriorityPrepend { 92 | override type Aux[P <: HList, S <: HList, Out0 <: HList] = Prepend[P, S] { type Out = Out0 } 93 | 94 | implicit def hnilPrepend0[P <: HList, S >: HNil.type <: HNil]: Aux[P, S, P] = 95 | new Prepend[P, S] { 96 | type Out = P 97 | def apply(prefix: P, suffix: S): P = prefix 98 | } 99 | } 100 | 101 | object Prepend extends LowPriorityPrepend { 102 | def apply[P <: HList, S <: HList](implicit prepend: Prepend[P, S]): Aux[P, S, prepend.Out] = prepend 103 | 104 | implicit def hnilPrepend1[P >: HNil.type <: HNil, S <: HList]: Aux[P, S, S] = 105 | new Prepend[P, S] { 106 | type Out = S 107 | def apply(prefix: P, suffix: S): S = suffix 108 | } 109 | } 110 | 111 | /** Type class supporting reverse prepending to this `HList`. 112 | * 113 | * @author Miles Sabin 114 | */ 115 | trait ReversePrepend[P <: HList, S <: HList] extends DepFn2[P, S] with Serializable { type Out <: HList } 116 | 117 | trait LowPriorityReversePrepend { 118 | type Aux[P <: HList, S <: HList, Out0 <: HList] = ReversePrepend[P, S] { type Out = Out0 } 119 | 120 | implicit def hnilReversePrepend0[P <: HList, S <: HNil](implicit rv: Reverse[P]): Aux[P, S, rv.Out] = 121 | new ReversePrepend[P, S] { 122 | type Out = rv.Out 123 | def apply(prefix: P, suffix: S) = prefix.reverse 124 | } 125 | } 126 | 127 | object ReversePrepend extends LowPriorityReversePrepend { 128 | def apply[P <: HList, S <: HList](implicit prepend: ReversePrepend[P, S]): Aux[P, S, prepend.Out] = prepend 129 | 130 | implicit def hnilReversePrepend1[P <: HNil, S <: HList]: Aux[P, S, S] = 131 | new ReversePrepend[P, S] { 132 | type Out = S 133 | def apply(prefix: P, suffix: S) = suffix 134 | } 135 | 136 | implicit def hlistReversePrepend[PH, PT <: HList, S <: HList](implicit 137 | rpt: ReversePrepend[PT, PH :: S] 138 | ): Aux[PH :: PT, S, rpt.Out] = 139 | new ReversePrepend[PH :: PT, S] { 140 | type Out = rpt.Out 141 | def apply(prefix: PH :: PT, suffix: S): Out = rpt(prefix.tail, prefix.head :: suffix) 142 | } 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/support/hlist/syntax/hlists.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.support.hlist 18 | package syntax 19 | 20 | import org.parboiled2.support.hlist.ops.hlist.Reverse 21 | 22 | /** Carrier for `HList` operations. 23 | * 24 | * These methods are implemented here and pimped onto the minimal `HList` types to avoid issues that would otherwise be 25 | * caused by the covariance of `::[H, T]`. 26 | * 27 | * @author Miles Sabin 28 | */ 29 | final class HListOps[L <: HList](l: L) extends Serializable { 30 | 31 | /** Prepend the argument element to this `HList`. 32 | */ 33 | def ::[H](h: H): H :: L = org.parboiled2.support.hlist.::(h, l) 34 | 35 | /** Reverses this `HList`. 36 | */ 37 | def reverse(implicit reverse: Reverse[L]): reverse.Out = reverse(l) 38 | } 39 | -------------------------------------------------------------------------------- /parboiled-core/src/main/scala/org/parboiled2/support/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | package object support { 20 | private[parboiled2] def `n/a` = throw new IllegalStateException("Untranslated compile-time only call") 21 | } 22 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/BasicSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.{TestableString as _, *} 20 | 21 | object BasicSpec extends TestParserSpec { 22 | 23 | val tests = Tests { 24 | 25 | import utest.TestableString 26 | 27 | "The Parser should correctly recognize/reject input for" - { 28 | 29 | "simple char literals" - new TestParser0 { 30 | def targetRule = rule('x') 31 | "x" must beMatched 32 | "y" must beMismatched 33 | "" must beMismatched 34 | } 35 | 36 | "a simple char `val`" - new TestParser0 { 37 | val c = 'x' 38 | def targetRule = rule(c) 39 | "x" must beMatched 40 | "y" must beMismatched 41 | "" must beMismatched 42 | } 43 | 44 | "a simple char `def`" - new TestParser0 { 45 | def c = 'x' 46 | def targetRule = rule(c) 47 | "x" must beMatched 48 | "y" must beMismatched 49 | "" must beMismatched 50 | } 51 | 52 | "simple string literals" - new TestParser0 { 53 | def targetRule = rule("ab" ~ EOI) 54 | "" must beMismatched 55 | "a" must beMismatched 56 | "ab" must beMatched 57 | "abc" must beMismatched 58 | } 59 | 60 | "a simple string `val`" - new TestParser0 { 61 | val s = "ab" 62 | def targetRule = rule(s ~ EOI) 63 | "" must beMismatched 64 | "a" must beMismatched 65 | "ab" must beMatched 66 | "abc" must beMismatched 67 | } 68 | 69 | "a simple string `def`" - new TestParser0 { 70 | def s = "ab" 71 | def targetRule = rule(s ~ EOI) 72 | "" must beMismatched 73 | "a" must beMismatched 74 | "ab" must beMatched 75 | "abc" must beMismatched 76 | } 77 | 78 | "a CharPredicate" - new TestParser0 { 79 | def targetRule = rule(CharPredicate.Digit) 80 | "0" must beMatched 81 | "8" must beMatched 82 | "x" must beMismatched 83 | "" must beMismatched 84 | } 85 | 86 | "anyOf" - new TestParser0 { 87 | def targetRule = rule(anyOf("abc") ~ EOI) 88 | "" must beMismatched 89 | "a" must beMatched 90 | "b" must beMatched 91 | "c" must beMatched 92 | "d" must beMismatched 93 | "ab" must beMismatched 94 | } 95 | 96 | "noneOf" - new TestParser0 { 97 | def targetRule = rule(noneOf("abc") ~ EOI) 98 | "" must beMismatched 99 | "a" must beMismatched 100 | "b" must beMismatched 101 | "c" must beMismatched 102 | "d" must beMatched 103 | "ab" must beMismatched 104 | } 105 | 106 | "ignoreCase(char)" - new TestParser0 { 107 | def targetRule = rule(ignoreCase('x') ~ EOI) 108 | "" must beMismatched 109 | "x" must beMatched 110 | "X" must beMatched 111 | "y" must beMismatched 112 | } 113 | 114 | "ignoreCase(string)" - new TestParser0 { 115 | def targetRule = rule(ignoreCase("ab") ~ EOI) 116 | "" must beMismatched 117 | "a" must beMismatched 118 | "ab" must beMatched 119 | "Ab" must beMatched 120 | "aB" must beMatched 121 | "abc" must beMismatched 122 | } 123 | 124 | "ANY" - new TestParser0 { 125 | def targetRule = rule(ANY) 126 | "a" must beMatched 127 | "Ж" must beMatched 128 | "" must beMismatched 129 | } 130 | 131 | "EOI" - new TestParser0 { 132 | def targetRule = rule(EOI) 133 | "" must beMatched 134 | "x" must beMismatched 135 | } 136 | 137 | "character ranges" - new TestParser0 { 138 | // shadow utests implicit extension on Strings which collides with our `str2CharRangeSupport` 139 | override def TestableString: Rule0 = rule(("1" - "5") ~ EOI) 140 | def targetRule = TestableString 141 | 142 | "1" must beMatched 143 | "3" must beMatched 144 | "5" must beMatched 145 | "" must beMismatched 146 | "0" must beMismatched 147 | "a" must beMismatched 148 | "8" must beMismatched 149 | } 150 | 151 | "MATCH" - new TestParser0 { 152 | def targetRule = rule(MATCH ~ EOI) 153 | "" must beMatched 154 | "x" must beMismatched 155 | } 156 | 157 | "called rules" - new TestParser0 { 158 | def targetRule = { 159 | def free() = rule("-free") 160 | rule(foo ~ bar(42) ~ baz("", 1337) ~ typed[String] ~ free() ~ EOI) 161 | } 162 | def foo = rule("foo") 163 | def bar(i: Int) = rule("-bar" ~ i.toString) 164 | def baz(s: String, i: Int) = rule("-baz" ~ s ~ i.toString) 165 | def typed[S <: String] = rule(MATCH) 166 | "foo-bar42-baz1337-free" must beMatched 167 | } 168 | 169 | "Map[String, T]" - new TestParser1[Int] { 170 | val colors = Map("red" -> 1, "green" -> 2, "blue" -> 3) 171 | def targetRule = rule(colors ~ EOI) 172 | "red" must beMatchedWith(1) 173 | "green" must beMatchedWith(2) 174 | "blue" must beMatchedWith(3) 175 | "black" must beMismatched 176 | } 177 | 178 | "Map[String, T] that ignores case" - new TestParser1[Int] { 179 | val colors = Map("red" -> 1, "green" -> 2, "blue" -> 3) 180 | def targetRule = rule(valueMap(colors, ignoreCase = true) ~ EOI) 181 | "ReD" must beMatchedWith(1) 182 | "grEen" must beMatchedWith(2) 183 | "blUe" must beMatchedWith(3) 184 | "black" must beMismatched 185 | } 186 | 187 | "Map[String, T] with keys that prefix each other" - new TestParser1[Int] { 188 | val map = Map("a" -> 1, "ab" -> 2, "abc" -> 3, "abcd" -> 4, "abcde" -> 5, "abcdef" -> 6) 189 | def targetRule = rule(map ~ EOI) 190 | "a" must beMatchedWith(1) 191 | "ab" must beMatchedWith(2) 192 | "abc" must beMatchedWith(3) 193 | "abcd" must beMatchedWith(4) 194 | "abcde" must beMatchedWith(5) 195 | "abcdef" must beMatchedWith(6) 196 | } 197 | } 198 | 199 | "The Parser" - { 200 | "disallow compilation of an illegal character range" - new Parser { 201 | // shadow utests implicit extension on Strings which collides with our `str2CharRangeSupport` 202 | def TestableString = ParserInput.Empty 203 | def input = TestableString 204 | 205 | compileError("""rule { "00" - "5" }""").msg ==> "lower bound must be a single char string" 206 | compileError("""rule { "0" - "55" }""").msg ==> "upper bound must be a single char string" 207 | compileError("""rule { "" - "5" }""").msg ==> "lower bound must be a single char string" 208 | compileError("""rule { "0" - "" }""").msg ==> "upper bound must be a single char string" 209 | compileError("""rule { "5" - "1" }""").msg ==> "lower bound must not be > upper bound" 210 | } 211 | } 212 | } 213 | } 214 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/CalculatorSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | object CalculatorSpec extends TestParserSpec { 22 | 23 | // format: OFF 24 | abstract class Calculator extends TestParser1[Int] { 25 | def InputLine = rule (Expression ~ EOI) 26 | 27 | def Expression: Rule1[Int] = rule { 28 | Term ~ zeroOrMore( 29 | '+' ~ Term ~> ((_: Int) + _) 30 | | '-' ~ Term ~> ((_: Int) - _)) 31 | } 32 | 33 | def Term = rule { 34 | Factor ~ zeroOrMore( 35 | '*' ~ Factor ~> ((_: Int) * _) 36 | | '/' ~ Factor ~> ((_: Int) / _)) 37 | } 38 | 39 | def Factor = rule (Number | Parens) 40 | 41 | def Parens = rule ('(' ~ Expression ~ ')') 42 | 43 | def Number = rule (capture(Digits) ~> (_.toInt)) 44 | 45 | def Digits = rule (oneOrMore(CharPredicate.Digit)) 46 | } 47 | // format: ON 48 | 49 | val tests = Tests { 50 | 51 | "The Calculator parser" - { 52 | "successfully evaluate simple calculator expression" - new Calculator { 53 | def targetRule = InputLine 54 | 55 | "1" must beMatchedWith(1) 56 | "1+2" must beMatchedWith(3) 57 | "1+2*3" must beMatchedWith(7) 58 | "1*2+3" must beMatchedWith(5) 59 | "1*(2+3)" must beMatchedWith(5) 60 | "10*((2+3))" must beMatchedWith(50) 61 | "(2+3)*(80-60)" must beMatchedWith(100) 62 | "2*8/2+16/16-16/(1+3)" must beMatchedWith(5) 63 | 64 | "*1" must beMismatchedWithErrorMsg( 65 | """Invalid input '*', expected Number or Parens (line 1, column 1): 66 | |*1 67 | |^ 68 | | 69 | |2 rules mismatched at error location: 70 | | /InputLine/ /Expression/ /Term/ /Factor/ | /Number/ capture /Digits/ + / Digit: 71 | | /InputLine/ /Expression/ /Term/ /Factor/ | /Parens/ '(' 72 | |""" 73 | ) 74 | 75 | "()" must beMismatchedWithErrorMsg( 76 | """Invalid input ')', expected Number or Parens (line 1, column 2): 77 | |() 78 | | ^ 79 | | 80 | |2 rules mismatched at error location: 81 | | ...xpression/ /Term/ /Factor/ |:-1 /Parens/ /Expression/ /Term/ /Factor/ | /Number/ capture /Digits/ + / Digit: 82 | | /InputLine/ /Expression/ /Term/ /Factor/ |:-1 /Parens/ /Expression/ /Term/ /Factor/ | /Parens/ '(' 83 | |""" 84 | ) 85 | 86 | "1+2)" must beMismatchedWithErrorMsg( 87 | """Invalid input ')', expected Digit, '*', '/', '+', '-' or 'EOI' (line 1, column 4): 88 | |1+2) 89 | | ^ 90 | | 91 | |6 rules mismatched at error location: 92 | | /InputLine/ /Expression/ *:-2 / |:-2 /Term/ /Factor/ |:-1 /Number/ capture:-1 /Digits/ +:-1 / Digit: 93 | | /InputLine/ /Expression/ *:-2 / |:-2 /Term/ * / | / '*' 94 | | /InputLine/ /Expression/ *:-2 / |:-2 /Term/ * / | / '/' 95 | | /InputLine/ /Expression/ *:-2 / | / '+' 96 | | /InputLine/ /Expression/ *:-2 / | / '-' 97 | | /InputLine/ 'EOI' 98 | |""" 99 | ) 100 | 101 | "(1+)2" must beMismatchedWithErrorMsg( 102 | """Invalid input ')', expected Number or Parens (line 1, column 4): 103 | |(1+)2 104 | | ^ 105 | | 106 | |2 rules mismatched at error location: 107 | | ...Term/ /Factor/ |:-3 /Parens/ /Expression/ *:-1 / |:-1 /Term/ /Factor/ | /Number/ capture /Digits/ + / Digit: 108 | | /InputLine/ /Expression/ /Term/ /Factor/ |:-3 /Parens/ /Expression/ *:-1 / |:-1 /Term/ /Factor/ | /Parens/ '(' 109 | |""" 110 | ) 111 | } 112 | } 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/CharPredicateSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | object CharPredicateSpec extends TestSuite { 22 | 23 | val tests = Tests { 24 | "CharPredicates" - { 25 | 26 | "correctly mask characters" - { 27 | inspectMask(CharPredicate("4")) ==> "0010000000000000|0000000000000000" 28 | inspectMask(CharPredicate("a")) ==> "0000000000000000|0000000200000000" 29 | CharPredicate("&048z{~").toString ==> "CharPredicate.MaskBased(&048z{~)" 30 | show(CharPredicate("&048z{~")) ==> "&048z{~" 31 | } 32 | 33 | "support `testAny`" - { 34 | assert( 35 | CharPredicate("abc").matchesAny("0125!") == false, 36 | CharPredicate("abc").matchesAny("012c5!") 37 | ) 38 | } 39 | 40 | "support `indexOfFirstMatch`" - { 41 | CharPredicate("abc").indexOfFirstMatch("0125!") ==> -1 42 | CharPredicate("abc").indexOfFirstMatch("012c5!") ==> 3 43 | } 44 | 45 | "correctly support non-masked content" - { 46 | val colonSlashEOI = CharPredicate(':', '/', EOI) 47 | assert( 48 | colonSlashEOI(':'), 49 | colonSlashEOI('/'), 50 | colonSlashEOI(EOI), 51 | colonSlashEOI('x') == false 52 | ) 53 | } 54 | 55 | "be backed by a mask where possible" - { 56 | CharPredicate('1' to '9').toString ==> "CharPredicate.MaskBased(123456789)" 57 | (CharPredicate('1' to '3') ++ CharPredicate('5' to '8')).toString ==> "CharPredicate.MaskBased(1235678)" 58 | (CharPredicate('1' to '3') ++ "5678").toString ==> "CharPredicate.MaskBased(1235678)" 59 | (CharPredicate('1' to '6') -- CharPredicate('2' to '4')).toString ==> "CharPredicate.MaskBased(156)" 60 | (CharPredicate('1' to '6') -- "234").toString ==> "CharPredicate.MaskBased(156)" 61 | } 62 | "be backed by an array where possible" - { 63 | CharPredicate("abcäüö").toString ==> "CharPredicate.ArrayBased(abcäöü)" 64 | (CharPredicate("abcäüö") -- "äö").toString ==> "CharPredicate.ArrayBased(abcü)" 65 | } 66 | "be backed by a range where possible" - { 67 | CharPredicate( 68 | '1' to 'Ä' 69 | ).toString ==> "CharPredicate.RangeBased(start = 1, end = Ä, step = 1, inclusive = true)" 70 | } 71 | } 72 | } 73 | 74 | def show(pred: CharPredicate): String = { 75 | val chars = ('\u0000' to '\u0080').flatMap(c => Some(c) filter pred.apply).toArray 76 | new String(chars) 77 | } 78 | 79 | def inspectMask(pred: CharPredicate) = { 80 | val (lowMask, highMask) = pred match { 81 | case CharPredicate.MaskBased(a, b) => a -> b 82 | case _ => throw new IllegalStateException() 83 | } 84 | "%016x|%016x".format(lowMask, highMask) 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/CharUtilsSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.scalacheck.{Gen, Prop, Test} 20 | import org.scalacheck.util.Pretty 21 | import org.scalacheck.Prop.forAll 22 | import utest.* 23 | 24 | object CharUtilsSpec extends TestSuite with UTestScalaCheck { 25 | 26 | val hexChars = for (i <- Gen.choose(0, 15)) yield i -> Integer.toHexString(i).charAt(0) 27 | 28 | val tests = Tests { 29 | 30 | "CharUtils" - { 31 | "hexValue" - forAll(hexChars) { case (i, c) => 32 | CharUtils.hexValue(c) == i 33 | }.checkUTest() 34 | "numberOfHexDigits" - forAll { (l: Long) => 35 | CharUtils.numberOfHexDigits(l) == java.lang.Long.toHexString(l).length 36 | } 37 | .checkUTest() 38 | "upperHexString" - forAll { (l: Long) => 39 | CharUtils.upperHexString(l) == java.lang.Long.toHexString(l).toUpperCase 40 | } 41 | .checkUTest() 42 | "lowerHexString" - forAll((l: Long) => CharUtils.lowerHexString(l) == java.lang.Long.toHexString(l)) 43 | .checkUTest() 44 | "numberOfDecimalDigits" - forAll { (l: Long) => 45 | CharUtils.numberOfDecimalDigits(l) == java.lang.Long.toString(l).length 46 | }.checkUTest() 47 | "signedDecimalString" - forAll((l: Long) => CharUtils.signedDecimalString(l) == java.lang.Long.toString(l)) 48 | .checkUTest() 49 | } 50 | } 51 | } 52 | 53 | // from https://github.com/lihaoyi/utest/issues/2#issuecomment-67300735 54 | trait UTestScalaCheck { 55 | 56 | protected[this] object UTestReporter extends Test.TestCallback { 57 | private val prettyParams = Pretty.defaultParams 58 | 59 | override def onTestResult(name: String, res: org.scalacheck.Test.Result): Unit = { 60 | val scalaCheckResult = if (res.passed) "" else Pretty.pretty(res, prettyParams) 61 | assert(scalaCheckResult.isEmpty) 62 | } 63 | } 64 | 65 | implicit protected[this] class PropWrapper(prop: Prop) { 66 | 67 | def checkUTest(): Unit = 68 | prop.check(Test.Parameters.default.withTestCallback(UTestReporter)) 69 | } 70 | 71 | } 72 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/CompileDurationTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | import org.parboiled2.support.hlist.{::, HNil} 22 | 23 | object CompileDurationTest extends TestParserSpec { 24 | 25 | val tests = Tests { 26 | 27 | "The Parser should compile" - { 28 | def combine6(a1: String, a2: String, a3: String, a4: String, a5: String, a6: String): String = 29 | a1 + a2 + a3 + a4 + a5 + a6 30 | 31 | "`~` combinator" - new TestParser1[String] { 32 | def basicRule: Rule[HNil, String :: HNil] = rule(capture("a")) 33 | def targetRule = rule( 34 | basicRule ~ basicRule ~ basicRule ~ basicRule ~ basicRule ~ basicRule ~> combine6 _ 35 | ) 36 | 37 | "" must beMismatched 38 | "aaaaaa" must beMatched 39 | "ac" must beMismatched 40 | "a" must beMismatched 41 | "b" must beMismatched 42 | } 43 | 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/CustomCharAndStringExpansionSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | object CustomCharAndStringExpansionSpec extends TestParserSpec { 22 | 23 | val tests = Tests { 24 | 25 | "The parser" - { 26 | 27 | "allow for custom char expansion" - new TestParser0 { 28 | implicit def chWithX(c: Char): Rule0 = 29 | if (c == EOI) rule(ch(EOI)) else rule(ch(c) ~ ch('x')) 30 | 31 | def targetRule = rule('a' ~ 'b' ~ EOI) 32 | 33 | "axbx" must beMatched 34 | "ab" must beMismatched 35 | } 36 | 37 | "allow for custom string expansion" - new TestParser0 { 38 | implicit def wspStr(s: String): Rule0 = 39 | rule { 40 | str(s) ~ zeroOrMore(' ') 41 | } 42 | 43 | def targetRule = rule("foo" ~ "bar" ~ EOI) 44 | 45 | "foobar" must beMatched 46 | "foo bar" must beMatched 47 | "foo" must beMismatched 48 | } 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/CutSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | object CutSpec extends TestParserSpec { 22 | 23 | val tests = Tests { 24 | 25 | "The `~!~` (cut) operator" - { 26 | "work as expected" - new TestParser0 { 27 | def targetRule = rule(foo ~ EOI) 28 | def foo = rule("abc" | "a" ~!~ "de" | "axy") 29 | 30 | "abc" must beMatched 31 | "ade" must beMatched 32 | "axy" must beMismatched 33 | 34 | "axy" must beMismatchedWithErrorMsg("""Invalid input 'x', expected 'b' or 'd' (line 1, column 2): 35 | |axy 36 | | ^ 37 | | 38 | |2 rules mismatched at error location: 39 | | /targetRule/ /foo/ |:-1 / "abc":-1 / 'b' 40 | | /targetRule/ /foo/ |:-1 / cut:-1 / "de" / 'd' 41 | |""") 42 | } 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/DSLTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.hlist.* 20 | 21 | //// pure compile-time-only test 22 | class DSLTest(val input: ParserInput) extends Parser { 23 | 24 | def ZeroOrMoreReduction_Checked: Rule[String :: HNil, String :: HNil] = ZeroOrMoreReduction 25 | def ZeroOrMoreReduction = rule(zeroOrMore(capture("0" - "9") ~> ((x: String, y) => x + y))) 26 | 27 | def OptionalReduction_Checked: Rule[String :: HNil, String :: HNil] = OptionalReduction 28 | def OptionalReduction = rule(optional(capture("0" - "9") ~> ((x: String, y) => x + y))) 29 | 30 | def OpsTest1_Checked: RuleN[Int :: Boolean :: String :: Int :: Boolean :: Int :: Boolean :: Array[Char] :: HNil] = 31 | OpsTest1 32 | def OpsTest1 = rule(ComplexRule ~> (_.toCharArray)) 33 | 34 | def OpsTest2_Checked: RuleN[Int :: Boolean :: String :: Int :: Boolean :: Int :: HNil] = OpsTest2 35 | def OpsTest2 = rule(ComplexRule ~> ((_, s) => s.length) ~> (_ + _)) 36 | 37 | def ComplexRule_Checked: RuleN[Int :: Boolean :: String :: Int :: Boolean :: Int :: Boolean :: String :: HNil] = 38 | ComplexRule 39 | def ComplexRule = rule(capture(DigitAndBool) ~ DigitAndBool ~ capture(DigitAndBool)) 40 | 41 | def DigitAndBool_Checked: Rule2[Int, Boolean] = DigitAndBool 42 | def DigitAndBool = rule(Digit ~ Bool) 43 | 44 | def Bool_Checked: Rule1[Boolean] = Bool 45 | def Bool = rule(BoolTrue | BoolFalse) 46 | 47 | def BoolTrue_Checked: Rule1[Boolean] = BoolTrue 48 | def BoolTrue = rule(str("true") ~ push(true)) 49 | 50 | def BoolFalse_Checked: Rule1[Boolean] = BoolFalse 51 | def BoolFalse = rule(str("false") ~ push(false)) 52 | 53 | def Digits_Checked: Rule1[Seq[Int]] = Digits 54 | def Digits = rule(oneOrMore(Digit)) 55 | 56 | def DigitsOrEmpty_Checked: Rule1[Seq[Int]] = DigitsOrEmpty 57 | def DigitsOrEmpty = rule(zeroOrMore(Digit)) 58 | 59 | def Digit_Checked: Rule1[Int] = Digit 60 | def Digit = rule(capture("0" - "9") ~> (_.toInt)) 61 | 62 | def DigitOptional_Checked: Rule1[Option[Int]] = DigitOptional 63 | def DigitOptional = rule(optional(Digit)) 64 | } 65 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/DefaultArgumentsSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | import scala.util.Success 22 | 23 | object DefaultArgumentsSpec extends TestSuite { 24 | 25 | case class A(a: Int = 0, b: Int = 1) 26 | 27 | class Foo(val input: ParserInput) extends Parser { 28 | def Foo: Rule1[A] = rule("foo" ~ push(A(b = 2))) 29 | } 30 | 31 | val tests = Tests { 32 | "The `push` action" - { 33 | "properly handle default arguments" - { 34 | new Foo("foo").Foo.run() ==> Success(A(0, 2)) 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/MetaRuleSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | object MetaRuleSpec extends TestParserSpec { 22 | 23 | val tests = Tests { 24 | 25 | "Rule transformation should be possible" - { 26 | 27 | "using vals and `run`" - new TestParser0 { 28 | val ab = () => rule("ab") 29 | val cd = () => rule("cd") 30 | def targetRule = rule(bracketed(ab) ~ bracketed(cd)) 31 | def bracketed(inner: () => Rule0) = rule('[' ~ inner() ~ ']') 32 | 33 | "[ab][cd]" must beMatched 34 | "abcd" must beMismatched 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/ReductionResetSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | import org.parboiled2.support.hlist.* 21 | 22 | object ReductionResetSpec extends TestParserSpec { 23 | 24 | abstract class ReductionResetParser extends TestParser1[Int] { 25 | 26 | def Alternative: Rule1[Int] = 27 | rule { 28 | Digits ~ (String2Int ~ "." | String2Int) 29 | } 30 | 31 | def ZeroOrMore: Rule1[Int] = 32 | rule { 33 | Digits ~ zeroOrMore(String2Int ~ "." ~ Int2String) ~ String2Int 34 | } 35 | 36 | def OneOrMore: Rule1[Int] = 37 | rule { 38 | Digits ~ oneOrMore(String2Int ~ "." ~ Int2String) ~ String2Int 39 | } 40 | 41 | def String2Int: Rule[String :: HNil, Int :: HNil] = 42 | rule { 43 | run((_: String).toInt) 44 | } 45 | 46 | def Int2String: Rule[Int :: HNil, String :: HNil] = 47 | rule { 48 | run((_: Int).toString) 49 | } 50 | 51 | def Digits: Rule1[String] = 52 | rule { 53 | capture(oneOrMore(CharPredicate.Digit)) 54 | } 55 | } 56 | 57 | val tests = Tests { 58 | 59 | "ReductionResetSpec" - { 60 | 61 | "reduction reset in `|`" - new ReductionResetParser { 62 | def targetRule = Alternative 63 | 64 | // re-enable after fixing 65 | // "123" must beMatchedWith(123) 66 | } 67 | 68 | "reduction reset in `zeroOrMore`" - new ReductionResetParser { 69 | def targetRule = ZeroOrMore 70 | 71 | // "123" must beMatchedWith(123) 72 | } 73 | 74 | "reduction reset in `oneOrMore`" - new ReductionResetParser { 75 | def targetRule = OneOrMore 76 | 77 | // "123." must beMatchedWith(123) 78 | } 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/ReductionTypeSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.reflect.ClassTag 20 | import utest.* 21 | 22 | object ReductionTypeSpec extends TestSuite { 23 | 24 | sealed trait Foo 25 | case object Foo1 extends Foo 26 | case class Foo2(lhs: Foo, rhs: Foo) extends Foo 27 | 28 | class FooParser(val input: ParserInput) extends Parser { 29 | def OneOrMoreExpr: Rule1[Foo2] = rule(foo1 ~ oneOrMore(foo1 ~> Foo2.apply _)) 30 | def ZeroOrMoreExpr: Rule1[Foo] = rule(foo1 ~ zeroOrMore(foo1 ~> Foo2.apply _)) 31 | def OptionalExpr: Rule1[Foo] = rule(foo1 ~ optional(foo1 ~> Foo2.apply _)) 32 | def TimesExpr: Rule1[Foo2] = rule(foo1 ~ 2.times(foo1 ~> Foo2.apply _)) 33 | 34 | def foo1: Rule1[Foo] = rule(push(Foo1)) 35 | } 36 | 37 | val tests = Tests { 38 | 39 | "Repeating combinators should properly compute their reduction result types" - { 40 | "OneOrMore" - { ruleTypeOf(_.OneOrMoreExpr) ==> classOf[Foo2] } 41 | "ZeroOrMore" - { ruleTypeOf(_.ZeroOrMoreExpr) ==> classOf[Foo] } 42 | "Optional" - { ruleTypeOf(_.OptionalExpr) ==> classOf[Foo] } 43 | "Times" - { ruleTypeOf(_.TimesExpr) ==> classOf[Foo2] } 44 | } 45 | } 46 | 47 | def ruleTypeOf[T](f: FooParser => Rule1[T])(implicit tag: ClassTag[T]) = tag.runtimeClass 48 | } 49 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/RunSubParserSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | object RunSubParserSpec extends TestParserSpec { 22 | 23 | class SubParser(val input: ParserInput) extends Parser { 24 | 25 | def IntNumber = 26 | rule { 27 | capture(oneOrMore(CharPredicate.Digit)) ~> (_.toInt) 28 | } 29 | } 30 | 31 | abstract class ParserWithSubParser extends TestParser1[Int] { 32 | 33 | def InputLine = 34 | rule { 35 | oneOrMore(runSubParser(new SubParser(_).IntNumber)).separatedBy(',') ~ EOI ~> (_.sum) 36 | } 37 | } 38 | 39 | val tests = Tests { 40 | 41 | "`runSubParser` should" - { 42 | "work as expected" - new ParserWithSubParser { 43 | def targetRule = InputLine 44 | 45 | "12" must beMatchedWith(12) 46 | "43,8" must beMatchedWith(51) 47 | 48 | "1234,a" must beMismatchedWithErrorMsg( 49 | """Invalid input 'a', expected IntNumber (line 1, column 6): 50 | |1234,a 51 | | ^ 52 | | 53 | |1 rule mismatched at error location: 54 | | /InputLine/ +:-5 / runSubParser /IntNumber/ capture / + / Digit: 55 | |""" 56 | ) 57 | } 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/RunningSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.util.{Success, Try} 20 | import utest.* 21 | 22 | object RunningSpec extends TestSuite { 23 | 24 | class TestParser(val input: ParserInput) extends Parser { 25 | def A = rule('a' ~ B ~ EOI) 26 | def B = rule(oneOrMore('b')) 27 | def C(n: Int)(m: Int) = rule((n - m).times('c')) 28 | def D[S <: String] = rule('d') 29 | def go(): Try[Unit] = null 30 | } 31 | 32 | val tests = Tests { 33 | 34 | "Running a rule should support several notations" - { 35 | 36 | "parser.rule.run()" - { 37 | val p = new TestParser("abb") 38 | p.A.run() ==> Success(()) 39 | } 40 | 41 | "new Parser(...).rule.run()" - { 42 | new TestParser("abb").A.run() ==> Success(()) 43 | } 44 | 45 | "parser.rule(args).run()" - { 46 | val p = new TestParser("ccc") 47 | p.C(4)(1).run() ==> Success(()) 48 | } 49 | 50 | "parser.rule[targs].run()" - { 51 | val p = new TestParser("d") 52 | p.D[String].run() ==> Success(()) 53 | } 54 | 55 | "this.rule.run()" - { 56 | val p = new TestParser("b") { 57 | override def go() = B.run() 58 | } 59 | p.go() ==> Success(()) 60 | } 61 | 62 | "rule(B ~ EOI).run()" - { 63 | val p = new TestParser("bb") { 64 | override def go() = rule(B ~ EOI).run() 65 | } 66 | p.go() ==> Success(()) 67 | } 68 | 69 | "namedRule(B ~ EOI).run()" - { 70 | val p = new TestParser("bb") { 71 | override def go() = namedRule("règle")(B ~ EOI).run() 72 | } 73 | p.go() ==> Success(()) 74 | } 75 | } 76 | 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/TailrecSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import utest.* 20 | 21 | // test verifying the effectiveness of our workaround for https://github.com/scala/bug/issues/8657 22 | object TailrecSpec extends TestParserSpec { 23 | 24 | abstract class TailrecParser extends TestParser0 { 25 | 26 | def InputLine = 27 | rule { 28 | oneOrMore('x') ~ EOI | zeroOrMore('x') ~ 'y' ~ EOI 29 | } 30 | } 31 | 32 | val tests = Tests { 33 | 34 | "The TailrecParser parser" - { 35 | "be able to match 100,000 chars without overflowing the stack" - new TailrecParser { 36 | def targetRule = InputLine 37 | 38 | val chars = Array.fill(100000)('x') 39 | chars(99999) = 'y' 40 | new String(chars) must beMatched 41 | } 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/TestParserSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.Unpack 20 | import org.parboiled2.support.hlist.* 21 | import utest.* 22 | 23 | abstract class TestParserSpec extends TestSuite { 24 | type TestParser0 = TestParser[HNil, Unit] 25 | type TestParser1[T] = TestParser[T :: HNil, T] 26 | type TestParserN[L <: HList] = TestParser[L, L] 27 | 28 | abstract class TestParser[L <: HList, Out](implicit unpack: Unpack.Aux[L, Out]) extends Parser { 29 | var input: ParserInput = _ 30 | def errorFormatter: ErrorFormatter = new ErrorFormatter(showTraces = true) 31 | 32 | def targetRule: RuleN[L] 33 | 34 | // shadow utests implicit extension on Strings which collides with our `str2CharRangeSupport` 35 | def TestableString: Any = null 36 | 37 | sealed trait MustAssert { 38 | def assert(str: String): Unit 39 | } 40 | 41 | private case class BeMatchedWith(underlying: String => Unit) extends MustAssert { 42 | override def assert(str: String): Unit = underlying(str) 43 | } 44 | def beMatched: MustAssert = BeMatchedWith(assertMatched) 45 | def beMatchedWith(r: Out): MustAssert = BeMatchedWith(assertMatchedWith(r)) 46 | def beMismatched: MustAssert = BeMatchedWith(assertMismatched) 47 | def beMismatchedWithErrorMsg(msg: String): MustAssert = BeMatchedWith(assertMismatchedWithErrorMsg(msg)) 48 | 49 | implicit class StringExt(str: String) { 50 | def must(mustAssert: MustAssert): Unit = mustAssert.assert(str) 51 | } 52 | 53 | def assertMatched(str: String): Unit = assert(parse(str).isRight) 54 | def assertMatchedWith(r: Out)(str: String): Unit = assert(parse(str) == Right(r)) 55 | def assertMismatched(str: String): Unit = assert(parse(str).isLeft) 56 | 57 | // def beMismatchedWithError(pe: ParseError) = parse(_: String).left.toOption.get === pe 58 | def assertMismatchedWithErrorMsg(expected: String)(str: String): Unit = { 59 | val actual = parse(str).left.toOption.map(formatError(_, errorFormatter)).get 60 | val expct = expected.stripMargin 61 | assert(actual == expct) 62 | } 63 | 64 | def parse(input: String): Either[ParseError, Out] = { 65 | this.input = input 66 | import Parser.DeliveryScheme.Either 67 | targetRule.run() 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/ValueStackSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.hlist.{HList, HNil} 20 | import utest.{intercept, *} 21 | 22 | object ValueStackSpec extends TestSuite { 23 | 24 | val tests = Tests { 25 | 26 | "The ValueStack should properly support" - { 27 | 28 | "push, size, toList" - new TestStack(stackSize = 3) { 29 | size ==> 0 30 | push(42) 31 | size ==> 1 32 | toList ==> List(42) 33 | push("yes") 34 | push(3.0) 35 | size ==> 3 36 | toList ==> List(42, "yes", 3.0) 37 | intercept[ValueStackOverflowException] { 38 | push("overflow") 39 | } 40 | } 41 | 42 | "pushAll, toHList" - new TestStack(stackSize = 3) { 43 | pushAll(42 :: "yes" :: 4.5 :: HNil) 44 | size ==> 3 45 | toHList[HList]() ==> 42 :: "yes" :: 4.5 :: HNil 46 | intercept[ValueStackOverflowException] { 47 | pushAll("overflow" :: HNil) 48 | } 49 | intercept[IllegalArgumentException] { 50 | toHList[HList](start = -1) 51 | } 52 | intercept[IllegalArgumentException] { 53 | toHList[HList](start = 1, end = 0) 54 | } 55 | } 56 | 57 | "insert" - new TestStack(stackSize = 4) { 58 | pushAll(1 :: 2 :: 3 :: HNil) 59 | insert(2, 1.5) 60 | toList ==> List(1, 1.5, 2, 3) 61 | intercept[IllegalArgumentException] { 62 | insert(-1, 0) 63 | } 64 | intercept[ValueStackOverflowException] { 65 | insert(2, 0) 66 | } 67 | intercept[ValueStackUnderflowException] { 68 | insert(5, 0) 69 | } 70 | } 71 | 72 | "pop" - new TestStack(stackSize = 8) { 73 | pushAll(1 :: 2 :: 3 :: HNil) 74 | pop() ==> 3 75 | toList ==> List(1, 2) 76 | pop() ==> 2 77 | toList ==> List(1) 78 | pop() ==> 1 79 | assert(isEmpty) 80 | intercept[ValueStackUnderflowException] { 81 | pop() 82 | } 83 | } 84 | 85 | "pullOut" - new TestStack(stackSize = 8) { 86 | pushAll(1 :: 2 :: 3 :: 4 :: HNil) 87 | pullOut(1) ==> 3 88 | toList ==> List(1, 2, 4) 89 | pullOut(2) ==> 1 90 | toList ==> List(2, 4) 91 | intercept[ValueStackUnderflowException] { 92 | pullOut(2) 93 | } 94 | intercept[IllegalArgumentException] { 95 | pullOut(-1) 96 | } 97 | } 98 | 99 | "peek" - new TestStack(stackSize = 8) { 100 | pushAll(1 :: 2 :: 3 :: HNil) 101 | peek ==> 3 102 | peek(1) ==> 2 103 | peek(2) ==> 1 104 | intercept[ValueStackUnderflowException] { 105 | peek(3) 106 | } 107 | intercept[IllegalArgumentException] { 108 | pullOut(-1) 109 | } 110 | } 111 | 112 | "poke" - new TestStack(stackSize = 8) { 113 | pushAll(1 :: 2 :: 3 :: HNil) 114 | poke(0, "3") 115 | toList ==> List(1, 2, "3") 116 | poke(1, "2") 117 | toList ==> List(1, "2", "3") 118 | poke(2, "1") 119 | toList ==> List("1", "2", "3") 120 | intercept[ValueStackUnderflowException] { 121 | poke(3, 0) 122 | } 123 | intercept[IllegalArgumentException] { 124 | poke(-1, 0) 125 | } 126 | } 127 | 128 | "swap" - new TestStack(stackSize = 8) { 129 | pushAll(1 :: 2 :: 3 :: HNil) 130 | swap() 131 | toList ==> List(1, 3, 2) 132 | pop() 133 | pop() 134 | intercept[ValueStackUnderflowException] { 135 | swap() 136 | } 137 | } 138 | 139 | "swap3" - new TestStack(stackSize = 8) { 140 | pushAll(1 :: 2 :: 3 :: HNil) 141 | swap3() 142 | toList ==> List(3, 2, 1) 143 | pop() 144 | intercept[ValueStackUnderflowException] { 145 | swap3() 146 | } 147 | } 148 | 149 | "swap4" - new TestStack(stackSize = 8) { 150 | pushAll(1 :: 2 :: 3 :: 4 :: HNil) 151 | swap4() 152 | toList ==> List(4, 3, 2, 1) 153 | pop() 154 | intercept[ValueStackUnderflowException] { 155 | swap4() 156 | } 157 | } 158 | 159 | "swap5" - new TestStack(stackSize = 8) { 160 | pushAll(1 :: 2 :: 3 :: 4 :: 5 :: HNil) 161 | swap5() 162 | toList ==> List(5, 4, 3, 2, 1) 163 | pop() 164 | intercept[ValueStackUnderflowException] { 165 | swap5() 166 | } 167 | } 168 | 169 | } 170 | } 171 | 172 | class TestStack(stackSize: Int) extends ValueStack(stackSize, stackSize) 173 | } 174 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/VarianceSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.hlist.* 20 | import utest.{compileError, ArrowAssert} 21 | 22 | //// pure compile-time-only test 23 | class VarianceSpec { 24 | 25 | // the Parsing DSL should 26 | { 27 | 28 | // honor contravariance on the 1st type param of the `Rule` type 29 | { 30 | // valid example 31 | test { 32 | abstract class Par extends Parser { 33 | def A: Rule2[String, Int] = ??? 34 | def B: PopRule[Any :: HNil] = ??? 35 | def C: Rule1[String] = rule(A ~ B) 36 | } 37 | () 38 | } 39 | 40 | // TODO: fix https://github.com/sirthias/parboiled2/issues/172 and re-enable 41 | // //invalid example 1 42 | // test { 43 | // abstract class Par extends Parser { 44 | // def A: Rule1[Any] = ??? 45 | // def B: PopRule[Int :: HNil] = ??? 46 | // } 47 | // illTyped("""class P extends Par { def C = rule { A ~ B } }""", "Illegal rule composition") 48 | // } 49 | 50 | // invalid example 2 51 | test { 52 | abstract class Par extends Parser { 53 | def A: Rule2[String, Any] = ??? 54 | def B: PopRule[Int :: HNil] = ??? 55 | } 56 | compileError("""class P extends Par { def C = rule { A ~ B } }""").msg ==> "Illegal rule composition" 57 | } 58 | 59 | // invalid example 3 60 | test { 61 | abstract class Par extends Parser { 62 | def A: Rule1[String] = ??? 63 | def B: PopRule[Int :: HNil] = ??? 64 | } 65 | compileError("""class P extends Par { def C = rule { A ~ B } }""").msg ==> "Illegal rule composition" 66 | } 67 | } 68 | 69 | // honor covariance on the 2nd type param of the `Rule` type 70 | { 71 | // valid example 72 | test { 73 | abstract class Par extends Parser { 74 | def A: Rule0 = ??? 75 | def B: Rule1[Int] = ??? 76 | def C: Rule1[Any] = rule(A ~ B) 77 | } 78 | } 79 | 80 | // invalid example 81 | test { 82 | abstract class Par extends Parser { 83 | def A: Rule0 = ??? 84 | def B: Rule1[Any] = ??? 85 | } 86 | compileError("""class P extends Par { def C: Rule1[Int] = rule { A ~ B } }""").msg ==> "type mismatch;.*" 87 | } 88 | } 89 | } 90 | 91 | def test(x: Any): Unit = () // prevent "a pure expression does nothing in statement position" warnings 92 | } 93 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/nestedpackage/AlienPackageParserSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.nestedpackage 18 | 19 | import scala.util.Success 20 | import utest.* 21 | 22 | object AlienPackageParserSpec extends TestSuite { 23 | 24 | abstract class AbstractParser(val input: org.parboiled2.ParserInput) extends org.parboiled2.Parser { 25 | import org.parboiled2.{CharPredicate, Rule1} 26 | 27 | def foo: Rule1[String] = rule(capture("foo" ~ zeroOrMore(CharPredicate.Digit))) 28 | } 29 | 30 | class FooParser(input: String) extends AbstractParser(input) { 31 | def Go = rule(foo ~ EOI) 32 | } 33 | 34 | val tests = Tests { 35 | "Parsers in files that dont explicitly import org.parboiled2._" - { 36 | "compile" - { 37 | new FooParser("foo123").Go.run() ==> Success("foo123") 38 | } 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /parboiled-core/src/test/scala/org/parboiled2/util/Base64Spec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2.util 18 | 19 | import utest.* 20 | import java.nio.charset.StandardCharsets 21 | 22 | object Base64Spec extends TestSuite { 23 | 24 | private val testVectors = Map( 25 | "" -> "", 26 | "f" -> "Zg==", 27 | "fo" -> "Zm8=", 28 | "foo" -> "Zm9v", 29 | "foob" -> "Zm9vYg==", 30 | "fooba" -> "Zm9vYmE=", 31 | "foobar" -> "Zm9vYmFy", 32 | "@@ Hello @@ world @@!" -> "QEAgSGVsbG8gQEAgd29ybGQgQEAh" 33 | ) 34 | 35 | val tests = Tests { 36 | "Base64" - { 37 | testVectors.foreach { case (expectedDecoded, expectedEncoded) => 38 | val expectedDecodedBytes = expectedDecoded.getBytes(StandardCharsets.UTF_8) 39 | 40 | val encoded = Base64.rfc2045().encodeToString(expectedDecodedBytes, lineSep = false) 41 | 42 | assert( 43 | expectedEncoded == encoded, 44 | expectedDecodedBytes sameElements Base64.rfc2045().decode(encoded.toCharArray), 45 | expectedDecodedBytes sameElements Base64.rfc2045().decodeFast(encoded.toCharArray) 46 | ) 47 | } 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /parboiled/src/main/scala-2/org/parboiled2/DynamicRuleDispatchMacro.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.reflect.macros.whitebox.Context 20 | import org.parboiled2.support.hlist.HList 21 | 22 | import scala.collection.immutable 23 | 24 | trait DynamicRuleDispatchMacro { _: DynamicRuleDispatch.type => 25 | 26 | /** Implements efficient runtime dispatch to a predefined set of parser rules. 27 | * Given a number of rule names this macro-supported method creates a `DynamicRuleDispatch` instance along with 28 | * a sequence of the given rule names. 29 | * Note that there is no reflection involved and compilation will fail, if one of the given rule names 30 | * does not constitute a method of parser type `P` or has a type different from `RuleN[L]`. 31 | */ 32 | def apply[P <: Parser, L <: HList](ruleNames: String*): (DynamicRuleDispatch[P, L], immutable.Seq[String]) = 33 | macro DynamicRuleDispatch.__create[P, L] 34 | 35 | ///////////////////// INTERNAL //////////////////////// 36 | 37 | def __create[P <: Parser, L <: HList](c: Context)( 38 | ruleNames: c.Expr[String]* 39 | )(implicit P: c.WeakTypeTag[P], L: c.WeakTypeTag[L]): c.Expr[(DynamicRuleDispatch[P, L], immutable.Seq[String])] = { 40 | import c.universe._ 41 | val names = ruleNames.map { 42 | _.tree match { 43 | case Literal(Constant(s: String)) => s 44 | case x => c.abort(x.pos, s"Invalid `String` argument `x`, only `String` literals are supported!") 45 | } 46 | }.toArray 47 | java.util.Arrays.sort(names.asInstanceOf[Array[Object]]) 48 | 49 | def rec(start: Int, end: Int): Tree = 50 | if (start <= end) { 51 | val mid = (start + end) >>> 1 52 | val name = names(mid) 53 | q"""val c = $name compare ruleName 54 | if (c < 0) ${rec(mid + 1, end)} 55 | else if (c > 0) ${rec(start, mid - 1)} 56 | else 57 | Some(new RuleRunner[$P, $L] { 58 | def apply(handler: DynamicRuleHandler[$P, $L]): handler.Result = { 59 | val p = handler.parser 60 | p.__run[$L](p.${TermName(name).encodedName.toTermName})(handler) 61 | } 62 | }) 63 | """ 64 | } else q"None" 65 | 66 | c.Expr[(DynamicRuleDispatch[P, L], immutable.Seq[String])] { 67 | q"""val drd = 68 | new org.parboiled2.DynamicRuleDispatch[$P, $L] { 69 | def lookup(ruleName: String): Option[RuleRunner[$P, $L]] = 70 | ${rec(0, names.length - 1)} 71 | } 72 | (drd, scala.collection.immutable.Seq(..$ruleNames))""" 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /parboiled/src/main/scala-3/org/parboiled2/DynamicRuleDispatchMacro.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.hlist.HList 20 | 21 | import scala.collection.immutable 22 | 23 | trait DynamicRuleDispatchMacro { self: DynamicRuleDispatch.type => 24 | 25 | /** Implements efficient runtime dispatch to a predefined set of parser rules. 26 | * Given a number of rule names this macro-supported method creates a `DynamicRuleDispatch` instance along with 27 | * a sequence of the given rule names. 28 | * Note that there is no reflection involved and compilation will fail, if one of the given rule names 29 | * does not constitute a method of parser type `P` or has a type different from `RuleN[L]`. 30 | */ 31 | inline def apply[P <: Parser, L <: HList]( 32 | inline ruleNames: String* 33 | ): (DynamicRuleDispatch[P, L], immutable.Seq[String]) = 34 | ${ DynamicRuleDispatch.__create[P, L]('ruleNames) } 35 | 36 | import scala.quoted.* 37 | def __create[P <: Parser: Type, L <: HList: Type]( 38 | ruleNames: Expr[Seq[String]] 39 | )(using Quotes): Expr[(DynamicRuleDispatch[P, L], immutable.Seq[String])] = { 40 | import quotes.reflect.* 41 | 42 | val names: Seq[String] = ruleNames match { 43 | case Varargs(Exprs(args)) => args.sorted 44 | } 45 | 46 | def ruleEntry(name: String): Expr[(String, RuleRunner[P, L])] = '{ 47 | val runner = new RuleRunner[P, L] { 48 | def apply(handler: DynamicRuleHandler[P, L]): handler.Result = { 49 | val p = handler.parser 50 | p.__run[L](${ Select.unique('{ handler.parser }.asTerm, name).asExprOf[RuleN[L]] })(handler) 51 | } 52 | } 53 | (${ Expr(name) }, runner) 54 | } 55 | val ruleEntries: Expr[Seq[(String, RuleRunner[P, L])]] = Expr.ofSeq(names.map(ruleEntry(_))) 56 | 57 | '{ 58 | val map: Map[String, RuleRunner[P, L]] = Map($ruleEntries*) 59 | val drd = 60 | new org.parboiled2.DynamicRuleDispatch[P, L] { 61 | def lookup(ruleName: String): Option[RuleRunner[P, L]] = 62 | map.get(ruleName) 63 | } 64 | 65 | (drd, $ruleNames) 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /parboiled/src/main/scala/org/parboiled2/Base64Parsing.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.util.Base64 20 | 21 | /** Rules for parsing Base-64 encoded strings. 22 | */ 23 | trait Base64Parsing { this: Parser => 24 | import Base64Parsing.* 25 | 26 | /** Parses an RFC4045-encoded string and decodes it onto the value stack. 27 | */ 28 | def rfc2045String: Rule1[Array[Byte]] = base64(rfc2045Alphabet, Base64.rfc2045().fillChar, rfc2045StringDecoder) 29 | 30 | /** Parses an RFC4045-encoded string potentially containing newlines and decodes it onto the value stack. 31 | */ 32 | def rfc2045Block: Rule1[Array[Byte]] = base64(rfc2045Alphabet, Base64.rfc2045().fillChar, rfc2045BlockDecoder) 33 | 34 | /** Parses a org.parboiled2.util.Base64.custom()-encoded string and decodes it onto the value stack. 35 | */ 36 | def base64CustomString: Rule1[Array[Byte]] = base64(customAlphabet, Base64.custom().fillChar, customStringDecoder) 37 | 38 | /** Parses a org.parboiled2.util.Base64.custom()-encoded string potentially containing newlines 39 | * and decodes it onto the value stack. 40 | */ 41 | def base64CustomBlock: Rule1[Array[Byte]] = base64(customAlphabet, Base64.custom().fillChar, customBlockDecoder) 42 | 43 | /** Parses a BASE64-encoded string with the given alphabet and decodes it onto the value 44 | * stack using the given codec. 45 | */ 46 | @deprecated( 47 | "Does not work on padded blocks. Does not work on strings with trailing garbage. Use rfc2045String, rfc2045Block, base64CustomString, or base64CustomBlock instead.", 48 | "2.1.7" 49 | ) 50 | def base64StringOrBlock(alphabet: CharPredicate, decoder: Decoder): Rule1[Array[Byte]] = { 51 | val start = cursor 52 | rule { 53 | oneOrMore(alphabet) ~ run { 54 | decoder(input.sliceCharArray(start, cursor)) match { 55 | case null => MISMATCH 56 | case bytes => push(bytes) 57 | } 58 | } 59 | } 60 | } 61 | 62 | private def base64(alphabet: CharPredicate, fillChar: Char, decoder: Decoder): Rule1[Array[Byte]] = { 63 | val start = cursor 64 | rule { 65 | oneOrMore(alphabet) ~ zeroOrMore(ch(fillChar)) ~ run { 66 | decoder(input.sliceCharArray(start, cursor)) match { 67 | case null => MISMATCH 68 | case bytes => push(bytes) 69 | } 70 | } ~ EOI 71 | } 72 | } 73 | } 74 | 75 | object Base64Parsing { 76 | type Decoder = Array[Char] => Array[Byte] 77 | 78 | val rfc2045Alphabet = CharPredicate(Base64.rfc2045().getAlphabet).asMaskBased 79 | val customAlphabet = CharPredicate(Base64.custom().getAlphabet).asMaskBased 80 | 81 | val rfc2045StringDecoder: Decoder = decodeString(Base64.rfc2045()) 82 | val customStringDecoder: Decoder = decodeString(Base64.custom()) 83 | 84 | val rfc2045BlockDecoder: Decoder = decodeBlock(Base64.rfc2045()) 85 | val customBlockDecoder: Decoder = decodeBlock(Base64.custom()) 86 | 87 | def decodeString(codec: Base64)(chars: Array[Char]): Array[Byte] = codec.decodeFast(chars) 88 | def decodeBlock(codec: Base64)(chars: Array[Char]): Array[Byte] = codec.decode(chars) 89 | } 90 | -------------------------------------------------------------------------------- /parboiled/src/main/scala/org/parboiled2/DynamicRuleDispatch.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.hlist.HList 20 | 21 | /** An application needs to implement this interface to receive the result 22 | * of a dynamic parsing run. 23 | * Often times this interface is directly implemented by the Parser class itself 24 | * (even though this is not a requirement). 25 | */ 26 | trait DynamicRuleHandler[P <: Parser, L <: HList] extends Parser.DeliveryScheme[L] { 27 | def parser: P 28 | def ruleNotFound(ruleName: String): Result 29 | } 30 | 31 | /** Runs one of the rules of a parser instance of type `P` given the rules name. 32 | * The rule must have type `RuleN[L]`. 33 | */ 34 | trait DynamicRuleDispatch[P <: Parser, L <: HList] { 35 | def apply(handler: DynamicRuleHandler[P, L], ruleName: String): handler.Result = 36 | lookup(ruleName).map(_(handler)).getOrElse(handler.ruleNotFound(ruleName)) 37 | 38 | def lookup(ruleName: String): Option[RuleRunner[P, L]] 39 | } 40 | 41 | trait RuleRunner[P <: Parser, L <: HList] { 42 | def apply(handler: DynamicRuleHandler[P, L]): handler.Result 43 | } 44 | 45 | object DynamicRuleDispatch extends DynamicRuleDispatchMacro 46 | -------------------------------------------------------------------------------- /parboiled/src/main/scala/org/parboiled2/StringBuilding.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | /** For certain high-performance use-cases it is better to construct Strings 20 | * that the parser is to produce/extract from the input in a char-by-char fashion. 21 | * 22 | * Mixing this trait into your parser gives you a simple facility to support this. 23 | */ 24 | trait StringBuilding { this: Parser => 25 | protected val sb = new java.lang.StringBuilder 26 | 27 | def clearSB(): Rule0 = rule(run(sb.setLength(0))) 28 | 29 | def appendSB(): Rule0 = rule(run(sb.append(lastChar))) 30 | 31 | def appendSB(offset: Int): Rule0 = rule(run(sb.append(charAt(offset)))) 32 | 33 | def appendSB(c: Char): Rule0 = rule(run(sb.append(c))) 34 | 35 | def appendSB(s: String): Rule0 = rule(run(sb.append(s))) 36 | 37 | def prependSB(): Rule0 = rule(run(doPrepend(lastChar))) 38 | 39 | def prependSB(offset: Int): Rule0 = rule(run(doPrepend(charAt(offset)))) 40 | 41 | def prependSB(c: Char): Rule0 = rule(run(doPrepend(c))) 42 | 43 | def prependSB(s: String): Rule0 = rule(run(doPrepend(s))) 44 | 45 | def setSB(s: String): Rule0 = rule(run(doSet(s))) 46 | 47 | private def doPrepend(c: Char): Unit = { 48 | val saved = sb.toString 49 | sb.setLength(0) 50 | sb.append(c) 51 | sb.append(saved) 52 | } 53 | 54 | private def doPrepend(s: String): Unit = { 55 | val saved = sb.toString 56 | sb.setLength(0) 57 | sb.append(s) 58 | sb.append(saved) 59 | } 60 | 61 | private def doSet(s: String): Unit = { 62 | sb.setLength(0) 63 | sb.append(s) 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /parboiled/src/test/scala/org/parboiled2/Base64ParsingSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import scala.util.Random 20 | import org.parboiled2.util.Base64 21 | import org.parboiled2.support.hlist.* 22 | import utest.* 23 | 24 | object Base64ParsingSpec extends TestSuite { 25 | 26 | class TestParser(val input: ParserInput) extends Parser with Base64Parsing 27 | 28 | val randomChars = { 29 | val random = new Random() 30 | Stream.continually(random.nextPrintableChar()) 31 | } 32 | 33 | val tests = Tests { 34 | 35 | "Base64Parsing" - { 36 | "enable parsing of RFC2045 Strings" - test("rfc2045String", Base64.rfc2045()) 37 | "enable parsing of RFC2045 Blocks" - test("rfc2045Block", Base64.rfc2045()) 38 | "enable parsing of custom-Base64 Strings" - test("base64CustomString", Base64.custom()) 39 | "enable parsing of custom-Base64 Blocks" - test("base64CustomBlock", Base64.custom()) 40 | "reject RFC2045 Strings with trailing garbage" - testTrailingGarbage("rfc2045String", Base64.rfc2045()) 41 | "reject RFC2045 Blocks with trailing garbage" - testTrailingGarbage("rfc2045Block", Base64.rfc2045()) 42 | "reject custom-Base64 Strings with trailing garbage" - testTrailingGarbage("base64CustomString", Base64.custom()) 43 | "reject custom-Base64 Blocks with trailing garbage" - testTrailingGarbage("base64CustomBlock", Base64.custom()) 44 | } 45 | 46 | } 47 | 48 | val (dispatch, rules) = DynamicRuleDispatch[TestParser, Array[Byte] :: HNil]( 49 | "rfc2045String", 50 | "rfc2045Block", 51 | "base64CustomString", 52 | "base64CustomBlock" 53 | ) 54 | 55 | def testParser(encoded: String) = 56 | new TestParser(encoded) with DynamicRuleHandler[TestParser, Array[Byte] :: HNil] { 57 | type Result = String 58 | def parser: TestParser = this 59 | def ruleNotFound(ruleName: String): Result = "n/a" 60 | def success(result: Array[Byte] :: HNil): Result = new String(result.head, UTF8) 61 | def parseError(error: ParseError): Result = throw error 62 | def failure(error: Throwable): Result = throw error 63 | } 64 | 65 | def test(ruleName: String, base64: Base64): Unit = 66 | (1 to 100).foreach { x => 67 | val string = randomChars.take(x).mkString("") 68 | val encoded = base64.encodeToString(string getBytes UTF8, lineSep = false) 69 | val parser = testParser(encoded) 70 | dispatch(parser, ruleName) ==> string 71 | } 72 | 73 | def testTrailingGarbage(ruleName: String, base64: Base64): Unit = 74 | (1 to 100).foreach { x => 75 | val string = randomChars.take(x).mkString("") 76 | val encoded = base64.encodeToString(string getBytes UTF8, lineSep = false) + "!" 77 | val parser = testParser(encoded) 78 | intercept[ParseError](dispatch(parser, ruleName)) 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /parboiled/src/test/scala/org/parboiled2/DynamicRuleDispatchSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.parboiled2 18 | 19 | import org.parboiled2.support.hlist.HNil 20 | import utest.* 21 | 22 | object DynamicRuleDispatchSpec extends TestSuite { 23 | 24 | class TestParser(val input: ParserInput) extends Parser with DynamicRuleHandler[TestParser, HNil] { 25 | def A = rule('a' ~ EOI) 26 | def B = rule('b' ~ EOI) 27 | def C = rule('c' ~ EOI) 28 | def D = rule('d' ~ EOI) 29 | def E = rule('e' ~ EOI) 30 | 31 | type Result = String 32 | def parser: TestParser = this 33 | def ruleNotFound(ruleName: String): Result = "n/a" 34 | def success(result: HNil): Result = "ok" 35 | def parseError(error: ParseError): Result = "error" 36 | def failure(error: Throwable): Result = "throw" 37 | } 38 | 39 | val tests = Tests { 40 | 41 | "DynamicRuleDispatch" - { 42 | "work as expected when selecting from 0 rules" - { 43 | val (dispatch, ruleNames) = DynamicRuleDispatch[TestParser, HNil]() 44 | dispatch(new TestParser("a"), "A") ==> "n/a" 45 | ruleNames ==> Seq() 46 | } 47 | "work as expected when selecting from 1 rule" - { 48 | val (dispatch, ruleNames) = DynamicRuleDispatch[TestParser, HNil]("A") 49 | dispatch(new TestParser("a"), "A") ==> "ok" 50 | dispatch(new TestParser("b"), "B") ==> "n/a" 51 | ruleNames ==> Seq("A") 52 | } 53 | "work as expected when selecting from 2 rules" - { 54 | val (dispatch, ruleNames) = DynamicRuleDispatch[TestParser, HNil]("A", "B") 55 | dispatch(new TestParser("a"), "A") ==> "ok" 56 | dispatch(new TestParser("b"), "B") ==> "ok" 57 | dispatch(new TestParser("c"), "C") ==> "n/a" 58 | ruleNames ==> Seq("A", "B") 59 | } 60 | "work as expected when selecting from 3 rules" - { 61 | val (dispatch, ruleNames) = DynamicRuleDispatch[TestParser, HNil]("A", "B", "C") 62 | dispatch(new TestParser("a"), "A") ==> "ok" 63 | dispatch(new TestParser("b"), "B") ==> "ok" 64 | dispatch(new TestParser("c"), "C") ==> "ok" 65 | dispatch(new TestParser("d"), "D") ==> "n/a" 66 | ruleNames ==> Seq("A", "B", "C") 67 | } 68 | "work as expected when selecting from 4 rules" - { 69 | val (dispatch, ruleNames) = DynamicRuleDispatch[TestParser, HNil]("A", "B", "C", "D") 70 | dispatch(new TestParser("a"), "A") ==> "ok" 71 | dispatch(new TestParser("b"), "B") ==> "ok" 72 | dispatch(new TestParser("c"), "C") ==> "ok" 73 | dispatch(new TestParser("d"), "D") ==> "ok" 74 | dispatch(new TestParser("e"), "E") ==> "n/a" 75 | ruleNames ==> Seq("A", "B", "C", "D") 76 | } 77 | "work as expected when selecting from 5 rules" - { 78 | val (dispatch, ruleNames) = DynamicRuleDispatch[TestParser, HNil]("A", "B", "C", "D", "E") 79 | dispatch(new TestParser("a"), "A") ==> "ok" 80 | dispatch(new TestParser("b"), "B") ==> "ok" 81 | dispatch(new TestParser("c"), "C") ==> "ok" 82 | dispatch(new TestParser("d"), "D") ==> "ok" 83 | dispatch(new TestParser("e"), "E") ==> "ok" 84 | dispatch(new TestParser("f"), "F") ==> "n/a" 85 | ruleNames ==> Seq("A", "B", "C", "D", "E") 86 | } 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /project/ActionOpsBoilerplate.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2011-14 Miles Sabin, Mathias Doenitz, Alexander Myltsev 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import sbt.* 18 | 19 | /** Generate action ops boilerplate for consuming/producing up to 22 values from/to the value stack. 20 | * 21 | * @author Mathias Doenitz 22 | * @author Alexander Myltsev 23 | */ 24 | object ActionOpsBoilerplate { 25 | 26 | private val MaxArity = 22 27 | 28 | def apply(srcManagedDir: File, streams: Keys.TaskStreams): Seq[File] = { 29 | val actionOpsManagedSourceFile = srcManagedDir / "ActionOps.scala" 30 | if (!actionOpsManagedSourceFile.exists()) { 31 | streams.log.info("Generating boilerplate ActionOps source file " + actionOpsManagedSourceFile) 32 | val scalaSource = (1 to MaxArity).map(generate).mkString(header + generate0, "\n", "}") 33 | IO.write(actionOpsManagedSourceFile, scalaSource) 34 | } 35 | Seq(actionOpsManagedSourceFile) 36 | } 37 | 38 | def header = 39 | """/* 40 | | * Copyright (C) 2009-2014 Mathias Doenitz, Alexander Myltsev 41 | | * 42 | | * Licensed under the Apache License, Version 2.0 (the "License"); 43 | | * you may not use this file except in compliance with the License. 44 | | * You may obtain a copy of the License at 45 | | * 46 | | * http://www.apache.org/licenses/LICENSE-2.0 47 | | * 48 | | * Unless required by applicable law or agreed to in writing, software 49 | | * distributed under the License is distributed on an "AS IS" BASIS, 50 | | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 51 | | * See the License for the specific language governing permissions and 52 | | * limitations under the License. 53 | | */ 54 | | 55 | |package org.parboiled2.support 56 | | 57 | |import org.parboiled2.support.hlist._ 58 | |import org.parboiled2.Rule 59 | | 60 | |// provides the supported `~>` "overloads" for rules of type `Rule[I, O]` as `Out` 61 | |// as a phantom type, which is only used for rule DSL typing 62 | | 63 | |sealed trait ActionOps[I <: HList, O <: HList] { type Out } 64 | |object ActionOps { 65 | | 66 | |""".stripMargin 67 | 68 | val AtoZ = 'A' to 'Z' 69 | val `A, ...` = Array.tabulate(MaxArity + 1)(AtoZ take _ mkString ", ") 70 | val `A ::...` = Array.tabulate(MaxArity + 1)(AtoZ take _ mkString " :: ") 71 | val `..., Z` = Array.tabulate(MaxArity + 1)(AtoZ takeRight _ mkString ", ") 72 | val `... :: Z` = Array.tabulate(MaxArity + 1)(AtoZ takeRight _ mkString " :: ") 73 | 74 | def generate0 = { 75 | def consumeStack(i: Int) = 76 | s""" def apply[${`..., Z`(i)}, RR](f: (${`..., Z`(i)}) => RR) 77 | | (implicit j: Join[${`... :: Z`(i)} :: II, HNil, RR], 78 | | c: FCapture[(${`..., Z`(i)}) => RR]): Rule[j.In, j.Out] 79 | """.stripMargin 80 | 81 | // implicit def ops0[II <: HList, OO <: HNil]: ActionOps[II, OO] { type Out = Ops0[II] } = `n/a` 82 | // sealed trait Ops0[II <: HList] { 83 | // def apply[RR](f: () => RR)(implicit j: Join[II, HNil, RR], c: FCapture[() => RR]): Rule[j.In, j.Out] 84 | // def apply[Z, RR](f: Z => RR)(implicit j: Join[Z :: II, HNil, RR], c: FCapture[Z => RR]): Rule[j.In, j.Out] 85 | // def apply[Y, Z, RR](f: (Y, Z) => RR)(implicit j: Join[Y :: Z :: II, HNil, RR], c: FCapture[(Y, Z) => RR]): Rule[j.In, j.Out] 86 | // def apply[X, Y, Z, RR](f: (X, Y, Z) => RR)(implicit j: Join[X :: Y :: Z :: II, HNil, RR], c: FCapture[(X, Y, Z) => RR]): Rule[j.In, j.Out] 87 | // ... 88 | // } 89 | 90 | s""" implicit def ops0[II <: HList, OO <: HNil]: ActionOps[II, OO] { type Out = Ops0[II] } = `n/a` 91 | | sealed trait Ops0[II <: HList] { 92 | | def apply[RR](f: () => RR)(implicit j: Join[II, HNil, RR], c: FCapture[() => RR]): Rule[j.In, j.Out] 93 | | 94 | |${(1 to MaxArity) map consumeStack mkString "\n"} 95 | | } 96 | """.stripMargin 97 | } 98 | 99 | def generate(i: Int) = { 100 | def consumeOut(j: Int) = { 101 | val consumedTypes = AtoZ take i takeRight j mkString ", " 102 | s""" def apply[RR](f: ($consumedTypes) => RR) 103 | | (implicit j: Join[II, ${`A ::...`(i - j)} :: HNil, RR], 104 | | c: FCapture[($consumedTypes) => RR]): Rule[j.In, j.Out] 105 | """.stripMargin 106 | } 107 | 108 | def consumeStack(j: Int) = 109 | s""" def apply[${`..., Z`(j)}, RR](f: (${`..., Z`(j)}, ${`A, ...`(i)}) => RR) 110 | | (implicit j: Join[${`... :: Z`(j)} :: II, HNil, RR], 111 | | c: FCapture[(${`..., Z`(j)}, ${`A, ...`(i)}) => RR]): Rule[j.In, j.Out] 112 | """.stripMargin 113 | 114 | // e.g. 115 | // implicit def ops3[II <: HList, A, B, C]: ActionOps[II, A :: B :: C :: HNil] { type Out = Ops3[II, A, B, C] } = `n/a` 116 | // sealed trait Ops3[II <: HList, A, B, C] { 117 | // def apply[RR](f: () => RR)(implicit j: Join[II, A :: B :: C :: HNil, RR], c: FCapture[() => RR]): Rule[j.In, j.Out] 118 | // def apply[RR](f: C => RR)(implicit j: Join[II, A :: B :: HNil, RR], c: FCapture[C => RR]): Rule[j.In, j.Out] 119 | // def apply[RR](f: (B, C) => RR)(implicit j: Join[II, A :: HNil, RR], c: FCapture[(B, C) => RR]): Rule[j.In, j.Out] 120 | // def apply[RR](f: (A, B, C) => RR)(implicit j: Join[II, HNil, RR], c: FCapture[(A, B, C) => RR]): Rule[j.In, j.Out] 121 | // def apply[Z, RR](f: (Z, A, B, C) => RR)(implicit j: Join[Z :: II, HNil, RR], c: FCapture[(Z, A, B, C) => RR]): Rule[j.In, j.Out] 122 | // def apply[Y, Z, RR](f: (Y, Z, A, B, C) => RR)(implicit j: Join[Y :: Z :: II, HNil, RR], c: FCapture[(Y, Z, A, B, C) => RR]): Rule[j.In, j.Out] 123 | // def apply[X, Y, Z, RR](f: (X, Y, Z, A, B, C) => RR)(implicit j: Join[X :: Y :: Z :: II, HNil, RR], c: FCapture[(X, Y, Z, A, B, C) => RR]): Rule[j.In, j.Out] 124 | // ... 125 | // } 126 | 127 | s""" 128 | | implicit def ops$i[II <: HList, ${`A, ...`(i)}]: ActionOps[II, ${`A ::...`( 129 | i 130 | )} :: HNil] { type Out = Ops$i[II, ${`A, ...`( 131 | i 132 | )}] } = `n/a` 133 | | sealed trait Ops$i[II <: HList, ${`A, ...`(i)}] { 134 | | def apply[RR](f: () => RR)(implicit j: Join[II, ${`A ::...`( 135 | i 136 | )} :: HNil, RR], c: FCapture[() => RR]): Rule[j.In, j.Out] 137 | | 138 | |${(1 to i - 1) map consumeOut mkString "\n"} 139 | | def apply[RR](f: (${`A, ...`(i)}) => RR)(implicit j: Join[II, HNil, RR], c: FCapture[(${`A, ...`( 140 | i 141 | )}) => RR]): Rule[j.In, j.Out] 142 | | 143 | |${(1 to 22 - i) map consumeStack mkString "\n"} 144 | | } 145 | """.stripMargin 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.11.0 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4") 2 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.9.3") 3 | addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.10.0") 4 | addSbtPlugin("com.github.sbt" % "sbt-boilerplate" % "0.7.0") 5 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.19.0") 6 | addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2") 7 | addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.7") 8 | addSbtPlugin("com.github.sbt" % "sbt-osgi" % "0.9.9") 9 | addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.5.7") 10 | addSbtPlugin("org.portable-scala" % "sbt-scala-native-crossproject" % "1.3.2") 11 | addSbtPlugin("com.github.sbt" % "sbt-github-actions" % "0.25.0") 12 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/CharacterClasses.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2.CharPredicate 20 | 21 | object CharacterClasses { 22 | 23 | val DIGIT = CharPredicate.Digit 24 | 25 | val HEXDIGIT = CharPredicate.HexDigit 26 | 27 | val `Ee` = CharPredicate("Ee") 28 | 29 | val `Ll` = CharPredicate("Ll") 30 | 31 | val `+-` = CharPredicate("+-") 32 | 33 | val DQLF = CharPredicate("\"\n") 34 | val DQBS = CharPredicate("\"\\") 35 | 36 | val `FfDd` = CharPredicate("FfDd") 37 | 38 | val ESCAPEE = CharPredicate("""btnfr'\"""") 39 | 40 | val WSCHAR = CharPredicate(" \t") 41 | 42 | val OPCHAR = CharPredicate("""!#$%&*+-/:<=>?@\^|~""") 43 | 44 | val $_ = CharPredicate("$_") 45 | 46 | val LOWER = CharPredicate.LowerAlpha 47 | 48 | val LOWER$_ = CharPredicate.LowerAlpha ++ $_ 49 | 50 | val UPPER = CharPredicate.UpperAlpha 51 | 52 | val ALPHA = CharPredicate.Alpha 53 | 54 | val ALPHANUM = CharPredicate.AlphaNum 55 | val ALPHANUM$ = CharPredicate.AlphaNum ++ '$' 56 | val ALPHANUM$_ = CharPredicate.AlphaNum ++ $_ 57 | 58 | val KEYCHAR = CharPredicate(":;=#@\u21d2\u2190") 59 | val KEYCHAR2 = CharPredicate("-:%") 60 | } 61 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/L0_Basics.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | trait L0_Basics { this: Parser => 22 | import CharacterClasses._ 23 | 24 | def HexNum = rule("0x" ~ HEXDIGIT.+) 25 | 26 | def DecNum = rule(DIGIT.+) 27 | 28 | def Newline = rule(quiet('\r'.? ~ '\n')) 29 | 30 | def LineEnd = rule(quiet(WL ~ Newline)) 31 | 32 | def OpChar = rule(atomic(OPCHAR | test(isMathOrOtherSymbol(cursorChar)) ~ ANY)) 33 | 34 | def AlphaNum = rule(ALPHANUM | GeneralAlphaNum) 35 | def AlphaNum$ = rule(ALPHANUM$ | GeneralAlphaNum) 36 | def AlphaNum$_ = rule(ALPHANUM$_ | GeneralAlphaNum) 37 | 38 | def GeneralLower = rule(atomic(`LOWER$_` | test(cursorChar.isLower) ~ ANY)) 39 | 40 | /** Whitespace, including newlines. This is the default for most things. 41 | */ 42 | def WL = rule(quiet((WSCHAR | Comment | Newline).*)) 43 | 44 | /** Whitespace, excluding newlines. 45 | * Only really useful in e.g. {} blocks, where we want to avoid 46 | * capturing newlines so semicolon-inference works 47 | */ 48 | def WS = rule(quiet(WSCHAR | Comment).*) 49 | 50 | def Semi = rule(WL ~ ';' | WS ~ Newline.+) 51 | 52 | def Semis = rule(Semi.+) 53 | 54 | def NotNewline: Rule0 = rule(&(WS ~ !Newline)) 55 | 56 | def OneNLMax: Rule0 = rule(quiet(WS ~ Newline.? ~ CommentLine.* ~ NotNewline)) 57 | 58 | //////////////////////////// PRIVATE /////////////////////////////////// 59 | 60 | private def Comment: Rule0 = rule(BlockComment | "//" ~ (!Newline ~ ANY).*) 61 | 62 | private def BlockComment: Rule0 = rule("/*" ~ (BlockComment | !"*/" ~ ANY).* ~ "*/") 63 | 64 | private def GeneralAlphaNum = rule(test(cursorChar.isLetter | cursorChar.isDigit) ~ ANY) 65 | 66 | private def CommentLine = rule(quiet(WSCHAR.* ~ Comment ~ WSCHAR.* ~ Newline)) 67 | 68 | private def isMathOrOtherSymbol(c: Char) = 69 | Character.getType(c) match { 70 | case Character.OTHER_SYMBOL | Character.MATH_SYMBOL => true 71 | case _ => false 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/L1_KeywordsAndOperators.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | import scala.annotation.switch 22 | 23 | trait L1_KeywordsAndOperators { this: Parser with L0_Basics => 24 | import CharacterClasses._ 25 | 26 | def Operator = rule(!SymbolicKeyword ~ OpChar.+) 27 | 28 | def Keyword = rule(AlphaKeyword | SymbolicKeyword) 29 | 30 | def underscore = Key("_") 31 | def `abstract` = Key("abstract") 32 | def `case` = Key("case") 33 | def `catch` = Key("catch") 34 | def `class` = Key("class") 35 | def `def` = Key("def") 36 | def `do` = Key("do") 37 | def `else` = Key("else") 38 | def `extends` = Key("extends") 39 | def `final` = Key("final") 40 | def `finally` = Key("finally") 41 | def `for` = Key("for") 42 | def `forSome` = Key("forSome") 43 | def `if` = Key("if") 44 | def `implicit` = Key("implicit") 45 | def `import` = Key("import") 46 | def `lazy` = Key("lazy") 47 | def `macro` = Key("macro") 48 | def `match` = Key("match") 49 | def `new` = Key("new") 50 | def `object` = Key("object") 51 | def `override` = Key("override") 52 | def `package` = Key("package") 53 | def `private` = Key("private") 54 | def `protected` = Key("protected") 55 | def `return` = Key("return") 56 | def `sealed` = Key("sealed") 57 | def `super` = Key("super") 58 | def `this` = Key("this") 59 | def `throw` = Key("throw") 60 | def `trait` = Key("trait") 61 | def `try` = Key("try") 62 | def `type` = Key("type") 63 | def `val` = Key("val") 64 | def `var` = Key("var") 65 | def `while` = Key("while") 66 | def `with` = Key("with") 67 | def `yield` = Key("yield") 68 | 69 | def `<%` = SymbolicKey("<%") 70 | def `>:` = SymbolicKey(">:") 71 | def `<:` = SymbolicKey("<:") 72 | def `=>` = rule(SymbolicKey("=>") | SymbolicKey('⇒')) 73 | def `<-` = rule(SymbolicKey("<-") | SymbolicKey('←')) 74 | def `:` = SymbolicKey(':') 75 | def `=` = SymbolicKey('=') 76 | def `@` = SymbolicKey('@') 77 | def `#` = SymbolicKey("#") 78 | 79 | def Null = RawKey("null") 80 | def True = RawKey("true") 81 | def False = RawKey("false") 82 | 83 | // keyword-like patterns (not really keywords though) 84 | def `_*` = rule(underscore ~ WL ~ "*") 85 | def `}` = rule(Semis.? ~ WL ~ '}') 86 | def `{` = rule(WL ~ '{' ~ Semis.?) 87 | 88 | //////////////////////////// PRIVATE /////////////////////////////////// 89 | 90 | private def Key(s: String) = rule(WL ~ RawKey(s)) 91 | private def RawKey(s: String) = rule(s ~ !AlphaNum$_) 92 | private def SymbolicKey(c: Char) = rule(WL ~ c ~ !OpChar) 93 | private def SymbolicKey(s: String) = rule(WL ~ s ~ !OpChar) 94 | 95 | private def SymbolicKeyword = rule(("=>" | KEYCHAR | '<' ~ KEYCHAR2 | ">:") ~ !OpChar) 96 | 97 | private def AlphaKeyword = rule { 98 | run { 99 | // TODO: simplify when https://github.com/sirthias/parboiled2/issues/115 is done 100 | (cursorChar: @switch) match { 101 | case 'a' => str("abstract") 102 | case 'c' => "case" | "catch" | "class" 103 | case 'd' => "def" | "do" 104 | case 'e' => "else" | "extends" 105 | case 'f' => "false" | "final" ~ "ly".? | "forSome" | "for" 106 | case 'i' => "if" | "implicit" | "import" 107 | case 'l' => str("lazy") 108 | case 'm' => str("match") 109 | case 'n' => "new" | "null" 110 | case 'o' => "object" | "override" 111 | case 'p' => "package" | "private" | "protected" 112 | case 'r' => str("return") 113 | case 's' => "sealed" | "super" 114 | case 't' => "this" | "throw" | "trait" | "try" | "true" | "type" 115 | case 'v' => "val" | "var" 116 | case 'w' => "while" | "with" 117 | case 'y' => str("yield") 118 | case '_' => ANY 119 | case _ => MISMATCH 120 | } 121 | } ~ !AlphaNum$_ 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/L2_Identifiers.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | trait L2_Identifiers { self: Parser with L0_Basics with L1_KeywordsAndOperators => 22 | 23 | def VarId = rule(WL ~ !Keyword ~ GeneralLower ~ IdRestWithDollar) 24 | def RawIdNoBackticks = rule(!Keyword ~ AlphaNum$_ ~ IdRestWithDollar | Operator) 25 | def RawIdNoDollarNoBackticks = rule(!Keyword ~ AlphaNum$_ ~ IdRest | Operator) 26 | def RawId = rule(RawIdNoBackticks | '`' ~ (!'`' ~ ANY).+ ~ '`') // FIXME: are newlines in backticks allowed? 27 | def Id = rule(WL ~ RawId) 28 | def Ids = rule(Id.+(WL ~ ',')) 29 | 30 | def StableId: Rule0 = { 31 | def ClassQualifier = rule(WL ~ '[' ~ Id ~ WL ~ ']') 32 | def `.` = rule(WL ~ '.') 33 | def ThisOrSuper = rule(`this` | `super` ~ ClassQualifier.?) 34 | def ThisOrSuperTail = rule(ThisOrSuper ~ (`.` ~ Id).*) 35 | rule(Id.+(`.`) ~ (`.` ~ ThisOrSuperTail).? | ThisOrSuperTail) 36 | } 37 | 38 | //////////////////////////// PRIVATE /////////////////////////////////// 39 | 40 | private def IdRest = rule((Underscores ~ AlphaNum.+).* ~ OpSuffix) 41 | private def IdRestWithDollar = rule((Underscores ~ AlphaNum$.+).* ~ OpSuffix) 42 | private def Underscores = rule(ch('_').*) 43 | private def OpSuffix = rule((ch('_').+ ~ OpChar.*).?) 44 | } 45 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/L3_Literals.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | trait L3_Literals { self: Parser with L0_Basics with L1_KeywordsAndOperators with L2_Identifiers => 22 | import CharacterClasses._ 23 | 24 | def Block: Rule0 25 | 26 | def Literal = rule(("-".? ~ (Float | Int)) | Bool | Char | String | Symbol | Null) 27 | 28 | def Float = { 29 | def Exp = rule(`Ee` ~ `+-`.? ~ DecNum) 30 | def Decimals = rule('.' ~ DIGIT.+ ~ Exp.? ~ `FfDd`.?) 31 | rule(Decimals | DIGIT.+ ~ (Decimals | Exp ~ `FfDd`.? | `FfDd`)) 32 | } 33 | 34 | def Int = rule((HexNum | DecNum) ~ `Ll`.?) 35 | 36 | def Bool = rule(True | False) 37 | 38 | def Char = rule("'" ~ (UnicodeEscape | EscapedChars | !'\\' ~ test(isPrintableChar(cursorChar)) ~ ANY) ~ "'") 39 | 40 | def Symbol = rule('\'' ~ (RawIdNoBackticks | Keyword)) // symbols can take on the same values as keywords! 41 | 42 | def String = { 43 | def TripleTail = rule('"' ~ '"' ~ ch('"').+) 44 | def Inter = rule('$' ~ (RawIdNoDollarNoBackticks | '{' ~ Block ~ WL ~ '}' | '$')) 45 | def Raw = rule('"'.? ~ '"'.? ~ !'\"' ~ ANY) 46 | def Simple = rule('\\' ~ DQBS | !DQLF ~ ANY) 47 | rule( 48 | RawId ~ '"' ~ ('"' ~ '"' ~ (Inter | Raw).* ~ TripleTail | (Inter | Simple).* ~ '"') 49 | | '"' ~ ('"' ~ '"' ~ Raw.* ~ TripleTail | Simple.* ~ '"') 50 | ) 51 | } 52 | 53 | def WLLiteral = rule(WL ~ Literal) 54 | 55 | //////////////////////////// PRIVATE /////////////////////////////////// 56 | 57 | private def UnicodeEscape = rule("\\u" ~ HEXDIGIT ~ HEXDIGIT ~ HEXDIGIT ~ HEXDIGIT) 58 | 59 | private def EscapedChars = rule('\\' ~ ESCAPEE) 60 | 61 | private def isPrintableChar(c: Char): Boolean = 62 | !Character.isISOControl(c) && 63 | !Character.isSurrogate(c) && { 64 | val block = Character.UnicodeBlock.of(c); block != null && block != Character.UnicodeBlock.SPECIALS 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/L4_Types.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | trait L4_Types { 22 | this: Parser 23 | with WhitespaceStringsAndChars with L0_Basics with L1_KeywordsAndOperators with L2_Identifiers with L3_Literals => 24 | 25 | def TypeExpr: Rule0 26 | 27 | def Mod: Rule0 = rule(LocalMod | AccessMod | `override`) 28 | def LocalMod: Rule0 = rule(`abstract` | `final` | `sealed` | `implicit` | `lazy`) 29 | 30 | def AccessMod: Rule0 = { 31 | def AccessQualifier = rule('[' ~ (`this` | Id) ~ ']') 32 | rule((`private` | `protected`) ~ AccessQualifier.?) 33 | } 34 | 35 | def Dcl: Rule0 = { 36 | def VarDcl = rule(`var` ~ Ids ~ `:` ~ Type) 37 | def FunDcl = rule(`def` ~ FunSig ~ (`:` ~ Type).?) 38 | rule(ValDcl | VarDcl | FunDcl | TypeDcl) 39 | } 40 | 41 | def Type: Rule0 = { 42 | def FunctionArgTypes = rule('(' ~ ParamType.+(',').? ~ ')') 43 | def ArrowType = rule(FunctionArgTypes ~ `=>` ~ Type) 44 | def ExistentialClause = rule(`forSome` ~ `{` ~ (TypeDcl | ValDcl).+(Semis) ~ `}`) 45 | def PostfixType = rule(InfixType ~ (`=>` ~ Type | ExistentialClause.?)) 46 | def Unbounded = rule(underscore | ArrowType | PostfixType) 47 | rule(Unbounded ~ TypeBounds) 48 | } 49 | 50 | def InfixType = rule(CompoundType ~ (NotNewline ~ Id ~ OneNLMax ~ CompoundType).*) 51 | 52 | def CompoundType = { 53 | def RefineStat = rule(TypeDef | Dcl) 54 | def Refinement = rule(OneNLMax ~ `{` ~ RefineStat.*(Semis) ~ `}`) 55 | rule(AnnotType.+(`with`) ~ Refinement.? | Refinement) 56 | } 57 | def AnnotType = rule(SimpleType ~ (NotNewline ~ (NotNewline ~ Annot).+).?) 58 | 59 | def SimpleType: Rule0 = { 60 | def BasicType = rule('(' ~ Types ~ ')' | StableId ~ '.' ~ `type` | StableId) 61 | rule(BasicType ~ (TypeArgs | `#` ~ Id).*) 62 | } 63 | 64 | def TypeArgs = rule('[' ~ Types ~ "]") 65 | def Types = rule(Type.+(',')) 66 | 67 | def ValDcl: Rule0 = rule(`val` ~ Ids ~ `:` ~ Type) 68 | def TypeDcl: Rule0 = rule(`type` ~ Id ~ TypeArgList.? ~ TypeBounds) 69 | 70 | def FunSig: Rule0 = { 71 | def FunTypeArgs = rule('[' ~ (Annot.* ~ TypeArg).+(',') ~ ']') 72 | def FunAllArgs = rule(FunArgs.* ~ (OneNLMax ~ '(' ~ `implicit` ~ Args ~ ')').?) 73 | def FunArgs = rule(OneNLMax ~ '(' ~ Args.? ~ ')') 74 | def FunArg = rule(Annot.* ~ Id ~ (`:` ~ ParamType).? ~ (`=` ~ TypeExpr).?) 75 | def Args = rule(FunArg.+(',')) 76 | rule((Id | `this`) ~ FunTypeArgs.? ~ FunAllArgs) 77 | } 78 | def ParamType = rule(`=>` ~ Type | Type ~ "*" | Type) 79 | 80 | def TypeBounds: Rule0 = rule((`>:` ~ Type).? ~ (`<:` ~ Type).?) 81 | 82 | def TypeArg: Rule0 = { 83 | def CtxBounds = rule((`<%` ~ Type).* ~ (`:` ~ Type).*) 84 | rule((Id | underscore) ~ TypeArgList.? ~ TypeBounds ~ CtxBounds) 85 | } 86 | 87 | def Annot: Rule0 = rule(`@` ~ SimpleType ~ ('(' ~ (Exprs ~ (`:` ~ `_*`).?).? ~ ")").*) 88 | 89 | def TypeArgList: Rule0 = { 90 | def Variant: Rule0 = rule(Annot.* ~ (WL ~ anyOf("+-")).? ~ TypeArg) 91 | rule('[' ~ Variant.*(',') ~ ']') 92 | } 93 | def Exprs: Rule0 = rule(TypeExpr.+(',')) 94 | def TypeDef: Rule0 = rule(`type` ~ Id ~ TypeArgList.? ~ `=` ~ Type) 95 | } 96 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/L5_Exprs.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | trait L5_Exprs { 22 | this: Parser 23 | with WhitespaceStringsAndChars with L0_Basics with L1_KeywordsAndOperators with L2_Identifiers with L3_Literals 24 | with L4_Types with L4_Xml => 25 | 26 | def NewBody: Rule0 27 | def BlockDef: Rule0 28 | 29 | def Import: Rule0 = { 30 | def ImportExpr: Rule0 = rule(StableId ~ ('.' ~ (underscore | Selectors)).?) 31 | def Selectors: Rule0 = rule('{' ~ (Selector ~ ',').* ~ (Selector | underscore) ~ "}") 32 | def Selector: Rule0 = rule(Id ~ (`=>` ~ (Id | underscore)).?) 33 | rule(`import` ~ ImportExpr.+(',')) 34 | } 35 | 36 | def Ascription = rule(`:` ~ (`_*` | Type | Annot.+)) 37 | 38 | def LambdaHead: Rule0 = { 39 | def Binding = rule((Id | underscore) ~ (`:` ~ Type).?) 40 | def Bindings = rule('(' ~ Binding.*(',') ~ ')') 41 | def Implicit = rule(`implicit`.? ~ Id ~ (`:` ~ InfixType).?) 42 | rule((Bindings | Implicit | underscore ~ Ascription.?) ~ `=>`) 43 | } 44 | object StatCtx extends WsCtx(true) 45 | object ExprCtx extends WsCtx(false) 46 | def TypeExpr = ExprCtx.Expr 47 | 48 | class WsCtx(injectSemicolons: Boolean) { 49 | 50 | def OneSemiMax = if (injectSemicolons) OneNLMax else MATCH 51 | def NoSemis = if (injectSemicolons) NotNewline else MATCH 52 | 53 | def Enumerators = { 54 | def Generator = rule(Pat1 ~ `<-` ~ Expr ~ Guard.?) 55 | def Assign = rule(Pat1 ~ `=` ~ Expr) 56 | def Enumerator = rule(Semis ~ Generator | optional(Semis) ~ Guard | Semis ~ Assign) 57 | rule(Generator ~ Enumerator.* ~ WL) 58 | } 59 | 60 | def Expr: Rule0 = { 61 | def If = { 62 | def Else = rule(Semi.? ~ `else` ~ Expr) 63 | rule(`if` ~ '(' ~ ExprCtx.Expr ~ ')' ~ Expr ~ Else.?) 64 | } 65 | def While = rule(`while` ~ '(' ~ Expr ~ ')' ~ Expr) 66 | def Try = { 67 | def Catch = rule(`catch` ~ Expr) 68 | def Finally = rule(`finally` ~ Expr) 69 | rule(`try` ~ Expr ~ Catch.? ~ Finally.?) 70 | } 71 | def DoWhile = rule(`do` ~ Expr ~ Semi.? ~ `while` ~ '(' ~ Expr ~ ")") 72 | 73 | def For = { 74 | def Body = rule('(' ~ ExprCtx.Enumerators ~ ')' | '{' ~ StatCtx.Enumerators ~ '}') 75 | rule(`for` ~ Body ~ `yield`.? ~ Expr) 76 | } 77 | def Throw = rule(`throw` ~ Expr) 78 | def Return = rule(`return` ~ Expr.?) 79 | 80 | def SmallerExpr = rule(PostfixExpr ~ (`match` ~ '{' ~ CaseClauses ~ "}" | Ascription).?) 81 | def LambdaBody = rule(If | While | Try | DoWhile | For | Throw | Return | SmallerExpr) 82 | rule(LambdaHead.* ~ LambdaBody) 83 | } 84 | 85 | def PostfixExpr: Rule0 = { 86 | def Prefixed = rule((WL ~ anyOf("-+~!") ~ WS ~ !OpChar) ~ SimpleExpr) 87 | def Assign = rule(SimpleExpr ~ (`=` ~ Expr).?) 88 | def PrefixExpr = rule(Prefixed | Assign) 89 | 90 | def InfixExpr = rule(PrefixExpr ~ (NoSemis ~ Id ~ TypeArgs.? ~ OneSemiMax ~ PrefixExpr).*) 91 | rule(InfixExpr ~ (NotNewline ~ Id ~ LineEnd.?).?) 92 | } 93 | 94 | def SimpleExpr: Rule0 = { 95 | def Path = rule((Id ~ '.').* ~ `this` ~ ('.' ~ Id).* | StableId) 96 | def New = rule(`new` ~ NewBody) 97 | def Parened = rule('(' ~ Exprs.? ~ ")") 98 | def SimpleExpr1 = rule(XmlExpr | New | BlockExpr | WLLiteral | Path | underscore | Parened) 99 | rule(SimpleExpr1 ~ ('.' ~ Id | TypeArgs | NoSemis ~ ArgList).* ~ (NoSemis ~ underscore).?) 100 | } 101 | def Guard: Rule0 = rule(`if` ~ PostfixExpr) 102 | } 103 | 104 | def SimplePat: Rule0 = { 105 | def ExtractorArgs = rule(Pat.*(',')) 106 | def Extractor = rule(StableId ~ ('(' ~ ExtractorArgs ~ ')').?) 107 | def TupleEx = rule('(' ~ ExtractorArgs.? ~ ')') 108 | def Thingy = rule(underscore ~ (`:` ~ TypePat).? ~ !"*") 109 | rule(XmlPattern | Thingy | WLLiteral | TupleEx | Extractor | VarId) 110 | } 111 | 112 | def BlockExpr: Rule0 = rule('{' ~ (CaseClauses | Block) ~ `}`) 113 | 114 | def BlockStats: Rule0 = { 115 | def Prelude = rule(Annot.* ~ `implicit`.? ~ `lazy`.? ~ LocalMod.*) 116 | def Tmpl = rule(Prelude ~ BlockDef) 117 | def BlockStat = rule(Import | Tmpl | StatCtx.Expr) 118 | rule(BlockStat.+(Semis)) 119 | } 120 | 121 | def Block: Rule0 = { 122 | def End = rule(Semis.? ~ &("}" | `case`)) 123 | def ResultExpr = rule(StatCtx.Expr | LambdaHead ~ Block) 124 | def Body = rule(ResultExpr ~ End | BlockStats ~ (Semis ~ ResultExpr).? ~ End | End) 125 | rule(LambdaHead.* ~ Semis.? ~ Body) 126 | } 127 | 128 | def Patterns: Rule0 = rule(Pat.+(",")) 129 | def Pat: Rule0 = rule(Pat1.+('|')) 130 | def Pat1: Rule0 = rule(underscore ~ `:` ~ TypePat | VarId ~ `:` ~ TypePat | Pat2) 131 | 132 | def Pat2: Rule0 = { 133 | def Pat3 = rule(`_*` | SimplePat ~ (Id ~ SimplePat).*) 134 | rule(VarId ~ `@` ~ Pat3 | Pat3 | VarId) 135 | } 136 | 137 | def TypePat = rule(CompoundType) 138 | 139 | def ArgList: Rule0 = rule('(' ~ (Exprs ~ (`:` ~ `_*`).?).? ~ ")" | OneNLMax ~ BlockExpr) 140 | 141 | def CaseClauses: Rule0 = { 142 | def CaseClause: Rule0 = rule(`case` ~ Pat ~ ExprCtx.Guard.? ~ `=>` ~ Block) 143 | rule(CaseClause.+) 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/L6_TopLevel.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | trait L6_TopLevel { 22 | this: Parser 23 | with WhitespaceStringsAndChars with L0_Basics with L1_KeywordsAndOperators with L2_Identifiers with L3_Literals 24 | with L4_Types with L4_Xml with L5_Exprs => 25 | 26 | def CompilationUnit: Rule0 = { 27 | def TopPackageSeq = rule((`package` ~ QualId ~ !(WS ~ "{")).+(Semis)) 28 | def Body = rule(TopPackageSeq ~ (Semis ~ TopStatSeq).? | TopStatSeq | MATCH) 29 | rule(Semis.? ~ Body ~ Semis.? ~ WL ~ EOI) 30 | } 31 | 32 | def TmplBody: Rule0 = { 33 | def Prelude = rule((Annot ~ OneNLMax).* ~ Mod.*) 34 | def TmplStat = rule(Import | Prelude ~ (Def | Dcl) | StatCtx.Expr) 35 | def SelfType = rule((`this` | Id | underscore) ~ (`:` ~ InfixType).? ~ `=>`) 36 | rule('{' ~ SelfType.? ~ Semis.? ~ TmplStat.*(Semis) ~ `}`) 37 | } 38 | 39 | def NewBody = rule(ClsTmpl | TmplBody) 40 | 41 | def BlockDef = rule(Def | TmplDef) 42 | 43 | def ValVarDef: Rule0 = { 44 | def Val = rule(Pat2.+(',') ~ (`:` ~ Type).? ~ `=` ~ StatCtx.Expr) 45 | def Var = rule(Ids ~ `:` ~ Type ~ `=` ~ underscore | Val) 46 | rule(`val` ~ Val | `var` ~ Var) 47 | } 48 | 49 | def Def: Rule0 = { 50 | def Body = rule(`=` ~ `macro`.? ~ StatCtx.Expr | OneNLMax ~ '{' ~ Block ~ "}") 51 | def FunDef = rule(`def` ~ FunSig ~ (`:` ~ Type).? ~ Body) 52 | rule(FunDef | TypeDef | ValVarDef | TmplDef) 53 | } 54 | 55 | def TmplDef: Rule0 = { 56 | def ClsDef = { 57 | def ClsAnnot = rule(`@` ~ SimpleType ~ ArgList) 58 | def Prelude = rule(NotNewline ~ (ClsAnnot.+ ~ AccessMod.? | ClsAnnot.* ~ AccessMod)) 59 | def ClsArgMod = rule((Mod.* ~ (`val` | `var`)).?) 60 | def ClsArg = rule(Annot.* ~ ClsArgMod ~ Id ~ `:` ~ ParamType ~ (`=` ~ ExprCtx.Expr).?) 61 | 62 | def Implicit = rule(OneNLMax ~ '(' ~ `implicit` ~ ClsArg.+(",") ~ ")") 63 | def ClsArgs = rule(OneNLMax ~ '(' ~ ClsArg.*(',') ~ ")") 64 | def AllArgs = rule(ClsArgs.+ ~ Implicit.? | Implicit) 65 | rule(`class` ~ Id ~ TypeArgList.? ~ Prelude.? ~ AllArgs.? ~ ClsTmplOpt) 66 | } 67 | def TraitTmplOpt = { 68 | def TraitParents = rule(AnnotType ~ (`with` ~ AnnotType).*) 69 | def TraitTmpl = rule(EarlyDefs.? ~ TraitParents ~ TmplBody.?) 70 | rule(`extends` ~ TraitTmpl | (`extends`.? ~ TmplBody).?) 71 | } 72 | def TraitDef = rule(`trait` ~ Id ~ TypeArgList.? ~ TraitTmplOpt) 73 | rule(TraitDef | `case`.? ~ (ClsDef | ObjDef)) 74 | } 75 | 76 | def ObjDef: Rule0 = rule(`object` ~ Id ~ ClsTmplOpt) 77 | def ClsTmplOpt: Rule0 = rule(`extends` ~ ClsTmpl | (`extends`.? ~ TmplBody).?) 78 | 79 | def ClsTmpl: Rule0 = { 80 | def Constr = rule(AnnotType ~ (NotNewline ~ ArgList).*) 81 | def ClsParents = rule(Constr ~ (`with` ~ AnnotType).*) 82 | rule(EarlyDefs.? ~ ClsParents ~ TmplBody.?) 83 | } 84 | 85 | def EarlyDefs: Rule0 = { 86 | def EarlyDef = rule((Annot ~ OneNLMax).* ~ Mod.* ~ ValVarDef) 87 | rule(`{` ~ EarlyDef.*(Semis) ~ `}` ~ `with`) 88 | } 89 | 90 | def TopStatSeq: Rule0 = { 91 | def PkgObj = rule(`package` ~ ObjDef) 92 | def PkgBlock = rule(`package` ~ QualId ~ `{` ~ TopStatSeq.? ~ `}`) 93 | def Tmpl = rule((Annot ~ OneNLMax).* ~ Mod.* ~ TmplDef) 94 | def TopStat = rule(PkgBlock | PkgObj | Import | Tmpl) 95 | rule(TopStat.+(Semis)) 96 | } 97 | 98 | //////////////////////////// PRIVATE /////////////////////////////////// 99 | 100 | private def QualId = rule(WL ~ Id.+('.')) 101 | } 102 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/ScalaParser.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | class ScalaParser(val input: ParserInput) 22 | extends Parser with WhitespaceStringsAndChars with L0_Basics with L1_KeywordsAndOperators with L2_Identifiers 23 | with L3_Literals with L4_Types with L4_Xml with L5_Exprs with L6_TopLevel 24 | -------------------------------------------------------------------------------- /scalaParser/src/main/scala/scalaparser/WhitespaceStringsAndChars.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import org.parboiled2._ 20 | 21 | trait WhitespaceStringsAndChars extends Parser { 22 | 23 | def WL: Rule0 24 | 25 | implicit def wlStr(s: String): Rule0 = rule(WL ~ str(s)) 26 | implicit def wlCh(s: Char): Rule0 = rule(WL ~ ch(s)) 27 | } 28 | -------------------------------------------------------------------------------- /scalaParser/src/test/scala/scalaparser/RealSourcesSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2009-2019 Mathias Doenitz 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package scalaparser 18 | 19 | import java.io.{File, FileInputStream} 20 | import java.nio.ByteBuffer 21 | import java.nio.charset.Charset 22 | import utest._ 23 | import org.parboiled2._ 24 | 25 | object RealSourcesSpec extends TestSuite { 26 | 27 | val tests = Tests { 28 | 29 | "The ScalaParser should successfully parse the following project sources" - { 30 | 31 | "parboiled2" - checkDir(".") 32 | 33 | "akka" - checkDir("~/Documents/projects/Akka") 34 | 35 | "shapeless" - checkDir("~/Documents/forks/shapeless") 36 | 37 | "spray" - checkDir("~/Documents/projects/spray") 38 | 39 | "scalaz" - checkDir("~/Documents/forks/scalaz") 40 | 41 | "spire" - checkDir("~/Documents/forks/spire") 42 | 43 | "sbt" - checkDir( 44 | "~/Documents/forks/xsbt", 45 | "sbt/std/InputWrapper.scala", // unicode escapes 46 | "sbt/src/sbt-test", 47 | "util/cross/src/main/input_sources" 48 | ) 49 | 50 | "scala" - checkDir( 51 | "~/Documents/forks/scala", 52 | // Not real Scala files 53 | "dbuild-meta-json-gen.scala", 54 | "genprod.scala", 55 | "disabled", // don't bother parsing disabled tests 56 | "neg", // or neg tests 57 | "deprecate-early-type-defs.scala", // or deprecated tests 58 | // or unicode escapes 59 | "test/files/run/literals.scala", 60 | "test/files/run/t3835.scala", 61 | "test/files/run/richs.scala", 62 | // Lots of guys in these folders seem to be borked, skip all of them 63 | "test/files/positions", 64 | "test/files/presentation", 65 | "test/pending" 66 | ) 67 | } 68 | } 69 | 70 | val utf8 = Charset.forName("UTF-8") 71 | val formatter = new ErrorFormatter(showTraces = true) 72 | 73 | def checkDir(path: String, blackList: String*): String => Boolean = { exampleName => 74 | def checkFile(path: String): Int = { 75 | val inputStream = new FileInputStream(path) 76 | val utf8Bytes = Array.ofDim[Byte](inputStream.available) 77 | inputStream.read(utf8Bytes) 78 | inputStream.close() 79 | val charBuffer = utf8.decode(ByteBuffer.wrap(utf8Bytes)) 80 | val parser = new ScalaParser(ParserInput(charBuffer.array(), charBuffer.remaining())) 81 | def fail(msg: String) = Predef.assert(false, msg) 82 | parser.CompilationUnit.run().failed foreach { 83 | case error: ParseError => fail(s"Error in file `$path`:\n" + error.format(parser, formatter)) 84 | case error => fail(s"Exception in file `$path`:\n$error") 85 | } 86 | parser.input.length 87 | } 88 | def listFiles(file: File): Iterator[String] = { 89 | val (dirs, files) = file.listFiles().toIterator.partition(_.isDirectory) 90 | files.map(_.getPath) ++ dirs.flatMap(listFiles) 91 | } 92 | 93 | val startTime = System.nanoTime() 94 | val fileChars = 95 | for { 96 | fileName <- listFiles(new File(if (path startsWith "~") System.getProperty("user.home") + path.tail else path)) 97 | if fileName endsWith ".scala" 98 | if !blackList.exists(fileName.contains) 99 | } yield checkFile(fileName) 100 | val totalChars = fileChars.sum / 1000 101 | val millis = (System.nanoTime() - startTime) / 1000000 102 | println(s"$exampleName:\n ${totalChars}K chars in $millis ms (${totalChars * 1000 / millis}K chars/sec})") 103 | true 104 | } 105 | } 106 | --------------------------------------------------------------------------------