├── .github └── workflows │ └── haskell-ci.yml ├── .gitignore ├── Grampa.lhs.pdf ├── LICENSE ├── README.md ├── cabal.project ├── deep-transformations ├── CHANGELOG.md ├── LICENSE ├── README.md ├── Setup.hs ├── deep-transformations.cabal ├── src │ ├── Transformation.hs │ └── Transformation │ │ ├── AG.hs │ │ ├── AG │ │ ├── Dimorphic.hs │ │ ├── Generics.hs │ │ └── Monomorphic.hs │ │ ├── Deep.hs │ │ ├── Deep.hs-boot │ │ ├── Deep │ │ └── TH.hs │ │ ├── Full.hs │ │ ├── Full │ │ └── TH.hs │ │ ├── Rank2.hs │ │ ├── Shallow.hs │ │ └── Shallow │ │ └── TH.hs └── test │ ├── Doctest.hs │ ├── README.lhs │ ├── RepMin.hs │ └── RepMinAuto.hs ├── grammatical-parsers ├── CHANGELOG.md ├── LICENSE ├── README.md ├── Setup.hs ├── examples │ ├── Arithmetic.hs │ ├── Boolean.hs │ ├── BooleanTransformations.hs │ ├── Combined.hs │ ├── Comparisons.hs │ ├── Conditionals.hs │ ├── Lambda.hs │ ├── Main.hs │ └── Utilities.hs ├── grammatical-parsers.cabal ├── src │ └── Text │ │ ├── Grampa.hs │ │ └── Grampa │ │ ├── Class.hs │ │ ├── Combinators.hs │ │ ├── ContextFree │ │ ├── Continued.hs │ │ ├── Continued │ │ │ └── Measured.hs │ │ ├── LeftRecursive.hs │ │ ├── LeftRecursive │ │ │ └── Transformer.hs │ │ ├── Memoizing.hs │ │ ├── Memoizing │ │ │ └── LeftRecursive.hs │ │ ├── Parallel.hs │ │ ├── SortedMemoizing.hs │ │ └── SortedMemoizing │ │ │ ├── LeftRecursive.hs │ │ │ ├── Transformer.hs │ │ │ └── Transformer │ │ │ └── LeftRecursive.hs │ │ ├── Internal.hs │ │ ├── Internal │ │ ├── LeftRecursive.hs │ │ └── Storable.hs │ │ └── PEG │ │ ├── Backtrack.hs │ │ ├── Backtrack │ │ └── Measured.hs │ │ ├── Continued.hs │ │ ├── Continued │ │ └── Measured.hs │ │ └── Packrat.hs └── test │ ├── Benchmark.hs │ ├── Doctest.hs │ ├── README.lhs │ ├── Test.hs │ └── Test │ ├── Ambiguous.hs │ └── Examples.hs ├── rank2classes ├── CHANGELOG.md ├── LICENSE ├── README.md ├── Setup.hs ├── Tutorial.lhs ├── rank2classes.cabal ├── src │ ├── Rank2.hs │ └── Rank2 │ │ └── TH.hs └── test │ ├── Doctest.hs │ ├── Issue23.hs │ ├── MyModule.lhs │ └── TH.hs └── stack.yaml /.github/workflows/haskell-ci.yml: -------------------------------------------------------------------------------- 1 | # This GitHub workflow config has been generated by a script via 2 | # 3 | # haskell-ci 'github' '--no-cabal-check' '--distribution' 'jammy' 'cabal.project' 4 | # 5 | # To regenerate the script (for example after adjusting tested-with) run 6 | # 7 | # haskell-ci regenerate 8 | # 9 | # For more information, see https://github.com/haskell-CI/haskell-ci 10 | # 11 | # version: 0.19.20250115 12 | # 13 | # REGENDATA ("0.19.20250115",["github","--no-cabal-check","--distribution","jammy","cabal.project"]) 14 | # 15 | name: Haskell-CI 16 | on: 17 | - push 18 | - pull_request 19 | jobs: 20 | linux: 21 | name: Haskell-CI - Linux - ${{ matrix.compiler }} 22 | runs-on: ubuntu-20.04 23 | timeout-minutes: 24 | 60 25 | container: 26 | image: buildpack-deps:jammy 27 | continue-on-error: ${{ matrix.allow-failure }} 28 | strategy: 29 | matrix: 30 | include: 31 | - compiler: ghc-9.12.1 32 | compilerKind: ghc 33 | compilerVersion: 9.12.1 34 | setup-method: ghcup 35 | allow-failure: false 36 | - compiler: ghc-9.10.1 37 | compilerKind: ghc 38 | compilerVersion: 9.10.1 39 | setup-method: ghcup 40 | allow-failure: false 41 | - compiler: ghc-9.8.2 42 | compilerKind: ghc 43 | compilerVersion: 9.8.2 44 | setup-method: ghcup 45 | allow-failure: false 46 | - compiler: ghc-9.6.4 47 | compilerKind: ghc 48 | compilerVersion: 9.6.4 49 | setup-method: ghcup 50 | allow-failure: false 51 | - compiler: ghc-9.4.8 52 | compilerKind: ghc 53 | compilerVersion: 9.4.8 54 | setup-method: ghcup 55 | allow-failure: false 56 | - compiler: ghc-9.2.8 57 | compilerKind: ghc 58 | compilerVersion: 9.2.8 59 | setup-method: ghcup 60 | allow-failure: false 61 | - compiler: ghc-9.0.2 62 | compilerKind: ghc 63 | compilerVersion: 9.0.2 64 | setup-method: ghcup 65 | allow-failure: false 66 | - compiler: ghc-8.10.7 67 | compilerKind: ghc 68 | compilerVersion: 8.10.7 69 | setup-method: ghcup 70 | allow-failure: false 71 | fail-fast: false 72 | steps: 73 | - name: apt-get install 74 | run: | 75 | apt-get update 76 | apt-get install -y --no-install-recommends gnupg ca-certificates dirmngr curl git software-properties-common libtinfo5 77 | - name: Install GHCup 78 | run: | 79 | mkdir -p "$HOME/.ghcup/bin" 80 | curl -sL https://downloads.haskell.org/ghcup/0.1.30.0/x86_64-linux-ghcup-0.1.30.0 > "$HOME/.ghcup/bin/ghcup" 81 | chmod a+x "$HOME/.ghcup/bin/ghcup" 82 | - name: Install cabal-install 83 | run: | 84 | "$HOME/.ghcup/bin/ghcup" install cabal 3.12.1.0 || (cat "$HOME"/.ghcup/logs/*.* && false) 85 | echo "CABAL=$HOME/.ghcup/bin/cabal-3.12.1.0 -vnormal+nowrap" >> "$GITHUB_ENV" 86 | - name: Install GHC (GHCup) 87 | if: matrix.setup-method == 'ghcup' 88 | run: | 89 | "$HOME/.ghcup/bin/ghcup" install ghc "$HCVER" || (cat "$HOME"/.ghcup/logs/*.* && false) 90 | HC=$("$HOME/.ghcup/bin/ghcup" whereis ghc "$HCVER") 91 | HCPKG=$(echo "$HC" | sed 's#ghc$#ghc-pkg#') 92 | HADDOCK=$(echo "$HC" | sed 's#ghc$#haddock#') 93 | echo "HC=$HC" >> "$GITHUB_ENV" 94 | echo "HCPKG=$HCPKG" >> "$GITHUB_ENV" 95 | echo "HADDOCK=$HADDOCK" >> "$GITHUB_ENV" 96 | env: 97 | HCKIND: ${{ matrix.compilerKind }} 98 | HCNAME: ${{ matrix.compiler }} 99 | HCVER: ${{ matrix.compilerVersion }} 100 | - name: Set PATH and environment variables 101 | run: | 102 | echo "$HOME/.cabal/bin" >> $GITHUB_PATH 103 | echo "LANG=C.UTF-8" >> "$GITHUB_ENV" 104 | echo "CABAL_DIR=$HOME/.cabal" >> "$GITHUB_ENV" 105 | echo "CABAL_CONFIG=$HOME/.cabal/config" >> "$GITHUB_ENV" 106 | HCNUMVER=$(${HC} --numeric-version|perl -ne '/^(\d+)\.(\d+)\.(\d+)(\.(\d+))?$/; print(10000 * $1 + 100 * $2 + ($3 == 0 ? $5 != 1 : $3))') 107 | echo "HCNUMVER=$HCNUMVER" >> "$GITHUB_ENV" 108 | echo "ARG_TESTS=--enable-tests" >> "$GITHUB_ENV" 109 | echo "ARG_BENCH=--enable-benchmarks" >> "$GITHUB_ENV" 110 | echo "HEADHACKAGE=false" >> "$GITHUB_ENV" 111 | echo "ARG_COMPILER=--$HCKIND --with-compiler=$HC" >> "$GITHUB_ENV" 112 | env: 113 | HCKIND: ${{ matrix.compilerKind }} 114 | HCNAME: ${{ matrix.compiler }} 115 | HCVER: ${{ matrix.compilerVersion }} 116 | - name: env 117 | run: | 118 | env 119 | - name: write cabal config 120 | run: | 121 | mkdir -p $CABAL_DIR 122 | cat >> $CABAL_CONFIG <> $CABAL_CONFIG < cabal-plan.xz 155 | echo 'f62ccb2971567a5f638f2005ad3173dba14693a45154c1508645c52289714cb2 cabal-plan.xz' | sha256sum -c - 156 | xz -d < cabal-plan.xz > $HOME/.cabal/bin/cabal-plan 157 | rm -f cabal-plan.xz 158 | chmod a+x $HOME/.cabal/bin/cabal-plan 159 | cabal-plan --version 160 | - name: checkout 161 | uses: actions/checkout@v4 162 | with: 163 | path: source 164 | - name: initial cabal.project for sdist 165 | run: | 166 | touch cabal.project 167 | echo "packages: $GITHUB_WORKSPACE/source/rank2classes" >> cabal.project 168 | echo "packages: $GITHUB_WORKSPACE/source/grammatical-parsers" >> cabal.project 169 | echo "packages: $GITHUB_WORKSPACE/source/deep-transformations" >> cabal.project 170 | cat cabal.project 171 | - name: sdist 172 | run: | 173 | mkdir -p sdist 174 | $CABAL sdist all --output-dir $GITHUB_WORKSPACE/sdist 175 | - name: unpack 176 | run: | 177 | mkdir -p unpacked 178 | find sdist -maxdepth 1 -type f -name '*.tar.gz' -exec tar -C $GITHUB_WORKSPACE/unpacked -xzvf {} \; 179 | - name: generate cabal.project 180 | run: | 181 | PKGDIR_rank2classes="$(find "$GITHUB_WORKSPACE/unpacked" -maxdepth 1 -type d -regex '.*/rank2classes-[0-9.]*')" 182 | echo "PKGDIR_rank2classes=${PKGDIR_rank2classes}" >> "$GITHUB_ENV" 183 | PKGDIR_grammatical_parsers="$(find "$GITHUB_WORKSPACE/unpacked" -maxdepth 1 -type d -regex '.*/grammatical-parsers-[0-9.]*')" 184 | echo "PKGDIR_grammatical_parsers=${PKGDIR_grammatical_parsers}" >> "$GITHUB_ENV" 185 | PKGDIR_deep_transformations="$(find "$GITHUB_WORKSPACE/unpacked" -maxdepth 1 -type d -regex '.*/deep-transformations-[0-9.]*')" 186 | echo "PKGDIR_deep_transformations=${PKGDIR_deep_transformations}" >> "$GITHUB_ENV" 187 | rm -f cabal.project cabal.project.local 188 | touch cabal.project 189 | touch cabal.project.local 190 | echo "packages: ${PKGDIR_rank2classes}" >> cabal.project 191 | echo "packages: ${PKGDIR_grammatical_parsers}" >> cabal.project 192 | echo "packages: ${PKGDIR_deep_transformations}" >> cabal.project 193 | echo "package rank2classes" >> cabal.project 194 | echo " ghc-options: -Werror=missing-methods" >> cabal.project 195 | echo "package grammatical-parsers" >> cabal.project 196 | echo " ghc-options: -Werror=missing-methods" >> cabal.project 197 | echo "package deep-transformations" >> cabal.project 198 | echo " ghc-options: -Werror=missing-methods" >> cabal.project 199 | cat >> cabal.project <> cabal.project.local 205 | cat cabal.project 206 | cat cabal.project.local 207 | - name: dump install plan 208 | run: | 209 | $CABAL v2-build $ARG_COMPILER $ARG_TESTS $ARG_BENCH --dry-run all 210 | cabal-plan 211 | - name: restore cache 212 | uses: actions/cache/restore@v4 213 | with: 214 | key: ${{ runner.os }}-${{ matrix.compiler }}-${{ github.sha }} 215 | path: ~/.cabal/store 216 | restore-keys: ${{ runner.os }}-${{ matrix.compiler }}- 217 | - name: install dependencies 218 | run: | 219 | $CABAL v2-build $ARG_COMPILER --disable-tests --disable-benchmarks --dependencies-only -j2 all 220 | $CABAL v2-build $ARG_COMPILER $ARG_TESTS $ARG_BENCH --dependencies-only -j2 all 221 | - name: build w/o tests 222 | run: | 223 | $CABAL v2-build $ARG_COMPILER --disable-tests --disable-benchmarks all 224 | - name: build 225 | run: | 226 | $CABAL v2-build $ARG_COMPILER $ARG_TESTS $ARG_BENCH all --write-ghc-environment-files=always 227 | - name: tests 228 | run: | 229 | $CABAL v2-test $ARG_COMPILER $ARG_TESTS $ARG_BENCH all --test-show-details=direct 230 | - name: haddock 231 | run: | 232 | $CABAL v2-haddock --disable-documentation --haddock-all $ARG_COMPILER --with-haddock $HADDOCK $ARG_TESTS $ARG_BENCH all 233 | - name: unconstrained build 234 | run: | 235 | rm -f cabal.project.local 236 | $CABAL v2-build $ARG_COMPILER --disable-tests --disable-benchmarks all 237 | - name: save cache 238 | if: always() 239 | uses: actions/cache/save@v4 240 | with: 241 | key: ${{ runner.os }}-${{ matrix.compiler }}-${{ github.sha }} 242 | path: ~/.cabal/store 243 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | stack.yaml 2 | .stack-work/ 3 | dist/ 4 | dist-newstyle/ 5 | tmp/ 6 | TAGS 7 | -------------------------------------------------------------------------------- /Grampa.lhs.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/blamario/grampa/bbb34f5460f0741e297b07b6718a00dddc4b57e2/Grampa.lhs.pdf -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Mario Blažević 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | 1. Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the 14 | distribution. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | The Grampa project consists of three related Haskell libraries: 2 | 3 | - [`rank2classes`](http://github.com/blamario/grampa/tree/master/rank2classes) is at the lowest level. It provides a 4 | set of type classes that mirror `Functor` and related type classes but for records. 5 | - [`grammatical-parsers`](http://github.com/blamario/grampa/tree/master/grammatical-parsers) is a parser combinator 6 | and grammar combinator library that depends on `rank2classes`. 7 | - [`deep-transformations`](http://github.com/blamario/grampa/tree/master/deep-transformations) depends on and extends 8 | `rank2classes` to operate on trees of mutually-recusive types. This library is independent of grammatical-parsers, 9 | but can be used to manipulate the parse trees produced by it. 10 | -------------------------------------------------------------------------------- /cabal.project: -------------------------------------------------------------------------------- 1 | packages: 2 | rank2classes/ 3 | grammatical-parsers/ 4 | deep-transformations/ 5 | 6 | allow-newer: data-functor-logistic:base, indexed-traversable:base, indexed-traversable-instances:base 7 | -------------------------------------------------------------------------------- /deep-transformations/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Revision history for deep-transformations 2 | 3 | ## 0.3 -- 2025-01-01 4 | 5 | * **BREAKING**: Changed the definitions of `Deep.Product` and `Deep.Sum` 6 | * Added `Shallow` class instances for all data types declared in the `Rank2` module 7 | * Added `Shallow` class instances for `Proxy`, `Const`, `Product`, and `Sum` 8 | * Bumped the upper bound of the template-haskell dependency to compile with GHC 9.12.1 9 | * Fixed the PolyKinds-related test errors 10 | * Added `Deep.Only` and `Deep.Flip` data types to mirror `Rank2.Only` and `Rank2.Flip` 11 | 12 | ## 0.2.3 -- 2024-05-18 13 | 14 | * Bumped the upper bound of the template-haskell dependency 15 | * Generalized the TH generation code 16 | * Fixed the loopy superclass constraints in instance declarations 17 | 18 | ## 0.2.2 -- 2023-06-25 19 | 20 | * Updated for GHC 9.8.1 and TH 2.22 21 | * Updated TH code to use `DuplicateRecordFields` and `OverloadedRecordDot` when enabled 22 | * Fixed warnings in tests 23 | 24 | ## 0.2.1.2 -- 2023-06-25 25 | 26 | * Bumped the upper bound of the `template-haskell` dependency 27 | 28 | ## 0.2.1.1 -- 2023-04-02 29 | 30 | * Bumped the upper bound of the `rank2classes` dependency 31 | 32 | ## 0.2.1 -- 2023-01-07 33 | 34 | * Added AG.Dimorphic 35 | * Added combinators `Transformation.Mapped`, `Folded`, and `Traversed` 36 | * Compiling with GHC 9.4 37 | 38 | ## 0.2 -- 2022-03-27 39 | 40 | * Changes necessary to compile with GHC 9.2.2 41 | * Excluded GHC 8.2.2 from `deep-transformations` and GitHub CI 42 | * Increased the `deep-transformations`' bottom bound of base dependency 43 | * Relaxed the bounds of the `generic-lens` dependency 44 | * Fixed `deep-transformations` compilation with GHC 9.0.1 45 | * Added an explicit implementation `mappend = (<>)` 46 | * Used haskell-ci to generate GitHub CI 47 | * Incremented upper dependency bounds 48 | * Added `AG.Generics.Keep` 49 | * Added `knitKeeping` and `applyDefaultWithAttributes` to `AG` 50 | * Dropped `fullMapDefault` 51 | * Switch the README's attribute grammar functor to map upwards 52 | * Removed unused code 53 | * Added `infixl 4` declarations for all `<$>` methods 54 | * Added the `AG.Monomorphic` module 55 | * Fixed `Transformation.Shallow.TH` for repeated type parameters 56 | * Added `Transformation.Deep.Sum` 57 | 58 | ## 0.1 -- 2020-11-11 59 | 60 | First version 61 | -------------------------------------------------------------------------------- /deep-transformations/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /deep-transformations/Setup.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Distribution.Extra.Doctest (defaultMainWithDoctests) 4 | 5 | main :: IO () 6 | main = defaultMainWithDoctests "doctests" 7 | -------------------------------------------------------------------------------- /deep-transformations/deep-transformations.cabal: -------------------------------------------------------------------------------- 1 | -- Initial language-oberon.cabal generated by cabal init. For further 2 | -- documentation, see http://haskell.org/cabal/users-guide/ 3 | 4 | name: deep-transformations 5 | version: 0.3 6 | synopsis: Deep natural and unnatural tree transformations, including attribute grammars 7 | description: 8 | 9 | This library builds on the package to provide the 10 | equivalents of 'Functor' and related classes for heterogenous trees, including complex abstract syntax trees of 11 | real-world programming languages. 12 | . 13 | The functionality includes attribute grammars in "Transformation.AG". 14 | 15 | homepage: https://github.com/blamario/grampa/tree/master/deep-transformations 16 | bug-reports: https://github.com/blamario/grampa/issues 17 | license: BSD3 18 | license-file: LICENSE 19 | author: Mario Blažević 20 | maintainer: blamario@protonmail.com 21 | copyright: (c) 2019 Mario Blažević 22 | category: Control, Generics 23 | build-type: Custom 24 | cabal-version: >=1.10 25 | tested-with: GHC==9.12.1, GHC==9.10.1, GHC==9.8.2, GHC==9.6.4, GHC==9.4.8, GHC==9.2.8, GHC==9.0.2, GHC==8.10.7 26 | extra-source-files: README.md, CHANGELOG.md 27 | source-repository head 28 | type: git 29 | location: https://github.com/blamario/grampa 30 | custom-setup 31 | setup-depends: 32 | base >= 4 && <5, 33 | Cabal < 4, 34 | cabal-doctest >= 1 && <1.1 35 | 36 | library 37 | hs-source-dirs: src 38 | exposed-modules: Transformation, 39 | Transformation.Shallow, Transformation.Shallow.TH, 40 | Transformation.Deep, Transformation.Deep.TH, 41 | Transformation.Full, Transformation.Full.TH, 42 | Transformation.Rank2, 43 | Transformation.AG, Transformation.AG.Generics, 44 | Transformation.AG.Monomorphic, Transformation.AG.Dimorphic 45 | ghc-options: -Wall 46 | build-depends: base >= 4.11 && < 5, rank2classes >= 1.4.1 && < 1.6, 47 | transformers >= 0.5 && < 0.7, 48 | template-haskell >= 2.11 && < 2.24, generic-lens >= 1.2 && < 2.3 49 | default-language: Haskell2010 50 | 51 | test-suite doctests 52 | type: exitcode-stdio-1.0 53 | hs-source-dirs: test 54 | default-language: Haskell2010 55 | main-is: Doctest.hs 56 | other-modules: README, RepMin, RepMinAuto 57 | ghc-options: -threaded -pgmL markdown-unlit 58 | build-depends: base, rank2classes, deep-transformations, doctest >= 0.8 59 | build-tool-depends: markdown-unlit:markdown-unlit >= 0.5 && < 0.6 60 | x-doctest-options: --fast 61 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation.hs: -------------------------------------------------------------------------------- 1 | {-# Language DataKinds, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, ScopedTypeVariables, 2 | TypeFamilies, TypeOperators, UndecidableInstances #-} 3 | 4 | -- | A /natural transformation/ is a concept from category theory for a mapping between two functors and their objects 5 | -- that preserves a naturality condition. In Haskell the naturality condition boils down to parametricity, so a 6 | -- natural transformation between two functors @f@ and @g@ is represented as 7 | -- 8 | -- > type NaturalTransformation f g = ∀a. f a → g a 9 | -- 10 | -- This type appears in several Haskell libraries, most obviously in 11 | -- [natural-transformations](https://hackage.haskell.org/package/natural-transformation). There are times, however, 12 | -- when we crave more control. Sometimes what we want to do depends on which type @a@ is hiding in that @f a@ we're 13 | -- given. Sometimes, in other words, we need an /unnatural/ transformation. 14 | -- 15 | -- This means we have to abandon parametricity for ad-hoc polymorphism, and that means type classes. There are two 16 | -- steps to defining a transformation: 17 | -- 18 | -- * an instance of the base class 'Transformation' declares the two functors being mapped, much like a function type 19 | -- signature, 20 | -- * while the actual mapping of values is performed by an arbitrary number of instances of the method '$', a bit like 21 | -- multiple equation clauses that make up a single function definition. 22 | -- 23 | -- The module is meant to be imported qualified, and the importing module will require at least the 24 | -- @FlexibleInstances@, @MultiParamTypeClasses@, and @TypeFamilies@ language extensions to declare the appropriate 25 | -- instances. 26 | 27 | module Transformation where 28 | 29 | import qualified Data.Functor.Compose as Functor 30 | import Data.Functor.Const (Const) 31 | import Data.Functor.Product (Product(Pair)) 32 | import Data.Functor.Sum (Sum(InL, InR)) 33 | import Data.Kind (Type) 34 | import qualified Rank2 35 | 36 | import Prelude hiding (($)) 37 | 38 | -- $setup 39 | -- >>> {-# Language FlexibleInstances, MultiParamTypeClasses, TypeFamilies, TypeOperators #-} 40 | -- >>> import Transformation (Transformation) 41 | -- >>> import qualified Transformation 42 | 43 | -- | A 'Transformation', natural or not, maps one functor to another. 44 | -- For example, here's the declaration for a transformation that maps `Maybe` to `[]`: 45 | -- 46 | -- >>> :{ 47 | -- data MaybeToList = MaybeToList 48 | -- instance Transformation MaybeToList where 49 | -- type Domain MaybeToList = Maybe 50 | -- type Codomain MaybeToList = [] 51 | -- :} 52 | class Transformation t where 53 | type Domain t :: Type -> Type 54 | type Codomain t :: Type -> Type 55 | 56 | -- | Before we can apply a 'Transformation', we also need to declare 'At' which base types it is applicable and how 57 | -- it works. If the transformation is natural, we'll need only one instance declaration. 58 | -- 59 | -- >>> :{ 60 | -- instance MaybeToList `Transformation.At` a where 61 | -- MaybeToList $ Just x = [x] 62 | -- MaybeToList $ Nothing = [] 63 | -- :} 64 | -- 65 | -- >>> MaybeToList Transformation.$ (Just True) 66 | -- [True] 67 | -- 68 | -- An unnatural 'Transformation' can behave differently depending on the base type and even on its value. 69 | -- 70 | -- >>> :{ 71 | -- instance {-# OVERLAPS #-} MaybeToList `At` Char where 72 | -- MaybeToList $ Just '\0' = [] 73 | -- MaybeToList $ Just c = [c] 74 | -- MaybeToList $ Nothing = [] 75 | -- :} 76 | class Transformation t => At t x where 77 | -- | Apply the transformation @t@ at type @x@ to map 'Domain' to the 'Codomain' functor. 78 | ($) :: t -> Domain t x -> Codomain t x 79 | infixr 0 $ 80 | 81 | -- | Alphabetical synonym for '$' 82 | apply :: t `At` x => t -> Domain t x -> Codomain t x 83 | apply = ($) 84 | 85 | -- | Composition of two transformations 86 | data Compose t u = Compose t u 87 | 88 | -- | Transformation under a 'Data.Functor.Functor' 89 | newtype Mapped (f :: Type -> Type) t = Mapped t 90 | 91 | -- | Transformation under a 'Foldable' 92 | newtype Folded (f :: Type -> Type) t = Folded t 93 | 94 | -- | Transformation under a 'Traversable' 95 | newtype Traversed (f :: Type -> Type) t = Traversed t 96 | 97 | instance (Transformation t, Transformation u, Domain t ~ Codomain u) => Transformation (Compose t u) where 98 | type Domain (Compose t u) = Domain u 99 | type Codomain (Compose t u) = Codomain t 100 | 101 | instance Transformation t => Transformation (Mapped f t) where 102 | type Domain (Mapped f t) = Functor.Compose f (Domain t) 103 | type Codomain (Mapped f t) = Functor.Compose f (Codomain t) 104 | 105 | instance Transformation t => Transformation (Folded f t) where 106 | type Domain (Folded f t) = Functor.Compose f (Domain t) 107 | type Codomain (Folded f t) = Codomain t 108 | 109 | instance (Transformation t, Codomain t ~ Functor.Compose m n) => Transformation (Traversed f t) where 110 | type Domain (Traversed f t) = Functor.Compose f (Domain t) 111 | type Codomain (Traversed f t) = 112 | Functor.Compose (ComposeOuter (Codomain t)) (Functor.Compose f (ComposeInner (Codomain t))) 113 | 114 | type family ComposeOuter (c :: Type -> Type) :: Type -> Type where 115 | ComposeOuter (Functor.Compose p q) = p 116 | 117 | type family ComposeInner (c :: Type -> Type) :: Type -> Type where 118 | ComposeInner (Functor.Compose p q) = q 119 | 120 | instance (t `At` x, u `At` x, Domain t ~ Codomain u) => Compose t u `At` x where 121 | Compose t u $ x = t $ u $ x 122 | 123 | instance (t `At` x, Functor f) => Mapped f t `At` x where 124 | Mapped t $ Functor.Compose x = Functor.Compose ((t $) <$> x) 125 | 126 | instance (t `At` x, Foldable f, Codomain t ~ Const m, Monoid m) => Folded f t `At` x where 127 | Folded t $ Functor.Compose x = foldMap (t $) x 128 | 129 | instance (t `At` x, Traversable f, Codomain t ~ Functor.Compose m n, Applicative m) => Traversed f t `At` x where 130 | Traversed t $ Functor.Compose x = Functor.Compose (Functor.Compose <$> traverse (Functor.getCompose . (t $)) x) 131 | 132 | instance Transformation (Rank2.Arrow (p :: Type -> Type) q x) where 133 | type Domain (Rank2.Arrow p q x) = p 134 | type Codomain (Rank2.Arrow p q x) = q 135 | 136 | instance Rank2.Arrow p q x `At` x where 137 | ($) = Rank2.apply 138 | 139 | instance (Transformation t1, Transformation t2, Domain t1 ~ Domain t2) => Transformation (t1, t2) where 140 | type Domain (t1, t2) = Domain t1 141 | type Codomain (t1, t2) = Product (Codomain t1) (Codomain t2) 142 | 143 | instance (t `At` x, u `At` x, Domain t ~ Domain u) => (t, u) `At` x where 144 | (t, u) $ x = Pair (t $ x) (u $ x) 145 | 146 | instance (Transformation t1, Transformation t2, Domain t1 ~ Domain t2) => Transformation (Either t1 t2) where 147 | type Domain (Either t1 t2) = Domain t1 148 | type Codomain (Either t1 t2) = Sum (Codomain t1) (Codomain t2) 149 | 150 | instance (t `At` x, u `At` x, Domain t ~ Domain u) => Either t u `At` x where 151 | Left t $ x = InL (t $ x) 152 | Right t $ x = InR (t $ x) 153 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/AG.hs: -------------------------------------------------------------------------------- 1 | {-# Language FlexibleContexts, FlexibleInstances, 2 | MultiParamTypeClasses, RankNTypes, ScopedTypeVariables, StandaloneDeriving, 3 | TypeFamilies, TypeOperators, UndecidableInstances #-} 4 | 5 | -- | An attribute grammar is a particular kind of 'Transformation' that assigns attributes to nodes in a 6 | -- tree. Different node types may have different types of attributes, so the transformation is not natural. All 7 | -- attributes are divided into 'Inherited' and 'Synthesized' attributes. 8 | 9 | module Transformation.AG where 10 | 11 | import Data.Kind (Type) 12 | import Unsafe.Coerce (unsafeCoerce) 13 | 14 | import qualified Rank2 15 | import qualified Transformation 16 | 17 | -- | Type family that maps a node type to the type of its attributes, indexed per type constructor. 18 | type family Atts (f :: Type -> Type) a 19 | 20 | -- | Type constructor wrapping the inherited attributes for the given transformation. 21 | newtype Inherited t a = Inherited{inh :: Atts (Inherited t) a} 22 | -- | Type constructor wrapping the synthesized attributes for the given transformation. 23 | newtype Synthesized t a = Synthesized{syn :: Atts (Synthesized t) a} 24 | 25 | deriving instance (Show (Atts (Inherited t) a)) => Show (Inherited t a) 26 | deriving instance (Show (Atts (Synthesized t) a)) => Show (Synthesized t a) 27 | 28 | -- | A node's 'Semantics' is a natural tranformation from the node's inherited attributes to its synthesized 29 | -- attributes. 30 | type Semantics t = Inherited t Rank2.~> Synthesized t 31 | 32 | -- | A node's 'PreservingSemantics' is a natural tranformation from the node's inherited attributes to all its 33 | -- attributes paired with the preserved node. 34 | type PreservingSemantics t f = Rank2.Arrow (Inherited t) (Rank2.Product (AllAtts t) f) 35 | 36 | -- | All inherited and synthesized attributes 37 | data AllAtts t a = AllAtts{allInh :: Atts (Inherited t) a, 38 | allSyn :: Atts (Synthesized t) a} 39 | 40 | -- | An attribution rule maps a node's inherited attributes and its child nodes' synthesized attributes to the node's 41 | -- synthesized attributes and the children nodes' inherited attributes. 42 | type Rule t g = forall sem . sem ~ Semantics t 43 | => (Inherited t (g sem (Semantics t)), g sem (Synthesized t)) 44 | -> (Synthesized t (g sem (Semantics t)), g sem (Inherited t)) 45 | 46 | -- | The core function to tie the recursive knot, turning a 'Rule' for a node into its 'Semantics'. 47 | knit :: (Rank2.Apply (g sem), sem ~ Semantics t) => Rule t g -> g sem sem -> sem (g sem sem) 48 | knit r chSem = Rank2.Arrow knit' 49 | where knit' inherited = synthesized 50 | where (synthesized, chInh) = r (inherited, chSyn) 51 | chSyn = chSem Rank2.<*> chInh 52 | 53 | -- | Another way to tie the recursive knot, using a 'Rule' to add 'AllAtts' information to every node 54 | knitKeeping :: forall t f g sem. (sem ~ PreservingSemantics t f, Rank2.Apply (g sem), 55 | Atts (Inherited t) (g sem sem) ~ Atts (Inherited t) (g (Semantics t) (Semantics t)), 56 | Atts (Synthesized t) (g sem sem) ~ Atts (Synthesized t) (g (Semantics t) (Semantics t)), 57 | g sem (Synthesized t) ~ g (Semantics t) (Synthesized t), 58 | g sem (Inherited t) ~ g (Semantics t) (Inherited t)) 59 | => (forall a. f a -> a) -> Rule t g -> f (g (PreservingSemantics t f) (PreservingSemantics t f)) 60 | -> PreservingSemantics t f (g (PreservingSemantics t f) (PreservingSemantics t f)) 61 | knitKeeping extract rule x = Rank2.Arrow knitted 62 | where knitted :: Inherited t (g (PreservingSemantics t f) (PreservingSemantics t f)) 63 | -> Rank2.Product (AllAtts t) f (g (PreservingSemantics t f) (PreservingSemantics t f)) 64 | chSem :: g (PreservingSemantics t f) (PreservingSemantics t f) 65 | knitted inherited = Rank2.Pair AllAtts{allInh= inh inherited, allSyn= syn synthesized} x 66 | where chInh :: g (PreservingSemantics t f) (Inherited t) 67 | chSyn :: g (PreservingSemantics t f) (Synthesized t) 68 | chKept :: g (PreservingSemantics t f) (Rank2.Product (AllAtts t) f) 69 | synthesized :: Synthesized t (g (PreservingSemantics t f) (PreservingSemantics t f)) 70 | (synthesized, chInh) = unsafeCoerce (rule (unsafeCoerce inherited, unsafeCoerce chSyn)) 71 | chSyn = Synthesized . allSyn . Rank2.fst Rank2.<$> chKept 72 | chKept = chSem Rank2.<*> chInh 73 | chSem = extract x 74 | 75 | -- | The core type class for defining the attribute grammar. The instances of this class typically have a form like 76 | -- 77 | -- > instance Attribution MyAttGrammar MyNode (Semantics MyAttGrammar) Identity where 78 | -- > attribution MyAttGrammar{} (Identity MyNode{}) 79 | -- > (Inherited fromParent, 80 | -- > Synthesized MyNode{firstChild= fromFirstChild, ...}) 81 | -- > = (Synthesized _forMyself, 82 | -- > Inherited MyNode{firstChild= _forFirstChild, ...}) 83 | -- 84 | -- If you prefer to separate the calculation of different attributes, you can split the above instance into two 85 | -- instances of the 'Transformation.AG.Generics.Bequether' and 'Transformation.AG.Generics.Synthesizer' classes 86 | -- instead. If you derive 'GHC.Generics.Generic' instances for your attributes, you can even define each synthesized 87 | -- attribute individually with a 'Transformation.AG.Generics.SynthesizedField' instance. 88 | class Attribution t g deep shallow where 89 | -- | The attribution rule for a given transformation and node. 90 | attribution :: t -> shallow (g deep deep) -> Rule t g 91 | 92 | -- | Drop-in implementation of 'Transformation.$' 93 | applyDefault :: (q ~ Semantics t, x ~ g q q, Rank2.Apply (g q), Attribution t g q p) 94 | => (forall a. p a -> a) -> t -> p x -> q x 95 | applyDefault extract t x = knit (attribution t x) (extract x) 96 | {-# INLINE applyDefault #-} 97 | 98 | -- | Drop-in implementation of 'Transformation.$' that preserves all attributes with every original node 99 | applyDefaultWithAttributes :: (p ~ Transformation.Domain t, q ~ PreservingSemantics t p, x ~ g q q, Rank2.Apply (g q), 100 | Atts (Inherited t) (g q q) ~ Atts (Inherited t) (g (Semantics t) (Semantics t)), 101 | Atts (Synthesized t) (g q q) ~ Atts (Synthesized t) (g (Semantics t) (Semantics t)), 102 | g q (Synthesized t) ~ g (Semantics t) (Synthesized t), 103 | g q (Inherited t) ~ g (Semantics t) (Inherited t), 104 | Attribution t g (PreservingSemantics t p) p) 105 | => (forall a. p a -> a) -> t -> p (g (PreservingSemantics t p) (PreservingSemantics t p)) 106 | -> PreservingSemantics t p (g (PreservingSemantics t p) (PreservingSemantics t p)) 107 | applyDefaultWithAttributes extract t x = knitKeeping extract (attribution t x) x 108 | {-# INLINE applyDefaultWithAttributes #-} 109 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/AG/Dimorphic.hs: -------------------------------------------------------------------------------- 1 | {-# Language Haskell2010, DeriveDataTypeable, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, 2 | ScopedTypeVariables, TypeFamilies, TypeOperators, UndecidableInstances #-} 3 | 4 | -- | A special case of an attribute grammar where every node has only a single inherited and a single synthesized 5 | -- attribute of the same monoidal type. The synthesized attributes of child nodes are all 'mconcat`ted together. 6 | 7 | module Transformation.AG.Dimorphic where 8 | 9 | import Data.Data (Data, Typeable) 10 | import Data.Functor.Compose (Compose(..)) 11 | import Data.Functor.Const (Const(..)) 12 | import Data.Kind (Type) 13 | import Data.Semigroup (Semigroup(..)) 14 | import qualified Rank2 15 | import Transformation (Transformation, Domain, Codomain, At) 16 | import qualified Transformation 17 | import qualified Transformation.Deep as Deep 18 | import qualified Transformation.Full as Full 19 | 20 | -- | Transformation wrapper that allows automatic inference of attribute rules. 21 | newtype Auto t = Auto t 22 | 23 | -- | Transformation wrapper that allows automatic inference of attribute rules and preservation of the attribute with 24 | -- the original nodes. 25 | newtype Keep t = Keep t 26 | 27 | data Atts a b = Atts{ 28 | inh :: a, 29 | syn :: b} 30 | deriving (Data, Typeable, Show) 31 | 32 | instance (Semigroup a, Semigroup b) => Semigroup (Atts a b) where 33 | Atts i1 s1 <> Atts i2 s2 = Atts (i1 <> i2) (s1 <> s2) 34 | 35 | instance (Monoid a, Monoid b) => Monoid (Atts a b) where 36 | mappend = (<>) 37 | mempty = Atts mempty mempty 38 | 39 | -- | A node's 'Semantics' maps its inherited attribute to its synthesized attribute. 40 | type Semantics a b = Const (a -> b) 41 | 42 | -- | A node's 'PreservingSemantics' maps its inherited attribute to its synthesized attribute. 43 | type PreservingSemantics f a b = Compose ((->) a) (Compose ((,) (Atts a b)) f) 44 | 45 | -- | An attribution rule maps a node's inherited attribute and its child nodes' synthesized attribute to the node's 46 | -- synthesized attribute and the children nodes' inherited attributes. 47 | type Rule a b = Atts a b -> Atts a b 48 | 49 | instance {-# overlappable #-} Attribution t a b g deep shallow where 50 | attribution = const (const id) 51 | 52 | instance {-# overlappable #-} (Transformation (Auto t), p ~ Domain (Auto t), q ~ Codomain (Auto t), q ~ Semantics a b, 53 | Rank2.Foldable (g q), Monoid a, Monoid b, Foldable p, Attribution (Auto t) a b g q p) => 54 | (Auto t) `At` g (Semantics a b) (Semantics a b) where 55 | ($) = applyDefault (foldr const $ error "Missing node") 56 | {-# INLINE ($) #-} 57 | 58 | instance {-# overlappable #-} (Transformation (Keep t), p ~ Domain (Keep t), q ~ Codomain (Keep t), 59 | q ~ PreservingSemantics p a b, Rank2.Foldable (g q), Monoid a, Monoid b, 60 | Foldable p, Functor p, Attribution (Keep t) a b g q p) => 61 | (Keep t) `At` g (PreservingSemantics p a b) (PreservingSemantics p a b) where 62 | ($) = applyDefaultWithAttributes 63 | {-# INLINE ($) #-} 64 | 65 | instance (Transformation (Auto t), Domain (Auto t) ~ f, Functor f, Codomain (Auto t) ~ Semantics a b, 66 | Rank2.Functor (g f), Deep.Functor (Auto t) g, Auto t `At` g (Semantics a b) (Semantics a b)) => 67 | Full.Functor (Auto t) g where 68 | (<$>) = Full.mapUpDefault 69 | 70 | instance (Transformation (Keep t), Domain (Keep t) ~ f, Functor f, Codomain (Keep t) ~ PreservingSemantics f a b, 71 | Rank2.Functor (g f), Deep.Functor (Keep t) g, 72 | Keep t `At` g (PreservingSemantics f a b) (PreservingSemantics f a b)) => 73 | Full.Functor (Keep t) g where 74 | (<$>) = Full.mapUpDefault 75 | 76 | instance (Transformation (Keep t), Domain (Keep t) ~ f, Traversable f, Rank2.Traversable (g f), 77 | Codomain (Keep t) ~ PreservingSemantics f a b, Deep.Traversable (Feeder a b f) g, Full.Functor (Keep t) g, 78 | Keep t `At` g (PreservingSemantics f a b) (PreservingSemantics f a b)) => 79 | Full.Traversable (Keep t) g where 80 | traverse = traverseDefaultWithAttributes 81 | 82 | -- | The core function to tie the recursive knot, turning a 'Rule' for a node into its 'Semantics'. 83 | knit :: (Rank2.Foldable (g sem), sem ~ Semantics a b, Monoid a, Monoid b) 84 | => Rule a b -> g sem sem -> sem (g sem sem) 85 | knit r chSem = Const knitted 86 | where knitted inherited = synthesized 87 | where Atts{syn= synthesized, inh= chInh} = r Atts{inh= inherited, syn= chSyn} 88 | chSyn = Rank2.foldMap (($ chInh) . getConst) chSem 89 | 90 | -- | Another way to tie the recursive knot, using a 'Rule' to add attributes to every node througha stateful calculation 91 | knitKeeping :: forall a b f g sem. (Rank2.Foldable (g sem), sem ~ PreservingSemantics f a b, 92 | Monoid a, Monoid b, Foldable f, Functor f) 93 | => Rule a b -> f (g sem sem) -> sem (g sem sem) 94 | knitKeeping r x = Compose knitted 95 | where knitted :: a -> Compose ((,) (Atts a b)) f (g sem sem) 96 | knitted inherited = Compose (results, x) 97 | where results@Atts{inh= chInh} = r Atts{inh= inherited, syn= chSyn} 98 | chSyn = foldMap (Rank2.foldMap (syn . fst . getCompose . ($ chInh) . getCompose)) x 99 | 100 | -- | The core type class for defining the attribute grammar. The instances of this class typically have a form like 101 | -- 102 | -- > instance Attribution MyAttGrammar MyMonoid MyNode (Semantics MyAttGrammar) Identity where 103 | -- > attribution MyAttGrammar{} (Identity MyNode{}) 104 | -- > Atts{inh= fromParent, 105 | -- > syn= fromChildren} 106 | -- > = Atts{syn= toParent, 107 | -- > inh= toChildren} 108 | class Attribution t a b g (deep :: Type -> Type) shallow where 109 | -- | The attribution rule for a given transformation and node. 110 | attribution :: t -> shallow (g deep deep) -> Rule a b 111 | 112 | -- | Drop-in implementation of 'Transformation.$' 113 | applyDefault :: (p ~ Domain t, q ~ Semantics a b, x ~ g q q, 114 | Rank2.Foldable (g q), Attribution t a b g q p, Monoid a, Monoid b) 115 | => (forall y. p y -> y) -> t -> p x -> q x 116 | applyDefault extract t x = knit (attribution t x) (extract x) 117 | {-# INLINE applyDefault #-} 118 | 119 | -- | Drop-in implementation of 'Full.<$>' 120 | fullMapDefault :: (p ~ Domain t, q ~ Semantics a b, q ~ Codomain t, x ~ g q q, Rank2.Foldable (g q), 121 | Deep.Functor t g, Attribution t a b g p p, Monoid a, Monoid b) 122 | => (forall y. p y -> y) -> t -> p (g p p) -> q (g q q) 123 | fullMapDefault extract t local = knit (attribution t local) (t Deep.<$> extract local) 124 | {-# INLINE fullMapDefault #-} 125 | 126 | -- | Drop-in implementation of 'Transformation.$' that stores all attributes with every original node 127 | applyDefaultWithAttributes :: (p ~ Domain t, q ~ PreservingSemantics p a b, x ~ g q q, 128 | Attribution t a b g q p, Rank2.Foldable (g q), Monoid a, Monoid b, Foldable p, Functor p) 129 | => t -> p x -> q x 130 | applyDefaultWithAttributes t x = knitKeeping (attribution t x) x 131 | {-# INLINE applyDefaultWithAttributes #-} 132 | 133 | -- | Drop-in implementation of 'Full.traverse' that stores all attributes with every original node 134 | traverseDefaultWithAttributes :: forall t p q r a b g. 135 | (Transformation t, Domain t ~ p, Codomain t ~ Compose ((->) a) q, 136 | q ~ Compose ((,) (Atts a b)) p, r ~ Compose ((->) a) q, 137 | Traversable p, Full.Functor t g, Deep.Traversable (Feeder a b p) g, 138 | Transformation.At t (g r r)) 139 | => t -> p (g p p) -> a -> q (g q q) 140 | traverseDefaultWithAttributes t x rootInheritance = Full.traverse Feeder (t Full.<$> x) rootInheritance 141 | {-# INLINE traverseDefaultWithAttributes #-} 142 | 143 | data Feeder (a :: Type) (b :: Type) (f :: Type -> Type) = Feeder 144 | 145 | type FeederDomain (a :: Type) (b :: Type) f = Compose ((->) a) (Compose ((,) (Atts a b)) f) 146 | 147 | instance Transformation (Feeder a b f) where 148 | type Domain (Feeder a b f) = FeederDomain a b f 149 | type Codomain (Feeder a b f) = FeederDomain a b f 150 | 151 | instance Transformation.At (Feeder a b f) g where 152 | Feeder $ x = x 153 | 154 | instance (Traversable f, Rank2.Traversable (g (FeederDomain a b f)), Deep.Traversable (Feeder a b f) g) => 155 | Full.Traversable (Feeder a b f) g where 156 | traverse t x inheritance = Compose (atts{inh= inheritance}, traverse (Deep.traverse t) y (inh atts)) 157 | where Compose (atts, y) = getCompose x inheritance 158 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/AG/Monomorphic.hs: -------------------------------------------------------------------------------- 1 | {-# Language FlexibleContexts, FlexibleInstances, KindSignatures, MultiParamTypeClasses, PatternSynonyms, RankNTypes, 2 | TypeFamilies, TypeOperators, UndecidableInstances #-} 3 | 4 | -- | A special case of an attribute grammar where every node has only a single inherited and a single synthesized 5 | -- attribute of the same monoidal type. The synthesized attributes of child nodes are all 'mconcat`ted together. 6 | 7 | module Transformation.AG.Monomorphic ( 8 | Auto (Auto), Keep (Keep), Atts, pattern Atts, inh, syn, 9 | Semantics, PreservingSemantics, Rule, Attribution (attribution), Feeder, 10 | Dimorphic.knit, Dimorphic.knitKeeping, 11 | applyDefault, applyDefaultWithAttributes, 12 | fullMapDefault, Dimorphic.traverseDefaultWithAttributes) where 13 | 14 | import Data.Functor.Compose (Compose(..)) 15 | import Data.Functor.Const (Const(..)) 16 | import Data.Kind (Type) 17 | import qualified Rank2 18 | import Transformation (Transformation, Domain, Codomain, At) 19 | import qualified Transformation 20 | import qualified Transformation.Deep as Deep 21 | import qualified Transformation.Full as Full 22 | 23 | import qualified Transformation.AG.Dimorphic as Dimorphic 24 | import Transformation.AG.Dimorphic (knit, knitKeeping) 25 | 26 | 27 | -- | Transformation wrapper that allows automatic inference of attribute rules. 28 | newtype Auto t = Auto t 29 | 30 | -- | Transformation wrapper that allows automatic inference of attribute rules and preservation of the attribute with 31 | -- the original nodes. 32 | newtype Keep t = Keep t 33 | 34 | type Atts a = Dimorphic.Atts a a 35 | 36 | pattern Atts :: a -> a -> Atts a 37 | pattern Atts{inh, syn} = Dimorphic.Atts inh syn 38 | 39 | -- | A node's 'Semantics' maps its inherited attribute to its synthesized attribute. 40 | type Semantics a = Const (a -> a) 41 | 42 | -- | A node's 'PreservingSemantics' maps its inherited attribute to its synthesized attribute. 43 | type PreservingSemantics f a = Compose ((->) a) (Compose ((,) (Atts a)) f) 44 | 45 | -- | An attribution rule maps a node's inherited attribute and its child nodes' synthesized attribute to the node's 46 | -- synthesized attribute and the children nodes' inherited attributes. 47 | type Rule a = Atts a -> Atts a 48 | 49 | instance {-# overlappable #-} Attribution t a g deep shallow where 50 | attribution = const (const id) 51 | 52 | instance {-# overlappable #-} (Transformation (Auto t), p ~ Domain (Auto t), q ~ Codomain (Auto t), q ~ Semantics a, 53 | Rank2.Foldable (g q), Monoid a, Foldable p, Attribution (Auto t) a g q p) => 54 | (Auto t) `At` g (Semantics a) (Semantics a) where 55 | ($) = applyDefault (foldr const $ error "Missing node") 56 | {-# INLINE ($) #-} 57 | 58 | instance {-# overlappable #-} (Transformation (Keep t), p ~ Domain (Keep t), q ~ Codomain (Keep t), 59 | q ~ PreservingSemantics p a, Rank2.Foldable (g q), Monoid a, 60 | Foldable p, Functor p, Attribution (Keep t) a g q p) => 61 | (Keep t) `At` g (PreservingSemantics p a) (PreservingSemantics p a) where 62 | ($) = applyDefaultWithAttributes 63 | {-# INLINE ($) #-} 64 | 65 | instance (Transformation (Auto t), Domain (Auto t) ~ f, Functor f, Codomain (Auto t) ~ Semantics a, 66 | Rank2.Functor (g f), Deep.Functor (Auto t) g, Auto t `At` g (Semantics a) (Semantics a)) => 67 | Full.Functor (Auto t) g where 68 | (<$>) = Full.mapUpDefault 69 | 70 | instance (Transformation (Keep t), Domain (Keep t) ~ f, Functor f, Codomain (Keep t) ~ PreservingSemantics f a, 71 | Functor f, Rank2.Functor (g f), Deep.Functor (Keep t) g, 72 | Keep t `At` g (PreservingSemantics f a) (PreservingSemantics f a)) => 73 | Full.Functor (Keep t) g where 74 | (<$>) = Full.mapUpDefault 75 | 76 | instance (Transformation (Keep t), Domain (Keep t) ~ f, Traversable f, Rank2.Traversable (g f), 77 | Codomain (Keep t) ~ PreservingSemantics f a, Deep.Traversable (Feeder a f) g, Full.Functor (Keep t) g, 78 | Keep t `At` g (PreservingSemantics f a) (PreservingSemantics f a)) => 79 | Full.Traversable (Keep t) g where 80 | traverse = Dimorphic.traverseDefaultWithAttributes 81 | 82 | -- | The core type class for defining the attribute grammar. The instances of this class typically have a form like 83 | -- 84 | -- > instance Attribution MyAttGrammar MyMonoid MyNode (Semantics MyAttGrammar) Identity where 85 | -- > attribution MyAttGrammar{} (Identity MyNode{}) 86 | -- > Atts{inh= fromParent, 87 | -- > syn= fromChildren} 88 | -- > = Atts{syn= toParent, 89 | -- > inh= toChildren} 90 | class Attribution t a g (deep :: Type -> Type) shallow where 91 | -- | The attribution rule for a given transformation and node. 92 | attribution :: t -> shallow (g deep deep) -> Rule a 93 | 94 | -- | Drop-in implementation of 'Transformation.$' 95 | applyDefault :: (p ~ Domain t, q ~ Semantics a, x ~ g q q, Rank2.Foldable (g q), Attribution t a g q p, Monoid a) 96 | => (forall y. p y -> y) -> t -> p x -> q x 97 | applyDefault extract t x = knit (attribution t x) (extract x) 98 | {-# INLINE applyDefault #-} 99 | 100 | -- | Drop-in implementation of 'Full.<$>' 101 | fullMapDefault :: (p ~ Domain t, q ~ Semantics a, q ~ Codomain t, x ~ g q q, Rank2.Foldable (g q), 102 | Deep.Functor t g, Attribution t a g p p, Monoid a) 103 | => (forall y. p y -> y) -> t -> p (g p p) -> q (g q q) 104 | fullMapDefault extract t local = knit (attribution t local) (t Deep.<$> extract local) 105 | {-# INLINE fullMapDefault #-} 106 | 107 | -- | Drop-in implementation of 'Transformation.$' that stores all attributes with every original node 108 | applyDefaultWithAttributes :: (p ~ Domain t, q ~ PreservingSemantics p a, x ~ g q q, 109 | Attribution t a g q p, Rank2.Foldable (g q), Monoid a, Foldable p, Functor p) 110 | => t -> p x -> q x 111 | applyDefaultWithAttributes t x = knitKeeping (attribution t x) x 112 | {-# INLINE applyDefaultWithAttributes #-} 113 | 114 | type Feeder a = Dimorphic.Feeder a a 115 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/Deep.hs: -------------------------------------------------------------------------------- 1 | {-# Language Haskell2010, DeriveDataTypeable, FlexibleInstances, KindSignatures, MultiParamTypeClasses, RankNTypes, 2 | StandaloneDeriving, TypeFamilies, TypeOperators, UndecidableInstances #-} 3 | 4 | -- | Type classes 'Functor', 'Foldable', and 'Traversable' that correspond to the standard type classes of the same 5 | -- name, but applying the given transformation to every descendant of the given tree node. The corresponding classes 6 | -- in the "Transformation.Shallow" module operate only on the immediate children, while those from the 7 | -- "Transformation.Full" module include the argument node itself. 8 | 9 | module Transformation.Deep where 10 | 11 | import Data.Data (Data, Typeable) 12 | import Data.Functor.Compose (Compose) 13 | import Data.Functor.Const (Const) 14 | import qualified Control.Applicative as Rank1 15 | import qualified Data.Foldable as Rank1 16 | import qualified Data.Functor as Rank1 17 | import qualified Data.Traversable as Rank1 18 | import Data.Kind (Type) 19 | import qualified Rank2 20 | import Transformation (Transformation, Domain, Codomain) 21 | import qualified Transformation.Full as Full 22 | 23 | import Prelude hiding (Foldable(..), Traversable(..), Functor(..), Applicative(..), (<$>), fst, snd) 24 | 25 | -- | Like "Transformation.Shallow".'Transformation.Shallow.Functor' except it maps all descendants and not only immediate children 26 | class (Transformation t, Rank2.Functor (g (Domain t))) => Functor t g where 27 | (<$>) :: t -> g (Domain t) (Domain t) -> g (Codomain t) (Codomain t) 28 | infixl 4 <$> 29 | 30 | -- | Like "Transformation.Shallow".'Transformation.Shallow.Foldable' except it folds all descendants and not only immediate children 31 | class (Transformation t, Rank2.Foldable (g (Domain t))) => Foldable t g where 32 | foldMap :: (Codomain t ~ Const m, Monoid m) => t -> g (Domain t) (Domain t) -> m 33 | 34 | -- | Like "Transformation.Shallow".'Transformation.Shallow.Traversable' except it folds all descendants and not only immediate children 35 | class (Transformation t, Rank2.Traversable (g (Domain t))) => Traversable t g where 36 | traverse :: Codomain t ~ Compose m f => t -> g (Domain t) (Domain t) -> m (g f f) 37 | 38 | -- | A tuple of only one element 39 | newtype Only g (d :: Type -> Type) (s :: Type -> Type) = 40 | Only {fromOnly :: s (g d d)} 41 | 42 | -- | Compose a regular type constructor with a data type with two type constructor parameters 43 | newtype Nest (f :: Type -> Type) g (d :: Type -> Type) (s :: Type -> Type) = 44 | Nest {unNest :: f (g d s)} 45 | 46 | -- | Like 'Data.Functor.Product.Product' for data types with two type constructor parameters 47 | data Product g h (d :: Type -> Type) (s :: Type -> Type) = 48 | Pair{fst :: g d s, 49 | snd :: h d s} 50 | 51 | -- | Like 'Data.Functor.Sum.Sum' for data types with two type constructor parameters 52 | data Sum g h (d :: Type -> Type) (s :: Type -> Type) = 53 | InL (g d s) 54 | | InR (h d s) 55 | 56 | -- Instances 57 | 58 | instance Rank2.Functor (Only g d) where 59 | f <$> Only x = Only (f x) 60 | 61 | instance Rank2.Foldable (Only g d) where 62 | foldMap f (Only x) = f x 63 | 64 | instance Rank2.Traversable (Only g d) where 65 | traverse f (Only x) = Only Rank1.<$> f x 66 | 67 | instance Rank2.Apply (Only g d) where 68 | Only f <*> Only x = Only (Rank2.apply f x) 69 | liftA2 f (Only x) (Only y) = Only (f x y) 70 | 71 | instance Rank2.Applicative (Only g d) where 72 | pure f = Only f 73 | 74 | instance Rank2.DistributiveTraversable (Only g d) 75 | 76 | instance Rank2.Distributive (Only g d) where 77 | cotraverse w f = Only (w (Rank1.fmap fromOnly f)) 78 | 79 | instance Full.Functor t g => Functor t (Only g) where 80 | t <$> Only x = Only (t Full.<$> x) 81 | 82 | instance Full.Foldable t g => Foldable t (Only g) where 83 | foldMap t (Only x) = Full.foldMap t x 84 | 85 | instance (Full.Traversable t g, Codomain t ~ Compose m f, Rank1.Functor m) => Traversable t (Only g) where 86 | traverse t (Only x) = Only Rank1.<$> Full.traverse t x 87 | 88 | deriving instance (Typeable s, Typeable d, Typeable g, Data (s (g d d))) => Data (Only g d s) 89 | deriving instance Eq (s (g d d)) => Eq (Only g d s) 90 | deriving instance Ord (s (g d d)) => Ord (Only g d s) 91 | deriving instance Show (s (g d d)) => Show (Only g d s) 92 | 93 | instance (Rank1.Functor f, Rank2.Functor (g d)) => Rank2.Functor (Nest f g d) where 94 | f <$> Nest x = Nest ((f Rank2.<$>) Rank1.<$> x) 95 | 96 | instance (Rank1.Applicative f, Rank2.Apply (g d)) => Rank2.Apply (Nest f g d) where 97 | Nest x <*> Nest y = Nest (Rank1.liftA2 (Rank2.<*>) x y) 98 | 99 | instance (Rank1.Applicative f, Rank2.Applicative (g d)) => Rank2.Applicative (Nest f g d) where 100 | pure f = Nest (Rank1.pure (Rank2.pure f)) 101 | 102 | instance (Rank1.Foldable f, Rank2.Foldable (g d)) => Rank2.Foldable (Nest f g d) where 103 | foldMap f (Nest x) = Rank1.foldMap (Rank2.foldMap f) x 104 | 105 | instance (Rank1.Traversable f, Rank2.Traversable (g d)) => Rank2.Traversable (Nest f g d) where 106 | traverse f (Nest x) = Nest Rank1.<$> Rank1.traverse (Rank2.traverse f) x 107 | 108 | instance (Rank1.Functor f, Functor t g) => Functor t (Nest f g) where 109 | t <$> Nest x = Nest ((t <$>) Rank1.<$> x) 110 | 111 | instance (Rank1.Foldable f, Foldable t g) => Foldable t (Nest f g) where 112 | foldMap t (Nest x) = Rank1.foldMap (foldMap t) x 113 | 114 | instance (Rank1.Traversable f, Traversable t g, Codomain t ~ Compose m f, Rank1.Applicative m) => 115 | Traversable t (Nest f g) where 116 | traverse t (Nest x) = Nest Rank1.<$> Rank1.traverse (traverse t) x 117 | 118 | deriving instance (Typeable s, Typeable d, Typeable f, Typeable g, 119 | Data (f (g d s))) => Data (Nest f g d s) 120 | deriving instance Eq (f (g d s)) => Eq (Nest f g d s) 121 | deriving instance Ord (f (g d s)) => Ord (Nest f g d s) 122 | deriving instance Show (f (g d s)) => Show (Nest f g d s) 123 | 124 | instance (Rank2.Functor (g d), Rank2.Functor (h d)) => Rank2.Functor (Product g h d) where 125 | f <$> (Pair left right) = Pair (f Rank2.<$> left) (f Rank2.<$> right) 126 | 127 | instance (Rank2.Apply (g d), Rank2.Apply (h d)) => Rank2.Apply (Product g h d) where 128 | Pair g1 h1 <*> ~(Pair g2 h2) = Pair (g1 Rank2.<*> g2) (h1 Rank2.<*> h2) 129 | liftA2 f (Pair g1 h1) ~(Pair g2 h2) = Pair (Rank2.liftA2 f g1 g2) (Rank2.liftA2 f h1 h2) 130 | liftA3 f (Pair g1 h1) ~(Pair g2 h2) ~(Pair g3 h3) = Pair (Rank2.liftA3 f g1 g2 g3) (Rank2.liftA3 f h1 h2 h3) 131 | 132 | instance (Rank2.Applicative (g d), Rank2.Applicative (h d)) => Rank2.Applicative (Product g h d) where 133 | pure f = Pair (Rank2.pure f) (Rank2.pure f) 134 | 135 | instance (Rank2.Foldable (g d), Rank2.Foldable (h d)) => Rank2.Foldable (Product g h d) where 136 | foldMap f (Pair g h) = Rank2.foldMap f g `mappend` Rank2.foldMap f h 137 | 138 | instance (Rank2.Traversable (g d), Rank2.Traversable (h d)) => Rank2.Traversable (Product g h d) where 139 | traverse f (Pair g h) = Rank1.liftA2 Pair (Rank2.traverse f g) (Rank2.traverse f h) 140 | 141 | instance (Rank2.Distributive (g d), Rank2.Distributive (h d)) => Rank2.DistributiveTraversable (Product g h d) 142 | 143 | instance (Rank2.Distributive (g d), Rank2.Distributive (h d)) => Rank2.Distributive (Product g h d) where 144 | cotraverse w f = Pair{fst= Rank2.cotraverse w (fst Rank1.<$> f), 145 | snd= Rank2.cotraverse w (snd Rank1.<$> f)} 146 | 147 | instance (Functor t g, Functor t h) => Functor t (Product g h) where 148 | t <$> Pair left right = Pair (t <$> left) (t <$> right) 149 | 150 | instance (Foldable t g, Foldable t h) => Foldable t (Product g h) where 151 | foldMap t (Pair g h) = foldMap t g `mappend` foldMap t h 152 | 153 | instance (Traversable t g, Traversable t h, Codomain t ~ Compose m f, Rank1.Applicative m) => 154 | Traversable t (Product g h) where 155 | traverse t (Pair left right) = Rank1.liftA2 Pair (traverse t left) (traverse t right) 156 | 157 | deriving instance (Typeable d, Typeable s, Typeable g1, Typeable g2, 158 | Data (g1 d s), Data (g2 d s)) => Data (Product g1 g2 d s) 159 | deriving instance (Show (g1 d s), Show (g2 d s)) => Show (Product g1 g2 d s) 160 | deriving instance (Eq (g d s), Eq (h d s)) => Eq (Product g h d s) 161 | deriving instance (Ord (g d s), Ord (h d s)) => Ord (Product g h d s) 162 | 163 | instance (Rank2.Functor (g d), Rank2.Functor (h d)) => Rank2.Functor (Sum g h d) where 164 | f <$> InL left = InL (f Rank2.<$> left) 165 | f <$> InR right = InR (f Rank2.<$> right) 166 | 167 | instance (Rank2.Foldable (g d), Rank2.Foldable (h d)) => Rank2.Foldable (Sum g h d) where 168 | foldMap f (InL left) = Rank2.foldMap f left 169 | foldMap f (InR right) = Rank2.foldMap f right 170 | 171 | instance (Rank2.Traversable (g d), Rank2.Traversable (h d)) => Rank2.Traversable (Sum g h d) where 172 | traverse f (InL left) = InL Rank1.<$> Rank2.traverse f left 173 | traverse f (InR right) = InR Rank1.<$> Rank2.traverse f right 174 | 175 | instance (Functor t g, Functor t h) => Functor t (Sum g h) where 176 | t <$> InL left = InL (t <$> left) 177 | t <$> InR right = InR (t <$> right) 178 | 179 | instance (Foldable t g, Foldable t h, Codomain t ~ Const m) => Foldable t (Sum g h) where 180 | foldMap t (InL left) = foldMap t left 181 | foldMap t (InR right) = foldMap t right 182 | 183 | instance (Traversable t g, Traversable t h, Codomain t ~ Compose m f, Rank1.Applicative m) => 184 | Traversable t (Sum g h) where 185 | traverse t (InL left) = InL Rank1.<$> traverse t left 186 | traverse t (InR right) = InR Rank1.<$> traverse t right 187 | 188 | deriving instance (Typeable d, Typeable s, Typeable g1, Typeable g2, 189 | Data (g1 d s), Data (g2 d s)) => Data (Sum g1 g2 d s) 190 | deriving instance (Show (g1 d s), Show (g2 d s)) => Show (Sum g1 g2 d s) 191 | deriving instance (Eq (g d s), Eq (h d s)) => Eq (Sum g h d s) 192 | deriving instance (Ord (g d s), Ord (h d s)) => Ord (Sum g h d s) 193 | 194 | -- | Alphabetical synonym for '<$>' 195 | fmap :: Functor t g => t -> g (Domain t) (Domain t) -> g (Codomain t) (Codomain t) 196 | fmap = (<$>) 197 | 198 | -- | Equivalent of 'Data.Either.either' 199 | eitherFromSum :: Sum g h d s -> Either (g d s) (h d s) 200 | eitherFromSum (InL left) = Left left 201 | eitherFromSum (InR right) = Right right 202 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/Deep.hs-boot: -------------------------------------------------------------------------------- 1 | {-# Language MultiParamTypeClasses, RankNTypes, TypeFamilies, TypeOperators #-} 2 | 3 | module Transformation.Deep where 4 | 5 | import Data.Functor.Compose (Compose) 6 | import Data.Functor.Const (Const) 7 | import qualified Rank2 8 | import Transformation (Transformation, Domain, Codomain) 9 | 10 | import Prelude hiding (Functor, Foldable, Traversable, (<$>), foldMap, traverse) 11 | 12 | class (Transformation t, Rank2.Functor (g (Domain t))) => Functor t g where 13 | (<$>) :: t -> g (Domain t) (Domain t) -> g (Codomain t) (Codomain t) 14 | 15 | class (Transformation t, Rank2.Foldable (g (Domain t))) => Foldable t g where 16 | foldMap :: (Codomain t ~ Const m, Monoid m) => t -> g (Domain t) (Domain t) -> m 17 | 18 | class (Transformation t, Rank2.Traversable (g (Domain t))) => Traversable t g where 19 | traverse :: Codomain t ~ Compose m f => t -> g (Domain t) (Domain t) -> m (g f f) 20 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/Full.hs: -------------------------------------------------------------------------------- 1 | {-# Language FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, TypeFamilies, TypeOperators #-} 2 | 3 | -- | Type classes 'Functor', 'Foldable', and 'Traversable' that correspond to the standard type classes of the same 4 | -- name, but applying the given transformation to the given tree node and all its descendants. The corresponding classes 5 | -- in the "Transformation.Shallow" moduleo perate only on the immediate children, while those from the 6 | -- "Transformation.Deep" module exclude the argument node itself. 7 | 8 | module Transformation.Full where 9 | 10 | import qualified Data.Functor 11 | import Data.Functor.Compose (Compose(getCompose)) 12 | import Data.Functor.Const (Const(getConst)) 13 | import qualified Data.Foldable 14 | import qualified Data.Traversable 15 | import qualified Rank2 16 | import qualified Transformation 17 | import Transformation (Transformation, Domain, Codomain) 18 | import {-# SOURCE #-} qualified Transformation.Deep as Deep 19 | 20 | import Prelude hiding (Foldable(..), Traversable(..), Functor(..), Applicative(..), (<$>), fst, snd) 21 | 22 | -- | Like "Transformation.Deep".'Deep.Functor' except it maps an additional wrapper around the entire tree 23 | class (Transformation t, Rank2.Functor (g (Domain t))) => Functor t g where 24 | (<$>) :: t -> Domain t (g (Domain t) (Domain t)) -> Codomain t (g (Codomain t) (Codomain t)) 25 | infixl 4 <$> 26 | 27 | -- | Like "Transformation.Deep".'Deep.Foldable' except the entire tree is also wrapped 28 | class (Transformation t, Rank2.Foldable (g (Domain t))) => Foldable t g where 29 | foldMap :: (Codomain t ~ Const m, Monoid m) => t -> Domain t (g (Domain t) (Domain t)) -> m 30 | 31 | -- | Like "Transformation.Deep".'Deep.Traversable' except it traverses an additional wrapper around the entire tree 32 | class (Transformation t, Rank2.Traversable (g (Domain t))) => Traversable t g where 33 | traverse :: Codomain t ~ Compose m f => t -> Domain t (g (Domain t) (Domain t)) -> m (f (g f f)) 34 | 35 | -- | Alphabetical synonym for '<$>' 36 | fmap :: Functor t g => t -> Domain t (g (Domain t) (Domain t)) -> Codomain t (g (Codomain t) (Codomain t)) 37 | fmap = (<$>) 38 | 39 | -- | Default implementation for '<$>' that maps the wrapper and then the tree 40 | mapDownDefault :: (Deep.Functor t g, t `Transformation.At` g (Domain t) (Domain t), Data.Functor.Functor (Codomain t)) 41 | => t -> Domain t (g (Domain t) (Domain t)) -> Codomain t (g (Codomain t) (Codomain t)) 42 | mapDownDefault t x = (t Deep.<$>) Data.Functor.<$> (t Transformation.$ x) 43 | 44 | -- | Default implementation for '<$>' that maps the tree and then the wrapper 45 | mapUpDefault :: (Deep.Functor t g, t `Transformation.At` g (Codomain t) (Codomain t), Data.Functor.Functor (Domain t)) 46 | => t -> Domain t (g (Domain t) (Domain t)) -> Codomain t (g (Codomain t) (Codomain t)) 47 | mapUpDefault t x = t Transformation.$ ((t Deep.<$>) Data.Functor.<$> x) 48 | 49 | foldMapDownDefault, foldMapUpDefault :: (t `Transformation.At` g (Domain t) (Domain t), Deep.Foldable t g, 50 | Codomain t ~ Const m, Data.Foldable.Foldable (Domain t), Monoid m) 51 | => t -> Domain t (g (Domain t) (Domain t)) -> m 52 | -- | Default implementation for 'foldMap' that folds the wrapper and then the tree 53 | foldMapDownDefault t x = getConst (t Transformation.$ x) `mappend` Data.Foldable.foldMap (Deep.foldMap t) x 54 | -- | Default implementation for 'foldMap' that folds the tree and then the wrapper 55 | foldMapUpDefault t x = Data.Foldable.foldMap (Deep.foldMap t) x `mappend` getConst (t Transformation.$ x) 56 | 57 | -- | Default implementation for 'traverse' that traverses the wrapper and then the tree 58 | traverseDownDefault :: (Deep.Traversable t g, t `Transformation.At` g (Domain t) (Domain t), 59 | Codomain t ~ Compose m f, Data.Traversable.Traversable f, Monad m) 60 | => t -> Domain t (g (Domain t) (Domain t)) -> m (f (g f f)) 61 | traverseDownDefault t x = getCompose (t Transformation.$ x) >>= Data.Traversable.traverse (Deep.traverse t) 62 | 63 | -- | Default implementation for 'traverse' that traverses the tree and then the wrapper 64 | traverseUpDefault :: (Deep.Traversable t g, Codomain t ~ Compose m f, t `Transformation.At` g f f, 65 | Data.Traversable.Traversable (Domain t), Monad m) 66 | => t -> Domain t (g (Domain t) (Domain t)) -> m (f (g f f)) 67 | traverseUpDefault t x = Data.Traversable.traverse (Deep.traverse t) x >>= getCompose . (t Transformation.$) 68 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/Full/TH.hs: -------------------------------------------------------------------------------- 1 | -- | This module exports the templates for automatic instance deriving of "Transformation.Full" type classes. The most 2 | -- common way to use it would be 3 | -- 4 | -- > import qualified Transformation.Full.TH 5 | -- > data MyDataType f' f = ... 6 | -- > $(Transformation.Full.TH.deriveUpFunctor (conT ''MyTransformation) (conT ''MyDataType)) 7 | -- 8 | 9 | {-# Language TemplateHaskell #-} 10 | 11 | module Transformation.Full.TH (deriveDownFunctor, deriveDownFoldable, deriveDownTraversable, 12 | deriveUpFunctor, deriveUpFoldable, deriveUpTraversable) 13 | where 14 | 15 | import Language.Haskell.TH 16 | 17 | import qualified Transformation 18 | import qualified Transformation.Deep 19 | import qualified Transformation.Full 20 | 21 | deriveDownFunctor :: Q Type -> Q Type -> Q [Dec] 22 | deriveDownFunctor transformation node = do 23 | let domain = conT ''Transformation.Domain `appT` transformation 24 | deepConstraint = conT ''Transformation.Deep.Functor `appT` transformation `appT` node 25 | fullConstraint = conT ''Transformation.Full.Functor `appT` transformation `appT` node 26 | shallowConstraint = conT ''Transformation.At `appT` transformation `appT` (node `appT` domain `appT` domain) 27 | sequence [instanceD (cxt [deepConstraint, shallowConstraint]) 28 | fullConstraint 29 | [funD '(Transformation.Full.<$>) [clause [] (normalB $ varE 'Transformation.Full.mapDownDefault) []]]] 30 | 31 | deriveUpFunctor :: Q Type -> Q Type -> Q [Dec] 32 | deriveUpFunctor transformation node = do 33 | let codomain = conT ''Transformation.Codomain `appT` transformation 34 | deepConstraint = conT ''Transformation.Deep.Functor `appT` transformation `appT` node 35 | fullConstraint = conT ''Transformation.Full.Functor `appT` transformation `appT` node 36 | shallowConstraint = conT ''Transformation.At `appT` transformation `appT` (node `appT` codomain `appT` codomain) 37 | sequence [instanceD (cxt [deepConstraint, shallowConstraint]) 38 | fullConstraint 39 | [funD '(Transformation.Full.<$>) [clause [] (normalB $ varE 'Transformation.Full.mapUpDefault) []]]] 40 | 41 | deriveDownFoldable :: Q Type -> Q Type -> Q [Dec] 42 | deriveDownFoldable transformation node = do 43 | let domain = conT ''Transformation.Domain `appT` transformation 44 | deepConstraint = conT ''Transformation.Deep.Foldable `appT` transformation `appT` node 45 | fullConstraint = conT ''Transformation.Full.Foldable `appT` transformation `appT` node 46 | shallowConstraint = conT ''Transformation.At `appT` transformation `appT` (node `appT` domain `appT` domain) 47 | sequence [instanceD (cxt [deepConstraint, shallowConstraint]) 48 | fullConstraint 49 | [funD 'Transformation.Full.foldMap [clause [] (normalB $ varE 'Transformation.Full.foldMapDownDefault) []]]] 50 | 51 | deriveUpFoldable :: Q Type -> Q Type -> Q [Dec] 52 | deriveUpFoldable transformation node = do 53 | let codomain = conT ''Transformation.Codomain `appT` transformation 54 | deepConstraint = conT ''Transformation.Deep.Foldable `appT` transformation `appT` node 55 | fullConstraint = conT ''Transformation.Full.Foldable `appT` transformation `appT` node 56 | shallowConstraint = conT ''Transformation.At `appT` transformation `appT` (node `appT` codomain `appT` codomain) 57 | sequence [instanceD (cxt [deepConstraint, shallowConstraint]) 58 | fullConstraint 59 | [funD 'Transformation.Full.foldMap [clause [] (normalB $ varE 'Transformation.Full.foldMapUpDefault) []]]] 60 | 61 | deriveDownTraversable :: Q Type -> Q Type -> Q [Dec] 62 | deriveDownTraversable transformation node = do 63 | let domain = conT ''Transformation.Domain `appT` transformation 64 | deepConstraint = conT ''Transformation.Deep.Traversable `appT` transformation `appT` node 65 | fullConstraint = conT ''Transformation.Full.Traversable `appT` transformation `appT` node 66 | shallowConstraint = conT ''Transformation.At `appT` transformation `appT` (node `appT` domain `appT` domain) 67 | sequence [instanceD (cxt [deepConstraint, shallowConstraint]) 68 | fullConstraint 69 | [funD 'Transformation.Full.traverse [clause [] (normalB $ varE 'Transformation.Full.traverseDownDefault) []]]] 70 | 71 | deriveUpTraversable :: Q Type -> Q Type -> Q [Dec] 72 | deriveUpTraversable transformation node = do 73 | let codomain = conT ''Transformation.Codomain `appT` transformation 74 | deepConstraint = conT ''Transformation.Deep.Traversable `appT` transformation `appT` node 75 | fullConstraint = conT ''Transformation.Full.Traversable `appT` transformation `appT` node 76 | shallowConstraint = conT ''Transformation.At `appT` transformation `appT` (node `appT` codomain `appT` codomain) 77 | sequence [instanceD (cxt [deepConstraint, shallowConstraint]) 78 | fullConstraint 79 | [funD 'Transformation.Full.traverse [clause [] (normalB $ varE 'Transformation.Full.traverseUpDefault) []]]] 80 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/Rank2.hs: -------------------------------------------------------------------------------- 1 | {-# Language FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, TypeFamilies, UndecidableInstances #-} 2 | 3 | -- | This module provides natural transformations 'Map', 'Fold', and 'Traversal', as well as three rank-2 functions 4 | -- that wrap them in a convenient interface. 5 | 6 | module Transformation.Rank2 where 7 | 8 | import Data.Functor.Compose (Compose(Compose)) 9 | import Data.Functor.Const (Const(Const)) 10 | import Data.Kind (Type) 11 | import qualified Rank2 12 | import Transformation (Transformation, Domain, Codomain) 13 | import qualified Transformation 14 | import qualified Transformation.Deep as Deep 15 | import qualified Transformation.Full as Full 16 | 17 | -- | Transform (naturally) the containing functor of every node in the given tree. 18 | (<$>) :: Deep.Functor (Map p q) g => (forall a. p a -> q a) -> g p p -> g q q 19 | f <$> x = Map f Deep.<$> x 20 | infixl 4 <$> 21 | 22 | -- | Fold the containing functor of every node in the given tree. 23 | foldMap :: (Deep.Foldable (Fold p m) g, Monoid m) => (forall a. p a -> m) -> g p p -> m 24 | foldMap f = Deep.foldMap (Fold f) 25 | 26 | -- | Traverse the containing functors of all nodes in the given tree. 27 | traverse :: Deep.Traversable (Traversal p q m) g => (forall a. p a -> m (q a)) -> g p p -> m (g q q) 28 | traverse f = Deep.traverse (Traversal f) 29 | 30 | newtype Map (p :: Type -> Type) (q :: Type -> Type) = Map (forall x. p x -> q x) 31 | 32 | newtype Fold (p :: Type -> Type) m = Fold (forall x. p x -> m) 33 | 34 | newtype Traversal (p :: Type -> Type) (q :: Type -> Type) m = Traversal (forall x. p x -> m (q x)) 35 | 36 | instance Transformation (Map p q) where 37 | type Domain (Map p q) = p 38 | type Codomain (Map p q) = q 39 | 40 | instance Transformation (Fold p m) where 41 | type Domain (Fold p m) = p 42 | type Codomain (Fold p m) = Const m 43 | 44 | instance Transformation (Traversal p q m) where 45 | type Domain (Traversal p q m) = p 46 | type Codomain (Traversal p q m) = Compose m q 47 | 48 | instance Transformation.At (Map p q) x where 49 | ($) (Map f) = f 50 | 51 | instance Transformation.At (Fold p m) x where 52 | ($) (Fold f) = Const . f 53 | 54 | instance Transformation.At (Traversal p q m) x where 55 | ($) (Traversal f) = Compose . f 56 | 57 | instance (Rank2.Functor (g p), Deep.Functor (Map p q) g, Functor p) => Full.Functor (Map p q) g where 58 | (<$>) = Full.mapUpDefault 59 | -------------------------------------------------------------------------------- /deep-transformations/src/Transformation/Shallow.hs: -------------------------------------------------------------------------------- 1 | {-# Language DeriveDataTypeable, FlexibleContexts, FlexibleInstances, KindSignatures, MultiParamTypeClasses, RankNTypes, 2 | StandaloneDeriving, TypeFamilies, TypeOperators, UndecidableInstances #-} 3 | 4 | -- | Type classes 'Functor', 'Foldable', and 'Traversable' that correspond to the standard type classes of the same 5 | -- name. The [rank2classes](https://hackage.haskell.org/package/rank2classes) package provides the equivalent set 6 | -- of classes for natural transformations. This module extends the functionality to unnatural transformations. 7 | 8 | module Transformation.Shallow (Functor(..), Foldable(..), Traversable(..), fmap) where 9 | 10 | import Control.Applicative (Applicative, liftA2, pure) 11 | import qualified Data.Functor as Rank1 (Functor, (<$>)) 12 | import qualified Data.Foldable as Rank1 (Foldable, foldMap) 13 | import qualified Data.Traversable as Rank1 (Traversable, traverse) 14 | import Data.Functor.Compose (Compose(Compose, getCompose)) 15 | import Data.Functor.Const (Const(Const, getConst)) 16 | import Data.Functor.Product (Product(Pair)) 17 | import Data.Functor.Sum (Sum(InL, InR)) 18 | import Data.Kind (Type) 19 | import Data.Proxy (Proxy(Proxy)) 20 | import qualified Rank2 21 | import Transformation (Transformation, Domain, Codomain, At) 22 | import qualified Transformation 23 | 24 | import Prelude hiding (Foldable(..), Traversable(..), Functor(..), Applicative(..), (<$>), fst, snd) 25 | 26 | -- | Like Rank2.'Rank2.Functor' except it takes a 'Transformation' instead of a polymorphic function 27 | class (Transformation t, Rank2.Functor g) => Functor t g where 28 | (<$>) :: t -> g (Domain t) -> g (Codomain t) 29 | infixl 4 <$> 30 | 31 | -- | Like Rank2.'Rank2.Foldable' except it takes a 'Transformation' instead of a polymorphic function 32 | class (Transformation t, Rank2.Foldable g) => Foldable t g where 33 | foldMap :: (Codomain t ~ Const m, Monoid m) => t -> g (Domain t) -> m 34 | 35 | -- | Like Rank2.'Rank2.Traversable' except it takes a 'Transformation' instead of a polymorphic function 36 | class (Transformation t, Rank2.Traversable g) => Traversable t g where 37 | traverse :: Codomain t ~ Compose m f => t -> g (Domain t) -> m (g f) 38 | 39 | newtype FunctorCompose (p :: Type -> Type) t = FunctorCompose t 40 | newtype FoldableCompose (p :: Type -> Type) t = FoldableCompose t 41 | newtype TraversableCompose (p :: Type -> Type) t = TraversableCompose t 42 | 43 | instance Transformation t => Transformation (FunctorCompose p t) where 44 | type Domain (FunctorCompose p t) = Compose p (Domain t) 45 | type Codomain (FunctorCompose p t) = Compose p (Codomain t) 46 | instance Transformation t => Transformation (FoldableCompose p t) where 47 | type Domain (FoldableCompose p t) = Compose p (Domain t) 48 | type Codomain (FoldableCompose p t) = Codomain t 49 | instance (Transformation t, Codomain t ~ Compose q r) => Transformation (TraversableCompose p t) where 50 | type Domain (TraversableCompose p t) = Compose p (Domain t) 51 | type Codomain (TraversableCompose p t) = Compose (Outer (Codomain t)) (Compose p (Inner (Codomain t))) 52 | 53 | type family Outer f where 54 | Outer (Compose p q) = p 55 | type family Inner f where 56 | Inner (Compose p q) = q 57 | 58 | instance (Rank1.Functor p, t `At` a) => FunctorCompose p t `At` a where 59 | FunctorCompose t $ Compose x = Compose ((t Transformation.$) Rank1.<$> x) 60 | instance (Rank1.Foldable p, Codomain t ~ Const m, Monoid m, t `At` a) => FoldableCompose p t `At` a where 61 | FoldableCompose t $ Compose x = Const $ Rank1.foldMap (getConst . (t Transformation.$)) x 62 | instance (Rank1.Traversable p, Applicative q, Codomain t ~ Compose q r, t `At` a) => TraversableCompose p t `At` a where 63 | TraversableCompose t $ Compose x = Compose $ Compose Rank1.<$> Rank1.traverse (getCompose . (t Transformation.$)) x 64 | 65 | instance Transformation t => Functor t Rank2.Empty where 66 | _ <$> Rank2.Empty = Rank2.Empty 67 | 68 | instance Transformation t => Functor t Proxy where 69 | _ <$> _ = Proxy 70 | 71 | instance Transformation t => Functor t (Const a) where 72 | _ <$> Const a = Const a 73 | 74 | instance (Transformation t, t `At` a) => Functor t (Rank2.Only a) where 75 | t <$> Rank2.Only x = Rank2.Only (t Transformation.$ x) 76 | 77 | instance Functor t g => Functor t (Rank2.Identity g) where 78 | f <$> Rank2.Identity g = Rank2.Identity (f <$> g) 79 | 80 | instance (Transformation t, Functor (FunctorCompose p t) g, Rank1.Functor p) => Functor t (Rank2.Compose g p) where 81 | t <$> Rank2.Compose g = Rank2.Compose (FunctorCompose t <$> g) 82 | 83 | instance (Transformation t, t `At` a, Rank1.Functor g) => Functor t (Rank2.Flip g a) where 84 | t <$> Rank2.Flip g = Rank2.Flip ((t Transformation.$) Rank1.<$> g) 85 | 86 | instance (Functor t g, Functor t h) => Functor t (Product g h) where 87 | t <$> Pair left right = Pair (t <$> left) (t <$> right) 88 | 89 | instance (Functor t g, Functor t h) => Functor t (Sum g h) where 90 | t <$> InL g = InL (t <$> g) 91 | t <$> InR h = InR (t <$> h) 92 | 93 | instance Transformation t => Foldable t Rank2.Empty where 94 | foldMap _ _ = mempty 95 | 96 | instance Transformation t => Foldable t Proxy where 97 | foldMap _ _ = mempty 98 | 99 | instance Transformation t => Foldable t (Const x) where 100 | foldMap _ _ = mempty 101 | 102 | instance (Transformation t, t `At` a, Codomain t ~ Const m) => Foldable t (Rank2.Only a) where 103 | foldMap t (Rank2.Only x) = getConst (t Transformation.$ x) 104 | 105 | instance Foldable t g => Foldable t (Rank2.Identity g) where 106 | foldMap t (Rank2.Identity g) = foldMap t g 107 | 108 | instance (Transformation t, Foldable (FoldableCompose p t) g, Rank1.Foldable p) => Foldable t (Rank2.Compose g p) where 109 | foldMap t (Rank2.Compose g) = foldMap (FoldableCompose t) g 110 | 111 | instance (Transformation t, t `At` a, Codomain t ~ Const m, Rank1.Foldable g) => Foldable t (Rank2.Flip g a) where 112 | foldMap t (Rank2.Flip g) = Rank1.foldMap (getConst . (t Transformation.$)) g 113 | 114 | instance (Foldable t g, Foldable t h, Codomain t ~ Const m, Monoid m) => Foldable t (Product g h) where 115 | foldMap t (Pair left right) = foldMap t left `mappend` foldMap t right 116 | 117 | instance (Foldable t g, Foldable t h) => Foldable t (Sum g h) where 118 | foldMap t (InL g) = foldMap t g 119 | foldMap t (InR h) = foldMap t h 120 | 121 | instance (Transformation t, Codomain t ~ Compose m f, Applicative m) => Traversable t Rank2.Empty where 122 | traverse _ _ = pure Rank2.Empty 123 | 124 | instance (Transformation t, Codomain t ~ Compose m f, Applicative m) => Traversable t Proxy where 125 | traverse _ _ = pure Proxy 126 | 127 | instance (Transformation t, Codomain t ~ Compose m f, Applicative m) => Traversable t (Const x) where 128 | traverse _ (Const x) = pure (Const x) 129 | 130 | instance (Transformation t, t `At` a, Codomain t ~ Compose m f, Rank1.Functor m) => Traversable t (Rank2.Only a) where 131 | traverse t (Rank2.Only x) = Rank2.Only Rank1.<$> getCompose (t Transformation.$ x) 132 | 133 | instance (Traversable t g, Codomain t ~ Compose m f, Rank1.Functor m) => Traversable t (Rank2.Identity g) where 134 | traverse t (Rank2.Identity g) = Rank2.Identity Rank1.<$> traverse t g 135 | 136 | instance (Transformation t, Traversable (TraversableCompose p t) g, 137 | Rank1.Traversable p, Codomain t ~ Compose q r, Rank1.Functor q) => Traversable t (Rank2.Compose g p) where 138 | traverse t (Rank2.Compose g) = Rank2.Compose Rank1.<$> traverse (TraversableCompose t) g 139 | 140 | instance (Transformation t, t `At` a, 141 | Codomain t ~ Compose m f, Applicative m, Rank1.Traversable g) => Traversable t (Rank2.Flip g a) where 142 | traverse t (Rank2.Flip g) = Rank2.Flip Rank1.<$> Rank1.traverse (getCompose . (t Transformation.$)) g 143 | 144 | instance (Traversable t g, Traversable t h, Codomain t ~ Compose m f, Applicative m) => Traversable t (Product g h) where 145 | traverse t (Pair left right) = liftA2 Pair (traverse t left) (traverse t right) 146 | 147 | instance (Traversable t g, Traversable t h, Codomain t ~ Compose m f, Rank1.Functor m) => Traversable t (Sum g h) where 148 | traverse t (InL g) = InL Rank1.<$> traverse t g 149 | traverse t (InR h) = InR Rank1.<$> traverse t h 150 | 151 | -- | Alphabetical synonym for '<$>' 152 | fmap :: Functor t g => t -> g (Domain t) -> g (Codomain t) 153 | fmap = (<$>) 154 | -------------------------------------------------------------------------------- /deep-transformations/test/Doctest.hs: -------------------------------------------------------------------------------- 1 | import Build_doctests (flags, pkgs, module_sources) 2 | import Test.DocTest (doctest) 3 | 4 | main :: IO () 5 | main = do 6 | doctest (flags ++ pkgs ++ module_sources) 7 | doctest (flags ++ pkgs ++ ["-pgmL", "markdown-unlit", "-isrc", "test/README.lhs"]) 8 | doctest (flags ++ pkgs ++ ["-isrc", "test/RepMin.hs", "test/RepMinAuto.hs"]) 9 | -------------------------------------------------------------------------------- /deep-transformations/test/README.lhs: -------------------------------------------------------------------------------- 1 | ../README.md -------------------------------------------------------------------------------- /deep-transformations/test/RepMin.hs: -------------------------------------------------------------------------------- 1 | {-# Language FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, StandaloneDeriving, 2 | TypeFamilies, TypeOperators, UndecidableInstances #-} 3 | 4 | -- | The RepMin example - replicate a binary tree with all leaves replaced by the minimal leaf value. 5 | module RepMin where 6 | 7 | import Data.Functor.Identity 8 | import Data.Kind (Type) 9 | import qualified Rank2 10 | import Transformation (Transformation(..)) 11 | import Transformation.AG (Inherited(..), Synthesized(..)) 12 | import qualified Transformation 13 | import qualified Transformation.AG as AG 14 | import qualified Transformation.Deep as Deep 15 | import qualified Transformation.Full as Full 16 | 17 | -- | tree data type 18 | data Tree a (f' :: Type -> Type) (f :: Type -> Type) = Fork{left :: f (Tree a f' f'), 19 | right:: f (Tree a f' f')} 20 | | Leaf{leafValue :: f a} 21 | -- | tree root 22 | data Root a f' f = Root{root :: f (Tree a f' f')} 23 | 24 | deriving instance (Show (f (Tree a f' f')), Show (f a)) => Show (Tree a f' f) 25 | deriving instance (Show (f (Tree a f' f'))) => Show (Root a f' f) 26 | 27 | instance Rank2.Functor (Tree a f') where 28 | f <$> Fork l r = Fork (f l) (f r) 29 | f <$> Leaf x = Leaf (f x) 30 | 31 | instance Rank2.Functor (Root a f') where 32 | f <$> Root x = Root (f x) 33 | 34 | instance Rank2.Apply (Tree a f') where 35 | Fork fl fr <*> ~(Fork l r) = Fork (Rank2.apply fl l) (Rank2.apply fr r) 36 | Leaf f <*> ~(Leaf x) = Leaf (Rank2.apply f x) 37 | 38 | instance Rank2.Applicative (Tree a f') where 39 | pure x = Leaf x 40 | 41 | instance Rank2.Apply (Root a f') where 42 | Root f <*> ~(Root x) = Root (Rank2.apply f x) 43 | 44 | instance (Transformation t, Transformation.At t a, Full.Functor t (Tree a)) => Deep.Functor t (Tree a) where 45 | t <$> Fork l r = Fork (t Full.<$> l) (t Full.<$> r) 46 | t <$> Leaf x = Leaf (t Transformation.$ x) 47 | 48 | instance (Transformation t, Full.Functor t (Tree a)) => Deep.Functor t (Root a) where 49 | t <$> Root x = Root (t Full.<$> x) 50 | 51 | -- | The transformation type 52 | data RepMin = RepMin 53 | 54 | type Sem = AG.Semantics RepMin 55 | 56 | instance Transformation RepMin where 57 | type Domain RepMin = Identity 58 | type Codomain RepMin = Sem 59 | 60 | -- | Inherited attributes' type 61 | data InhRepMin = InhRepMin{global :: Int} 62 | deriving Show 63 | 64 | -- | Synthesized attributes' type 65 | data SynRepMin = SynRepMin{local :: Int, 66 | tree :: Tree Int Identity Identity} 67 | deriving Show 68 | 69 | type instance AG.Atts (Inherited RepMin) (Tree Int f' f) = InhRepMin 70 | type instance AG.Atts (Synthesized RepMin) (Tree Int f' f) = SynRepMin 71 | type instance AG.Atts (Inherited RepMin) (Root Int f' f) = () 72 | type instance AG.Atts (Synthesized RepMin) (Root Int f' f) = SynRepMin 73 | 74 | type instance AG.Atts (Inherited RepMin) Int = () 75 | type instance AG.Atts (Synthesized RepMin) Int = Int 76 | 77 | instance Transformation.At RepMin (Tree Int Sem Sem) where 78 | ($) = AG.applyDefault runIdentity 79 | instance Transformation.At RepMin (Root Int Sem Sem) where 80 | ($) = AG.applyDefault runIdentity 81 | 82 | instance Full.Functor RepMin (Tree Int) where 83 | (<$>) = Full.mapUpDefault 84 | instance Full.Functor RepMin (Root Int) where 85 | (<$>) = Full.mapUpDefault 86 | 87 | -- | The semantics of the primitive 'Int' type must be defined manually. 88 | instance Transformation.At RepMin Int where 89 | RepMin $ Identity n = Rank2.Arrow (const $ Synthesized n) 90 | 91 | instance AG.Attribution RepMin (Root Int) Sem Identity where 92 | attribution RepMin self (inherited, Root root) = (Synthesized SynRepMin{local= local (syn root), 93 | tree= tree (syn root)}, 94 | Root{root= Inherited InhRepMin{global= local (syn root)}}) 95 | 96 | instance AG.Attribution RepMin (Tree Int) Sem Identity where 97 | attribution _ _ (inherited, Fork left right) = (Synthesized SynRepMin{local= local (syn left) 98 | `min` local (syn right), 99 | tree= tree (syn left) `fork` tree (syn right)}, 100 | Fork{left= Inherited InhRepMin{global= global $ inh inherited}, 101 | right= Inherited InhRepMin{global= global $ inh inherited}}) 102 | attribution _ _ (inherited, Leaf value) = (Synthesized SynRepMin{local= syn value, 103 | tree= Leaf{leafValue= Identity $ global 104 | $ inh inherited}}, 105 | Leaf{leafValue= Inherited ()}) 106 | 107 | -- * Helper functions 108 | fork l r = Fork (Identity l) (Identity r) 109 | leaf = Leaf . Identity 110 | 111 | -- | The example tree 112 | exampleTree :: Root Int Identity Identity 113 | exampleTree = Root (Identity $ leaf 7 `fork` (leaf 4 `fork` leaf 1) `fork` leaf 3) 114 | 115 | -- | 116 | -- >>> Rank2.apply (Full.fmap RepMin $ Identity exampleTree) (Inherited ()) 117 | -- Synthesized {syn = SynRepMin {local = 1, tree = Fork {left = Identity (Fork {left = Identity (Leaf {leafValue = Identity 1}), right = Identity (Fork {left = Identity (Leaf {leafValue = Identity 1}), right = Identity (Leaf {leafValue = Identity 1})})}), right = Identity (Leaf {leafValue = Identity 1})}}} 118 | -------------------------------------------------------------------------------- /deep-transformations/test/RepMinAuto.hs: -------------------------------------------------------------------------------- 1 | {-# Language DataKinds, DeriveGeneric, DuplicateRecordFields, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, 2 | StandaloneDeriving, TemplateHaskell, TypeFamilies, TypeOperators, UndecidableInstances #-} 3 | 4 | -- | The RepMin example with automatic derivation of attributes. 5 | 6 | module RepMinAuto where 7 | 8 | import Data.Functor.Identity 9 | import Data.Kind (Type) 10 | import Data.Semigroup (Min(Min, getMin)) 11 | import GHC.Generics (Generic) 12 | import qualified Rank2 13 | import qualified Rank2.TH 14 | import Transformation (Transformation(..)) 15 | import Transformation.AG (Inherited(..), Synthesized(..)) 16 | import qualified Transformation 17 | import qualified Transformation.AG as AG 18 | import qualified Transformation.AG.Generics as AG 19 | import Transformation.AG.Generics (Auto(Auto)) 20 | import qualified Transformation.Deep as Deep 21 | import qualified Transformation.Full as Full 22 | import qualified Transformation.Deep.TH 23 | import qualified Transformation.Shallow.TH 24 | 25 | -- | tree data type 26 | data Tree a (f' :: Type -> Type) (f :: Type -> Type) = Fork{left :: f (Tree a f' f'), 27 | right:: f (Tree a f' f')} 28 | | Leaf{leafValue :: f a} 29 | -- | tree root 30 | data Root a f' f = Root{root :: f (Tree a f' f')} 31 | 32 | deriving instance (Show (f (Tree a f' f')), Show (f a)) => Show (Tree a f' f) 33 | deriving instance (Show (f (Tree a f' f'))) => Show (Root a f' f) 34 | 35 | $(concat <$> 36 | (mapM (\derive-> mconcat <$> mapM derive [''Tree, ''Root]) 37 | [Rank2.TH.deriveFunctor, Rank2.TH.deriveFoldable, Rank2.TH.deriveTraversable, Rank2.TH.unsafeDeriveApply, 38 | Transformation.Shallow.TH.deriveAll, Transformation.Deep.TH.deriveAll])) 39 | 40 | -- | The transformation type. It will always appear wrapped in 'Auto' to enable automatic attribute derivation. 41 | data RepMin = RepMin 42 | 43 | -- | The semantics type synonym for convenience 44 | type Sem = AG.Semantics (Auto RepMin) 45 | 46 | instance Transformation (Auto RepMin) where 47 | type Domain (Auto RepMin) = Identity 48 | type Codomain (Auto RepMin) = Sem 49 | 50 | instance AG.Revelation (Auto RepMin) where 51 | reveal (Auto RepMin) = runIdentity 52 | 53 | -- | Inherited attributes' type 54 | data InhRepMin = InhRepMin{global :: Int} 55 | deriving (Generic, Show) 56 | 57 | -- | Synthesized attributes' types rely on the 'AG.Folded' and 'AG.Mapped' wrappers, whose rules can be automatically 58 | -- | derived. 59 | data SynRepMin g = SynRepMin{local :: AG.Folded (Min Int), 60 | tree :: AG.Mapped Identity (g Int Identity Identity)} 61 | deriving Generic 62 | deriving instance Show (g Int Identity Identity) => Show (SynRepMin g) 63 | 64 | -- | Synthesized attributes' type for the integer leaf. 65 | data SynRepLeaf = SynRepLeaf{local :: AG.Folded (Min Int), 66 | tree :: AG.Mapped Identity Int} 67 | deriving (Generic, Show) 68 | 69 | type instance AG.Atts (Inherited RepMin) (Tree Int f' f) = InhRepMin 70 | type instance AG.Atts (Synthesized RepMin) (Tree Int f' f) = SynRepMin Tree 71 | type instance AG.Atts (Inherited RepMin) (Root Int f' f) = () 72 | type instance AG.Atts (Synthesized RepMin) (Root Int f' f) = SynRepMin Root 73 | 74 | type instance AG.Atts (Inherited RepMin) Int = InhRepMin 75 | type instance AG.Atts (Synthesized RepMin) Int = SynRepLeaf 76 | 77 | -- | The semantics of the primitive 'Int' type must be defined manually. 78 | instance Transformation.At (Auto RepMin) Int where 79 | Auto RepMin $ Identity n = Rank2.Arrow f 80 | where f (Inherited InhRepMin{global= n'}) = 81 | Synthesized SynRepLeaf{local= AG.Folded (Min n), 82 | tree= AG.Mapped (Identity n')} 83 | 84 | -- | The only required attribute rule is the only non-trivial one, where we set the 'global' inherited attribute to 85 | -- | the 'local' minimum synthesized attribute at the tree root. 86 | instance AG.Bequether (Auto RepMin) (Root Int) Sem Identity where 87 | bequest (Auto RepMin) self inherited (Root (Synthesized SynRepMin{local= rootLocal})) = 88 | Root{root= Inherited InhRepMin{global= getMin (AG.getFolded rootLocal)}} 89 | 90 | -- * Helper functions 91 | fork l r = Fork (Identity l) (Identity r) 92 | leaf = Leaf . Identity 93 | 94 | -- | The example tree 95 | exampleTree :: Root Int Identity Identity 96 | exampleTree = Root (Identity $ leaf 7 `fork` (leaf 4 `fork` leaf 1) `fork` leaf 3) 97 | 98 | -- | 99 | -- >>> syn $ Rank2.apply (Auto RepMin Transformation.$ Identity (Auto RepMin Deep.<$> exampleTree)) (Inherited ()) 100 | -- SynRepMin {local = Folded {getFolded = Min {getMin = 1}}, tree = Mapped {getMapped = Identity (Root {root = Identity (Fork {left = Identity (Fork {left = Identity (Leaf {leafValue = Identity 1}), right = Identity (Fork {left = Identity (Leaf {leafValue = Identity 1}), right = Identity (Leaf {leafValue = Identity 1})})}), right = Identity (Leaf {leafValue = Identity 1})})})}} 101 | -------------------------------------------------------------------------------- /grammatical-parsers/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | Version 0.7.2.1 2 | --------------- 3 | * Updated CI configuration 4 | * Improved Haddock documentation 5 | * Removed the unused containers dependency from library and bumped it in tests and executables 6 | 7 | Version 0.7.2 8 | --------------- 9 | * Added `Combinators.takeSomeNonEmpty` 10 | * Updated CI configuration 11 | * Bumped the upper bounds of the `witherable` and `containers` dependencies 12 | * Slightly improved documentation 13 | 14 | Version 0.7.1 15 | --------------- 16 | * Added a Show instance for every Show1 to satisfy the new class declaration 17 | * Bumped the upper bound of the `template-haskell` dependency 18 | * Eliminated a bunch of compiler warnings 19 | 20 | Version 0.7.0.1 21 | --------------- 22 | * Bumped the upper bound of the `rank2classes` dependency 23 | 24 | Version 0.7 25 | --------------- 26 | 27 | * Reorganized the `LeftRecursive` modules and deprecated the old module names 28 | * Re-exposed the `Text.Grampa.ContextFree.Memoizing` module 29 | * Floated the `Rank2.Apply` constraint, which now may be necessary in calling contexts 30 | * Turned `FailureDescription` from sum to product 31 | * Added the `chainRecursive` & `chainLongestRecursive` methods 32 | * Added `autochain` 33 | * Improved performance by making `instance Semigroup ParseFailure` lazier 34 | * Improved performance by shortcutting non-left-recursive grammars 35 | * Improved the output of `TraceableParsing` instances 36 | * Fixed comments 37 | * Incremented dependency bounds 38 | * Factored out several internal utility functions 39 | * Updated code to compile with GHC 9.4.2 40 | * Updated GitHub actions 41 | 42 | Version 0.6 43 | --------------- 44 | * Updated code to compile with GHC 9.2.2 45 | * Added type `GrammarOverlay` and function `overlay` 46 | * Added the `someNonEmpty` combinator 47 | * Added the `CommittedParsing` class 48 | * The failure messages are now sorted 49 | * `` preserves the erroneous messages 50 | * Fixed the `parsingResult` in Packrat 51 | * Fixed the use of `maxBound` on `Down` which flipped meaning in `base` 52 | * Turned `ParseFailure` into a record to work around an old Haddock bug 53 | * Unified the `FailureInfo` type with `ParseFailure` 54 | * Parameterized `ParseFailure` with a position type 55 | * Eliminated the `size-based` and `testing-feat` dependencies 56 | * Hid the `Text.Grampa.ContextFree.Memoizing` module 57 | 58 | Version 0.5.2 59 | --------------- 60 | * Switched from the deprecated `witherable-class` package to `witherable` 61 | * Deprecated the `ContextFree.Memoizing` module 62 | * Fixed and tested the `<<|>` instance of the `LeftRecursive` parser 63 | * Fixed and tested a with left-recursive monadic empty match 64 | * Fixed an infinite loop in the expected set closure calculation 65 | * Improved documentation 66 | * Added the `TraceableParsing` class for easier debugging, not exposed 67 | 68 | Version 0.5.1 69 | --------------- 70 | * Fixed the `skipAll` implementation for the `SortedMemoizing.Transformer` parser 71 | * Added the `Filterable` and `MonadFail` instances to all parser types 72 | * Added instances `Monad Ambiguous` and `Functor ParseFailure` 73 | * Generalized the types of `LeftRecursive.Transformer.tmap` and `tbind` 74 | * Incremented dependencies' upper bounds 75 | 76 | Version 0.5 77 | --------------- 78 | * Replaced `MonoidParsing` by `InputParsing` 79 | * Moved the `InputParsing` and `InputCharParser` classes into the `input-parsers` package 80 | * Added the `Expected` data type to eliminate the `Show` constraint on `string` 81 | * Fixed the signature of `scan` and `scanChars` 82 | * Deprecated `endOfInput` and `satisfyChar` 83 | * Replaced `Lexical g` with `LexicalParsing m` 84 | * Added modules `SortedMemoizing.Transformer` and `LeftRecursive.Transformer` 85 | * Added the `getAmbiguous` destructor 86 | 87 | Version 0.4.1.2 88 | --------------- 89 | * Fixed the doctests using cabal-doctest 90 | * Fixed a QuickCheck timeout, issue #20 91 | 92 | Version 0.4.1.1 93 | --------------- 94 | * Fixed the doctests after cabal get 95 | 96 | Version 0.4.1 97 | --------------- 98 | * Adjustments for monoid-subclasses-1.0 99 | 100 | Version 0.4.0.1 101 | --------------- 102 | * Added missing markdown-unlit dependency 103 | 104 | Version 0.4 105 | --------------- 106 | * Added `Position` and related functions 107 | * Renamed `showFailure` to `failureDescription` 108 | * Faster parsing at the cost of slower compilation 109 | * Replaced Word64 source positions by plain Int 110 | * Fixed Haddock-related compilation warnings 111 | 112 | Version 0.3.2 113 | --------------- 114 | * Improved error reporting 115 | * Updated test suite to work with testing-feat >= 1.1 116 | * Fixed the construction of `Ambiguous` results 117 | * Added Applicative and Traversable instances for Ambiguous 118 | 119 | Version 0.3.1 120 | --------------- 121 | * Added `Text.Parser.Combinators` 122 | * Improved `try/()` error reporting 123 | * Added `showFailure` 124 | 125 | Version 0.3 126 | --------------- 127 | * Eliminated `token` and `whiteSpace` 128 | * Added the `Lexical` class of grammars 129 | * Added `Semigroup` instances to fix compilation with GHC 8.4.1 130 | * More precise calculation of `(>>=)` descendants 131 | * Added the `Ambiguous` results and the `AmbiguousParsing` class 132 | * Added the `SortedMemoizing` module 133 | 134 | Version 0.2.2 135 | --------------- 136 | * Incremented dependency version bounds 137 | 138 | Version 0.2.1 139 | --------------- 140 | * Added the `ContextFree.Continued` module 141 | * Fixed `LeftRecursive.Parallel.concatMany` 142 | 143 | Version 0.2 144 | --------------- 145 | * Numerous performance and documentation improvements 146 | * Fixed the `endOfInput` implementation in `PEG.Backtrack` 147 | * Made `LeftRecursive.Parser` a type synonym, introduced `peg` and `longest` 148 | * Added the `notSatisfy[Char]` methods 149 | * Added `satisfyCharInput` 150 | -------------------------------------------------------------------------------- /grammatical-parsers/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Mario Blažević 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | 1. Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the 14 | distribution. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /grammatical-parsers/README.md: -------------------------------------------------------------------------------- 1 | Grammatical Parsers 2 | =================== 3 | 4 | Behold, yet another parser combinator library in Haskell. Except this one is capable of working with grammars rather than mere parsers. A more in-depth description is available in the [paper](../Grampa.lhs.pdf) from Haskell Symposium 2017, what follows is a short tutorial. 5 | 6 | You can apply the usual 7 | [Applicative](http://hackage.haskell.org/package/base/docs/Control-Applicative.html#t:Applicative), 8 | [Alternative](http://hackage.haskell.org/package/base/docs/Control-Applicative.html#t:Alternative), and 9 | [Monad](http://hackage.haskell.org/package/base/docs/Control-Monad.html#t:Monad) operators to combine primitive parsers 10 | into larger ones. The combinators from the [parsers](http://hackage.haskell.org/package/parsers) library type classes 11 | are also available. Here are some typical imports you may need: 12 | 13 | ~~~ {.haskell} 14 | {-# LANGUAGE RecordWildCards, ScopedTypeVariables, TemplateHaskell #-} 15 | module README where 16 | import Control.Applicative 17 | import Data.Char (isDigit) 18 | import Data.Functor.Classes (Show1, showsPrec1) 19 | import Text.Grampa 20 | import Text.Grampa.ContextFree.Memoizing.LeftRecursive (Parser) 21 | import qualified Rank2 22 | import qualified Rank2.TH 23 | ~~~ 24 | 25 | What puts this library apart from most is that the parsers it allows you to construct are *grammatical*, just as the 26 | library name says. Instead of writing the parser definitions as top-level bindings, you can and should group them into 27 | a grammar record definition, like this: 28 | 29 | ~~~ {.haskell} 30 | arithmetic :: Rank2.Apply g => GrammarBuilder Arithmetic g Parser String 31 | arithmetic Arithmetic{..} = Arithmetic{ 32 | sum= product 33 | <|> string "-" *> (negate <$> product) 34 | <|> (+) <$> sum <* string "+" <*> product 35 | <|> (-) <$> sum <* string "-" <*> product, 36 | product= factor 37 | <|> (*) <$> product <* string "*" <*> factor 38 | <|> div <$> product <* string "/" <*> factor, 39 | factor= read <$> number 40 | <|> string "(" *> sum <* string ")", 41 | number= takeCharsWhile1 isDigit "number"} 42 | ~~~ 43 | 44 | What on Earth for? One good reason is that these parser definitions can then be left-recursive, which is usually 45 | deadly for parser combinator libraries. There are other benefits like memoization and grammar composability, and the 46 | main downside is the obligation to declare the grammar record: 47 | 48 | ~~~ {.haskell} 49 | data Arithmetic f = Arithmetic{sum :: f Int, 50 | product :: f Int, 51 | factor :: f Int, 52 | number :: f String} 53 | ~~~ 54 | 55 | and to make it an instance of several rank 2 type classes: 56 | 57 | ~~~ {.haskell} 58 | $(Rank2.TH.deriveAll ''Arithmetic) 59 | ~~~ 60 | 61 | Optionally, you may also be inclined to declare a proper ``Show`` instance, as it's often handy: 62 | 63 | ~~~ {.haskell} 64 | instance Show1 f => Show (Arithmetic f) where 65 | show Arithmetic{..} = 66 | "Arithmetic{\n sum=" ++ showsPrec1 0 sum 67 | (",\n product=" ++ showsPrec1 0 factor 68 | (",\n factor=" ++ showsPrec1 0 factor 69 | (",\n number=" ++ showsPrec1 0 number "}"))) 70 | ~~~ 71 | 72 | Once that's done, use [fixGrammar](http://hackage.haskell.org/package/grammatical-parsers/docs/Text-Grampa.html#v:fixGrammar) to, well, fix the grammar 73 | 74 | ~~~ {.haskell} 75 | grammar = fixGrammar arithmetic 76 | ~~~ 77 | 78 | and then [parseComplete](http://hackage.haskell.org/package/grammatical-parsers/docs/Text-Grampa.html#v:parseComplete) 79 | or [parsePrefix](http://hackage.haskell.org/package/grammatical-parsers/docs/Text-Grampa.html#v:parsePrefix) to parse 80 | some input. 81 | 82 | ~~~ {.haskell} 83 | -- | 84 | -- >>> parseComplete grammar "42" 85 | -- Arithmetic{ 86 | -- sum=Compose (Right [42]), 87 | -- product=Compose (Right [42]), 88 | -- factor=Compose (Right [42]), 89 | -- number=Compose (Right ["42"])} 90 | -- >>> parseComplete grammar "1+2*3" 91 | -- Arithmetic{ 92 | -- sum=Compose (Right [7]), 93 | -- product=Compose (Left (ParseFailure {failurePosition = Down 4, expectedAlternatives = FailureDescription {staticDescriptions = ["end of input"], literalDescriptions = []}, errorAlternatives = []})), 94 | -- factor=Compose (Left (ParseFailure {failurePosition = Down 4, expectedAlternatives = FailureDescription {staticDescriptions = ["end of input"], literalDescriptions = []}, errorAlternatives = []})), 95 | -- number=Compose (Left (ParseFailure {failurePosition = Down 4, expectedAlternatives = FailureDescription {staticDescriptions = ["end of input"], literalDescriptions = []}, errorAlternatives = []}))} 96 | -- >>> parsePrefix grammar "1+2*3 apples" 97 | -- Arithmetic{ 98 | -- sum=Compose (Compose (Right [("+2*3 apples",1),("*3 apples",3),(" apples",7)])), 99 | -- product=Compose (Compose (Right [("+2*3 apples",1)])), 100 | -- factor=Compose (Compose (Right [("+2*3 apples",1)])), 101 | -- number=Compose (Compose (Right [("+2*3 apples","1")]))} 102 | ~~~ 103 | 104 | To see more grammar examples, go straight to the 105 | [examples](https://github.com/blamario/grampa/tree/master/grammatical-parsers/examples) directory that builds up several 106 | smaller grammars and combines them all together in the 107 | [Combined](https://github.com/blamario/grampa/blob/master/grammatical-parsers/examples/Combined.hs) module. 108 | 109 | For more conventional tastes there are monolithic examples of 110 | [Lua](https://github.com/blamario/language-lua2/blob/master/src/Language/Lua/Grammar.hs), 111 | [Modula-2](https://hackage.haskell.org/package/language-Modula2), and 112 | [Oberon](http://hackage.haskell.org/package/language-oberon) grammars as well. 113 | -------------------------------------------------------------------------------- /grammatical-parsers/Setup.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Distribution.Extra.Doctest (defaultMainWithDoctests) 4 | 5 | main :: IO () 6 | main = defaultMainWithDoctests "doctests" 7 | 8 | -------------------------------------------------------------------------------- /grammatical-parsers/examples/Arithmetic.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts, FlexibleInstances, RecordWildCards, ScopedTypeVariables #-} 2 | module Arithmetic where 3 | 4 | import Control.Applicative 5 | import Data.Char (isDigit) 6 | import Data.Functor.Compose (Compose(..)) 7 | import Data.Monoid ((<>)) 8 | import Text.Parser.Token (symbol) 9 | 10 | import Text.Grampa 11 | import Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive (Parser) 12 | import Utilities (infixJoin) 13 | 14 | import qualified Rank2 15 | import Prelude hiding (negate, product, subtract, sum) 16 | 17 | class ArithmeticDomain e where 18 | number :: Int -> e 19 | add :: e -> e -> e 20 | multiply :: e -> e -> e 21 | negate :: e -> e 22 | subtract :: e -> e -> e 23 | divide :: e -> e -> e 24 | 25 | instance ArithmeticDomain Int where 26 | number = id 27 | add = (+) 28 | multiply = (*) 29 | negate = (0 -) 30 | subtract = (-) 31 | divide = div 32 | 33 | instance ArithmeticDomain [Char] where 34 | number = show 35 | add = infixJoin "+" 36 | multiply = infixJoin "*" 37 | negate = ("-" <>) 38 | subtract = infixJoin "-" 39 | divide = infixJoin "/" 40 | 41 | data Arithmetic e f = Arithmetic{expr :: f e, 42 | sum :: f e, 43 | product :: f e, 44 | factor :: f e, 45 | primary :: f e} 46 | 47 | instance Show (f e) => Show (Arithmetic e f) where 48 | showsPrec prec a rest = "Arithmetic{expr=" ++ showsPrec prec (expr a) 49 | (", sum=" ++ showsPrec prec (sum a) 50 | (", product=" ++ showsPrec prec (product a) 51 | (", factor=" ++ showsPrec prec (factor a) 52 | (", primary=" ++ showsPrec prec (primary a) ("}" ++ rest))))) 53 | 54 | instance Rank2.Functor (Arithmetic e) where 55 | f <$> a = a{expr= f (expr a), 56 | sum= f (sum a), 57 | product= f (product a), 58 | factor= f (factor a), 59 | primary= f (primary a)} 60 | 61 | instance Rank2.Apply (Arithmetic e) where 62 | a <*> a' = Arithmetic (expr a `Rank2.apply` expr a') 63 | (sum a `Rank2.apply` sum a') 64 | (product a `Rank2.apply` product a') 65 | (factor a `Rank2.apply` factor a') 66 | (primary a `Rank2.apply` primary a') 67 | 68 | instance Rank2.Applicative (Arithmetic e) where 69 | pure f = Arithmetic f f f f f 70 | 71 | instance Rank2.DistributiveTraversable (Arithmetic e) 72 | 73 | instance Rank2.Distributive (Arithmetic e) where 74 | cotraverse w f = Arithmetic{expr= w (expr <$> f), 75 | sum= w (sum <$> f), 76 | product= w (product <$> f), 77 | factor= w (factor <$> f), 78 | primary= w (primary <$> f)} 79 | 80 | instance Rank2.Foldable (Arithmetic e) where 81 | foldMap f a = f (expr a) <> f (sum a) <> f (product a) <> f (factor a) <> f (primary a) 82 | 83 | instance Rank2.Traversable (Arithmetic e) where 84 | traverse f a = Arithmetic 85 | <$> f (expr a) 86 | <*> f (sum a) 87 | <*> f (product a) 88 | <*> f (factor a) 89 | <*> f (primary a) 90 | 91 | instance TokenParsing (Parser (Arithmetic e) String) where 92 | token = lexicalToken 93 | instance LexicalParsing (Parser (Arithmetic e) String) 94 | 95 | arithmetic :: (LexicalParsing (Parser g String), ArithmeticDomain e) => GrammarBuilder (Arithmetic e) g Parser String 96 | arithmetic Arithmetic{..} = Arithmetic{ 97 | expr= lexicalWhiteSpace *> sum, 98 | sum= product 99 | <|> symbol "-" *> (negate <$> product) 100 | <|> add <$> sum <* symbol "+" <*> product 101 | <|> subtract <$> sum <* symbol "-" <*> product, 102 | product= factor 103 | <|> multiply <$> product <* symbol "*" <*> factor 104 | <|> divide <$> product <* symbol "/" <*> factor, 105 | factor= primary 106 | <|> symbol "(" *> expr <* symbol ")", 107 | primary= lexicalToken ((number . read) <$> takeCharsWhile1 isDigit) "digits"} 108 | 109 | main :: IO () 110 | main = getContents >>= 111 | print . (getCompose . expr . parseComplete (fixGrammar arithmetic) :: String -> ParseResults String [Int]) 112 | -------------------------------------------------------------------------------- /grammatical-parsers/examples/Boolean.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts, FlexibleInstances, KindSignatures, RecordWildCards, ScopedTypeVariables, 2 | TemplateHaskell, TypeFamilies, TypeOperators, UndecidableInstances #-} 3 | module Boolean where 4 | 5 | import Control.Applicative 6 | import qualified Data.Bool 7 | import Data.Char (isSpace) 8 | import Data.Kind (Type) 9 | import Data.Monoid ((<>)) 10 | import Text.Parser.Token (TokenParsing, symbol) 11 | 12 | import qualified Rank2.TH 13 | 14 | import Text.Grampa 15 | import Utilities (infixJoin) 16 | 17 | import Prelude hiding (and, or, not) 18 | 19 | class BooleanDomain e where 20 | and :: e -> e -> e 21 | or :: e -> e -> e 22 | not :: e -> e 23 | true :: e 24 | false :: e 25 | 26 | instance BooleanDomain Bool where 27 | true = True 28 | false = False 29 | and = (&&) 30 | or = (||) 31 | not = Data.Bool.not 32 | 33 | instance BooleanDomain [Char] where 34 | true = "True" 35 | false = "False" 36 | and = infixJoin "&&" 37 | or = infixJoin "||" 38 | not = ("not " <> ) 39 | 40 | data Boolean e f = 41 | Boolean{ 42 | expr :: f e, 43 | term :: f e, 44 | factor :: f e} 45 | deriving Show 46 | 47 | instance CharParsing (p (Boolean e) String) => TokenParsing (p (Boolean e) String) 48 | 49 | instance (DeterministicParsing (p (Boolean e) String), 50 | InputCharParsing (p (Boolean e) String), ParserInput (p (Boolean e) String) ~ String) => 51 | LexicalParsing (p (Boolean e) String) 52 | 53 | $(Rank2.TH.deriveAll ''Boolean) 54 | 55 | boolean :: forall e p (g :: (Type -> Type) -> Type). 56 | (BooleanDomain e, LexicalParsing (p g String), ParserInput (p g String) ~ String) => 57 | p g String e -> Boolean e (p g String) -> Boolean e (p g String) 58 | boolean p Boolean{..} = Boolean{ 59 | expr= term 60 | <|> or <$> term <* symbol "||" <*> expr, 61 | term= factor 62 | <|> and <$> factor <* symbol "&&" <*> term, 63 | factor= keyword "True" *> pure true 64 | <|> keyword "False" *> pure false 65 | <|> keyword "not" *> takeCharsWhile isSpace *> (not <$> factor) 66 | <|> p 67 | <|> symbol "(" *> expr <* symbol ")"} 68 | -------------------------------------------------------------------------------- /grammatical-parsers/examples/Combined.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RecordWildCards, 2 | TemplateHaskell, UndecidableInstances #-} 3 | {-# OPTIONS_GHC -Wno-simplifiable-class-constraints #-} 4 | 5 | module Combined where 6 | 7 | import Control.Applicative 8 | import qualified Data.Bool 9 | import Data.Map (Map) 10 | import qualified Data.Map as Map 11 | import qualified Rank2 12 | import qualified Rank2.TH 13 | import Text.Grampa (TokenParsing, LexicalParsing, GrammarBuilder) 14 | import Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive (Parser) 15 | import qualified Arithmetic 16 | import qualified Boolean 17 | import qualified Comparisons 18 | import qualified Conditionals 19 | import qualified Lambda 20 | 21 | -- | Grammar that combines arithmetic, boolean, comparison, conditional, and lambda expression grammars. The first 22 | -- three productions are used to bind them all together, see 'expression' below. 23 | data Expression f = 24 | Expression{ 25 | expr :: f Domain, 26 | term :: f Domain, 27 | primary :: f Domain, 28 | arithmeticGrammar :: Arithmetic.Arithmetic Domain f, 29 | booleanGrammar :: Boolean.Boolean Domain f, 30 | comparisonGrammar :: Comparisons.Comparisons Domain Domain f, 31 | conditionalGrammar :: Conditionals.Conditionals Domain Domain f, 32 | lambdaGrammar :: Lambda.Lambda Domain f} 33 | 34 | -- | The combined domain result type tags different component results with their type. 35 | data Tagged = IntExpression {intFromExpression :: Int} 36 | | BoolExpression {boolFromExpression :: Bool} 37 | | FunctionExpression {functionFromExpression :: Tagged -> Tagged} 38 | | TypeError String 39 | deriving (Eq, Ord, Show) 40 | 41 | type Env = Map String Tagged 42 | 43 | -- | The semantic domain of the parsed expression 44 | type Domain = Env -> Tagged 45 | 46 | instance Eq (Tagged -> Tagged) where 47 | (==) = error "Can't compare fuctions" 48 | 49 | instance Ord (Tagged -> Tagged) where 50 | (<=) = error "Can't compare fuctions" 51 | 52 | instance Show (Tagged -> Tagged) where 53 | show _ = "function" 54 | 55 | instance Arithmetic.ArithmeticDomain Tagged where 56 | number = IntExpression 57 | IntExpression a `add` IntExpression b = IntExpression (a+b) 58 | _ `add` _ = TypeError "type error: add expects numbers" 59 | IntExpression a `multiply` IntExpression b = IntExpression (a*b) 60 | _ `multiply` _ = TypeError "type error: multiply expects numbers" 61 | negate (IntExpression a) = IntExpression (Prelude.negate a) 62 | negate _ = TypeError "type error: negate expects a number" 63 | IntExpression a `subtract` IntExpression b = IntExpression (a-b) 64 | _ `subtract` _ = TypeError "type error: subtract expects numbers" 65 | IntExpression a `divide` IntExpression b = IntExpression (div a b) 66 | _ `divide` _ = TypeError "type error: divide expects numbers" 67 | 68 | instance Arithmetic.ArithmeticDomain (Env -> Tagged) where 69 | number n _ = IntExpression n 70 | (a `add` b) env = case (a env, b env) 71 | of (IntExpression a', IntExpression b') -> IntExpression (a' + b') 72 | _ -> TypeError "type error: add expects numbers" 73 | (a `multiply` b) env = case (a env, b env) 74 | of (IntExpression a', IntExpression b') -> IntExpression (a' * b') 75 | _ -> TypeError "type error: multiply expects numbers" 76 | negate a env = case a env 77 | of IntExpression a' -> IntExpression (Prelude.negate a') 78 | _ -> TypeError "type error: negate expects a number" 79 | (a `subtract` b) env = case (a env, b env) 80 | of (IntExpression a', IntExpression b') -> IntExpression (a' - b') 81 | _ -> TypeError "type error: subtract expects numbers" 82 | (a `divide` b) env = case (a env, b env) 83 | of (IntExpression a', IntExpression b') -> IntExpression (div a' b') 84 | _ -> TypeError "type error: divide expects numbers" 85 | 86 | instance Boolean.BooleanDomain (Env -> Tagged) where 87 | true _ = BoolExpression True 88 | false _ = BoolExpression False 89 | (a `and` b) env = case (a env, b env) 90 | of (BoolExpression a', BoolExpression b') -> BoolExpression (a' && b') 91 | _ -> TypeError "type error: and expects booleans" 92 | (a `or` b) env = case (a env, b env) 93 | of (BoolExpression a', BoolExpression b') -> BoolExpression (a' || b') 94 | _ -> TypeError "type error: r expects booleans" 95 | not a env = case a env 96 | of BoolExpression a' -> BoolExpression (Data.Bool.not a') 97 | _ -> TypeError "type error: not expects a boolean" 98 | 99 | instance Comparisons.ComparisonDomain Domain Domain where 100 | greaterThan a b env = BoolExpression (a env > b env) 101 | lessThan a b env = BoolExpression (a env < b env) 102 | equal a b env = BoolExpression (a env == b env) 103 | greaterOrEqual a b env = BoolExpression (a env >= b env) 104 | lessOrEqual a b env = BoolExpression (a env <= b env) 105 | 106 | instance Conditionals.ConditionalDomain Domain Domain where 107 | ifThenElse test t f env = case test env 108 | of BoolExpression True -> t env 109 | BoolExpression False -> f env 110 | _ -> TypeError "type error: if expects a boolean" 111 | 112 | instance Lambda.LambdaDomain (Env -> Tagged) where 113 | apply f arg env = case (f env, arg env) 114 | of (FunctionExpression f', x) -> f' x 115 | (f', _) -> TypeError ("Applying a non-function " ++ show f') 116 | lambda v body env = FunctionExpression (\arg-> body (Map.insert v arg env)) 117 | var v env = Map.findWithDefault (TypeError $ "Free variable " ++ show v) v env 118 | 119 | instance (Show (f Domain), Show (f String)) => Show (Expression f) where 120 | showsPrec prec g rest = "Expression{expr=" ++ showsPrec prec (expr g) 121 | (", arithmeticGrammar=" ++ showsPrec prec (arithmeticGrammar g) 122 | (", booleanGrammar=" ++ showsPrec prec (booleanGrammar g) 123 | (", comparisonGrammar=" ++ showsPrec prec (comparisonGrammar g) 124 | (", conditionalGrammar=" ++ showsPrec prec (conditionalGrammar g) 125 | (", lambdaGrammar=" ++ showsPrec prec (lambdaGrammar g) ("}" ++ rest)))))) 126 | 127 | $(Rank2.TH.deriveAll ''Expression) 128 | 129 | instance TokenParsing (Parser Expression String) 130 | instance LexicalParsing (Parser Expression String) 131 | 132 | {- The Rank.TH.deriveAll splice above inserts the following declarations: 133 | 134 | instance Rank2.Functor Expression where 135 | f <$> g = g{expr= f (expr g), 136 | term= f (term g), 137 | primary= f (primary g), 138 | arithmeticGrammar= Rank2.fmap f (arithmeticGrammar g), 139 | booleanGrammar= Rank2.fmap f (booleanGrammar g), 140 | comparisonGrammar= Rank2.fmap f (comparisonGrammar g), 141 | conditionalGrammar= Rank2.fmap f (conditionalGrammar g), 142 | lambdaGrammar= Rank2.fmap f (lambdaGrammar g)} 143 | 144 | instance Rank2.Apply Expression where 145 | a <*> b = Expression{expr= expr a `Rank2.apply` expr b, 146 | term= term a `Rank2.apply` term b, 147 | primary= primary a `Rank2.apply` primary b, 148 | arithmeticGrammar= arithmeticGrammar a `Rank2.ap` arithmeticGrammar b, 149 | booleanGrammar= booleanGrammar a `Rank2.ap` booleanGrammar b, 150 | comparisonGrammar= comparisonGrammar a `Rank2.ap` comparisonGrammar b, 151 | conditionalGrammar= conditionalGrammar a `Rank2.ap` conditionalGrammar b, 152 | lambdaGrammar= lambdaGrammar a `Rank2.ap` lambdaGrammar b} 153 | 154 | instance Rank2.Applicative Expression where 155 | pure f = Expression{expr= f, 156 | term= f, 157 | primary= f, 158 | arithmeticGrammar= Rank2.pure f, 159 | booleanGrammar= Rank2.pure f, 160 | comparisonGrammar= Rank2.pure f, 161 | conditionalGrammar= Rank2.pure f, 162 | lambdaGrammar= Rank2.pure f} 163 | 164 | instance Rank2.Distributive Expression where 165 | distributeM f = Expression{expr= f >>= expr, 166 | term= f >>= term, 167 | primary= f >>= primary, 168 | arithmeticGrammar= Rank2.distributeM (arithmeticGrammar <$> f), 169 | booleanGrammar= Rank2.distributeM (booleanGrammar <$> f), 170 | comparisonGrammar= Rank2.distributeM (comparisonGrammar <$> f), 171 | conditionalGrammar= Rank2.distributeM (conditionalGrammar <$> f), 172 | lambdaGrammar= Rank2.distributeM (lambdaGrammar <$> f)} 173 | cotraverse w f = Expression{expr= w (expr <$> f), 174 | term= w (term <$> f), 175 | primary= w (primary <$> f), 176 | arithmeticGrammar= Rank2.cotraverse w (arithmeticGrammar <$> f), 177 | booleanGrammar= Rank2.cotraverse w (booleanGrammar <$> f), 178 | comparisonGrammar= Rank2.cotraverse w (comparisonGrammar <$> f), 179 | conditionalGrammar= Rank2.cotraverse w (conditionalGrammar <$> f), 180 | lambdaGrammar= Rank2.cotraverse w (lambdaGrammar <$> f)} 181 | 182 | instance Rank2.Foldable Expression where 183 | foldMap f g = f (expr g) <> f (term g) <> f (primary g) 184 | <> Rank2.foldMap f (arithmeticGrammar g) <> Rank2.foldMap f (booleanGrammar g) 185 | <> Rank2.foldMap f (comparisonGrammar g) <> Rank2.foldMap f (conditionalGrammar g) 186 | <> Rank2.foldMap f (lambdaGrammar g) 187 | 188 | instance Rank2.Traversable Expression where 189 | traverse f g = Expression 190 | <$> f (expr g) 191 | <*> f (term g) 192 | <*> f (primary g) 193 | <*> Rank2.traverse f (arithmeticGrammar g) 194 | <*> Rank2.traverse f (booleanGrammar g) 195 | <*> Rank2.traverse f (comparisonGrammar g) 196 | <*> Rank2.traverse f (conditionalGrammar g) 197 | <*> Rank2.traverse f (lambdaGrammar g) 198 | -} 199 | 200 | expression :: (Rank2.Apply g, LexicalParsing (Parser g String)) => GrammarBuilder Expression g Parser String 201 | expression Expression{..} = 202 | let combinedExpr = Arithmetic.expr arithmeticGrammar 203 | <|> Boolean.expr booleanGrammar 204 | <|> Conditionals.expr conditionalGrammar 205 | <|> Lambda.expr lambdaGrammar 206 | combinedTerm = Lambda.application lambdaGrammar 207 | <|> Arithmetic.sum arithmeticGrammar 208 | combinedPrimary = Arithmetic.primary arithmeticGrammar 209 | <|> Boolean.factor booleanGrammar 210 | <|> Lambda.primary lambdaGrammar 211 | in Expression{expr= combinedExpr, 212 | term= combinedTerm, 213 | primary= combinedPrimary, 214 | arithmeticGrammar= Arithmetic.arithmetic arithmeticGrammar{ 215 | Arithmetic.expr= expr, 216 | Arithmetic.primary= primary}, 217 | booleanGrammar= Boolean.boolean (Comparisons.test comparisonGrammar) booleanGrammar, 218 | comparisonGrammar= Comparisons.comparisons comparisonGrammar{ 219 | Comparisons.term= Arithmetic.expr arithmeticGrammar}, 220 | conditionalGrammar= Conditionals.conditionals conditionalGrammar{ 221 | Conditionals.test= Boolean.expr booleanGrammar, 222 | Conditionals.term= expr}, 223 | lambdaGrammar= Lambda.lambdaCalculus lambdaGrammar{ 224 | Lambda.expr= expr, 225 | Lambda.application= term, 226 | Lambda.primary= primary}} 227 | -------------------------------------------------------------------------------- /grammatical-parsers/examples/Comparisons.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RecordWildCards, ScopedTypeVariables #-} 2 | module Comparisons where 3 | 4 | import Control.Applicative 5 | import Data.Monoid ((<>)) 6 | import Text.Parser.Token (TokenParsing, symbol) 7 | 8 | import qualified Rank2 9 | import Text.Grampa 10 | 11 | class ComparisonDomain c e where 12 | greaterThan :: c -> c -> e 13 | lessThan :: c -> c -> e 14 | equal :: c -> c -> e 15 | greaterOrEqual :: c -> c -> e 16 | lessOrEqual :: c -> c -> e 17 | 18 | instance Ord c => ComparisonDomain c Bool where 19 | greaterThan a b = a > b 20 | lessThan a b = a < b 21 | equal a b = a == b 22 | greaterOrEqual a b = a >= b 23 | lessOrEqual a b = a <= b 24 | 25 | instance ComparisonDomain [Char] [Char] where 26 | lessThan = infixJoin "<" 27 | lessOrEqual = infixJoin "<=" 28 | equal = infixJoin "==" 29 | greaterOrEqual = infixJoin ">=" 30 | greaterThan = infixJoin ">" 31 | 32 | infixJoin :: String -> String -> String -> String 33 | infixJoin rel a b = a <> rel <> b 34 | 35 | data Comparisons c e f = 36 | Comparisons{test :: f e, 37 | term :: f c} 38 | deriving (Show) 39 | 40 | instance Rank2.Functor (Comparisons c e) where 41 | f <$> g = g{test= f (test g), 42 | term= f (term g)} 43 | 44 | instance Rank2.Apply (Comparisons c e) where 45 | g <*> h = Comparisons{test= test g `Rank2.apply` test h, 46 | term= term g `Rank2.apply` term h} 47 | 48 | instance Rank2.Applicative (Comparisons c e) where 49 | pure f = Comparisons f f 50 | 51 | instance Rank2.DistributiveTraversable (Comparisons c e) 52 | 53 | instance Rank2.Distributive (Comparisons c e) where 54 | cotraverse w f = Comparisons{test= w (test <$> f), 55 | term= w (term <$> f)} 56 | 57 | instance Rank2.Foldable (Comparisons c e) where 58 | foldMap f g = f (test g) <> f (term g) 59 | 60 | instance Rank2.Traversable (Comparisons c e) where 61 | traverse f g = Comparisons 62 | <$> f (test g) 63 | <*> f (term g) 64 | 65 | comparisons :: (ComparisonDomain c e, LexicalParsing (p g String)) => GrammarBuilder (Comparisons c e) g p String 66 | comparisons Comparisons{..} = 67 | Comparisons{ 68 | test= lessThan <$> term <* symbol "<" <*> term 69 | <|> lessOrEqual <$> term <* symbol "<=" <*> term 70 | <|> equal <$> term <* symbol "==" <*> term 71 | <|> greaterOrEqual <$> term <* symbol ">=" <*> term 72 | <|> greaterThan <$> term <* symbol ">" <*> term, 73 | term= empty} 74 | -------------------------------------------------------------------------------- /grammatical-parsers/examples/Conditionals.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RecordWildCards, 2 | TypeFamilies, TemplateHaskell #-} 3 | module Conditionals where 4 | 5 | import Control.Applicative 6 | import Data.Monoid ((<>)) 7 | 8 | import qualified Rank2.TH 9 | 10 | import Text.Grampa 11 | import Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive (Parser) 12 | 13 | class ConditionalDomain c e where 14 | ifThenElse :: c -> e -> e -> e 15 | 16 | instance ConditionalDomain Bool e where 17 | ifThenElse True t _ = t 18 | ifThenElse False _ f = f 19 | 20 | instance ConditionalDomain [Char] [Char] where 21 | ifThenElse cond t f = "if " <> cond <> " then " <> t <> " else " <> f 22 | 23 | data Conditionals t e f = Conditionals{expr :: f e, 24 | test :: f t, 25 | term :: f e} 26 | 27 | instance (Show (f t), Show (f e)) => Show (Conditionals t e f) where 28 | showsPrec prec a rest = "Conditionals{expr=" ++ showsPrec prec (expr a) 29 | (", test= " ++ showsPrec prec (test a) 30 | (", term= " ++ showsPrec prec (term a) ("}" ++ rest))) 31 | 32 | $(Rank2.TH.deriveAll ''Conditionals) 33 | 34 | instance TokenParsing (Parser (Conditionals t e) String) 35 | instance LexicalParsing (Parser (Conditionals t e) String) 36 | 37 | conditionals :: (ConditionalDomain t e, LexicalParsing (Parser g String)) 38 | => GrammarBuilder (Conditionals t e) g Parser String 39 | conditionals Conditionals{..} = 40 | Conditionals{expr= ifThenElse <$> (keyword "if" *> test) <*> (keyword "then" *> term) <*> (keyword "else" *> term), 41 | test= empty, 42 | term= empty} 43 | -------------------------------------------------------------------------------- /grammatical-parsers/examples/Lambda.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts, FlexibleInstances, UndecidableInstances #-} 2 | {-# LANGUAGE RecordWildCards, ScopedTypeVariables, TemplateHaskell #-} 3 | module Lambda where 4 | 5 | import Control.Applicative 6 | import Data.Char (isAlphaNum, isLetter) 7 | import Data.Map (Map, insert, (!)) 8 | import Data.Monoid ((<>)) 9 | import Text.Parser.Token (symbol, whiteSpace) 10 | 11 | import qualified Rank2 12 | import qualified Rank2.TH 13 | 14 | import Text.Grampa 15 | import Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive (Parser) 16 | 17 | class LambdaDomain e where 18 | apply :: e -> e -> e 19 | lambda :: String -> e -> e 20 | var :: String -> e 21 | 22 | data LambdaInitial = ApplyI LambdaInitial LambdaInitial 23 | | LambdaI String LambdaInitial 24 | | VarI String 25 | deriving Show 26 | 27 | data LambdaDeBruin = ApplyB LambdaDeBruin LambdaDeBruin 28 | | LambdaB LambdaDeBruin 29 | | VarB Int 30 | deriving Show 31 | 32 | data LambdaPHOAS a = ApplyP (LambdaPHOAS a) (LambdaPHOAS a) 33 | | LambdaP (a -> LambdaPHOAS a) 34 | | VarP a 35 | 36 | instance LambdaDomain (Map String a -> [a] -> a) where 37 | apply f arg env stack = f env (arg env [] : stack) 38 | lambda v body env (arg:stack) = body (insert v arg env) stack 39 | var v env _stack = env ! v 40 | 41 | reduceNormallyI :: Map String a -> [a] -> LambdaInitial -> a 42 | reduceNormallyI env stack (ApplyI f arg) = reduceNormallyI env (reduceNormallyI env stack arg : stack) f 43 | reduceNormallyI env (arg:stack) (LambdaI v body) = reduceNormallyI (insert v arg env) stack body 44 | reduceNormallyI env _stack (VarI v) = env ! v 45 | 46 | reduceNormallyP :: (a -> LambdaPHOAS a) -> LambdaPHOAS a -> LambdaPHOAS a 47 | reduceNormallyP use (ApplyP f arg) = case reduceNormallyP use f 48 | of LambdaP f' -> reduceNormallyP use (reduceNormallyP f' arg) 49 | x -> ApplyP x arg 50 | reduceNormallyP use (VarP x) = use x 51 | reduceNormallyP _ x@LambdaP{} = x 52 | 53 | instance LambdaDomain LambdaInitial where 54 | apply = ApplyI 55 | lambda = LambdaI 56 | var = VarI 57 | 58 | instance LambdaDomain (Map String a -> LambdaPHOAS a) where 59 | apply f arg env = ApplyP (f env) (arg env) 60 | lambda v body env = LambdaP (\x-> body $ insert v x env) 61 | var v env = VarP (env ! v) 62 | 63 | instance LambdaDomain (Map String Int -> Int -> LambdaDeBruin) where 64 | apply f arg env depth = ApplyB (f env depth) (arg env depth) 65 | lambda v body env depth = LambdaB (body (insert v (succ depth) env) (succ depth)) 66 | var v env depth = VarB (depth - env ! v) 67 | 68 | {- 69 | instance LambdaDomain (Map String e -> [e] -> e) where 70 | apply f arg env stack = f env (arg env stack : stack) 71 | lambda v body env (arg : stack) = body (insert v arg env) stack 72 | var v map = (map ! v) map 73 | -} 74 | 75 | instance LambdaDomain String where 76 | apply f arg = f ++ " " ++ arg 77 | lambda v body = "(\\" ++ v ++ ". " ++ body ++ ")" 78 | var v = v 79 | 80 | data Lambda e f = 81 | Lambda{ 82 | expr :: f e, 83 | abstraction :: f e, 84 | application :: f e, 85 | argument :: f e, 86 | primary :: f e, 87 | varName :: f String} 88 | 89 | instance (Show (f e), Show (f String)) => Show (Lambda e f) where 90 | showsPrec prec g rest = "Lambda{expr=" ++ showsPrec prec (expr g) 91 | (", abstraction=" ++ showsPrec prec (abstraction g) 92 | (", application=" ++ showsPrec prec (application g) 93 | (", argument=" ++ showsPrec prec (application g) 94 | (", primary=" ++ showsPrec prec (primary g) 95 | (", varName=" ++ showsPrec prec (varName g) ("}" ++ rest)))))) 96 | 97 | $(Rank2.TH.deriveAll ''Lambda) 98 | 99 | lambdaCalculus :: (Rank2.Apply g, LexicalParsing (Parser g String), LambdaDomain e) => 100 | GrammarBuilder (Lambda e) g Parser String 101 | lambdaCalculus Lambda{..} = Lambda{ 102 | expr= abstraction, 103 | abstraction= lambda <$> (symbol "\\" *> varName <* symbol "->") <*> abstraction 104 | <|> application, 105 | application= apply <$> application <*> argument 106 | <|> argument, 107 | argument= symbol "(" *> expr <* symbol ")" 108 | <|> primary, 109 | primary= var <$> varName, 110 | varName= whiteSpace *> takeCharsWhile1 isLetter <> takeCharsWhile isAlphaNum} 111 | -------------------------------------------------------------------------------- /grammatical-parsers/examples/Main.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts, FlexibleInstances, RankNTypes, KindSignatures, UndecidableInstances #-} 2 | module Main (main, arithmetic, comparisons, boolean, conditionals) where 3 | 4 | import System.Environment (getArgs) 5 | import Data.Functor.Compose (Compose(..)) 6 | import Data.Map (Map) 7 | import qualified Rank2 8 | import Text.Grampa (TokenParsing, LexicalParsing, GrammarBuilder, ParseResults, fixGrammar, parseComplete) 9 | import Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive (Parser) 10 | import Arithmetic (Arithmetic, arithmetic) 11 | import qualified Arithmetic 12 | import qualified Boolean 13 | import qualified Comparisons 14 | import qualified Conditionals 15 | import qualified Combined 16 | import qualified Lambda 17 | 18 | type ArithmeticComparisons = Rank2.Product (Arithmetic.Arithmetic Int) (Comparisons.Comparisons Int Bool) 19 | type ArithmeticComparisonsBoolean = Rank2.Product ArithmeticComparisons (Boolean.Boolean Bool) 20 | type ACBC = Rank2.Product ArithmeticComparisonsBoolean (Conditionals.Conditionals Bool Int) 21 | 22 | main :: IO () 23 | main = do args <- concat <$> getArgs 24 | -- let a = fixGrammar (Arithmetic.arithmetic (production id Arithmetic.expr a)) 25 | -- let a = fixGrammar (Arithmetic.arithmetic (recursive $ Arithmetic.expr a)) 26 | print (getCompose . Lambda.expr $ parseComplete (fixGrammar Lambda.lambdaCalculus) args 27 | :: ParseResults String [Lambda.LambdaInitial]) 28 | -- print (((\f-> f (mempty :: Map String Int) [1 :: Int]) <$>) <$> parse (fixGrammar Lambda.lambdaCalculus) Lambda.expr args :: ParseResults String Int) 29 | print (getCompose . Arithmetic.expr $ parseComplete (fixGrammar arithmetic) args :: ParseResults String [Int]) 30 | print (getCompose . Comparisons.test . Rank2.snd $ parseComplete (fixGrammar comparisons) args 31 | :: ParseResults String [Bool]) 32 | print (getCompose . Boolean.expr . Rank2.snd $ 33 | parseComplete (fixGrammar boolean) args :: ParseResults String [Bool]) 34 | print (getCompose . Conditionals.expr . Rank2.snd $ parseComplete (fixGrammar conditionals) args 35 | :: ParseResults String [Int]) 36 | print (((\f-> f (mempty :: Map String Combined.Tagged)) <$>) 37 | <$> (getCompose . Combined.expr $ parseComplete (fixGrammar Combined.expression) args) 38 | :: ParseResults String [Combined.Tagged]) 39 | 40 | comparisons :: (LexicalParsing (Parser g String)) => GrammarBuilder ArithmeticComparisons g Parser String 41 | comparisons (Rank2.Pair a c) = 42 | Rank2.Pair (Arithmetic.arithmetic a) (Comparisons.comparisons c{Comparisons.term= Arithmetic.expr a}) 43 | 44 | boolean :: (LexicalParsing (Parser g String)) => GrammarBuilder ArithmeticComparisonsBoolean g Parser String 45 | boolean (Rank2.Pair ac b) = Rank2.Pair (comparisons ac) (Boolean.boolean (Comparisons.test $ Rank2.snd ac) b) 46 | 47 | conditionals :: (LexicalParsing (Parser g String)) => GrammarBuilder ACBC g Parser String 48 | conditionals (Rank2.Pair acb c) = 49 | Rank2.Pair 50 | (boolean acb) 51 | (Conditionals.conditionals c{Conditionals.test= Boolean.expr (Rank2.snd acb), 52 | Conditionals.term= Arithmetic.expr (Rank2.fst $ Rank2.fst acb)}) 53 | 54 | instance TokenParsing (Parser ArithmeticComparisons String) 55 | instance TokenParsing (Parser ArithmeticComparisonsBoolean String) 56 | instance TokenParsing (Parser ACBC String) 57 | instance TokenParsing (Parser (Lambda.Lambda Lambda.LambdaInitial) String) 58 | 59 | instance LexicalParsing (Parser ArithmeticComparisons String) 60 | instance LexicalParsing (Parser ArithmeticComparisonsBoolean String) 61 | instance LexicalParsing (Parser ACBC String) 62 | instance LexicalParsing (Parser (Lambda.Lambda Lambda.LambdaInitial) String) 63 | 64 | -------------------------------------------------------------------------------- /grammatical-parsers/examples/Utilities.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts, FlexibleInstances, KindSignatures, RankNTypes, ScopedTypeVariables #-} 2 | module Utilities where 3 | 4 | import Data.Functor.Compose (Compose(..)) 5 | import Data.Monoid ((<>)) 6 | import Data.Monoid.Textual (TextualMonoid, toString) 7 | 8 | import Text.Grampa 9 | import Text.Grampa.ContextFree.Memoizing.LeftRecursive 10 | import qualified Rank2 11 | 12 | parseUnique :: (Ord s, TextualMonoid s, Rank2.Traversable g, Rank2.Distributive g, Rank2.Apply g) => 13 | Grammar g Parser s -> (forall f. g f -> f r) -> s -> r 14 | parseUnique g prod s = 15 | case getCompose (prod $ parseComplete g s) 16 | of Left failure -> error ("Parse failure: " ++ toString (error "non-character") (failureDescription s failure 3)) 17 | Right [x] -> x 18 | 19 | infixJoin :: String -> String -> String -> String 20 | infixJoin op a b = "(" <> a <> op <> b <> ")" 21 | -------------------------------------------------------------------------------- /grammatical-parsers/grammatical-parsers.cabal: -------------------------------------------------------------------------------- 1 | name: grammatical-parsers 2 | version: 0.7.2.1 3 | synopsis: parsers that combine into grammars 4 | description: 5 | /Gram/matical-/pa/rsers, or Grampa for short, is a library of parser types whose values are meant to be assigned 6 | to grammar record fields. All parser types support the same set of parser combinators, but have different semantics 7 | and performance characteristics. 8 | 9 | homepage: https://github.com/blamario/grampa/tree/master/grammatical-parsers 10 | bug-reports: https://github.com/blamario/grampa/issues 11 | license: BSD3 12 | license-file: LICENSE 13 | author: Mario Blažević 14 | maintainer: Mario Blažević 15 | copyright: (c) 2017 Mario Blažević 16 | category: Text, Parsing 17 | build-type: Custom 18 | cabal-version: >=1.10 19 | tested-with: GHC==9.12.1, GHC==9.10.1, GHC==9.8.2, GHC==9.6.4, GHC==9.4.8, GHC==9.2.8, GHC==9.0.2, GHC==8.10.7 20 | extra-source-files: README.md, CHANGELOG.md 21 | source-repository head 22 | type: git 23 | location: https://github.com/blamario/grampa 24 | custom-setup 25 | setup-depends: 26 | base >= 4 && <5, 27 | Cabal < 4, 28 | cabal-doctest >= 1 && <1.1 29 | 30 | library 31 | hs-source-dirs: src 32 | exposed-modules: Text.Grampa, 33 | Text.Grampa.Combinators, 34 | Text.Grampa.PEG.Backtrack, Text.Grampa.PEG.Packrat, 35 | Text.Grampa.ContextFree.Continued, Text.Grampa.ContextFree.Parallel, 36 | Text.Grampa.ContextFree.Memoizing, 37 | Text.Grampa.ContextFree.Memoizing.LeftRecursive, 38 | Text.Grampa.ContextFree.SortedMemoizing, 39 | Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive, 40 | Text.Grampa.ContextFree.SortedMemoizing.Transformer, 41 | Text.Grampa.ContextFree.SortedMemoizing.Transformer.LeftRecursive, 42 | Text.Grampa.ContextFree.LeftRecursive, 43 | Text.Grampa.ContextFree.LeftRecursive.Transformer 44 | other-modules: Text.Grampa.Class, 45 | Text.Grampa.Internal, Text.Grampa.Internal.LeftRecursive, Text.Grampa.Internal.Storable, 46 | Text.Grampa.PEG.Backtrack.Measured, 47 | Text.Grampa.PEG.Continued, Text.Grampa.PEG.Continued.Measured, 48 | Text.Grampa.ContextFree.Continued.Measured 49 | default-language: Haskell2010 50 | ghc-options: -Wall 51 | build-depends: base >=4.9 && <5, 52 | transformers >= 0.5 && < 0.7, 53 | monoid-subclasses >=1.0 && <1.3, 54 | parsers < 0.13, 55 | input-parsers < 0.4, 56 | attoparsec >= 0.13 && < 0.15, 57 | witherable >= 0.4 && < 0.6, 58 | rank2classes >= 1.4.6 && < 1.6 59 | 60 | executable arithmetic 61 | hs-source-dirs: examples 62 | main-is: Main.hs 63 | other-modules: Arithmetic, Boolean, Combined, Comparisons, Conditionals, Lambda, Utilities 64 | default-language: Haskell2010 65 | build-depends: base >=4.9 && <5, containers >= 0.5.7.0 && < 0.9, 66 | parsers < 0.13, 67 | rank2classes >= 1.0.2 && < 1.6, grammatical-parsers, 68 | monoid-subclasses 69 | 70 | executable boolean-transformations 71 | hs-source-dirs: examples 72 | main-is: BooleanTransformations.hs 73 | other-modules: Boolean, Utilities 74 | default-language: Haskell2010 75 | build-depends: base >=4.9 && <5, containers >= 0.5.7.0 && < 0.9, 76 | parsers < 0.13, 77 | rank2classes >= 1.0.2 && < 1.6, grammatical-parsers, 78 | monoid-subclasses 79 | 80 | test-suite quicktests 81 | type: exitcode-stdio-1.0 82 | hs-source-dirs: test, examples 83 | x-uses-tf: true 84 | build-depends: base >=4.9 && < 5, containers >= 0.5.7.0 && < 0.9, 85 | monoid-subclasses, parsers < 0.13, 86 | witherable >= 0.4 && < 0.6, 87 | rank2classes >= 1.0.2 && < 1.6, grammatical-parsers, 88 | QuickCheck >= 2 && < 3, checkers >= 0.4.6 && < 0.6, 89 | tasty >= 0.7, tasty-quickcheck >= 0.7 90 | main-is: Test.hs 91 | other-modules: Test.Ambiguous, Test.Examples, 92 | Arithmetic, Boolean, Combined, Comparisons, Conditionals, Lambda, Utilities 93 | default-language: Haskell2010 94 | 95 | test-suite doctests 96 | type: exitcode-stdio-1.0 97 | hs-source-dirs: test 98 | default-language: Haskell2010 99 | main-is: Doctest.hs 100 | other-modules: README 101 | ghc-options: -threaded -pgmL markdown-unlit 102 | build-depends: base, rank2classes, grammatical-parsers, doctest >= 0.8 103 | build-tool-depends: markdown-unlit:markdown-unlit >= 0.5 && < 0.6 104 | 105 | benchmark benchmarks 106 | type: exitcode-stdio-1.0 107 | hs-source-dirs: test, examples 108 | ghc-options: -O2 -Wall -rtsopts -main-is Benchmark.main 109 | Build-Depends: base >=4.9 && < 5, rank2classes >= 1.0.2 && < 1.6, grammatical-parsers, 110 | monoid-subclasses, parsers < 0.13, 111 | criterion >= 1.0, deepseq >= 1.1, containers >= 0.5.7.0 && < 0.9, text >= 1.1 112 | main-is: Benchmark.hs 113 | other-modules: Main, Arithmetic, Boolean, Combined, Comparisons, Conditionals, Lambda, Utilities 114 | default-language: Haskell2010 115 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa.hs: -------------------------------------------------------------------------------- 1 | -- | This library consists of a collection of parsing algorithms and a common interface for representing grammars as 2 | -- records with rank-2 field types. 3 | -- 4 | -- To implement a grammar, first determine if it is a context-free grammar or perhaps a parsing expression grammar. In 5 | -- the latter case, you should import your parser type from either "Text.Grampa.PEG.Backtrack" or the 6 | -- "Text.Grampa.PEG.Packrat" module. The former is faster on simple grammars but may require exponential time on more 7 | -- complex cases. The Packrat parser on the other hand guarantees linear time complexity but has more overhead and 8 | -- consumes more memory. 9 | -- 10 | -- If your grammar is context-free, there are more possibilities to choose from: 11 | -- 12 | -- * If the grammar is neither left-recursive nor ambiguous, you can import your parser type from 13 | -- "Text.Grampa.ContextFree.Continued". 14 | -- * If the grammar is ambiguous and you need to see all the results, there's "Text.Grampa.ContextFree.Parallel". 15 | -- * For a complex but non-left-recursive grammar, you can use "Text.Grampa.ContextFree.SortedMemoizing". 16 | -- * If you need to carry a monadic computation, there's "Text.Grampa.ContextFree.SortedMemoizing.Transformer". 17 | -- * If the grammar is left-recursive, "Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive" is the ticket. 18 | -- * If the grammar is left-recursive /and/ you require monadic context, the final option is 19 | -- "Text.Grampa.ContextFree.SortedMemoizing.Transformer.LeftRecursive". 20 | -- 21 | -- Regardless of the chosen parer type, you'll construct your grammar the same way. A grammar is a set of productions 22 | -- using the same parser type, collected and abstracted inside a rank-2 record type. Each production is built using 23 | -- the standard parser combinators from the usual 'Applicative' and 'Alternative' classes, plus some additional 24 | -- [classes](#g:classes) provided by this library. The 'Monad' operations are available as well, but should not be 25 | -- used in left-recursive positions. 26 | -- 27 | -- Once the grammar is complete, you can use 'parseComplete' or 'parsePrefix' to apply it to your input. 28 | 29 | {-# LANGUAGE FlexibleContexts, KindSignatures, OverloadedStrings, RankNTypes, ScopedTypeVariables, 30 | TypeFamilies, TypeOperators #-} 31 | module Text.Grampa ( 32 | -- * Applying parsers 33 | failureDescription, simply, 34 | -- * Types 35 | Grammar, GrammarBuilder, GrammarOverlay, ParseResults, ParseFailure(..), FailureDescription(..), Ambiguous(..), Pos, 36 | -- * Classes #classes# 37 | -- ** Parsing 38 | DeterministicParsing(..), AmbiguousParsing(..), CommittedParsing(..), TraceableParsing(..), 39 | LexicalParsing(..), 40 | -- ** Grammars 41 | MultiParsing(..), GrammarParsing(..), overlay, 42 | -- ** From the [input-parsers](http://hackage.haskell.org/package/input-parsers) library 43 | InputParsing(..), InputCharParsing(..), ConsumedInputParsing(..), Position(..), 44 | -- ** From the [parsers](http://hackage.haskell.org/package/parsers) library 45 | module Text.Parser.Char, 46 | module Text.Parser.Combinators, 47 | module Text.Parser.LookAhead, 48 | TokenParsing(..), 49 | -- * Other combinators 50 | module Text.Grampa.Combinators) 51 | where 52 | 53 | import Data.List (intersperse) 54 | import Data.Kind (Type) 55 | import Data.Monoid ((<>), Endo (Endo, appEndo)) 56 | import Data.Monoid.Factorial (drop) 57 | import Data.Monoid.Null (null) 58 | import Data.Monoid.Textual (TextualMonoid) 59 | import Data.String (IsString(fromString)) 60 | import Text.Parser.Char (CharParsing(char, notChar, anyChar)) 61 | import Text.Parser.Combinators (Parsing((), notFollowedBy, skipMany, skipSome, unexpected)) 62 | import Text.Parser.LookAhead (LookAheadParsing(lookAhead)) 63 | import Text.Parser.Token (TokenParsing(..)) 64 | import Text.Parser.Input.Position (Position) 65 | import qualified Text.Parser.Input.Position as Position 66 | import Text.Grampa.Combinators (concatMany, concatSome) 67 | 68 | import qualified Rank2 69 | import Text.Grampa.Class (MultiParsing(..), GrammarParsing(..), 70 | InputParsing(..), InputCharParsing(..), 71 | ConsumedInputParsing(..), LexicalParsing(..), 72 | CommittedParsing(..), DeterministicParsing(..), 73 | AmbiguousParsing(..), Ambiguous(..), 74 | ParseResults, ParseFailure(..), FailureDescription(..), Pos) 75 | import Text.Grampa.Internal (TraceableParsing(..)) 76 | 77 | import Prelude hiding (drop, null) 78 | 79 | -- | A grammar is a record type @g@ whose fields are parsers of type @p@ on input streams of type @s@. A value of a 80 | -- @Grammar@ type is typically produced by applying 'fixGrammar' or 'overlay' to a 'GrammarBuilder'. 81 | type Grammar (g :: (Type -> Type) -> Type) p s = g (p g s) 82 | 83 | -- | A @GrammarBuilder g g' p s@ is an endomorphic function on a grammar @g@, whose parsers of type @p@ build on 84 | -- grammars of type @g'@ and parse an input stream of type @s@. Grammar parameters @g@ and @g'@ are typically 85 | -- identical in simple monolithic grammars, but when composing complex grammars the first grammar parameter @g@ would 86 | -- be just a building block for the final grammar @g'@. 87 | type GrammarBuilder (g :: (Type -> Type) -> Type) 88 | (g' :: (Type -> Type) -> Type) 89 | (p :: ((Type -> Type) -> Type) -> Type -> Type -> Type) 90 | (s :: Type) 91 | = g (p g' s) -> g (p g' s) 92 | 93 | -- | A grammar overlay is a function that takes a final grammar @self@ and the parent grammar @super@ and builds a new 94 | -- grammar from them. Use 'overlay' to apply a colection of overlays on top of a base grammar. 95 | type GrammarOverlay (g :: (Type -> Type) -> Type) 96 | (m :: Type -> Type) 97 | = g m -> g m -> g m 98 | 99 | -- | Layers a sequence of 'GrammarOverlay' on top of a base 'GrammarBuilder' to produce a new grammar. 100 | overlay :: (GrammarParsing m, g ~ ParserGrammar m, GrammarConstraint m g, Rank2.Distributive g, Foldable f) 101 | => (g m -> g m) -> f (GrammarOverlay g m) -> g m 102 | overlay base layers = appEndo (foldMap (Endo . ($ self)) layers) (base self) 103 | where self = selfReferring 104 | 105 | -- | Apply the given parsing function (typically `parseComplete` or `parsePrefix`) to the given grammar-agnostic 106 | -- parser and its input. A typical invocation might be 107 | -- 108 | -- > getCompose $ simply parsePrefix myParser myInput 109 | simply :: (Rank2.Only r (p (Rank2.Only r) s) -> s -> Rank2.Only r f) -> p (Rank2.Only r) s r -> s -> f r 110 | simply parseGrammar p input = Rank2.fromOnly (parseGrammar (Rank2.Only p) input) 111 | 112 | -- | Given the textual parse input, the parse failure on the input, and the number of preceding lines of context you 113 | -- want to show, produce a human-readable failure description. 114 | failureDescription :: forall s pos. (Ord s, TextualMonoid s, Position pos) => s -> ParseFailure pos s -> Int -> s 115 | failureDescription input (ParseFailure pos (FailureDescription expected inputs) erroneous) contextLineCount = 116 | Position.context input pos contextLineCount 117 | <> mconcat 118 | (intersperse ", but " $ filter (not . null) 119 | [onNonEmpty ("expected " <>) $ oxfordComma " or " ((fromString <$> expected) <> (fromLiteral <$> inputs)), 120 | oxfordComma " and " (fromString <$> erroneous)]) 121 | where oxfordComma :: s -> [s] -> s 122 | oxfordComma _ [] = "" 123 | oxfordComma _ [x] = x 124 | oxfordComma conjunction [x, y] = x <> conjunction <> y 125 | oxfordComma conjunction (x:y:rest) = mconcat (intersperse ", " (x : y : onLast (drop 1 conjunction <>) rest)) 126 | onNonEmpty f x = if null x then x else f x 127 | onLast _ [] = [] 128 | onLast f [x] = [f x] 129 | onLast f (x:xs) = x : onLast f xs 130 | fromLiteral s = "string \"" <> s <> "\"" 131 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa/Combinators.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TypeFamilies, TypeOperators #-} 2 | -- | A collection of useful parsing combinators not found in dependent libraries. 3 | module Text.Grampa.Combinators (moptional, concatMany, concatSome, someNonEmpty, takeSomeNonEmpty, 4 | flag, count, upto, 5 | delimiter, operator, keyword) where 6 | 7 | import Control.Applicative(Alternative(..)) 8 | import Data.List.NonEmpty (NonEmpty((:|))) 9 | import Data.Monoid (Monoid, (<>)) 10 | import Data.Monoid.Factorial (FactorialMonoid) 11 | import Data.Semigroup (Semigroup(sconcat)) 12 | import Data.Semigroup.Cancellative (LeftReductive) 13 | 14 | import Text.Grampa.Class (InputParsing(ParserInput, string), LexicalParsing(lexicalToken, keyword), 15 | DeterministicParsing(takeMany)) 16 | import Text.Parser.Combinators (Parsing(()), count) 17 | 18 | -- | Attempts to parse a monoidal value, if the argument parser fails returns 'mempty'. 19 | moptional :: (Alternative p, Monoid a) => p a -> p a 20 | moptional p = p <|> pure mempty 21 | 22 | -- | Zero or more argument occurrences like 'many', with concatenated monoidal results. 23 | concatMany :: (Alternative p, Monoid a) => p a -> p a 24 | concatMany p = mconcat <$> many p 25 | 26 | -- | One or more argument occurrences like 'some', with concatenated monoidal results. 27 | concatSome :: (Alternative p, Semigroup a) => p a -> p a 28 | concatSome p = sconcat <$> someNonEmpty p 29 | 30 | -- | One or more argument occurrences like 'some', returned in a 'NonEmpty' list. 31 | someNonEmpty :: Alternative p => p a -> p (NonEmpty a) 32 | someNonEmpty p = (:|) <$> p <*> many p 33 | 34 | -- | The longest sequence of One or more argument occurrences like 'takeSome', returned in a 'NonEmpty' list. 35 | takeSomeNonEmpty :: DeterministicParsing p => p a -> p (NonEmpty a) 36 | takeSomeNonEmpty p = (:|) <$> p <*> takeMany p 37 | 38 | -- | Returns 'True' if the argument parser succeeds and 'False' otherwise. 39 | flag :: Alternative p => p a -> p Bool 40 | flag p = True <$ p <|> pure False 41 | 42 | -- | Parses between 0 and N occurrences of the argument parser in sequence and returns the list of results. 43 | upto :: Alternative p => Int -> p a -> p [a] 44 | upto n p 45 | | n > 0 = (:) <$> p <*> upto (pred n) p 46 | <|> pure [] 47 | | otherwise = pure [] 48 | 49 | -- | Parses the given delimiter, such as a comma or a brace 50 | delimiter :: (Show s, FactorialMonoid s, LeftReductive s, s ~ ParserInput m, LexicalParsing m) => s -> m s 51 | delimiter s = lexicalToken (string s) ("delimiter " <> show s) 52 | 53 | -- | Parses the given operator symbol 54 | operator :: (Show s, FactorialMonoid s, LeftReductive s, s ~ ParserInput m, LexicalParsing m) => s -> m s 55 | operator s = lexicalToken (string s) ("operator " <> show s) 56 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa/ContextFree/LeftRecursive.hs: -------------------------------------------------------------------------------- 1 | -- | A context-free memoizing parser that can handle left-recursive grammars. 2 | module Text.Grampa.ContextFree.LeftRecursive 3 | {-# DEPRECATED "Use Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive instead" #-} 4 | (module Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive) 5 | where 6 | 7 | import Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive 8 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa/ContextFree/LeftRecursive/Transformer.hs: -------------------------------------------------------------------------------- 1 | -- | A context-free parser that can handle left-recursive grammars and carry a 2 | -- monadic computation with each parsing result. 3 | module Text.Grampa.ContextFree.LeftRecursive.Transformer 4 | {-# DEPRECATED "Use Text.Grampa.ContextFree.SortedMemoizing.Transformer.LeftRecursive instead" #-} 5 | (module Text.Grampa.ContextFree.SortedMemoizing.Transformer.LeftRecursive) 6 | where 7 | 8 | import Text.Grampa.ContextFree.SortedMemoizing.Transformer.LeftRecursive 9 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa/ContextFree/Memoizing/LeftRecursive.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ConstraintKinds, CPP, FlexibleContexts, FlexibleInstances, GADTs, GeneralizedNewtypeDeriving, InstanceSigs, 2 | RankNTypes, ScopedTypeVariables, StandaloneDeriving, TypeApplications, TypeFamilies, TypeOperators, 3 | UndecidableInstances #-} 4 | -- | A context-free memoizing parser that can handle left-recursive grammars. 5 | module Text.Grampa.ContextFree.Memoizing.LeftRecursive ( 6 | Fixed, Parser, SeparatedParser(..), 7 | autochain, liftPositive, liftPure, mapPrimitive, 8 | longest, peg, terminalPEG, 9 | parseSeparated, separated) 10 | where 11 | 12 | import Text.Grampa.Internal.LeftRecursive (Fixed(..), SeparatedParser(..), 13 | autochain, asLeaf, liftPositive, liftPure, mapPrimitive, 14 | parseSeparated, separated) 15 | import Text.Grampa.ContextFree.Memoizing (ResultList (..)) 16 | import qualified Text.Grampa.ContextFree.Memoizing as Memoizing 17 | import qualified Text.Grampa.PEG.Backtrack.Measured as Backtrack 18 | 19 | -- | A parser for left-recursive grammars on top of the memoizing 'Memoizing.Parser' 20 | type Parser = Fixed Memoizing.Parser 21 | 22 | -- | Turns a context-free parser into a backtracking PEG parser that consumes the longest possible prefix of the list 23 | -- of input tails, opposite of 'peg' 24 | longest :: Fixed Memoizing.Parser g s a -> Fixed Backtrack.Parser g [(s, g (ResultList g s))] a 25 | longest (PositiveDirectParser p) = PositiveDirectParser (Memoizing.longest p) 26 | longest p@DirectParser{} = DirectParser{complete= Memoizing.longest (complete p), 27 | direct0= Memoizing.longest (direct0 p), 28 | direct1= Memoizing.longest (direct1 p)} 29 | longest p@Parser{} = asLeaf Parser{ 30 | complete= Memoizing.longest (complete p), 31 | direct= Memoizing.longest (direct p), 32 | direct0= Memoizing.longest (direct0 p), 33 | direct1= Memoizing.longest (direct1 p), 34 | indirect= Memoizing.longest (indirect p), 35 | choices= undefined, 36 | isAmbiguous= Nothing, 37 | cyclicDescendants= cyclicDescendants p} 38 | 39 | -- | Turns a backtracking PEG parser of the list of input tails into a context-free parser, opposite of 'longest' 40 | peg :: Ord s => Fixed Backtrack.Parser g [(s, g (ResultList g s))] a -> Fixed Memoizing.Parser g s a 41 | peg (PositiveDirectParser p) = PositiveDirectParser (Memoizing.peg p) 42 | peg p@DirectParser{} = DirectParser{complete= Memoizing.peg (complete p), 43 | direct0= Memoizing.peg (direct0 p), 44 | direct1= Memoizing.peg (direct1 p)} 45 | peg p@Parser{} = asLeaf Parser{ 46 | complete= Memoizing.peg (complete p), 47 | direct= Memoizing.peg (direct p), 48 | direct0= Memoizing.peg (direct0 p), 49 | direct1= Memoizing.peg (direct1 p), 50 | indirect= Memoizing.peg (indirect p), 51 | choices= undefined, 52 | isAmbiguous= Nothing, 53 | cyclicDescendants= cyclicDescendants p} 54 | 55 | -- | Turns a backtracking PEG parser into a context-free parser 56 | terminalPEG :: (Monoid s, Ord s) => Fixed Backtrack.Parser g s a -> Fixed Memoizing.Parser g s a 57 | terminalPEG (PositiveDirectParser p) = PositiveDirectParser (Memoizing.terminalPEG p) 58 | terminalPEG p@DirectParser{} = DirectParser{complete= Memoizing.terminalPEG (complete p), 59 | direct0= Memoizing.terminalPEG (direct0 p), 60 | direct1= Memoizing.terminalPEG (direct1 p)} 61 | terminalPEG p@Parser{} = asLeaf Parser{ 62 | complete= Memoizing.terminalPEG (complete p), 63 | direct= Memoizing.terminalPEG (direct p), 64 | direct0= Memoizing.terminalPEG (direct0 p), 65 | direct1= Memoizing.terminalPEG (direct1 p), 66 | indirect= Memoizing.terminalPEG (indirect p), 67 | choices= undefined, 68 | isAmbiguous= Nothing, 69 | cyclicDescendants= cyclicDescendants p} 70 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa/ContextFree/SortedMemoizing/LeftRecursive.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ConstraintKinds, CPP, FlexibleContexts, FlexibleInstances, GADTs, GeneralizedNewtypeDeriving, InstanceSigs, 2 | RankNTypes, ScopedTypeVariables, StandaloneDeriving, TypeApplications, TypeFamilies, TypeOperators, 3 | UndecidableInstances #-} 4 | -- | A context-free memoizing parser that can handle ambiguous left-recursive grammars. 5 | module Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive ( 6 | Fixed, Parser, SeparatedParser(..), 7 | autochain, liftPositive, liftPure, mapPrimitive, 8 | longest, peg, terminalPEG, 9 | parseSeparated, separated) 10 | where 11 | 12 | import Text.Grampa.Internal.LeftRecursive (Fixed(..), SeparatedParser(..), 13 | autochain, asLeaf, liftPositive, liftPure, mapPrimitive, 14 | parseSeparated, separated) 15 | import Text.Grampa.ContextFree.SortedMemoizing (ResultList (..)) 16 | import qualified Text.Grampa.ContextFree.SortedMemoizing as Memoizing 17 | import qualified Text.Grampa.PEG.Backtrack.Measured as Backtrack 18 | 19 | -- | A parser for left-recursive grammars on top of the sorted memoizing 'Memoizing.Parser'. It's slightly slower than 20 | -- the parser from "Text.Grampa.ContextFree.Memoizing.LeftRecursive" but provides more features. 21 | type Parser = Fixed Memoizing.Parser 22 | 23 | -- | Turns a context-free parser into a backtracking PEG parser that consumes the longest possible prefix of the list 24 | -- of input tails, opposite of 'peg' 25 | longest :: Fixed Memoizing.Parser g s a -> Fixed Backtrack.Parser g [(s, g (ResultList g s))] a 26 | longest (PositiveDirectParser p) = PositiveDirectParser (Memoizing.longest p) 27 | longest p@DirectParser{} = DirectParser{complete= Memoizing.longest (complete p), 28 | direct0= Memoizing.longest (direct0 p), 29 | direct1= Memoizing.longest (direct1 p)} 30 | longest p@Parser{} = asLeaf Parser{ 31 | complete= Memoizing.longest (complete p), 32 | direct= Memoizing.longest (direct p), 33 | direct0= Memoizing.longest (direct0 p), 34 | direct1= Memoizing.longest (direct1 p), 35 | indirect= Memoizing.longest (indirect p), 36 | choices= undefined, 37 | isAmbiguous= Nothing, 38 | cyclicDescendants= cyclicDescendants p} 39 | 40 | -- | Turns a backtracking PEG parser of the list of input tails into a context-free parser, opposite of 'longest' 41 | peg :: Ord s => Fixed Backtrack.Parser g [(s, g (ResultList g s))] a -> Fixed Memoizing.Parser g s a 42 | peg (PositiveDirectParser p) = PositiveDirectParser (Memoizing.peg p) 43 | peg p@DirectParser{} = DirectParser{complete= Memoizing.peg (complete p), 44 | direct0= Memoizing.peg (direct0 p), 45 | direct1= Memoizing.peg (direct1 p)} 46 | peg p@Parser{} = asLeaf Parser{ 47 | complete= Memoizing.peg (complete p), 48 | direct= Memoizing.peg (direct p), 49 | direct0= Memoizing.peg (direct0 p), 50 | direct1= Memoizing.peg (direct1 p), 51 | indirect= Memoizing.peg (indirect p), 52 | choices= undefined, 53 | isAmbiguous= Nothing, 54 | cyclicDescendants= cyclicDescendants p} 55 | 56 | -- | Turns a backtracking PEG parser into a context-free parser 57 | terminalPEG :: (Monoid s, Ord s) => Fixed Backtrack.Parser g s a -> Fixed Memoizing.Parser g s a 58 | terminalPEG (PositiveDirectParser p) = PositiveDirectParser (Memoizing.terminalPEG p) 59 | terminalPEG p@DirectParser{} = DirectParser{complete= Memoizing.terminalPEG (complete p), 60 | direct0= Memoizing.terminalPEG (direct0 p), 61 | direct1= Memoizing.terminalPEG (direct1 p)} 62 | terminalPEG p@Parser{} = asLeaf Parser{ 63 | complete= Memoizing.terminalPEG (complete p), 64 | direct= Memoizing.terminalPEG (direct p), 65 | direct0= Memoizing.terminalPEG (direct0 p), 66 | direct1= Memoizing.terminalPEG (direct1 p), 67 | indirect= Memoizing.terminalPEG (indirect p), 68 | choices= undefined, 69 | isAmbiguous= Nothing, 70 | cyclicDescendants= cyclicDescendants p} 71 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa/ContextFree/SortedMemoizing/Transformer/LeftRecursive.hs: -------------------------------------------------------------------------------- 1 | {-# Language GADTs #-} 2 | -- | A context-free parser that can handle ambiguous left-recursive grammars and carry a monadic computation with each 3 | -- parsing result. 4 | module Text.Grampa.ContextFree.SortedMemoizing.Transformer.LeftRecursive ( 5 | ParserT, SeparatedParser(..), AmbiguityDecidable, 6 | lift, liftPositive, tbind, tmap, 7 | autochain, parseSeparated, separated) 8 | where 9 | 10 | import Text.Grampa.Internal.LeftRecursive (Fixed, SeparatedParser(..), 11 | liftPositive, liftPure, mapPrimitive, 12 | autochain, parseSeparated, separated) 13 | import qualified Text.Grampa.ContextFree.SortedMemoizing.Transformer as Transformer 14 | import Text.Grampa.Internal (AmbiguityDecidable) 15 | 16 | -- | Parser transformer for left-recursive grammars on top of 'Transformer.ParserT'. 17 | type ParserT m = Fixed (Transformer.ParserT m) 18 | 19 | -- | Lift a parse-free computation into the parser. 20 | lift :: (Applicative m, Ord s) => m a -> ParserT m g s a 21 | lift = liftPure . Transformer.lift 22 | 23 | -- | Transform the computation carried by the parser using the monadic bind ('>>='). 24 | tbind :: (Monad m, AmbiguityDecidable b) => ParserT m g s a -> (a -> m b) -> ParserT m g s b 25 | tbind p f = mapPrimitive (`Transformer.tbind` f) p 26 | 27 | -- | Transform the computation carried by the parser. 28 | tmap :: AmbiguityDecidable b => (m a -> m b) -> ParserT m g s a -> ParserT m g s b 29 | tmap = mapPrimitive . Transformer.tmap 30 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa/Internal.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ConstrainedClassMethods, FlexibleContexts, FlexibleInstances, GADTs, 2 | RankNTypes, StandaloneDeriving, TypeOperators, UndecidableInstances #-} 3 | 4 | module Text.Grampa.Internal (BinTree(..), ResultList(..), ResultsOfLength(..), FallibleResults(..), 5 | AmbiguousAlternative(..), AmbiguityDecidable(..), AmbiguityWitness(..), 6 | ParserFlags (ParserFlags, nullable, dependsOn), 7 | Dependencies (DynamicDependencies, StaticDependencies), 8 | TraceableParsing(..), 9 | emptyFailure, erroneous, expected, expectedInput, replaceExpected, noFailure) where 10 | 11 | import Control.Applicative (Applicative(..), Alternative(..)) 12 | import Data.Foldable (toList) 13 | import Data.Functor.Classes (Show1(..)) 14 | import Data.Functor.Const (Const) 15 | import Data.List.NonEmpty (NonEmpty, nonEmpty) 16 | import Data.Monoid (Monoid(mappend, mempty)) 17 | import Data.Ord (Down(Down)) 18 | import Data.Semigroup (Semigroup((<>))) 19 | import Data.Type.Equality ((:~:)(Refl)) 20 | import Witherable (Filterable(mapMaybe)) 21 | 22 | import Text.Grampa.Class (Ambiguous(..), FailureDescription(..), ParseFailure(..), InputParsing(..), Pos) 23 | 24 | import Prelude hiding (length, showList) 25 | 26 | data ResultsOfLength g s r = ResultsOfLength !Int ![(s, g (ResultList g s))] {-# UNPACK #-} !(NonEmpty r) 27 | 28 | data ResultList g s r = ResultList ![ResultsOfLength g s r] (ParseFailure Pos s) 29 | 30 | data BinTree a = Fork !(BinTree a) !(BinTree a) 31 | | Leaf !a 32 | | EmptyTree 33 | deriving (Show) 34 | 35 | data ParserFlags g = ParserFlags { 36 | nullable :: Bool, 37 | dependsOn :: Dependencies g} 38 | 39 | data Dependencies g = DynamicDependencies 40 | | StaticDependencies (g (Const Bool)) 41 | 42 | deriving instance Show (g (Const Bool)) => Show (Dependencies g) 43 | 44 | data AmbiguityWitness a where 45 | AmbiguityWitness :: (a :~: Ambiguous b) -> AmbiguityWitness a 46 | 47 | class AmbiguityDecidable a where 48 | ambiguityWitness :: Maybe (AmbiguityWitness a) 49 | 50 | instance {-# overlappable #-} AmbiguityDecidable a where 51 | ambiguityWitness = Nothing 52 | 53 | instance AmbiguityDecidable (Ambiguous a) where 54 | ambiguityWitness = Just (AmbiguityWitness Refl) 55 | 56 | noFailure :: ParseFailure Pos s 57 | noFailure = emptyFailure (Down maxBound) 58 | 59 | emptyFailure :: Pos -> ParseFailure Pos s 60 | emptyFailure pos = ParseFailure pos (FailureDescription [] []) [] 61 | 62 | expected :: Pos -> String -> ParseFailure Pos s 63 | expected pos msg = ParseFailure pos (FailureDescription [msg] []) [] 64 | 65 | expectedInput :: Pos -> s -> ParseFailure Pos s 66 | expectedInput pos s = ParseFailure pos (FailureDescription [] [s]) [] 67 | 68 | erroneous :: Pos -> String -> ParseFailure Pos s 69 | erroneous pos msg = ParseFailure pos (FailureDescription [] []) [msg] 70 | 71 | replaceExpected :: Pos -> String -> ParseFailure Pos s -> ParseFailure Pos s 72 | replaceExpected pos msg (ParseFailure pos' msgs errs) = ParseFailure pos' msgs' errs 73 | where msgs' | pos == pos' = FailureDescription [msg] [] 74 | | otherwise = msgs 75 | 76 | instance (Show s, Show r) => Show (ResultList g s r) where 77 | show (ResultList l f) = "ResultList (" ++ shows l (") (" ++ shows f ")") 78 | 79 | instance Show s => Show1 (ResultList g s) where 80 | liftShowsPrec _sp showList _prec (ResultList rol f) rest = 81 | "ResultList " ++ shows (simplify <$> toList rol) (shows f rest) 82 | where simplify (ResultsOfLength l _ r) = "ResultsOfLength " <> show l <> " _ " <> showList (toList r) "" 83 | 84 | instance Show r => Show (ResultsOfLength g s r) where 85 | show (ResultsOfLength l _ r) = "(ResultsOfLength @" ++ show l ++ " " ++ shows r ")" 86 | 87 | instance Functor (ResultsOfLength g s) where 88 | fmap f (ResultsOfLength l t r) = ResultsOfLength l t (f <$> r) 89 | {-# INLINE fmap #-} 90 | 91 | instance Functor (ResultList g s) where 92 | fmap f (ResultList l failure) = ResultList ((f <$>) <$> l) failure 93 | {-# INLINE fmap #-} 94 | 95 | instance Ord s => Applicative (ResultsOfLength g s) where 96 | pure = ResultsOfLength 0 mempty . pure 97 | ResultsOfLength l1 _ fs <*> ResultsOfLength l2 t2 xs = ResultsOfLength (l1 + l2) t2 (fs <*> xs) 98 | 99 | instance Ord s => Applicative (ResultList g s) where 100 | pure a = ResultList [pure a] mempty 101 | ResultList rl1 f1 <*> ResultList rl2 f2 = ResultList ((<*>) <$> rl1 <*> rl2) (f1 <> f2) 102 | 103 | instance Ord s => Alternative (ResultList g s) where 104 | empty = ResultList mempty mempty 105 | (<|>) = (<>) 106 | 107 | instance Filterable (ResultList g s) where 108 | mapMaybe f (ResultList rols failure) = ResultList (mapMaybe maybeROL rols) failure 109 | where maybeROL (ResultsOfLength l t rs) = ResultsOfLength l t <$> nonEmpty (mapMaybe f $ toList rs) 110 | {-# INLINE mapMaybe #-} 111 | 112 | instance Ord s => Semigroup (ResultList g s r) where 113 | ResultList rl1 f1 <> ResultList rl2 f2 = ResultList (merge rl1 rl2) (f1 <> f2) 114 | where merge [] rl = rl 115 | merge rl [] = rl 116 | merge rl1'@(rol1@(ResultsOfLength l1 s1 r1) : rest1) rl2'@(rol2@(ResultsOfLength l2 _ r2) : rest2) 117 | | l1 < l2 = rol1 : merge rest1 rl2' 118 | | l1 > l2 = rol2 : merge rl1' rest2 119 | | otherwise = ResultsOfLength l1 s1 (r1 <> r2) : merge rest1 rest2 120 | 121 | instance Ord s => AmbiguousAlternative (ResultList g s) where 122 | ambiguousOr (ResultList rl1 f1) (ResultList rl2 f2) = ResultList (merge rl1 rl2) (f1 <> f2) 123 | where merge [] rl = rl 124 | merge rl [] = rl 125 | merge rl1'@(rol1@(ResultsOfLength l1 s1 r1) : rest1) rl2'@(rol2@(ResultsOfLength l2 _ r2) : rest2) 126 | | l1 < l2 = rol1 : merge rest1 rl2' 127 | | l1 > l2 = rol2 : merge rl1' rest2 128 | | otherwise = ResultsOfLength l1 s1 (liftA2 collect r1 r2) : merge rest1 rest2 129 | collect (Ambiguous xs) (Ambiguous ys) = Ambiguous (xs <> ys) 130 | 131 | class Alternative f => AmbiguousAlternative f where 132 | ambiguousOr :: f (Ambiguous a) -> f (Ambiguous a) -> f (Ambiguous a) 133 | 134 | instance Ord s => Monoid (ResultList g s r) where 135 | mempty = ResultList mempty mempty 136 | mappend = (<>) 137 | 138 | instance Functor BinTree where 139 | fmap f (Fork left right) = Fork (fmap f left) (fmap f right) 140 | fmap f (Leaf a) = Leaf (f a) 141 | fmap _ EmptyTree = EmptyTree 142 | 143 | instance Applicative BinTree where 144 | pure = Leaf 145 | EmptyTree <*> _ = EmptyTree 146 | Leaf f <*> t = f <$> t 147 | Fork f1 f2 <*> t = Fork (f1 <*> t) (f2 <*> t) 148 | 149 | instance Foldable BinTree where 150 | foldMap f (Fork left right) = foldMap f left `mappend` foldMap f right 151 | foldMap f (Leaf a) = f a 152 | foldMap _ EmptyTree = mempty 153 | 154 | instance Traversable BinTree where 155 | traverse f (Fork left right) = Fork <$> traverse f left <*> traverse f right 156 | traverse f (Leaf a) = Leaf <$> f a 157 | traverse _ EmptyTree = pure EmptyTree 158 | 159 | instance Filterable BinTree where 160 | mapMaybe f (Fork left right) = mapMaybe f left <> mapMaybe f right 161 | mapMaybe f (Leaf a) = maybe EmptyTree Leaf (f a) 162 | mapMaybe _ EmptyTree = EmptyTree 163 | 164 | instance Semigroup (BinTree a) where 165 | EmptyTree <> t = t 166 | t <> EmptyTree = t 167 | l <> r = Fork l r 168 | 169 | instance Monoid (BinTree a) where 170 | mempty = EmptyTree 171 | mappend = (<>) 172 | 173 | class FallibleResults f where 174 | hasSuccess :: f s a -> Bool 175 | failureOf :: f s a -> ParseFailure Pos s 176 | failWith :: ParseFailure Pos s -> f s a 177 | 178 | instance FallibleResults (ResultList g) where 179 | hasSuccess (ResultList [] _) = False 180 | hasSuccess _ = True 181 | failureOf (ResultList _ failure) = failure 182 | failWith = ResultList [] 183 | 184 | -- | The class of parsers whose execution can be traced for debugging purposes 185 | class InputParsing m => TraceableParsing m where 186 | -- | Modify the argument parser to log its input whenever invoked. 187 | traceInput :: (ParserInput m -> String) -> m a -> m a 188 | -- | Modify the argument parser to log the given description and its input whenever invoked. 189 | traceAs :: Show (ParserInput m) => String -> m a -> m a 190 | traceAs description = traceInput (\input-> description <> " @ " <> show input) 191 | 192 | -------------------------------------------------------------------------------- /grammatical-parsers/src/Text/Grampa/Internal/Storable.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-} 2 | 3 | module Text.Grampa.Internal.Storable (Storable(..), Storable1(..), Storable11(..), 4 | Dependencies(..), ParserFlags(..)) where 5 | 6 | import Data.Functor.Const (Const(Const, getConst)) 7 | import qualified Rank2 8 | import Text.Grampa.Class (ParseFailure(ParseFailure)) 9 | import Text.Grampa.Internal (ResultList(ResultList), ResultsOfLength(ResultsOfLength), 10 | ParserFlags (ParserFlags, nullable, dependsOn), 11 | Dependencies (DynamicDependencies, StaticDependencies)) 12 | import qualified Text.Grampa.ContextFree.SortedMemoizing.Transformer as Transformer 13 | 14 | class Storable s a where 15 | store :: a -> s 16 | reuse :: s -> a 17 | 18 | class Storable1 s a where 19 | store1 :: a -> s b 20 | reuse1 :: s b -> a 21 | 22 | class Storable11 s t where 23 | store11 :: t a -> s b 24 | reuse11 :: s b -> t a 25 | 26 | instance Storable a a where 27 | store = id 28 | reuse = id 29 | 30 | instance Storable1 (Const a) a where 31 | store1 = Const 32 | reuse1 = getConst 33 | 34 | instance Storable1 s a => Storable11 s (Const a) where 35 | store11 = store1 . getConst 36 | reuse11 = Const . reuse1 37 | 38 | instance (Storable1 f a, Rank2.Functor g) => Storable (g f) (g (Const a)) where 39 | store = Rank2.fmap (store1 . getConst) 40 | reuse = Rank2.fmap (Const . reuse1) 41 | 42 | instance Ord s => Storable1 (ResultList g s) Bool where 43 | store1 bit = ResultList [] (ParseFailure (if bit then 1 else 0) mempty []) 44 | reuse1 (ResultList _ (ParseFailure pos _ _)) = pos /= 0 45 | 46 | instance (Rank2.Functor g, Monoid s, Ord s) => Storable1 (ResultList g s) (ParserFlags g) where 47 | store1 a = ResultList [store a] mempty 48 | reuse1 (ResultList [s] _) = reuse s 49 | 50 | instance (Rank2.Functor g, Monoid s, Ord s) => Storable (ResultsOfLength g s r) (ParserFlags g) where 51 | store (ParserFlags n d) = ResultsOfLength (if n then 1 else 0) (store d) (pure $ error "unused") 52 | reuse (ResultsOfLength n d _) = ParserFlags (n /= 0) (reuse d) 53 | 54 | instance (Rank2.Functor g, Monoid s, Ord s) => Storable [(s, g (ResultList g s))] (Dependencies g) where 55 | store DynamicDependencies = [] 56 | store (StaticDependencies deps) = [(mempty, store deps)] 57 | reuse [] = DynamicDependencies 58 | reuse [(_, deps)] = StaticDependencies (reuse deps) 59 | 60 | instance Ord s => Storable1 (Transformer.ResultListT m g s) Bool where 61 | store1 bit = Transformer.ResultList [] (ParseFailure (if bit then 1 else 0) mempty []) 62 | reuse1 (Transformer.ResultList _ (ParseFailure pos _ _)) = pos /= 0 63 | 64 | instance (Rank2.Functor g, Monoid s, Ord s) => Storable1 (Transformer.ResultListT m g s) (ParserFlags g) where 65 | store1 a = Transformer.ResultList [store a] mempty 66 | reuse1 (Transformer.ResultList [s] _) = reuse s 67 | 68 | instance (Rank2.Functor g, Monoid s, Ord s) => Storable (Transformer.ResultsOfLengthT m g s r) (ParserFlags g) where 69 | store = Transformer.ResultsOfLengthT . store 70 | reuse = reuse . Transformer.getResultsOfLength 71 | 72 | instance (Rank2.Functor g, Monoid s, Ord s) => Storable (Transformer.ResultsOfLength m g s r) (ParserFlags g) where 73 | store (ParserFlags n d) = Transformer.ROL (if n then 1 else 0) (store d) (pure $ error "unused") 74 | reuse (Transformer.ROL n d _) = ParserFlags (n /= 0) (reuse d) 75 | 76 | instance (Rank2.Functor g, Monoid s, Ord s) => Storable [(s, g (Transformer.ResultListT m g s))] (Dependencies g) where 77 | store DynamicDependencies = [] 78 | store (StaticDependencies deps) = [(mempty, store deps)] 79 | reuse [] = DynamicDependencies 80 | reuse [(_, deps)] = StaticDependencies (reuse deps) 81 | -------------------------------------------------------------------------------- /grammatical-parsers/test/Benchmark.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE Haskell2010, BangPatterns, ExistentialQuantification, FlexibleContexts, OverloadedStrings, 2 | RecordWildCards, ScopedTypeVariables, TemplateHaskell #-} 3 | 4 | module Benchmark where 5 | 6 | import Control.Applicative 7 | import Data.Functor.Compose (Compose(..)) 8 | import Data.Monoid ((<>)) 9 | 10 | import Control.DeepSeq (deepseq) 11 | import Criterion.Main (bench, bgroup, defaultMain, nf) 12 | 13 | import qualified Rank2 14 | import qualified Rank2.TH 15 | 16 | import Text.Parser.Combinators (eof) 17 | import Text.Grampa 18 | import Text.Grampa.ContextFree.Parallel (Parser) 19 | import qualified Arithmetic 20 | import qualified Boolean 21 | import Main (arithmetic, boolean) 22 | 23 | data Recursive f = Recursive{start :: f String, 24 | rec :: f String, 25 | next :: f String} 26 | 27 | $(Rank2.TH.deriveAll ''Recursive) 28 | 29 | recursiveManyGrammar :: Recursive (Parser g String) -> Recursive (Parser g String) 30 | recursiveManyGrammar Recursive{..} = Recursive{ 31 | start= rec <* eof, 32 | rec= many (char ';') <* optional next, 33 | next= string "END"} 34 | 35 | parseInt :: String -> [Int] 36 | parseInt s = case Arithmetic.expr (parseComplete (fixGrammar arithmetic) s) 37 | of Compose (Right [r]) -> [r] 38 | r -> error ("Unexpected " <> show r) 39 | 40 | parseBoolean :: String -> [Bool] 41 | parseBoolean s = case (Boolean.expr . Rank2.snd) (parseComplete (fixGrammar boolean) s) 42 | of Compose (Right [r]) -> [r] 43 | r -> error ("Unexpected " <> show r) 44 | 45 | zeroes, ones, falsehoods, truths, groupedLeft, groupedRight :: Int -> String 46 | zeroes n = "0" <> concat (replicate n "+0") 47 | ones n = "1" <> concat (replicate n "*1") 48 | falsehoods n = "False" <> concat (replicate n " || False") 49 | truths n = "True" <> concat (replicate n " && True") 50 | 51 | groupedLeft n = replicate n '(' <> "0" <> concat (replicate n "+0)") 52 | groupedRight n = concat (replicate n "(0+") <> "0" <> replicate n ')' 53 | 54 | main :: IO () 55 | main = do 56 | let zeroes100 = zeroes 100 57 | zeroes200 = zeroes 200 58 | zeroes300 = zeroes 300 59 | groupedLeft100 = groupedLeft 100 60 | groupedLeft200 = groupedLeft 200 61 | groupedLeft300 = groupedLeft 300 62 | groupedRight100 = groupedRight 100 63 | groupedRight200 = groupedRight 200 64 | groupedRight300 = groupedRight 300 65 | ones100 = ones 100 66 | ones200 = ones 200 67 | ones300 = ones 300 68 | falsehoods80 = falsehoods 80 69 | falsehoods160 = falsehoods 160 70 | falsehoods240 = falsehoods 240 71 | deepseq (zeroes100, zeroes200, zeroes300, 72 | groupedLeft100, groupedLeft200, groupedLeft300, 73 | groupedRight100, groupedRight200, groupedRight300) $ 74 | defaultMain [ 75 | {- 76 | bgroup "many" [ 77 | bench "simple" $ nf (simpleParse $ many (string ";") <* endOfInput) (replicate 400 ';'), 78 | bench "recursive" $ nf (parse (fixGrammar recursiveManyGrammar) start) (replicate 400 ';')], 79 | -} 80 | bgroup "zero sum" [ 81 | bench "100" $ nf parseInt zeroes100, 82 | bench "200" $ nf parseInt zeroes200, 83 | bench "300" $ nf parseInt zeroes300], 84 | bgroup "grouped left" [ 85 | bench "100" $ nf parseInt groupedLeft100, 86 | bench "200" $ nf parseInt groupedLeft200, 87 | bench "300" $ nf parseInt groupedLeft300], 88 | {- 89 | bgroup "grouped right" [ 90 | bench "100" $ nf parseInt groupedRight100, 91 | bench "200" $ nf parseInt groupedRight200, 92 | bench "300" $ nf parseInt groupedRight300], 93 | -} 94 | bgroup "one product" [ 95 | bench "100" $ nf parseInt ones100, 96 | bench "200" $ nf parseInt ones200, 97 | bench "300" $ nf parseInt ones300], 98 | bgroup "false disjunction" [ 99 | bench "80" $ nf parseBoolean falsehoods80, 100 | bench "160" $ nf parseBoolean falsehoods160, 101 | bench "240" $ nf parseBoolean falsehoods240] 102 | ] 103 | 104 | -------------------------------------------------------------------------------- /grammatical-parsers/test/Doctest.hs: -------------------------------------------------------------------------------- 1 | import Build_doctests 2 | import Test.DocTest (doctest) 3 | 4 | main :: IO () 5 | main = do 6 | doctest (flags ++ pkgs ++ module_sources) 7 | doctest (flags_exe_arithmetic ++ pkgs_exe_arithmetic ++ module_sources_exe_arithmetic) 8 | doctest (flags_exe_boolean_transformations ++ pkgs_exe_boolean_transformations 9 | ++ module_sources_exe_boolean_transformations) 10 | doctest (flags ++ pkgs ++ ["-pgmL", "markdown-unlit", "-isrc", "test/README.lhs"]) 11 | -------------------------------------------------------------------------------- /grammatical-parsers/test/README.lhs: -------------------------------------------------------------------------------- 1 | ../README.md -------------------------------------------------------------------------------- /grammatical-parsers/test/Test/Ambiguous.hs: -------------------------------------------------------------------------------- 1 | {-# Language FlexibleInstances, RankNTypes, RecordWildCards, TemplateHaskell #-} 2 | module Test.Ambiguous where 3 | 4 | import Control.Applicative ((<|>), empty, liftA2) 5 | import Data.Foldable (fold) 6 | import Data.Semigroup ((<>)) 7 | 8 | import qualified Rank2.TH 9 | import Text.Grampa 10 | import Text.Grampa.Combinators 11 | import Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive (Parser) 12 | 13 | import Debug.Trace 14 | 15 | data Amb = Xy1 String String 16 | | Xy2 (Ambiguous Amb) String 17 | | Xyz (Ambiguous Amb) String 18 | | Xyzw (Ambiguous Amb) String 19 | deriving (Eq, Show) 20 | 21 | data Test p = Test{ 22 | amb :: p (Ambiguous Amb) 23 | } 24 | 25 | $(Rank2.TH.deriveAll ''Test) 26 | 27 | grammar :: Test (Parser Test String) -> Test (Parser Test String) 28 | grammar Test{..} = Test{ 29 | amb = ambiguous (Xy1 <$> string "x" <*> moptional (string "y") 30 | <|> Xy2 <$> amb <*> string "y" 31 | <|> Xyz <$> amb <*> string "z" 32 | <|> Xyzw <$> amb <*> string "w") 33 | } 34 | -------------------------------------------------------------------------------- /grammatical-parsers/test/Test/Examples.hs: -------------------------------------------------------------------------------- 1 | {-# Language FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, ScopedTypeVariables #-} 2 | module Test.Examples where 3 | 4 | import Control.Applicative (empty, liftA2, liftA3, (<|>)) 5 | import Data.Functor.Compose (Compose(..)) 6 | import Data.Monoid (Monoid(..), (<>)) 7 | import Data.Monoid.Textual (TextualMonoid, toString) 8 | import Text.Parser.Combinators (choice) 9 | 10 | import Test.Tasty.QuickCheck (Arbitrary(..), Gen, NonNegative(..), Property, testProperty, (===), (==>), (.&&.), 11 | elements, forAll, mapSize, oneof, resize, sized, whenFail) 12 | import Data.Word (Word8) 13 | 14 | import qualified Rank2 15 | import Text.Grampa 16 | import Text.Grampa.ContextFree.SortedMemoizing.LeftRecursive (Parser) 17 | import qualified Arithmetic 18 | import qualified Comparisons 19 | import qualified Boolean 20 | import qualified Conditionals 21 | 22 | parseArithmetical :: String -> Either String ArithmeticTree 23 | parseArithmetical = uniqueParse (fixGrammar Arithmetic.arithmetic) Arithmetic.expr 24 | 25 | parseBoolean :: String -> Either String BooleanTree 26 | parseBoolean = uniqueParse (fixGrammar boolean) (Boolean.expr . Rank2.snd) 27 | 28 | comparisons :: (Rank2.Functor g, LexicalParsing (Parser g String)) => 29 | GrammarBuilder ArithmeticComparisons g Parser String 30 | comparisons (Rank2.Pair a c) = 31 | Rank2.Pair (Arithmetic.arithmetic a) (Comparisons.comparisons c){Comparisons.term= Arithmetic.expr a} 32 | 33 | boolean :: (Rank2.Functor g, LexicalParsing (Parser g String)) => 34 | GrammarBuilder ArithmeticComparisonsBoolean g Parser String 35 | boolean (Rank2.Pair ac b) = Rank2.Pair (comparisons ac) (Boolean.boolean (Comparisons.test $ Rank2.snd ac) b) 36 | 37 | parseConditional :: String -> Either String (ConditionalTree ArithmeticTree) 38 | parseConditional = uniqueParse (fixGrammar conditionals) (Conditionals.expr . Rank2.snd) 39 | 40 | conditionals :: (Rank2.Functor g, LexicalParsing (Parser g String)) => GrammarBuilder ACBC g Parser String 41 | conditionals (Rank2.Pair acb c) = 42 | boolean acb `Rank2.Pair` 43 | Conditionals.conditionals c{Conditionals.test= Boolean.expr (Rank2.snd acb), 44 | Conditionals.term= Unconditional <$> Arithmetic.expr (Rank2.fst $ Rank2.fst acb)} 45 | 46 | type ArithmeticComparisons = Rank2.Product (Arithmetic.Arithmetic ArithmeticTree) (Comparisons.Comparisons ArithmeticTree BooleanTree) 47 | type ArithmeticComparisonsBoolean = Rank2.Product ArithmeticComparisons (Boolean.Boolean BooleanTree) 48 | type ACBC = Rank2.Product ArithmeticComparisonsBoolean (Conditionals.Conditionals BooleanTree 49 | (ConditionalTree ArithmeticTree)) 50 | 51 | data ArithmeticTree = Number (NonNegative Int) 52 | | Add ArithmeticTree ArithmeticTree 53 | | Multiply ArithmeticTree ArithmeticTree 54 | | Negate ArithmeticTree 55 | | Subtract ArithmeticTree ArithmeticTree 56 | | Divide ArithmeticTree ArithmeticTree 57 | deriving Eq 58 | 59 | data BooleanTree = BooleanConstant Bool 60 | | Comparison ArithmeticTree Relation ArithmeticTree 61 | | Not BooleanTree 62 | | And BooleanTree BooleanTree 63 | | Or BooleanTree BooleanTree 64 | deriving Eq 65 | 66 | data ConditionalTree a = If BooleanTree (ConditionalTree a) (ConditionalTree a) 67 | | Unconditional a 68 | deriving Eq 69 | 70 | newtype Relation = Relation String deriving Eq 71 | 72 | instance Show ArithmeticTree where 73 | showsPrec p (Add l r) rest | p < 1 = showsPrec 0 l (" + " <> showsPrec 1 r rest) 74 | showsPrec p (Subtract l r) rest | p < 1 = showsPrec 0 l (" - " <> showsPrec 1 r rest) 75 | showsPrec p (Negate e) rest | p < 1 = "- " <> showsPrec 1 e rest 76 | showsPrec p (Multiply l r) rest | p < 2 = showsPrec 1 l (" * " <> showsPrec 2 r rest) 77 | showsPrec p (Divide l r) rest | p < 2 = showsPrec 1 l (" / " <> showsPrec 2 r rest) 78 | showsPrec _ (Number (NonNegative n)) rest = shows n rest 79 | showsPrec p e rest = "(" <> showsPrec 0 e (")" <> rest) 80 | 81 | instance Show BooleanTree where 82 | showsPrec p (Or l r) rest | p < 1 = showsPrec 1 l (" || " <> showsPrec 0 r rest) 83 | showsPrec p (And l r) rest | p < 2 = showsPrec 2 l (" && " <> showsPrec 1 r rest) 84 | showsPrec p (Not e) rest | p < 3 = "not " <> showsPrec 3 e rest 85 | showsPrec p (Comparison l rel r) rest | p < 3 = showsPrec 0 l (" " <> show rel <> " " <> showsPrec 0 r rest) 86 | showsPrec _ (BooleanConstant b) rest = shows b rest 87 | showsPrec p e rest = "(" <> showsPrec 0 e (")" <> rest) 88 | 89 | instance Show a => Show (ConditionalTree a) where 90 | show (Unconditional a) = show a 91 | show (If test true false) = "if " <> show test <> " then " <> show true <> " else " <> show false 92 | 93 | instance Show Relation where 94 | show (Relation rel) = rel 95 | 96 | instance Arithmetic.ArithmeticDomain ArithmeticTree where 97 | number = Number . NonNegative 98 | add = Add 99 | multiply = Multiply 100 | negate = Negate 101 | subtract = Subtract 102 | divide = Divide 103 | 104 | instance Boolean.BooleanDomain BooleanTree where 105 | true = BooleanConstant True 106 | false = BooleanConstant False 107 | and = And 108 | or = Or 109 | not = Not 110 | 111 | instance Comparisons.ComparisonDomain ArithmeticTree BooleanTree where 112 | lessThan = flip Comparison (Relation "<") 113 | lessOrEqual = flip Comparison (Relation "<=") 114 | equal = flip Comparison (Relation "==") 115 | greaterOrEqual = flip Comparison (Relation ">=") 116 | greaterThan = flip Comparison (Relation ">") 117 | 118 | instance Conditionals.ConditionalDomain BooleanTree (ConditionalTree ArithmeticTree) where 119 | ifThenElse = If 120 | 121 | instance Arbitrary ArithmeticTree where 122 | arbitrary = sized tree 123 | where tree n | n < 1 = Number <$> arbitrary 124 | | otherwise = oneof [Number <$> arbitrary, 125 | Negate <$> tree (n - 1), 126 | liftA2 Add branch branch, 127 | liftA2 Multiply branch branch, 128 | liftA2 Subtract branch branch, 129 | liftA2 Divide branch branch] 130 | where branch = tree (n `div` 2) 131 | 132 | instance Arbitrary BooleanTree where 133 | arbitrary = sized tree 134 | where tree n | n < 1 = BooleanConstant <$> arbitrary 135 | | otherwise = oneof [BooleanConstant <$> resize (n - 1) arbitrary, 136 | Not <$> tree (n - 1), 137 | liftA3 Comparison arbitrary' (elements relations) arbitrary', 138 | liftA2 And branch branch, 139 | liftA2 Or branch branch] 140 | where branch = tree (n `div` 2) 141 | relations = Relation <$> ["<", ">", "==", "<=", ">="] 142 | arbitrary' = resize (n `div` 2) arbitrary 143 | 144 | instance Arbitrary (ConditionalTree ArithmeticTree) where 145 | arbitrary = sized tree 146 | where tree n = oneof [--Unconditional <$> resize (n - 1) arbitrary, 147 | liftA3 If (resize (n `div` 3) arbitrary) 148 | (Unconditional <$> resize(n `div` 3) arbitrary) 149 | (Unconditional <$> resize(n `div` 3) arbitrary)] 150 | 151 | uniqueParse :: (Ord s, TextualMonoid s, Show r, Rank2.Apply g, Rank2.Traversable g, Rank2.Distributive g) => 152 | Grammar g Parser s -> (forall f. g f -> f r) -> s -> Either String r 153 | uniqueParse g p s = case getCompose (p $ parseComplete g s) 154 | of Right [r] -> Right r 155 | Right [] -> Left "Unparseable" 156 | Right rs -> Left ("Ambiguous: " ++ show rs) 157 | Left err -> Left (toString mempty $ failureDescription s err 3) 158 | 159 | instance TokenParsing (Parser ArithmeticComparisons String) where 160 | token = lexicalToken 161 | instance TokenParsing (Parser ArithmeticComparisonsBoolean String) where 162 | token = lexicalToken 163 | instance TokenParsing (Parser ACBC String) where 164 | token = lexicalToken 165 | 166 | instance LexicalParsing (Parser ArithmeticComparisons String) 167 | instance LexicalParsing (Parser ArithmeticComparisonsBoolean String) 168 | instance LexicalParsing (Parser ACBC String) 169 | 170 | -------------------------------------------------------------------------------- /rank2classes/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | Version 1.5.4 2 | --------------- 3 | * Deriving `Data` and `Typeable` for all declared data types. 4 | * Bumped the upper bound of the `template-haskell` dependency. 5 | 6 | Version 1.5.3.1 7 | --------------- 8 | * Bumped the upper bound of the `template-haskell` dependency. 9 | 10 | Version 1.5.3 11 | --------------- 12 | * Fixed compilation with GHC 9.8.1 and `template-haskell` 2.22 13 | 14 | Version 1.5.2 15 | --------------- 16 | * Fixed the generated TH instance contexts for GADTs. 17 | * Fixed the generated signature of the `deliver` instance method in presence of `InstanceSigs`. 18 | * Bumped the upper bound of the `template-haskell` dependency. 19 | 20 | Version 1.5.1 21 | --------------- 22 | * Fixed the `Rank2.TH` templates on GHC < 9.2 with no `OverloadedRecordDot` support to revert to their 23 | 1.4.6 behaviour. 24 | 25 | Version 1.5 26 | --------------- 27 | * The `Rank2.TH` templates have changed, are now applicable with `DuplicateRecordFields` provided that 28 | `OverloadedRecordDot` is enabled. 29 | * `Rank2.TH.deriveLogistic` also needs `ScopedTypeVariables` and `InstanceSigs` extensions to generate 30 | proper record updates. 31 | 32 | Version 1.4.6 33 | --------------- 34 | * Added the `Logistic` type class, `getters` and `setters` 35 | * Added `Rank2.TH.deriveLogistic`, included it in `deriveAll` 36 | * Compiling with GHC 9.4.2 37 | * Forward compatibility with `TypeFamilies` 38 | 39 | Version 1.4.4 40 | --------------- 41 | * Tested with GHC 9.2.1, incremented the upper `template-haskell` dependency bound 42 | * Generalized the TH generation to handle PolyRec types 43 | * Incremented the lower bound of rank2classes' `base` dependency, thanks to phadej 44 | 45 | Version 1.4.3 46 | --------------- 47 | * Fixed links to standard rank-1 classes in Haddock documentation 48 | * Fixed issue #23 with the `traverse` template generated for sum types with a fieldless constructor 49 | * Incremented upper dependency bounds 50 | 51 | Version 1.4.2 52 | --------------- 53 | * Fixed compatibility with GHC 9 - PR by Felix Yan 54 | 55 | Version 1.4.1 56 | --------------- 57 | * Fixed the templates for multi-constructor records 58 | * Made Rank2.TH.unsafeDeriveApply even more unsafe 59 | 60 | Version 1.4 61 | --------------- 62 | * Added Rank2.Compose :: ((* -> *) -> *) -> (* -> *) -> ((* -> *) -> *) 63 | * Matched the precedence of <$> and <*> operators with Prelude 64 | * Relaxed the lower bound of base dependency to 4.10 65 | 66 | Version 1.3.2.1 67 | --------------- 68 | * Incremented the upper bound of the template-haskell dependency 69 | 70 | Version 1.3.2 71 | --------------- 72 | * Exported the `$` synonym for `apply` 73 | 74 | Version 1.3.1.2 75 | --------------- 76 | * Fixed doctest module name issue 77 | * Incremented the lower bound of base dependency 78 | 79 | Version 1.3.1.1 80 | --------------- 81 | * Fixed the doctests after cabal get 82 | 83 | Version 1.3.1 84 | --------------- 85 | * Added missing markdown-unlit dependency 86 | * Strictified one argument of Rank2.<$> and Rank2.<*> 87 | 88 | Version 1.3 89 | --------------- 90 | * Added `newtype Flip` to exports - PR by Jeremy List 91 | * Generating INLINE pragmas from Rank2.TH 92 | * Generating the proper constraints on derived instances where needed 93 | 94 | Version 1.2.1 95 | --------------- 96 | * Added unsafeDeriveApply 97 | 98 | Version 1.2 99 | --------------- 100 | * Added the class instances for Data.Functor.Const 101 | * Fixed and optimized the Foldable/Traversable instance code generated for bare fields in Rank2.TH 102 | 103 | Version 1.1 104 | --------------- 105 | * Replaced own `Product` data type by the one from `Data.Functor.Product` 106 | * Added instances of `Data.Functor.Sum` 107 | * Removed the TH generation of partial Apply and Distributive instances 108 | * Covered more constructor cases in TH code 109 | * Added use-template-haskell flag, true by default - PR by Dridus 110 | 111 | Version 1.0.2 112 | --------------- 113 | * Fixed the bounds and `Semigroup` to compile with GHC 8.4.1 114 | * Added the ~> type synonym 115 | * Fixed `deriveFunctor` for record fields with concrete types - PR by Tom Smalley 116 | 117 | Version 1.0.1 118 | --------------- 119 | * Fixed the doctests 120 | 121 | Version 1.0 122 | --------------- 123 | * Swapped `distributeWith` with `cotraverse` 124 | * Documentation improvements 125 | 126 | Version 0.2.1.1 127 | --------------- 128 | * Corrected the README 129 | 130 | Version 0.2.1 131 | --------------- 132 | * Incremented the dependency bounds for GHC 8.2.1 133 | 134 | Version 0.2 135 | --------------- 136 | * Introduced `DistributiveTraversable` as a generalization of `Distributive` 137 | * Export "cotraverse" and "cotraverseTraversable" 138 | * Added `liftA3`, `liftA4`, `liftA5` 139 | * Added more convienence functions 140 | * Fixed grammatical errors and overlong lines 141 | 142 | Version 0.1.1 143 | --------------- 144 | * Generalized the classes with `{-# LANGUAGE PolyKinds" #-}` 145 | 146 | Version 0.1 147 | --------------- 148 | * Initial release 149 | -------------------------------------------------------------------------------- /rank2classes/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Mario Blažević 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | 1. Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the 14 | distribution. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /rank2classes/README.md: -------------------------------------------------------------------------------- 1 | Rank 2 Classes 2 | ============== 3 | 4 | ### The standard constructor type classes in the parallel rank-2 universe ### 5 | 6 | The rank2 package exports module `Rank2`, meant to be imported qualified like this: 7 | 8 | ~~~ {.haskell} 9 | {-# LANGUAGE RankNTypes, TemplateHaskell, TypeOperators #-} 10 | module MyModule where 11 | import qualified Rank2 12 | import qualified Rank2.TH 13 | ~~~ 14 | 15 | Several more imports for the examples... 16 | 17 | ~~~ {.haskell} 18 | import Data.Functor.Classes (Show1, showsPrec1) 19 | import Data.Functor.Identity (Identity(..)) 20 | import Data.Functor.Const (Const(..)) 21 | import Data.List (find) 22 | ~~~ 23 | 24 | The `Rank2` import will make available the following type classes: 25 | 26 | * [Rank2.Functor](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#t:Functor) 27 | * [Rank2.Apply](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#t:Apply) 28 | * [Rank2.Applicative](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#t:Applicative) 29 | * [Rank2.Foldable](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#t:Foldable) 30 | * [Rank2.Traversable](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#t:Traversable) 31 | * [Rank2.Distributive](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#t:Distributive) 32 | * [Rank2.Logistic](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#t:Logistic) 33 | 34 | The methods of these type classes all have rank-2 types. The class instances are data types of kind `(k -> *) -> *`, 35 | one example of which would be a database record with different field types but all wrapped by the same type 36 | constructor: 37 | 38 | ~~~ {.haskell} 39 | data Person f = Person{ 40 | name :: f String, 41 | age :: f Int, 42 | mother, father :: f (Maybe PersonVerified) 43 | } 44 | ~~~ 45 | 46 | By wrapping each field we have declared a generalized record type. It can made to play different roles by switching the 47 | value of the parameter `f`. Some examples would be 48 | 49 | ~~~ {.haskell} 50 | type PersonVerified = Person Identity 51 | type PersonText = Person (Const String) 52 | type PersonWithErrors = Person (Either String) 53 | type PersonDatabase = [PersonVerified] 54 | type PersonDatabaseByColumns = Person [] 55 | ~~~ 56 | 57 | If you wish to have the standard [Eq](http://hackage.haskell.org/package/base/docs/Data-Eq.html#t:Eq) and 58 | [Show](http://hackage.haskell.org/package/base/docs/Text-Show.html#t:Show) instances for a record type like `Person`, 59 | it's best if they refer to the 60 | [Eq1](http://hackage.haskell.org/package/base-4.9.1.0/docs/Data-Functor-Classes.html#t:Eq1) and 61 | [Show1](http://hackage.haskell.org/package/base-4.9.1.0/docs/Data-Functor-Classes.html#t:Show1) instances for its 62 | parameter `f`: 63 | 64 | ~~~ {.haskell} 65 | instance Show1 f => Show (Person f) where 66 | showsPrec prec person rest = 67 | "Person{" ++ separator ++ "name=" ++ showsPrec1 prec' (name person) 68 | ("," ++ separator ++ "age=" ++ showsPrec1 prec' (age person) 69 | ("," ++ separator ++ "mother=" ++ showsPrec1 prec' (mother person) 70 | ("," ++ separator ++ "father=" ++ showsPrec1 prec' (father person) 71 | ("}" ++ rest)))) 72 | where prec' = succ prec 73 | separator = "\n" ++ replicate prec' ' ' 74 | ~~~ 75 | 76 | You can create the rank-2 class instances for your data types manually, or you can generate the instances using the 77 | templates imported from the `Rank2.TH` module with a single line of code per data type: 78 | 79 | ~~~ {.haskell} 80 | $(Rank2.TH.deriveAll ''Person) 81 | ~~~ 82 | 83 | Either way, once you have the rank-2 type class instances, you can use them to easily convert between records with 84 | different parameters `f`. 85 | 86 | ### Record construction and modification examples ### 87 | 88 | In case of our `Person` record, a couple of helper functions will prove handy: 89 | 90 | ~~~ {.haskell} 91 | findPerson :: PersonDatabase -> String -> Maybe PersonVerified 92 | findPerson db nameToFind = find ((nameToFind ==) . runIdentity . name) db 93 | 94 | personByName :: PersonDatabase -> String -> Either String (Maybe PersonVerified) 95 | personByName db personName 96 | | null personName = Right Nothing 97 | | p@Just{} <- findPerson db personName = Right p 98 | | otherwise = Left ("Nobody by name of " ++ personName) 99 | ~~~ 100 | 101 | Now we can start by constructing a `Person` record with rank-2 functions for fields. This record is not so much a person 102 | as a field-by-field person verifier: 103 | 104 | ~~~ {.haskell} 105 | personChecker :: PersonDatabase -> Person (Const String Rank2.~> Either String) 106 | personChecker db = 107 | Person{name= Rank2.Arrow (Right . getConst), 108 | age= Rank2.Arrow $ \(Const age)-> 109 | case reads age 110 | of [(n, "")] -> Right n 111 | _ -> Left (age ++ " is not an integer"), 112 | mother= Rank2.Arrow (personByName db . getConst), 113 | father= Rank2.Arrow (personByName db . getConst)} 114 | ~~~ 115 | 116 | We can apply it using the [Rank2.<*>](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#v:-60--42--62-) 117 | method of the [Rank2.Apply](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#t:Apply) type class to a bunch 118 | of textual fields for `Person`, and get back either errors or proper field values: 119 | 120 | ~~~ {.haskell} 121 | verify :: PersonDatabase -> PersonText -> PersonWithErrors 122 | verify db person = personChecker db Rank2.<*> person 123 | ~~~ 124 | 125 | If there are no errors, we can get a fully verified record by applying 126 | [Rank2.traverse](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#v:traverse) to the result: 127 | 128 | ~~~ {.haskell} 129 | completeVerified :: PersonWithErrors -> Either String PersonVerified 130 | completeVerified = Rank2.traverse (Identity <$>) 131 | ~~~ 132 | 133 | or we can go in the opposite direction with 134 | [Rank2.<$>](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#v:-60--36--62-): 135 | 136 | ~~~ {.haskell} 137 | uncompleteVerified :: PersonVerified -> PersonWithErrors 138 | uncompleteVerified = Rank2.fmap (Right . runIdentity) 139 | ~~~ 140 | 141 | If on the other hand there *are* errors, we can collect them using 142 | [Rank2.foldMap](http://hackage.haskell.org/package/rank2classes/docs/Rank2.html#v:foldMap): 143 | 144 | ~~~ {.haskell} 145 | verificationErrors :: PersonWithErrors -> [String] 146 | verificationErrors = Rank2.foldMap (either (:[]) (const [])) 147 | ~~~ 148 | 149 | Here is an example GHCi session: 150 | 151 | ~~~ {.haskell} 152 | -- | 153 | -- >>> :{ 154 | --let Right alice = completeVerified $ 155 | -- verify [] Person{name= Const "Alice", age= Const "44", 156 | -- mother= Const "", father= Const ""} 157 | -- Right bob = completeVerified $ 158 | -- verify [] Person{name= Const "Bob", age= Const "45", 159 | -- mother= Const "", father= Const ""} 160 | -- Right charlie = completeVerified $ 161 | -- verify [alice, bob] Person{name= Const "Charlie", age= Const "19", 162 | -- mother= Const "Alice", father= Const "Bob"} 163 | -- :} 164 | -- 165 | -- >>> charlie 166 | -- Person{ 167 | -- name=Identity "Charlie", 168 | -- age=Identity 19, 169 | -- mother=Identity (Just Person{ 170 | -- name=(Identity "Alice"), 171 | -- age=(Identity 44), 172 | -- mother=(Identity Nothing), 173 | -- father=(Identity Nothing)}), 174 | -- father=Identity (Just Person{ 175 | -- name=(Identity "Bob"), 176 | -- age=(Identity 45), 177 | -- mother=(Identity Nothing), 178 | -- father=(Identity Nothing)})} 179 | -- >>> :{ 180 | --let dave = verify [alice, bob, charlie] 181 | -- Person{name= Const "Dave", age= Const "young", 182 | -- mother= Const "Lise", father= Const "Mike"} 183 | -- :} 184 | -- 185 | -- >>> dave 186 | -- Person{ 187 | -- name=Right "Dave", 188 | -- age=Left "young is not an integer", 189 | -- mother=Left "Nobody by name of Lise", 190 | -- father=Left "Nobody by name of Mike"} 191 | -- >>> completeVerified dave 192 | -- Left "young is not an integer" 193 | -- >>> verificationErrors dave 194 | -- ["young is not an integer","Nobody by name of Lise","Nobody by name of Mike"] 195 | -- >>> Rank2.distribute [alice, bob, charlie] 196 | -- Person{ 197 | -- name=Compose [Identity "Alice",Identity "Bob",Identity "Charlie"], 198 | -- age=Compose [Identity 44,Identity 45,Identity 19], 199 | -- mother=Compose [Identity Nothing,Identity Nothing,Identity (Just Person{ 200 | -- name=(Identity "Alice"), 201 | -- age=(Identity 44), 202 | -- mother=(Identity Nothing), 203 | -- father=(Identity Nothing)})], 204 | -- father=Compose [Identity Nothing,Identity Nothing,Identity (Just Person{ 205 | -- name=(Identity "Bob"), 206 | -- age=(Identity 45), 207 | -- mother=(Identity Nothing), 208 | -- father=(Identity Nothing)})]} 209 | ~~~ 210 | 211 | ### Related works ### 212 | 213 | This package is one of several implementations of a pattern that is often called *Higher-Kinded Data*. Other examples 214 | include [hkd-lens](https://hackage.haskell.org/package/hkd-lens), 215 | [barbies](https://hackage.haskell.org/package/barbies), and [higgledy](https://hackage.haskell.org/package/higgledy). 216 | 217 | Grammars are another use case that is almost, but not quite, entirely unlike database records. See 218 | [grammatical-parsers](https://github.com/blamario/grampa/tree/master/grammatical-parsers) or 219 | [construct](https://hackage.haskell.org/package/construct) for examples. 220 | 221 | Both database records and grammars are flat structures. If your use case involves trees of rank-2 records, this 222 | package will probably not suffice. Consider using 223 | [deep-transformations](https://hackage.haskell.org/package/deep-transformations) instead. 224 | -------------------------------------------------------------------------------- /rank2classes/Setup.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Distribution.Extra.Doctest (defaultMainWithDoctests) 4 | 5 | main :: IO () 6 | main = defaultMainWithDoctests "doctests" 7 | -------------------------------------------------------------------------------- /rank2classes/Tutorial.lhs: -------------------------------------------------------------------------------- 1 | > {-# LANGUAGE TemplateHaskell #-} 2 | > module Doctest where 3 | > 4 | > import qualified Rank2 5 | > import qualified Rank2.TH 6 | > import Data.Functor.Classes (Show1, showsPrec1) 7 | > import Data.Functor.Const (Const(..)) 8 | > import Data.Functor.Identity (Identity(..)) 9 | > import Data.List (find) 10 | > 11 | > data Person f = Person{ 12 | > name :: f String, 13 | > age :: f Int, 14 | > mother, father :: f (Maybe PersonVerified) 15 | > } 16 | > 17 | > instance Show1 f => Show (Person f) where 18 | > show person = "Person{name=" ++ showsPrec1 0 (name person) 19 | > (", age=" ++ showsPrec1 0 (age person) 20 | > (", mother=" ++ showsPrec1 0 (mother person) 21 | > (", father=" ++ showsPrec1 0 (father person) "}"))) 22 | > 23 | > type PersonText = Person (Const String) 24 | > type PersonWithErrors = Person (Either String) 25 | > type PersonVerified = Person Identity 26 | > type PersonDatabase = [PersonVerified] 27 | > 28 | > $(Rank2.TH.deriveAll ''Person) 29 | > 30 | > findPerson :: PersonDatabase -> String -> Maybe PersonVerified 31 | > findPerson db nameToFind = find ((nameToFind ==) . runIdentity . name) db 32 | > 33 | > personByName :: PersonDatabase -> String -> Either String (Maybe PersonVerified) 34 | > personByName db personName 35 | > | null personName = Right Nothing 36 | > | otherwise = maybe (Left $ "Nobody by name of " ++ personName) (Right . Just) 37 | > (findPerson db personName) 38 | > 39 | > personChecker :: PersonDatabase -> Person (Rank2.Arrow (Const String) (Either String)) 40 | > personChecker db = 41 | > Person{name= Rank2.Arrow (Right . getConst), 42 | > age= Rank2.Arrow $ \(Const age)-> 43 | > case reads age 44 | > of [(n, "")] -> Right n 45 | > _ -> Left (age ++ " is not an integer"), 46 | > mother= Rank2.Arrow (personByName db . getConst), 47 | > father= Rank2.Arrow (personByName db . getConst)} 48 | > 49 | > verify :: PersonDatabase -> PersonText -> PersonWithErrors 50 | > verify db person = personChecker db Rank2.<*> person 51 | > 52 | > completeVerified :: PersonWithErrors -> Either String PersonVerified 53 | > completeVerified = Rank2.traverse (Identity <$>) 54 | > 55 | > uncompleteVerified :: PersonVerified -> PersonWithErrors 56 | > uncompleteVerified = Rank2.fmap (Right . runIdentity) 57 | > 58 | > verificationErrors :: PersonWithErrors -> [String] 59 | > verificationErrors = Rank2.foldMap (either (:[]) (const [])) 60 | > 61 | > -- | 62 | > -- >>> let Right alice = completeVerified $ verify [] Person{name= Const "Alice", age= Const "44", mother= Const "", father= Const ""} 63 | > -- >>> let Right bob = completeVerified $ verify [] Person{name= Const "Bob", age= Const "45", mother= Const "", father= Const ""} 64 | > -- >>> let Right charlie = completeVerified $ verify [alice, bob] Person{name= Const "Charlie", age= Const "19", mother= Const "Alice", father= Const "Bob"} 65 | > -- >>> charlie 66 | > -- Person{name=Identity "Charlie", age=Identity 19, mother=Identity (Just Person{name=Identity "Alice", age=Identity 44, mother=Identity Nothing, father=Identity Nothing}), father=Identity (Just Person{name=Identity "Bob", age=Identity 45, mother=Identity Nothing, father=Identity Nothing})} 67 | > -- >>> let dave = verify [alice, bob, charlie] Person{name= Const "Eve", age= Const "young", mother= Const "Lise", father= Const "Mike"} 68 | > -- >>> dave 69 | > -- Person{name=Right "Eve", age=Left "young is not an integer", mother=Left "Nobody by name of Lise", father=Left "Nobody by name of Mike"} 70 | > -- >>> completeVerified dave 71 | > -- Left "young is not an integer" 72 | > -- >>> verificationErrors dave 73 | > -- ["young is not an integer","Nobody by name of Lise","Nobody by name of Mike"] 74 | > -- >>> Rank2.distribute [alice, bob, charlie] 75 | > -- Person{name=Compose [Identity "Alice",Identity "Bob",Identity "Charlie"], age=Compose [Identity 44,Identity 45,Identity 19], mother=Compose [Identity Nothing,Identity Nothing,Identity (Just Person{name=Identity "Alice", age=Identity 44, mother=Identity Nothing, father=Identity Nothing})], father=Compose [Identity Nothing,Identity Nothing,Identity (Just Person{name=Identity "Bob", age=Identity 45, mother=Identity Nothing, father=Identity Nothing})]} 76 | -------------------------------------------------------------------------------- /rank2classes/rank2classes.cabal: -------------------------------------------------------------------------------- 1 | name: rank2classes 2 | version: 1.5.4 3 | synopsis: standard type constructor class hierarchy, only with methods of rank 2 types 4 | description: 5 | A mirror image of the standard type constructor class hierarchy rooted in 'Functor', except with methods of rank 2 6 | types and class instances of kind @(k->*)->*@. The classes enable generic handling of heterogenously typed data 7 | structures and other neat tricks. 8 | 9 | homepage: https://github.com/blamario/grampa/tree/master/rank2classes 10 | bug-reports: https://github.com/blamario/grampa/issues 11 | license: BSD3 12 | license-file: LICENSE 13 | author: Mario Blažević 14 | maintainer: Mario Blažević 15 | copyright: (c) 2017 Mario Blažević 16 | category: Control, Data, Generics 17 | build-type: Custom 18 | cabal-version: >=1.10 19 | tested-with: GHC==9.12.1, GHC==9.10.1, GHC==9.8.2, GHC==9.6.4, GHC==9.4.8, GHC==9.2.8, GHC==9.0.2, GHC==8.10.7 20 | extra-source-files: README.md, CHANGELOG.md, test/MyModule.lhs 21 | source-repository head 22 | type: git 23 | location: https://github.com/blamario/grampa 24 | custom-setup 25 | setup-depends: 26 | base >= 4 && <5, 27 | Cabal < 4, 28 | cabal-doctest >= 1 && <1.1 29 | 30 | flag use-template-haskell 31 | description: Enable the compilation of the Rank2.TH module 32 | default: True 33 | manual: True 34 | 35 | library 36 | hs-source-dirs: src 37 | exposed-modules: Rank2 38 | default-language: Haskell2010 39 | -- other-modules: 40 | ghc-options: -Wall 41 | build-depends: base >=4.10 && <5, 42 | transformers >= 0.5 && < 0.7, 43 | distributive < 0.7, 44 | data-functor-logistic < 0.1 45 | 46 | if flag(use-template-haskell) 47 | build-depends: template-haskell >= 2.11 && < 2.24 48 | exposed-modules: Rank2.TH 49 | 50 | test-suite doctests 51 | type: exitcode-stdio-1.0 52 | hs-source-dirs: test 53 | default-language: Haskell2010 54 | main-is: Doctest.hs 55 | other-modules: MyModule 56 | ghc-options: -threaded -pgmL markdown-unlit 57 | build-depends: base, rank2classes, doctest >= 0.8 58 | build-tool-depends: markdown-unlit:markdown-unlit >= 0.5 && < 0.6 59 | 60 | test-suite TH 61 | if !flag(use-template-haskell) 62 | buildable: False 63 | type: exitcode-stdio-1.0 64 | hs-source-dirs: test 65 | default-language: Haskell2010 66 | main-is: TH.hs 67 | other-modules: Issue23 68 | ghc-options: -threaded -pgmL markdown-unlit 69 | build-depends: base, rank2classes, distributive < 0.7, 70 | tasty < 2, tasty-hunit < 1, 71 | data-functor-logistic < 0.1 72 | build-tool-depends: markdown-unlit:markdown-unlit >= 0.5 && < 0.6 73 | -------------------------------------------------------------------------------- /rank2classes/test/Doctest.hs: -------------------------------------------------------------------------------- 1 | import Build_doctests (flags, pkgs, module_sources) 2 | import Test.DocTest (doctest) 3 | 4 | main = do doctest (flags ++ pkgs ++ module_sources) 5 | doctest (flags ++ pkgs ++ ["-pgmL", "markdown-unlit", "test/MyModule.lhs"]) 6 | -------------------------------------------------------------------------------- /rank2classes/test/Issue23.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell #-} 2 | 3 | module Issue23 (test) where 4 | 5 | import Data.Functor.Identity 6 | import Data.Functor.Classes 7 | import qualified Rank2 8 | import qualified Rank2.TH 9 | 10 | import Test.Tasty (TestTree) 11 | import Test.Tasty.HUnit (testCase, assertEqual) 12 | 13 | data Stm r = Unit | ExpStmt (r Int) (Exp r) 14 | data Exp r = Nil | Cons (r Bool) (Exp r) (Stm r) 15 | 16 | instance Show1 r => Show (Stm r) where 17 | show Unit = "Unit" 18 | show (ExpStmt r e) = "(Stmt (" ++ showsPrec1 0 r (") " ++ show e ++ ")") 19 | instance Show1 r => Show (Exp r) where 20 | show Nil = "Nil" 21 | show (Cons r e s) = 22 | "(Cons (" ++ showsPrec1 0 r (") " ++ show e ++ " " ++ show s ++ ")") 23 | 24 | $(mconcat <$> traverse 25 | (\derive -> mconcat <$> traverse derive [''Stm, ''Exp]) 26 | [ Rank2.TH.deriveFunctor 27 | , Rank2.TH.deriveFoldable 28 | , Rank2.TH.deriveTraversable 29 | ]) 30 | 31 | expToMaybe :: Exp Identity -> Exp Maybe 32 | expToMaybe = Rank2.fmap (Just . runIdentity) 33 | 34 | maybeToExp :: Exp Maybe -> Maybe (Exp Identity) 35 | maybeToExp = Rank2.traverse (fmap Identity) 36 | 37 | myExp :: Exp Identity 38 | myExp = Cons 39 | (Identity True) 40 | (Cons (Identity False) Nil (ExpStmt (Identity 2) Nil)) 41 | (ExpStmt (Identity 3) (Cons (Identity True) Nil Unit)) 42 | 43 | test :: TestTree 44 | test = testCase "Issue #23" $ do 45 | print myExp 46 | let myExp' = expToMaybe myExp 47 | assertEqual "" (show $ Just myExp) (show $ maybeToExp myExp') 48 | -------------------------------------------------------------------------------- /rank2classes/test/MyModule.lhs: -------------------------------------------------------------------------------- 1 | ../README.md -------------------------------------------------------------------------------- /rank2classes/test/TH.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE KindSignatures, RankNTypes, TemplateHaskell #-} 2 | 3 | import Control.Applicative (liftA2) 4 | import Data.Foldable (fold, foldMap) 5 | import Data.Traversable (traverse) 6 | import Data.Distributive (cotraverse) 7 | import Data.Monoid (Dual, Sum(Sum), getDual) 8 | import Data.Functor.Classes (Eq1, Show1, eq1, showsPrec1) 9 | import Data.Functor.Compose (Compose(Compose)) 10 | import Data.Functor.Identity (Identity(Identity, runIdentity)) 11 | import qualified Issue23 12 | import qualified Rank2 13 | import qualified Rank2.TH 14 | import Test.Tasty 15 | import Test.Tasty.HUnit 16 | 17 | data Test0 (p :: * -> *) = Test0{} deriving (Eq, Show) 18 | 19 | data Test1 p = Test1{single :: p Int, 20 | whole :: Test0 p, 21 | wrapSingle :: Dual (Identity (p String)), 22 | wrapWhole :: Sum (Identity (Test0 p))} 23 | 24 | instance Eq1 p => Eq (Test1 p) where 25 | a == b = single a `eq1` single b 26 | && whole a == whole b 27 | && all (all id) (liftA2 (liftA2 eq1) (wrapSingle a) (wrapSingle b)) 28 | && wrapWhole a == wrapWhole b 29 | 30 | instance Show1 p => Show (Test1 p) where 31 | showsPrec p t s = "Test1{single= " ++ showsPrec1 p (single t) 32 | (", whole= " ++ showsPrec p (whole t) 33 | (", wrapSingle= Dual (Identity (" ++ showsPrec1 p (runIdentity $ getDual $ wrapSingle t) 34 | (")), wrapWhole= " ++ showsPrec p (wrapWhole t) s))) 35 | 36 | $(Rank2.TH.deriveAll ''Test0) 37 | $(Rank2.TH.deriveAll ''Test1) 38 | 39 | main = defaultMain $ testGroup "Template tests" 40 | [ testCase "Simple template test" $ 41 | do let test = Test1{single= [3, 4, 5], 42 | whole= Test0, 43 | wrapSingle= pure (pure ["a", "b", "ab"]), 44 | wrapWhole= pure (pure Test0)} 45 | id Rank2.<$> test @?= test 46 | Rank2.pure (Rank2.Arrow id) Rank2.<*> test @?= test 47 | Rank2.liftA2 (++) test test @?= Test1{single= [3, 4, 5, 3, 4, 5], 48 | whole= Test0, 49 | wrapSingle= pure (pure ["a", "b", "ab", "a", "b", "ab"]), 50 | wrapWhole= pure (pure Test0)} 51 | Rank2.foldMap (Sum . length) test @?= Sum 6 52 | Rank2.traverse (map Identity) test @?= [Test1{single= Identity 3, 53 | whole= Test0, 54 | wrapSingle= pure (pure $ Identity "a"), 55 | wrapWhole= pure (pure Test0)}, 56 | Test1{single= Identity 3, 57 | whole= Test0, 58 | wrapSingle= pure (pure $ Identity "b"), 59 | wrapWhole= pure (pure Test0)}, 60 | Test1{single= Identity 3, 61 | whole= Test0, 62 | wrapSingle= pure (pure $ Identity "ab"), 63 | wrapWhole= pure (pure Test0)}, 64 | Test1{single= Identity 4, 65 | whole= Test0, 66 | wrapSingle= pure (pure $ Identity "a"), 67 | wrapWhole= pure (pure Test0)}, 68 | Test1{single= Identity 4, 69 | whole= Test0, 70 | wrapSingle= pure (pure $ Identity "b"), 71 | wrapWhole= pure (pure Test0)}, 72 | Test1{single= Identity 4, 73 | whole= Test0, 74 | wrapSingle= pure (pure $ Identity "ab"), 75 | wrapWhole= pure (pure Test0)}, 76 | Test1{single= Identity 5, 77 | whole= Test0, 78 | wrapSingle= pure (pure $ Identity "a"), 79 | wrapWhole= pure (pure Test0)}, 80 | Test1{single= Identity 5, 81 | whole= Test0, 82 | wrapSingle= pure (pure $ Identity "b"), 83 | wrapWhole= pure (pure Test0)}, 84 | Test1{single= Identity 5, 85 | whole= Test0, 86 | wrapSingle= pure (pure $ Identity "ab"), 87 | wrapWhole= pure (pure Test0)} 88 | ] 89 | Rank2.distribute (Identity test) @?= Test1{single= Compose (Identity [3, 4, 5]), 90 | whole= Test0, 91 | wrapSingle= pure (pure $ Compose $ Identity ["a", "b", "ab"]), 92 | wrapWhole= pure (pure Test0)} 93 | Rank2.cotraverse (take 1 . map runIdentity) (Rank2.traverse (map Identity) test) @?= take 1 Rank2.<$> test, 94 | Issue23.test] 95 | -------------------------------------------------------------------------------- /stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-7.24 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | # resolver: nightly-2016-09-26 19 | # resolver: ghc-8.0.2 20 | resolver: lts-16.11 21 | 22 | # User packages to be built. 23 | # Various formats can be used as shown in the example below. 24 | # 25 | # packages: 26 | # - some-directory 27 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 28 | # - location: 29 | # git: https://github.com/commercialhaskell/stack.git 30 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 31 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 32 | # extra-dep: true 33 | # subdirs: 34 | # - auto-update 35 | # - wai 36 | # 37 | # A package marked 'extra-dep: true' will only be built if demanded by a 38 | # non-dependency (i.e. a user package), and its test suites and benchmarks 39 | # will not be run. This is useful for tweaking upstream packages. 40 | 41 | packages: 42 | - rank2classes/ 43 | - deep-transformations/ 44 | - grammatical-parsers/ 45 | 46 | # Dependency packages to be pulled from upstream that are not in the resolver 47 | # (e.g., acme-missiles-0.3) 48 | extra-deps: 49 | - either-5 50 | - input-parsers-0.1.0.1 51 | - repr-tree-syb-0.1.1 52 | 53 | # Override default flag values for local packages and extra-deps 54 | flags: {} 55 | 56 | # Extra package databases containing global packages 57 | extra-package-dbs: [] 58 | 59 | # Control whether we use the GHC we find on the path 60 | system-ghc: true 61 | 62 | # Require a specific version of stack, using version ranges 63 | # require-stack-version: -any # Default 64 | # require-stack-version: ">=1.1" 65 | # 66 | # Override the architecture used by stack, especially useful on Windows 67 | # arch: i386 68 | # arch: x86_64 69 | # 70 | # Extra directories used by stack for building 71 | # extra-include-dirs: [/path/to/dir] 72 | # extra-lib-dirs: [/path/to/dir] 73 | # 74 | # Allow a newer minor version of GHC than the snapshot specifies 75 | compiler-check: newer-minor --------------------------------------------------------------------------------