├── .github ├── dependabot.yml └── workflows │ └── main.yml ├── .gitignore ├── CHANGES.md ├── LICENSE ├── Makefile ├── README.md ├── dune-project ├── misc ├── backnaur.png ├── html.png ├── htmlcss.png ├── reco.mly ├── simplebnf.png ├── syntax.png └── tabular.png ├── obelisk.opam ├── src ├── ast.mli ├── common.ml ├── dune ├── extendedAst.ml ├── genericPrinter.ml ├── helper.mli ├── helpers │ ├── default.ml │ ├── ebnf.ml │ ├── html.ml │ ├── htmlCss.ml │ ├── latexBacknaur.ml │ ├── latexSimplebnf.ml │ ├── latexSyntax.ml │ ├── latexTabular.ml │ ├── miniHelper.ml │ ├── miniHtml.ml │ └── miniLatex.ml ├── lexer.mll ├── main.ml ├── normalize.ml ├── options.ml ├── parser.mly ├── position.ml ├── printer.mli ├── printers.ml ├── reduce.ml ├── scan.ml ├── specialize.ml ├── subst.ml └── transform.ml └── tests ├── JSParse.mly ├── K3Parser.mly ├── KaSim.4.0.0-kappaParser.mly ├── KaSim.4.0.0-kparser4.mly ├── albatross.mly ├── alphaCaml-demos-interactive.mly ├── alphaCaml-demos-mixins.mly ├── alphaCaml-demos-poplmark.mly ├── alphaCaml.mly ├── alphaprolog.mly ├── amalthea.mly ├── andromeda.mly ├── anonymous-nested.mly ├── anonymous-param-redundant.mly ├── anonymous-param.mly ├── anonymous.mly ├── apron.20160125-apron_parser.mly ├── atd_parser.mly ├── attapl-deptypes.mly ├── attapl-mini.mly ├── attributes-expansion.mly ├── attributes.mly ├── bamboo.0.0.03-parser.mly ├── bare-bones.mly ├── batsh.0.0.6-parser_yacc.mly ├── bauer.mly ├── belloeil.mly ├── bibtex2html.1.99-bibtex_parser.mly ├── bibtex_parser.mly ├── bison-mysterious-conflict.mly ├── bodin.mly ├── bodin_inlined.mly ├── boomerang-bparser.mly ├── boris-mini-no-eos.mly ├── boris-mini.mly ├── bran.mly ├── c.mly ├── c_unambiguous.mly ├── calc-anonymous.mly ├── calc-ast.mly ├── calc-locations.mly ├── calc-never-useful.mly ├── calc-new-syntax.mly ├── calc.mly ├── camltemplate.1.0.2-ctParser.mly ├── camomile.1.0.1-colParser.mly ├── ccss.1.6-parser.mly ├── charrua-client.0.9-dhcp_parser.mly ├── christian.mly ├── cigen-cparser.mly ├── cil-cparser.mly ├── cil-formatparse.mly ├── cime-coq.mly ├── cime-genpoly.mly ├── cime-parameterized-signatures.mly ├── cime-poly-interp.mly ├── cime-poly.mly ├── cime-signature.mly ├── cime-syntax.mly ├── cime-term.mly ├── cime-terms-signature.mly ├── cime-terms.mly ├── cime-theory.mly ├── cime-toplevel.mly ├── cime-word.mly ├── cminor.mly ├── cocci.mly ├── coccinelle.1.0.2-parser_c.mly ├── coccinelle.1.0.2-parser_cocci_menhir.mly ├── coercion-constraint.mly ├── coercion-mini.mly ├── cohttp.1.2.0-accept_parser.mly ├── compcert.mly ├── compcert_pre_parser.mly ├── compcert_pre_parser_canonical.mly ├── compcert_pre_parser_new.mly ├── compsyn.mly ├── condition_parser.mly ├── confluence-fnf.mly ├── confluence-psl.mly ├── confluence.mly ├── couac.mly ├── cppo_ocamlbuild.1.6.0-cppo_parser.mly ├── cubicle.1.1.2-parser.mly ├── cudf.0.9-cudf_type_parser.mly ├── cutdown.mly ├── dario.mly ├── datalog.0.5.2-topDownParser.mly ├── dbforge.2.0.1-sqml_parser.mly ├── debian-formats.0.1.1-DF822_parser.mly ├── dedukti.2.6.0-menhir_parser.mly ├── diy.5.01-ARMParser.mly ├── dml.mly ├── dns_zone_parser.mly ├── doc-ock-xml.1.2.1-docOckXmlParser.mly ├── docOckXmlParser.mly ├── dolmen.0.2-parseDimacs.mly ├── dolmen.0.2-parseDimacs01.mly ├── dolmen.0.2-parseTptp.mly ├── dolmen.0.2-parseTptp01.mly ├── dule.mly ├── dune ├── dune.1.4.0-opamBaseParser.mly ├── duplicated_prec.mly ├── efuns.mly ├── electrod.0.1.7-Parser.mly ├── electrum00.mly ├── electrum01.mly ├── electrum02.mly ├── electrum03.mly ├── electrum04.mly ├── empty-action.mly ├── end-of-stream-conflict.mly ├── execparser.mly ├── expansion_ok.mly ├── expansion_unused.mly ├── featherweight.mly ├── fibonacci.mly ├── filiot.mly ├── flowcaml-docgen.mly ├── flowcaml.mly ├── focc-pure-def.mly ├── foo.mly ├── forbid_end.mly ├── fork.mly ├── fp.mly ├── frama-c-base.20170501-logic_parser.mly ├── framac-cparser.mly ├── framac-print_api-grammar.mly ├── fsharp.mly ├── fslexpars.mly ├── fstar.0.9.6.0~alpha1-parse.mly ├── fstar.mly ├── fsyaccpars.mly ├── gdb.0.3-gdbmi_parser.mly ├── gettext.0.3.8-gettextPo_parser.mly ├── godiva.mly ├── gromit.mly ├── grune924.mly ├── heptagon.1.05.00-hept_parser.mly ├── herdtools7.7.51-AArch64Parser.mly ├── herdtools7.7.51-ARMParser.mly ├── herdtools7.7.51-CParser.mly ├── herdtools7.7.51-LISAParser.mly ├── herdtools7.7.51-PPCParser.mly ├── herdtools7.7.51-RISCVParser.mly ├── herdtools7.7.51-X86Parser.mly ├── herdtools7.7.51-modelParser.mly ├── herdtools7.7.51-stateParser.mly ├── hmx-multi.mly ├── hmx-sets.mly ├── ho.mly ├── htparser.mly ├── hydro.0.7.1-hgen_parser.mly ├── ibal.mly ├── ics.mly ├── idl_parser.mly ├── ilpars.mly ├── imaplet-lwt.0.1.9-parser.mly ├── infinite.mly ├── inline-multi-level.mly ├── inline-multi-sons.mly ├── inline-position.mly ├── inline-rename.mly ├── inline-shift.mly ├── inline-test.mly ├── inline-with-dollar.mly ├── inlined-dollar.mly ├── inlining-capture.mly ├── inliningWithSideEffects.mly ├── issue21_longer.mly ├── issue21_shorter.mly ├── jasmc.mly ├── java.mly ├── jingoo.1.2.9-jg_parser.mly ├── jml.mly ├── jocaml-ambients.mly ├── jocaml-new-parser.mly ├── jocaml.mly ├── jparser.mly ├── js_of_ocaml.3.2.0-js_parser.mly ├── judicael.mly ├── julia.mly ├── ketti.mly ├── kimmit.mly ├── kremlin.0.9.6.0-parser.mly ├── labltk-compiler.mly ├── labltk-ppyac.mly ├── labrys.0.1-parser.mly ├── lambdapi.1.0-menhir_parser.mly ├── lambdoc_rlambtex_parser.mly ├── ldap.2.4.0-ldap_filterparser.mly ├── lem.mly ├── liberty_parser.mly ├── link.mly ├── links.0.8-jsonparse.mly ├── links.0.8-parser.mly ├── links.0.8-xmlParser.mly ├── links.mly ├── liquidsoap.1.3.4-lang_parser.mly ├── llparse.mly ├── logic_parser.mly ├── logtk.0.8.1-parse_theory.mly ├── loop.mly ├── lr-but-not-lalr.mly ├── ltlparser.mly ├── lustre-v6.1.737-lv6parser.mly ├── lustreParser.mly ├── lutin.2.56-lutParser.mly ├── lutin.mly ├── macrodown.mly ├── maple.mly ├── mcc-fc.mly ├── mcc-fj.mly ├── mcc-frontends.mly ├── mcc-mc.mly ├── mcc-moogle.mly ├── mcc-naml.mly ├── mcc-pascal.mly ├── mcc-pasqual.mly ├── mcc-phobos.mly ├── mcc-prof.mly ├── mcc-python.mly ├── memcad.1.0.0-mc_parser.mly ├── menhir.mly ├── metaocaml.mly ├── mezzo.mly ├── mezzo_canonical.mly ├── mezzo_inclusion_only.mly ├── miniCparser.mly ├── miniMLparser.mly ├── mlpost.0.8.2-pfb_parser.mly ├── modelica.mly ├── modelyze.mly ├── modulo.mly ├── morbig.0.9.1-parser.mly ├── mpri.mly ├── multi-token-alias-0.mly ├── multi-token-alias-1.mly ├── multi-token-alias-2.mly ├── multi-token-alias-3.mly ├── multiple-functor.mly ├── name-clash-1.mly ├── name-clash-2.mly ├── netsem.mly ├── nexus.mly ├── nml-ip-parser.mly ├── no-end-of-stream-conflict.mly ├── no_future.mly ├── nunchaku.0.6-Parser.mly ├── nunchaku.0.6-TPTP_parser.mly ├── nunchaku.0.6-Tip_parser.mly ├── obc-sym.mly ├── obc.mly ├── obelisk.0.3.2-zelus.mly ├── ocaml-base-compiler.4.07.1-parsecmm.mly ├── ocaml-debugger.mly ├── ocaml-lex.mly ├── ocaml-protoc.1.2.0-pb_parsing_parser.mly ├── ocaml-test-Lex.mly ├── ocaml-testasmcomp-cmm.mly ├── ocaml.mly ├── ocaml_parser_menhir.mly ├── ocamldoc-odoc.mly ├── ocamldoc-odoc_text.mly ├── ocamlweb-yacc.mly ├── ocapic.3.3-parser.mly ├── oclisp.mly ├── octavius.1.2.0-octParser.mly ├── odds.1.0-parser.mly ├── oflux.mly ├── ojacare.mly ├── ollvm.0.99-ollvm_parser.mly ├── omake.0.9.8.7-omake_shell_parse.mly ├── omake_ast_parse.mly ├── omake_shell_parse.mly ├── on-error-reduce-inlined.mly ├── on-error-reduce-unreachable.mly ├── opam-doc.0.9.3-info_parser.mly ├── option2.mly ├── options.mly ├── orpie.1.6.0-txtin_parser.mly ├── osdp.0.6.0-pa_parser.mly ├── pager.mly ├── pair-new-syntax.mly ├── parameterized-nonterminal.mly ├── parse_xquery.mly ├── parser_cocci_menhir.mly ├── parser_cpp.mly ├── parser_css.mly ├── parser_java.mly ├── parser_js.mly ├── parser_php.mly ├── parser_raw.mly ├── partest.mly ├── permutation-growth.mly ├── permutation.mly ├── petit-1.mly ├── petit-2.mly ├── petit_kotlin.mly ├── photos.mly ├── phpparser.mly ├── pippijn.mly ├── pre_parser.mly ├── prec_inline.mly ├── private-and-public-1.mly ├── private-and-public-2.mly ├── private-and-public-3.mly ├── promelaparser.mly ├── promelaparser_withexps.mly ├── prooflang.mly ├── proverif.2.00-parser.mly ├── proverif.2.00-pitparser.mly ├── psmt2-frontend.0.1-smtlib_parser.mly ├── public-1.mly ├── public-2.mly ├── public-inline-1.mly ├── public-inline-2.mly ├── pxp-m2parsergen.mly ├── pxp-ucs2_to_utf8.mly ├── rdf_sparql_parser.mly ├── reason.3.3.7-reason_parser.mly ├── reason_parser.mly ├── regstab.2.0.0-parser.mly ├── rfsm.1.0-main_parser.mly ├── rml.1.09.05-parser.mly ├── ruby18_parser.mly ├── sage.mly ├── scilabParser.mly ├── self.mly ├── self_ground.mly ├── sibylfs-lem.0.4.0-parser.mly ├── simnml.mly ├── simple-if-conflict-no-eos.mly ├── simple-if-conflict.mly ├── spike.mly ├── split-public-symbol-with-renaming.mly ├── split-public-symbol.mly ├── sqlgg.0.4.5-sql_parser.mly ├── statverif.1.97pl1.1-piparser.mly ├── subiso.mly ├── subtle-conflict-pager.mly ├── swdogen.0.1.0-swgparser.mly ├── symbolstartofs.mly ├── symbolstartpos.mly ├── sysver.mly ├── talparser.mly ├── test.ml ├── tilde-used-warning.mly ├── tony-rc_parse.mly ├── tony-xmlparse.mly ├── touist.3.5.0-parser.mly ├── toy.mly ├── tptp2cime.mly ├── typage.mly ├── typed-freshml.mly ├── undetermined_sort.mly ├── unigram.mly ├── unreachable-symbol.mly ├── unused-attribute.mly ├── unused-pseudo-token.mly ├── unused-token1.mly ├── unused-value.mly ├── uppercase.mly ├── useless-priorities.mly ├── useless-pseudo-token.mly ├── verilog.mly ├── wallace.mly ├── wasm.1.0-parser.mly ├── webidl.1.4-parser00.mly ├── webidl.1.4-parser01.mly ├── webidl.1.4-parser03.mly ├── webidl.1.4-parser04.mly ├── why-clparser.mly ├── why-cparser.mly ├── why3.1.1.0-py_parser.mly ├── why3.mly ├── xpath_parser.mly ├── yann.mly ├── yaparser.mly ├── zelus.mly ├── zenon.0.8.4-parsecoq.mly ├── zenon.mly ├── zipperposition.1.5-Parse_dk.mly ├── zipperposition.1.5-Parse_tptp.mly ├── zipperposition.1.5-Parse_zf.mly ├── zipperposition.1.5-Tip_parser.mly ├── zyvaall.mly └── zyvaflo.mly /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: / 5 | schedule: 6 | interval: weekly 7 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Build and test 2 | 3 | on: 4 | - pull_request 5 | - push 6 | 7 | jobs: 8 | build: 9 | strategy: 10 | fail-fast: false 11 | matrix: 12 | ocaml-compiler: 13 | - "5" 14 | - "4" 15 | - "4.08" 16 | 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - name: Install LaTeX 21 | run: | 22 | sudo apt-get update 23 | sudo apt-get install texlive texlive-latex-extra texlive-science 24 | 25 | - name: Checkout tree 26 | uses: actions/checkout@v4 27 | 28 | - name: Set-up OCaml 29 | uses: ocaml/setup-ocaml@v3 30 | with: 31 | ocaml-compiler: ${{ matrix.ocaml-compiler }} 32 | 33 | - run: opam install . --deps-only --with-test 34 | - run: opam exec -- dune build 35 | - run: opam exec -- dune runtest 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | _build/ 2 | *.install 3 | *.merlin 4 | 5 | # latex 6 | *.tex 7 | *.aux 8 | *.fdb_latexmk 9 | *.fls 10 | *.log 11 | *.pdf 12 | auto 13 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | ## v0.8.1 - 2025-03-24 2 | - support attributes in semantic actions to comply with Menhir v20231231 ([#15](https://github.com/Lelio-Brun/Obelisk/issues/15)) 3 | 4 | ## v0.8.0 - 2025-03-08 5 | - add lower bound for Menhir version 6 | - update LaTeX backends 7 | - add a new LaTeX mode using `simplebnf` 8 | - remove `suffix` dependency in favor of `xparse` 9 | 10 | ## v0.7.0 - 2024-07-03 11 | - support for EBNF output ([#14](https://github.com/Lelio-Brun/Obelisk/issues/14)) 12 | - rewriting of the printers 13 | - switch to Github Actions for CI 14 | 15 | ## v0.6.0 - 2021-02-09 16 | This version adds support for the Menhir standard rules `endrule`, `midrule`, `rev`, `flatten` and `append`. 17 | 18 | ## v0.5.2 - 2020-05-30 19 | This patch fixes a mismatch in version number. 20 | 21 | ## v0.5.1 - 2020-05-30 22 | This patch fixes [#10](https://github.com/Lelio-Brun/Obelisk/issues/10) (`noempty_list` was not recognized) and [#12](https://github.com/Lelio-Brun/Obelisk/issues/12), thanks to the contribution of @zapashcanon (add a switch to disable the use of CSS content properties in HTML mode, to make e.g., content copy-pastable). 23 | 24 | ## v0.5.0 - 2020-04-28 25 | This version implements several important changes: 26 | - drop `ocamlbuild` in favor of `dune` 27 | - drop API-doc style documentation (irrelevant) 28 | - fix break hints after epsilons 29 | - use `\lit` command for literals in `syntax` mode 30 | - change the name of the grammar environment to `obeliskgrammar` in LaTeX modes 31 | - use `re` library instead of `str` 32 | - add support for token aliases, with a dedicated option `-noaliases` 33 | - add support for the new syntax of Menhir rules (fixes issue [#9](https://github.com/Lelio-Brun/Obelisk/issues/9)) 34 | - fix some lexing and parsing bugs (in particular with Ocaml code and strings in prologue and semantic actions) thanks to the added test benches of Menhir 35 | 36 | ## v0.4.0 - 2019-03-01 37 | This version fixes issue [#8](https://github.com/Lelio-Brun/Obelisk/issues/8), corrects some parentheses related additional bugs and uses the new OPAM 2.0 format. 38 | 39 | ## v0.3.2 - 2018-04-25 40 | This patch is a minor fix to 0.3.1: tests in LaTeX mode are now conditionally guarded on the presence of `pdflatex`` in the PATH. 41 | 42 | ## v0.3.1 - 2018-04-23 43 | This patch fixes [#7](https://github.com/Lelio-Brun/Obelisk/issues/7): in LaTeX mode, if no prefix is specified throug `-prefix` option, `end`-beginning commands are automatically prefixed with `zzz`. 44 | 45 | ## v0.3.0 - 2017-08-17 46 | This release fixes [#4](https://github.com/Lelio-Brun/Obelisk/issues/4), [#5](https://github.com/Lelio-Brun/Obelisk/issues/5) and [#6](https://github.com/Lelio-Brun/Obelisk/issues/6). 47 | See [#2](https://github.com/Lelio-Brun/Obelisk/issues/2) (comments). 48 | 49 | 1. in LaTeX modes, macros are now generated for terminals, non-terminals and functionals ; 50 | 2. those macros are now defined using the generated generic macros for each class of syntactic construction ; 51 | 3. `longtabu` replaces `tabu` in tabular mode. 52 | 53 | Also functionals are now surrounded by `<` and `>` in all output formats. 54 | 55 | ## v0.2.0 - 2017-07-21 56 | This release fixes [#2](https://github.com/Lelio-Brun/Obelisk/issues/2): 57 | 58 | 1. the option `-package` in LaTeX mode allows the user to specify an additional output file (.sty) to gather the created macros ; 59 | 2. all created macros in LaTeX mode can be prefixed with a prefix specified with the option `-prefix`. 60 | 61 | ## v0.1.1 - 2017-07-06 62 | Remove remaining unneeded *Roman* parts. 63 | 64 | ## v0.1.0 - 2017-07-06 65 | First release. 66 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Lélio Brun 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SRC=src 2 | MISC=misc 3 | PARSER=$(SRC)/parser.mly 4 | RECO=$(MISC)/reco.mly 5 | IMAGES=tabular simplebnf syntax backnaur 6 | PREFIX=my 7 | MAIN=main 8 | EXE=dune exec $(SRC)/main.exe -- 9 | 10 | .PHONY: publish all latex htmlcss html default ebnf reco readme tests clean 11 | 12 | all: 13 | @dune build 14 | 15 | publish: 16 | @dune-release tag 17 | @dune-release distrib 18 | @dune-release publish 19 | @dune-release opam pkg 20 | @dune-release opam submit 21 | 22 | %.tex: 23 | @$(EXE) latex -prefix $(PREFIX) -$* $(PARSER) -o $@ 24 | 25 | %.pdf: %.tex 26 | pdflatex -interaction batchmode $< 27 | 28 | %.png: %.pdf 29 | @magick convert -quiet -density 150 $< -format png $(MISC)/$@ 30 | @rm -f $*.tex $< $*.aux $*.log 31 | 32 | latex: $(IMAGES:%=%.png) 33 | 34 | htmlcss: 35 | @$(EXE) html $(PARSER) -o test.html 36 | @wkhtmltoimage -f png --width 800 test.html $(MISC)/htmlcss.png 37 | @rm -f test.html 38 | 39 | html: 40 | @$(EXE) html -nocss $(PARSER) -o test.html 41 | @wkhtmltoimage -f png --width 800 test.html $(MISC)/html.png 42 | @rm -f test.html 43 | 44 | default: 45 | @printf "\nDefault output on $(PARSER):\n" 46 | @$(EXE) $(PARSER) 47 | 48 | ebnf: 49 | @printf "\nEBNF output on $(PARSER):\n" 50 | @$(EXE) ebnf $(PARSER) 51 | 52 | reco: 53 | @printf "Default output on $(RECO):\n" 54 | @$(EXE) $(RECO) 55 | @printf "Default output on $(RECO) with '-i' switch:\n" 56 | @$(EXE) -i $(RECO) 57 | @printf "EBNF output on $(RECO):\n" 58 | @$(EXE) ebnf $(RECO) 59 | @printf "EBNF output on $(RECO) with '-i' switch:\n" 60 | @$(EXE) ebnf -i $(RECO) 61 | 62 | readme: latex htmlcss html default ebnf reco 63 | 64 | tests: 65 | @dune test 66 | 67 | clean: 68 | @dune clean 69 | -------------------------------------------------------------------------------- /dune-project: -------------------------------------------------------------------------------- 1 | (lang dune 2.7) 2 | 3 | (name obelisk) 4 | (version 0.8.1) 5 | 6 | (using menhir 2.0) 7 | (generate_opam_files true) 8 | 9 | (source (github Lelio-Brun/obelisk)) 10 | (homepage https://github.com/Lelio-Brun/Obelisk) 11 | (documentation https://github.com/Lelio-Brun/Obelisk/blob/master/README.md) 12 | (license MIT) 13 | (authors "Lélio Brun") 14 | (maintainers "Lélio Brun ") 15 | 16 | (package 17 | (name obelisk) 18 | (synopsis "Pretty-printing for Menhir files") 19 | (description "Obelisk is a simple tool which produces pretty-printed output from a Menhir parser file (.mly).\n\ 20 | It is inspired from yacc2latex and is also written in OCaml, but is aimed at supporting features from Menhir instead of only those of ocamlyacc.") 21 | (depends 22 | (ocaml (>= 4.08)) 23 | (menhir (>= "20190613")) 24 | (re (>= 1.7.2)))) 25 | -------------------------------------------------------------------------------- /misc/backnaur.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/misc/backnaur.png -------------------------------------------------------------------------------- /misc/html.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/misc/html.png -------------------------------------------------------------------------------- /misc/htmlcss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/misc/htmlcss.png -------------------------------------------------------------------------------- /misc/reco.mly: -------------------------------------------------------------------------------- 1 | %% 2 | my_option(X, Y): 3 | | {} 4 | | Y X {} 5 | 6 | my_list(A): 7 | | {} 8 | | A my_list(A) {} 9 | 10 | my_nonempty_list(C): 11 | | C {} 12 | | C my_nonempty_list(C) {} 13 | 14 | my_separated_nonempty_list(X,Y): 15 | | X {} 16 | | X Y my_separated_nonempty_list(X,Y) {} 17 | 18 | my_separated_list(X,S): 19 | | {} 20 | | my_separated_nonempty_list(X,S) {} 21 | 22 | my_rule: 23 | | my_option(E, F) {} 24 | | my_list(E) {} 25 | | my_nonempty_list(F) {} 26 | | my_separated_nonempty_list(E,S1) {} 27 | | my_separated_list(F,S2) {} 28 | -------------------------------------------------------------------------------- /misc/simplebnf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/misc/simplebnf.png -------------------------------------------------------------------------------- /misc/syntax.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/misc/syntax.png -------------------------------------------------------------------------------- /misc/tabular.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/misc/tabular.png -------------------------------------------------------------------------------- /obelisk.opam: -------------------------------------------------------------------------------- 1 | # This file is generated by dune, edit dune-project instead 2 | opam-version: "2.0" 3 | version: "0.8.1" 4 | synopsis: "Pretty-printing for Menhir files" 5 | description: """ 6 | Obelisk is a simple tool which produces pretty-printed output from a Menhir parser file (.mly). 7 | It is inspired from yacc2latex and is also written in OCaml, but is aimed at supporting features from Menhir instead of only those of ocamlyacc.""" 8 | maintainer: ["Lélio Brun "] 9 | authors: ["Lélio Brun"] 10 | license: "MIT" 11 | homepage: "https://github.com/Lelio-Brun/Obelisk" 12 | doc: "https://github.com/Lelio-Brun/Obelisk/blob/master/README.md" 13 | bug-reports: "https://github.com/Lelio-Brun/obelisk/issues" 14 | depends: [ 15 | "dune" {>= "2.7"} 16 | "ocaml" {>= "4.08"} 17 | "menhir" {>= "20190613"} 18 | "re" {>= "1.7.2"} 19 | "odoc" {with-doc} 20 | ] 21 | build: [ 22 | ["dune" "subst"] {dev} 23 | [ 24 | "dune" 25 | "build" 26 | "-p" 27 | name 28 | "-j" 29 | jobs 30 | "@install" 31 | "@runtest" {with-test} 32 | "@doc" {with-doc} 33 | ] 34 | ] 35 | dev-repo: "git+https://github.com/Lelio-Brun/obelisk.git" 36 | -------------------------------------------------------------------------------- /src/ast.mli: -------------------------------------------------------------------------------- 1 | (** The raw Abstract Syntax Tree for grammars. *) 2 | 3 | (** The grammar. *) 4 | type spec = rule list 5 | 6 | (** The rules. *) 7 | and rule = { 8 | name: string; (** The left-hand side of the rule. *) 9 | params: string list; (** The possible list of parameters. *) 10 | groups: group list (** The right-hand side of the rule. *) 11 | } 12 | 13 | (** The group of productions. *) 14 | and group = production list 15 | 16 | (** The productions. *) 17 | and production = actual list 18 | 19 | (** The actuals. *) 20 | and actual = 21 | | Symbol of string * actual list (** A possibly applied symbol. *) 22 | | Modifier of actual * modifier (** A "modified" actual. *) 23 | | Anonymous of group list (** An anonymous rule. *) 24 | 25 | (** The modifiers. *) 26 | and modifier = 27 | | Opt (** optionnal *) 28 | | Plus (** non-empty list *) 29 | | Star (** list *) 30 | -------------------------------------------------------------------------------- /src/dune: -------------------------------------------------------------------------------- 1 | (executable 2 | (name main) 3 | (public_name obelisk) 4 | (package obelisk) 5 | (modules_without_implementation ast helper printer) 6 | (libraries re)) 7 | 8 | (include_subdirs unqualified) 9 | 10 | (ocamllex lexer) 11 | 12 | (menhir 13 | (modules parser)) 14 | -------------------------------------------------------------------------------- /src/extendedAst.ml: -------------------------------------------------------------------------------- 1 | (** The simplified Abstract Syntax Tree for grammars. 2 | 3 | Compared to {!Ast}, two differences: 4 | + groups are sort of "flattened", leaving only productions. 5 | + Patterns of the standard library of Menhir are explicit. *) 6 | 7 | (** The grammar. *) 8 | type spec = rule list 9 | 10 | (** The rules. *) 11 | and rule = { 12 | name: string; (** The left-hand side of the rule.*) 13 | params: string list; (** The possible list of parameters. *) 14 | prods: production list (** The right-hand side of the rule. *) 15 | } 16 | 17 | (** The productions. *) 18 | and production = actual list 19 | 20 | (** The actuals. *) 21 | and actual = 22 | | Symbol of string * actual list (** A possibly applied symbol. *) 23 | | Pattern of pattern (** A pattern from the standard library. *) 24 | | Modifier of actual * modifier (** A "modified" actual. *) 25 | | Anonymous of production list (** An anonymous rule. *) 26 | 27 | (** The modifiers. *) 28 | and modifier = 29 | | Opt (** optionnal *) 30 | | Plus (** non-empty list *) 31 | | Star (** list *) 32 | 33 | (** The patterns from the standard library. *) 34 | and pattern = 35 | | Option of actual (** [option(x)] *) 36 | | Pair of actual * actual (** [pair(x, y)] *) 37 | | SepPair of actual * actual * actual (** [separated_pair(x, sep, y)] *) 38 | | Preceded of actual * actual (** [preceded(opening, x)] *) 39 | | Terminated of actual * actual (** [terminated(x, closing)] *) 40 | | Delimited of actual * actual * actual (** [delimited(opening, x, closing)] *) 41 | | List of actual (** [list(x)] *) 42 | | NEList of actual (** [nonempty_list(x)] *) 43 | | SepList of actual * actual (** [separated_list(x)] *) 44 | | SepNEList of actual * actual (** [separated_nonempty_list(sep, x)] *) 45 | 46 | let fold_map_pattern f acc = function 47 | | Option x -> 48 | let acc, x = f acc x in 49 | acc, Option x 50 | | Pair (x, y) -> 51 | let acc, x = f acc x in 52 | let acc, y = f acc y in 53 | acc, Pair (x, y) 54 | | SepPair (x, sep, y) -> 55 | let acc, x = f acc x in 56 | let acc, sep = f acc sep in 57 | let acc, y = f acc y in 58 | acc, SepPair (x, sep, y) 59 | | Preceded (o, x) -> 60 | let acc, o = f acc o in 61 | let acc, x = f acc x in 62 | acc, Preceded (o, x) 63 | | Terminated (x, c) -> 64 | let acc, x = f acc x in 65 | let acc, c = f acc c in 66 | acc, Terminated (x, c) 67 | | Delimited (o, x, c) -> 68 | let acc, o = f acc o in 69 | let acc, x = f acc x in 70 | let acc, c = f acc c in 71 | acc, Delimited (o, x, c) 72 | | List x -> 73 | let acc, x = f acc x in 74 | acc, List x 75 | | NEList x -> 76 | let acc, x = f acc x in 77 | acc, NEList x 78 | | SepList (sep, x) -> 79 | let acc, sep = f acc sep in 80 | let acc, x = f acc x in 81 | acc, SepList (sep, x) 82 | | SepNEList (sep, x) -> 83 | let acc, sep = f acc sep in 84 | let acc, x = f acc x in 85 | acc, SepNEList (sep, x) 86 | 87 | let map_pattern f p = fold_map_pattern (fun o p -> o, f p) None p |> snd 88 | -------------------------------------------------------------------------------- /src/helper.mli: -------------------------------------------------------------------------------- 1 | (** The generic signature for the printer helpers. *) 2 | 3 | open Common 4 | open Format 5 | 6 | (** Printed at the beginning of the output. *) 7 | val print_header: Symbols.t -> formatter -> unit 8 | 9 | (** Printed at the end of the output. *) 10 | val print_footer: formatter -> unit 11 | 12 | (** Print an escaped string. *) 13 | val print_string: formatter -> string -> unit 14 | 15 | (** [print_rule_name print_params fmt x] prints the left-hand side of a rule where 16 | [x] is the name of the defined non terminal and [print_params] the function 17 | to print the optional parameters. *) 18 | val print_rule_name: (formatter -> unit) -> formatter -> string -> unit 19 | 20 | (** Print a rule parmater. *) 21 | val print_param: formatter -> string -> unit 22 | 23 | (** Printed at the beginning of each rule. *) 24 | val rule_begin: formatter -> unit 25 | 26 | (** Printed at the end of each rule. *) 27 | val rule_end: formatter -> unit 28 | 29 | (** Printed at the beginning of each production. *) 30 | val production_begin: formatter -> unit 31 | 32 | (** Printed at the end of each production. *) 33 | val production_end: formatter -> unit 34 | 35 | (** [print_symbol symbols print_params fmt s] prints the symbol [s] and 36 | its parameters thanks to [print_params]. A different formatting is possible 37 | accordingly to [s] being a terminal, a non terminal or 38 | a functional non terminal. *) 39 | val print_symbol: Symbols.t -> (formatter -> unit) -> formatter -> string -> unit 40 | 41 | (** [print_sep_list e nonempty print_sep print_x fmt] prints the possibly non empty 42 | separated list [separated[_nonempty]_list(sep, x)] where [sep] and [x] are 43 | respectively printed by [print_sep] and [print_x]. If [e] is [true] then 44 | the result is parenthesized. *) 45 | val print_sep_list: bool -> bool -> (formatter -> unit) -> (formatter -> unit) -> formatter -> unit 46 | 47 | (** To print a possibly parenthesized optional. *) 48 | val opt: bool -> (formatter -> unit) -> formatter -> unit 49 | 50 | (** To print a possibly parenthesized non empty list. *) 51 | val plus: bool -> (formatter -> unit) -> formatter -> unit 52 | 53 | (** To print a possibly parenthesized list. *) 54 | val star: bool -> (formatter -> unit) -> formatter -> unit 55 | 56 | (** The rule definition symbol. *) 57 | val def: formatter -> unit 58 | 59 | (** The bar at the start of each alternative production. *) 60 | val prod_bar: formatter -> unit 61 | 62 | (** The bar for the anonymous rules. *) 63 | val bar: formatter -> unit 64 | 65 | (* (\** The optionally parenthesizing function. *\) 66 | * val par: bool -> (unit -> unit) -> unit *) 67 | 68 | (** The space. *) 69 | val space: formatter -> unit 70 | 71 | (** The line break. *) 72 | val break: formatter -> unit 73 | 74 | (** The empty word epsilon. *) 75 | val eps: formatter -> unit 76 | -------------------------------------------------------------------------------- /src/helpers/default.ml: -------------------------------------------------------------------------------- 1 | open Format 2 | 3 | include MiniHelper 4 | 5 | let print_header _ _ = () 6 | let print_footer fmt = fprintf fmt "@." 7 | 8 | let def fmt = fprintf fmt " ::= @[" 9 | let prod_bar fmt = pp_print_string fmt "| " 10 | let bar fmt = fprintf fmt "@ |@ " 11 | let space fmt = fprintf fmt "@ " 12 | let break fmt = fprintf fmt "@;" 13 | let eps fmt = pp_print_string fmt "epsilon" 14 | 15 | let print_rule_name = 16 | print_rule_name_with (print_string' "<") (print_string' ">") 17 | 18 | let rule_begin _ = () 19 | let rule_end fmt = 20 | fprintf fmt "@]@;" 21 | 22 | let print_symbol symbols = 23 | print_symbol_aux symbols (print_string' "<") (print_string' ">") 24 | 25 | let opt _ print = enclose print (print_string' "[") (print_string' "]") 26 | let plus e print fmt = fprintf fmt "%t%a" (par e print) print_string "+" 27 | let star e print fmt = fprintf fmt "%t%a" (par e print) print_string "*" 28 | 29 | let print_sep_list e nonempty print_sep print_x fmt = 30 | let print fmt = 31 | fprintf fmt "%t%t%t" 32 | print_x space 33 | (star true (fun fmt -> fprintf fmt "%t%t%t" print_sep space print_x)) 34 | in 35 | (if nonempty then par e else opt false) print fmt 36 | -------------------------------------------------------------------------------- /src/helpers/ebnf.ml: -------------------------------------------------------------------------------- 1 | open Format 2 | 3 | include MiniHelper 4 | 5 | let print_header _ _ = () 6 | let print_footer fmt = fprintf fmt "@." 7 | 8 | let def fmt = fprintf fmt " ::= @[" 9 | let prod_bar fmt = pp_print_string fmt "| " 10 | let bar fmt = fprintf fmt "@ |@ " 11 | let space fmt = fprintf fmt "@ " 12 | let break fmt = fprintf fmt "@;" 13 | let eps fmt = pp_print_string fmt "" 14 | 15 | let print_rule_name = 16 | print_rule_name_with (fun _ -> ()) (fun _ -> ()) 17 | 18 | let rule_begin _ = () 19 | let rule_end fmt = 20 | fprintf fmt "@]@;" 21 | 22 | let print_symbol symbols = 23 | print_symbol_aux symbols (fun _ -> ()) (fun _ -> ()) 24 | 25 | let opt e print fmt = fprintf fmt "%t%a" (par e print) print_string "?" 26 | let plus e print fmt = fprintf fmt "%t%a" (par e print) print_string "+" 27 | let star e print fmt = fprintf fmt "%t%a" (par e print) print_string "*" 28 | 29 | let print_sep_list e nonempty print_sep print_x fmt = 30 | let print fmt = 31 | fprintf fmt "%t%t%t" 32 | print_x space 33 | (star true (fun fmt -> fprintf fmt "%t%t%t" print_sep space print_x)) 34 | in 35 | (if nonempty then par e else opt true) print fmt 36 | -------------------------------------------------------------------------------- /src/helpers/html.ml: -------------------------------------------------------------------------------- 1 | open Format 2 | 3 | include MiniHtml 4 | 5 | let print_header _ fmt = 6 | fprintf fmt 7 | "@[@;\ 8 | @[@;\ 9 | @[@;\ 10 | Grammar@;\ 11 | @[\ 40 | @]@;@;@;\ 41 | @[@;@;\ 42 | @[@;@;" 43 | 44 | let nonterminal_before = "<" 45 | let nonterminal_after = ">" 46 | let option_before = "[" 47 | let option_after = "]" 48 | let list_after = "*" 49 | let ne_list_after = "+" 50 | let rule_def = "::=" 51 | 52 | let def fmt = fprintf fmt " %s @;@;@;
" rule_def 53 | let prod_bar fmt = 54 | fprintf fmt "@[
" 55 | 56 | let print_rule_name = 57 | print_rule_name_with 58 | (fun fmt -> fprintf fmt "%s" nonterminal_before) 59 | (fun fmt -> fprintf fmt "%s" nonterminal_after) 60 | 61 | let print_symbol symbols = 62 | print_symbol_aux symbols 63 | (fun fmt -> fprintf fmt "%s" nonterminal_before) 64 | (fun fmt -> fprintf fmt "%s" nonterminal_after) 65 | 66 | let opt _ print fmt = 67 | fprintf fmt "%a%a%t%a%a" 68 | print_string option_before 69 | print_string "" 70 | print 71 | print_string "" 72 | print_string option_after 73 | 74 | let plus e print fmt = 75 | fprintf fmt "%a%t%a%a%a%a" 76 | print_string "" 77 | (par e print) 78 | print_string "" 79 | print_string "" 80 | print_string ne_list_after 81 | print_string "" 82 | let star e print fmt = 83 | fprintf fmt "%a%t%a%a%a%a" 84 | print_string "" 85 | (par e print) 86 | print_string "" 87 | print_string "" 88 | print_string list_after 89 | print_string "" 90 | -------------------------------------------------------------------------------- /src/helpers/htmlCss.ml: -------------------------------------------------------------------------------- 1 | open Format 2 | 3 | include MiniHtml 4 | 5 | let print_header _ fmt = 6 | fprintf fmt 7 | "@[@;\ 8 | @[@;\ 9 | @[@;\ 10 | Grammar@;\ 11 | @[\ 60 | @]@;@;@;\ 61 | @[@;@;\ 62 | @[@;@;" 63 | 64 | let def fmt = fprintf fmt "@;@;@;@;" 51 | let rule_end fmt = 52 | fprintf fmt "@;@;" 53 | 54 | let production_begin _ = () 55 | let production_end fmt = 56 | fprintf fmt "@]@;" 57 | 58 | let print_sep_list e nonempty print_sep print_x fmt = 59 | par e (fun fmt -> 60 | fprintf fmt "%t%a%a%t%a" 61 | print_x 62 | print_string (if nonempty then "+" else "*") 63 | print_string "" 64 | print_sep 65 | print_string "") 66 | fmt 67 | -------------------------------------------------------------------------------- /src/main.ml: -------------------------------------------------------------------------------- 1 | (** The main driver of the application. 2 | 3 | Once parsed the grammars are concatenated into one. 4 | 5 | The resulting grammar is scanned ({!Scan}) to get back the symbols then 6 | normalized ({!Normalize}) and transformed ({!Transform}) to a simpler form. 7 | Finally, after an optional pass of pattern-recognition ({!Reduce}) it is 8 | printed ({!Printers}). *) 9 | 10 | open List 11 | open Position 12 | open Format 13 | open Options 14 | 15 | (** @return the lexer buffers, a printer chosen from the according passed 16 | options and a function to finally close the input and output channels.*) 17 | let get () = 18 | parse_opt (); 19 | try 20 | if !ifiles = [] then error (); 21 | let outf = if !ofile = "" then stdout else open_out !ofile in 22 | let formatter = formatter_of_out_channel outf in 23 | let formatter', close_package = match !pfile with 24 | | "" -> formatter, fun () -> () 25 | | pkg -> 26 | if !ofile = "" then error (); 27 | let f = open_out (pkg ^ ".sty") in 28 | formatter_of_out_channel f, fun () -> close_out f 29 | in 30 | formatter_package := formatter'; 31 | let p = match !mode with 32 | | Plain Default -> (module Printers.Default : GenericPrinter.PRINTER) 33 | | Plain EBNF -> (module Printers.Ebnf) 34 | | Latex Tabular -> (module Printers.LatexTabular) 35 | | Latex Simplebnf -> (module Printers.LatexSimplebnf) 36 | | Latex Syntax -> (module Printers.LatexSyntax) 37 | | Latex Backnaur -> (module Printers.LatexBacknaur) 38 | | Html CSS -> (module Printers.HtmlCss) 39 | | Html NoCSS -> (module Printers.Html) 40 | in 41 | let module P = (val p: GenericPrinter.PRINTER) in 42 | let print symbols = P.print_spec symbols formatter in 43 | let files = rev !ifiles in 44 | let infs = map open_in files in 45 | let lexbufs = map Lexing.from_channel infs in 46 | let close () = iter close_in infs; close_out outf; close_package () in 47 | combine files lexbufs, print, close 48 | with Sys_error s -> 49 | eprintf "System Error: %s@." s; 50 | exit 1 51 | 52 | (** @return the obtained grammars per input file / lexer buffer. *) 53 | let parse (_, lexbuf as fl) = 54 | try 55 | Lexer.init (); 56 | Parser.specification Lexer.lexer lexbuf 57 | with 58 | | Lexer.LexingError s -> 59 | err_loc_lexbuf fl (sprintf "Lexing Error: %s" s); 60 | exit 1 61 | | Parser.Error -> 62 | err_loc_lexbuf fl "Parsing Error"; 63 | exit 1 64 | 65 | let () = 66 | let lexbufs, print, close = get () in 67 | try 68 | let s = map parse lexbufs |> concat in 69 | let symbols = Scan.scan s in 70 | s 71 | |> Normalize.normalize 72 | |> Transform.transform symbols 73 | |> (if !mode = Plain EBNF then Specialize.specialize symbols else fun s -> s) 74 | |> Reduce.reduce !inline 75 | |> print symbols; 76 | close () 77 | with 78 | | Sys_error s -> 79 | eprintf "System Error: %s@." s; 80 | exit 1 81 | -------------------------------------------------------------------------------- /src/normalize.ml: -------------------------------------------------------------------------------- 1 | (** This module provides a way to "flatten" the groups of productions. *) 2 | 3 | open Ast 4 | open List 5 | 6 | (** Normalize an actual. 7 | Note that the anonymous rules are normalized in the same ways as 8 | the groups, see {!normalize_group}. *) 9 | let rec normalize_actual = function 10 | | Symbol (s, xs) -> 11 | Symbol (s, map normalize_actual xs) 12 | | Modifier (x, m) -> 13 | Modifier (normalize_actual x, m) 14 | | Anonymous gs -> 15 | let gs = fold_right normalize_group gs [] in 16 | Anonymous gs 17 | 18 | (** Normalize a production by normalizing its actuals. *) 19 | and normalize_production p = 20 | map normalize_actual p 21 | 22 | (** Normalize a group. 23 | Each production of the group is normalized then inserted in a new 24 | singleton group. As a result, [normalize_group] returns a list of groups 25 | with only one production each. *) 26 | and normalize_group productions grs = 27 | let productions = map normalize_production productions in 28 | let ps = map (fun x -> [x]) productions in 29 | ps @ grs 30 | 31 | (** Normalize a rule by normalizing its groups. 32 | In general, the number of groups {b increases}. *) 33 | let normalize_rule r = 34 | let groups = fold_right normalize_group r.groups [] in 35 | { r with groups } 36 | 37 | (** Normalize the grammar by normalizing its rules. *) 38 | let normalize = map normalize_rule 39 | -------------------------------------------------------------------------------- /src/position.ml: -------------------------------------------------------------------------------- 1 | (** A generic module for handling locations of errors. *) 2 | 3 | open Lexing 4 | open Format 5 | 6 | (** The error is located between two positions in the buffer. *) 7 | type position = { 8 | start_p: Lexing.position; (** The start position. *) 9 | end_p: Lexing.position (** The end position. *) 10 | } 11 | 12 | (** A "dummy" error location. *) 13 | let dummy = { 14 | start_p = dummy_pos; 15 | end_p = dummy_pos 16 | } 17 | 18 | (** Build an error location from the start and end positions. *) 19 | let from start_p end_p = { start_p; end_p } 20 | 21 | (** Print a located error message on the standard error output. *) 22 | let err_loc file {start_p; end_p} = 23 | let l = start_p.pos_lnum in 24 | let start_c = start_p.pos_cnum - start_p.pos_bol in 25 | let end_c = end_p.pos_cnum - end_p.pos_bol - 1 in 26 | if start_c = end_c 27 | then eprintf "File \"%s\", line %d, character %d:@\n%s@." file l start_c 28 | else eprintf "File \"%s\", line %d, characters %d-%d:@\n%s@." file l start_c end_c 29 | 30 | (** Build an error location from the lexer buffer. *) 31 | let get_pos lexbuf = 32 | from (lexeme_start_p lexbuf) (lexeme_end_p lexbuf) 33 | 34 | (** Print an error message located with the lexer buffer. *) 35 | let err_loc_lexbuf (file, lexbuf) = 36 | err_loc file (get_pos lexbuf) 37 | -------------------------------------------------------------------------------- /src/printer.mli: -------------------------------------------------------------------------------- 1 | (** The generic signature for a printer *) 2 | 3 | open Format 4 | open Common 5 | open ExtendedAst 6 | 7 | (** [print_spec symbs fmt s] print the grammar [s] with symbols [symbs] on 8 | the specified formatter [fmt]. *) 9 | val print_spec: Symbols.t -> formatter -> spec -> unit 10 | 11 | -------------------------------------------------------------------------------- /src/printers.ml: -------------------------------------------------------------------------------- 1 | (** This module provides the actual printers, of signature {!Printer}. *) 2 | 3 | open GenericPrinter 4 | 5 | (** The default plain text printer. *) 6 | module Default = Make(Default) 7 | 8 | (** The EBNF plain text printer. *) 9 | module Ebnf = Make(Ebnf) 10 | 11 | (** The LaTeX default table-based printer. *) 12 | module LatexTabular = Make(LatexTabular) 13 | 14 | (** The LaTeX "simplebnf" package based printer. *) 15 | module LatexSimplebnf = Make(LatexSimplebnf) 16 | 17 | (** The LaTeX "syntax" package based printer. *) 18 | module LatexSyntax = Make(LatexSyntax) 19 | 20 | (** The LaTeX "backnaur" package based printer. *) 21 | module LatexBacknaur = Make(LatexBacknaur) 22 | 23 | (** The HTML printer with CSS content properties. *) 24 | module HtmlCss = Make(HtmlCss) 25 | 26 | (** The HTML printer without CSS content properties. *) 27 | module Html = Make(Html) 28 | -------------------------------------------------------------------------------- /src/scan.ml: -------------------------------------------------------------------------------- 1 | (** Build the set of symbols appearing in a grammar. *) 2 | 3 | open Ast 4 | open Common 5 | open List 6 | 7 | (** [add_defined symbols r] adds to the [symbols] set the name of [r] 8 | (left-hand side) as a non terminal (see {!Common.Symbols.def_non_term}) 9 | if the rule has no parameter or as a functional non terminal along with 10 | its parameters (see {!Common.Symbols.def_fun}) otherwise. *) 11 | let add_defined symbols {name; params; _} = 12 | match params with 13 | | [] -> Symbols.def_non_term name symbols 14 | | _ -> Symbols.def_fun name params symbols 15 | 16 | (** [add_terminal symbols r] recursively scans the right-hand side of [r] to add 17 | the symbols which are not already "defined" in [symbols] 18 | (see {!Common.Symbols.is_defined}) neither parameters as terminals 19 | (see {!Common.Symbols.def_term}). *) 20 | let add_terminal symbols {params; groups; _} = 21 | let rec add_terminal_actual symbols = function 22 | | Symbol (s, ps) -> 23 | let symbols = fold_left add_terminal_actual symbols ps in 24 | if Symbols.is_defined s symbols = None 25 | && String.mapi (fun i c -> if i = 0 then Char.uppercase_ascii c else c) s = s 26 | && ps = [] 27 | && not (List.mem s params) 28 | then Symbols.def_term s symbols else symbols 29 | | Modifier (a, _) -> 30 | add_terminal_actual symbols a 31 | | Anonymous gs -> 32 | fold_left add_terminal_group symbols gs 33 | and add_terminal_prod symbols = fold_left add_terminal_actual symbols 34 | and add_terminal_group symbols = fold_left add_terminal_prod symbols 35 | in 36 | fold_left add_terminal_group symbols groups 37 | 38 | (** [scan s] first gets the defined symbols of [s] then its terminals and 39 | returns the whole set. *) 40 | let scan s = 41 | let symbols = fold_left add_defined Symbols.empty s in 42 | fold_left add_terminal symbols s 43 | -------------------------------------------------------------------------------- /src/subst.ml: -------------------------------------------------------------------------------- 1 | (** Symbol substitution. *) 2 | 3 | open ExtendedAst 4 | 5 | (** A map with identifiers keys. *) 6 | module M = Map.Make(String) 7 | 8 | (** Performs a substitution over an actual. 9 | Only non functional symbols are substituted. *) 10 | let rec subst_actual s a = 11 | let subst_production = List.map (subst_actual s) in 12 | match a with 13 | | Symbol (f, []) -> 14 | begin try 15 | M.find f s 16 | with Not_found -> Symbol (f, []) 17 | end 18 | | Symbol (f, xs) -> 19 | Symbol (f, List.map (subst_actual s) xs) 20 | | Pattern p -> 21 | Pattern (map_pattern (subst_actual s) p) 22 | | Modifier (x, m) -> 23 | Modifier (subst_actual s x, m) 24 | | Anonymous ps -> 25 | Anonymous (List.map subst_production ps) 26 | 27 | (** [make_subst xs ys] builds a substitution that is a map linking each element 28 | of [xs] to the corresponding (by index) element of [ys]. *) 29 | let make_subst xs ys = 30 | List.fold_left2 (fun s x y -> M.add x y s) M.empty xs ys 31 | -------------------------------------------------------------------------------- /src/transform.ml: -------------------------------------------------------------------------------- 1 | (** Transform a normalized {!Ast} towards an {!ExtendedAst}. *) 2 | 3 | open ExtendedAst 4 | open Common 5 | open List 6 | 7 | (** Transform an actual. 8 | Note that the anonymous rules are transformed with the help 9 | of {!transform_group}. *) 10 | let rec transform_actual symbols = function 11 | | Ast.Symbol (s, xs) -> 12 | transform_symbol symbols s xs 13 | | Ast.Modifier (x, m) -> 14 | transform_modifier symbols x m 15 | | Ast.Anonymous gs -> 16 | let gs = map (transform_group symbols) gs in 17 | match gs with 18 | | [[ a ]] -> a 19 | | _ -> Anonymous gs 20 | 21 | (** Transform all non defined symbols (see {!Common.Symbols.is_defined}) 22 | corresponding to Menhir standard library symbols ([list], [pair], [option], 23 | etc.) into patterns, and all other symbols into symbols. *) 24 | and transform_symbol symbols s xs = 25 | let xs = map (transform_actual symbols) xs in 26 | match Symbols.is_defined s symbols with 27 | | Some _ -> Symbol (s, xs) 28 | | None -> 29 | begin match s, xs with 30 | | ("endrule" | "midrule"), [x] -> 31 | x 32 | 33 | | ("option" | "ioption" | "boption" | "loption"), [x] -> 34 | Pattern (Option x) 35 | 36 | | "pair", [x; y] -> 37 | Pattern (Pair (x, y)) 38 | | "separated_pair", [x; sep; y] -> 39 | Pattern (SepPair (x, sep, y)) 40 | | "preceded", [o; x] -> 41 | Pattern (Preceded (o, x)) 42 | | "terminated", [x; c] -> 43 | Pattern (Terminated (x, c)) 44 | | "delimited", [o; x; c] -> 45 | Pattern (Delimited (o, x, c)) 46 | 47 | | "list", [x] -> 48 | Pattern (List x) 49 | | "nonempty_list", [x] -> 50 | Pattern (NEList x) 51 | | "separated_list", [sep; x] -> 52 | Pattern (SepList (sep, x)) 53 | | "separated_nonempty_list", [sep; x] -> 54 | Pattern (SepNEList (sep, x)) 55 | 56 | | ("rev" | "flatten"), [x] -> 57 | x 58 | | "append", [x; y] -> 59 | Pattern (Pair (x, y)) 60 | 61 | | _, _ -> 62 | Symbol (s, xs) 63 | end 64 | 65 | (** Transform a "modified" actual. *) 66 | and transform_modifier symbols x m = 67 | let x = transform_actual symbols x in 68 | let m = match m with 69 | | Ast.Opt -> Opt 70 | | Ast.Plus -> Plus 71 | | Ast.Star -> Star 72 | in 73 | Modifier (x, m) 74 | 75 | (** Transform a production by transforming its actuals. *) 76 | and transform_production symbols = map (transform_actual symbols) 77 | 78 | (** Transform a supposed singleton group by returning the transformation of 79 | its unique production. *) 80 | and transform_group symbols = function 81 | | [p] -> transform_production symbols p 82 | | _ -> assert false 83 | 84 | (** Transform a rule by transforming its groups into productions. *) 85 | let transform_rule symbols r = 86 | let prods = map (transform_group symbols) r.Ast.groups in 87 | { name = r.Ast.name; params = r.Ast.params; prods } 88 | 89 | (** Transform a grammar by transforming its rules. *) 90 | let transform symbols = map (transform_rule symbols) 91 | -------------------------------------------------------------------------------- /tests/alphaCaml-demos-interactive.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | 3 | open Syntax.Raw 4 | 5 | %} 6 | 7 | %token VAR 8 | %token CONST 9 | %token EQUAL PLUS EOF FAIL 10 | 11 | %left PLUS 12 | 13 | %type declarations 14 | %start declarations 15 | 16 | %% 17 | 18 | expr: 19 | | VAR 20 | { EVar $1 } 21 | | CONST 22 | { EConst $1 } 23 | | expr PLUS expr 24 | { EAdd ($1, $3) } 25 | | FAIL 26 | { EFail } 27 | 28 | declarations: 29 | | VAR EQUAL expr EOF 30 | { D ($1, $3, Suspension.create (fun () -> !ParserFix.declarations())) } 31 | 32 | /* We read just one declaration, of the form = , 33 | and expect to find no more input, because the token stream represents 34 | just one line of input and we expect one declaration per line. 35 | 36 | The rest of the declarations is represented by a suspension, which, 37 | when forced, will do whatever is necessary to read more input. The 38 | function that defines this suspension needs to call the parser, so 39 | there is a circularity. We break this circularity by going through 40 | a reference in module [ParserFix]. */ 41 | 42 | -------------------------------------------------------------------------------- /tests/alphaCaml-demos-mixins.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | 3 | open Strings 4 | open Mm.Raw 5 | 6 | %} 7 | 8 | %token LIDENT 9 | %token LCURLY RCURLY DOT MIXIN END LPAREN RPAREN CLOSE DELETE AND 10 | IN FAKE DEPENDS ON PLUS LET REC EQUAL VAL AS WILDCARD EOF 11 | LSQUARE RSQUARE 12 | 13 | %type toplevel 14 | %start toplevel 15 | 16 | %% 17 | 18 | atomic_expression: 19 | | LIDENT 20 | { EVar $1 } 21 | | LCURLY record_fields RCURLY 22 | { ERecord $2 } 23 | | atomic_expression DOT LIDENT 24 | { ERecordSelection ($1, $3) } 25 | | MIXIN components END 26 | { let _, input, output = $2 in 27 | EStructure (input, output) } 28 | | LPAREN expression RPAREN 29 | { $2 } 30 | 31 | unary_expression: 32 | | atomic_expression 33 | { $1 } 34 | | CLOSE unary_expression 35 | { EClose $2 } 36 | | DELETE fields IN unary_expression 37 | { EDeletion ($4, $2) } 38 | | FAKE LIDENT DEPENDS ON LIDENT IN unary_expression 39 | { EFakeDependency ($7, $2, $5) } 40 | 41 | summand_expression: 42 | | unary_expression 43 | { $1 } 44 | | summand_expression PLUS unary_expression 45 | { EComposition ($1, $3) } 46 | 47 | expression: 48 | | summand_expression 49 | { $1 } 50 | | LET REC bindings IN expression 51 | { ELetRec (List.rev $3, $5) } 52 | 53 | /* One should check against backward dependencies in let rec 54 | definitions, except when the target has predictable shape. 55 | This is not currently done. */ 56 | 57 | bindings: 58 | | binding 59 | { [ $1 ] } 60 | | bindings AND binding 61 | { $3 :: $1 } 62 | 63 | binding: 64 | | LIDENT EQUAL expression 65 | { ($1, $3) } 66 | 67 | toplevel: 68 | | expression EOF 69 | { $1 } 70 | 71 | record_fields: 72 | | /* epsilon */ 73 | { StringMap.empty } 74 | | record_fields VAL LIDENT EQUAL expression 75 | { StringMap.add $3 $5 $1 } 76 | 77 | /* One should check against duplicate field labels in structures. 78 | This is not currently done. */ 79 | 80 | components: 81 | | /* epsilon */ 82 | { 0, StringMap.empty, (StringMap.empty, []) } 83 | | components VAL lident_pun dependencies EQUAL expression 84 | { let xname, iname = $3 in 85 | let i, input, (fields, anonymous) = $1 in 86 | let fields = StringMap.add xname ($4, iname, $6, Mm.KStructMember i) fields in 87 | i+1, input, (fields, anonymous) } 88 | | components VAL WILDCARD AS LIDENT dependencies EQUAL expression 89 | { let i, input, (fields, anonymous) = $1 in 90 | let anonymous = ($6, $5, $8, Mm.KStructMember i) :: anonymous in 91 | i+1, input, (fields, anonymous) } 92 | | components VAL lident_pun 93 | { let xname, iname = $3 in 94 | let i, input, output = $1 in 95 | let input = StringMap.add xname iname input in 96 | i+1, input, output } 97 | 98 | lident_pun: 99 | | LIDENT AS LIDENT /* external name, internal name */ 100 | { $1, $3 } 101 | | LIDENT /* pun: both names identical */ 102 | { $1, $1 } 103 | 104 | fields: 105 | | /* epsilon */ 106 | { StringSet.empty } 107 | | fields LIDENT 108 | { StringSet.add $2 $1 } 109 | 110 | dependencies: 111 | | /* epsilon */ 112 | { [] } 113 | | LSQUARE variables RSQUARE 114 | { $2 } 115 | 116 | variables: 117 | | /* epsilon */ 118 | { [] } 119 | | variables LIDENT 120 | { $2 :: $1 } 121 | -------------------------------------------------------------------------------- /tests/amalthea.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | open Io_types ;; 3 | open Io_messages ;; 4 | open Io_parser_state ;; 5 | let unpos = snd 6 | let pos = fst 7 | let ct1 = fun c (cs, bs) -> (c::cs, bs) 8 | let ct2 = fun b (cs, bs) -> (cs, b::bs) 9 | let parse_error = function msg -> 10 | raise (Io_module_error (Io_parser_error !last_pos)) 11 | 12 | %} 13 | 14 | %token Lint 15 | %token Lstring 16 | %token Lident 17 | %token Llpar Lrpar Lsemi 18 | %token Lcolon Lperiod 19 | %token Llambda 20 | %token Lputvar 21 | %token Lgetvar 22 | %token Lend 23 | %token Ldeclare 24 | %token Lprimitive 25 | %token Lvariable 26 | %token Lexport 27 | %token Limport 28 | 29 | %start parse_io_module 30 | %type parse_io_module 31 | 32 | %% 33 | 34 | parse_io_module: 35 | | imports export decll expr Lend { ($1,$2,$3,$4) } 36 | 37 | imports: 38 | | Limport Lident importlist Lperiod imports { last_pos := pos $2;(String.lowercase (unpos $2), $3)::$5 } 39 | | { [] } 40 | 41 | importlist: 42 | | Lcolon idlist { Some $2 } 43 | | { None } 44 | 45 | export: 46 | | Lexport idlist Lperiod { $2 } 47 | | { [] } 48 | 49 | decll: 50 | | Ldeclare Lident Lcolon eatom Lperiod decll { last_pos := pos $2;ct1 ($1,unpos $2,$4) $6 } 51 | | Lprimitive Lident Lcolon Lident Lperiod decll { last_pos := pos $2;ct1 ($1,unpos $2,Eprimitive (unpos $4)) $6 } 52 | | Lvariable Lident Lcolon eatom Lperiod decll { last_pos := pos $2;ct2 ($1,unpos $2,$4) $6 } 53 | | { ([], []) } 54 | 55 | expr: 56 | | Lident paramlist { Eappl (pos $1, unpos $1, $2, EFall) } 57 | | Lident { Eid (pos $1, unpos $1) } 58 | | Lident Lputvar patom stmttail { Eputvar (pos $1, unpos $1, $3, $4, EFall) } 59 | | Lident Lgetvar Lident stmttail { Egetvar (pos $1, unpos $1, unpos $3, $4, EFall) } 60 | | stmt { $1 } 61 | 62 | paramlist: 63 | | patom paramlisttail { $1::$2 } 64 | | Lsemi eatom { [$2] } 65 | | stmt { [$1] } 66 | 67 | paramlisttail: 68 | | patom paramlisttail { $1::$2 } 69 | | Lsemi eatom { [$2] } 70 | | stmt { [$1] } 71 | | { [] } 72 | 73 | stmt: 74 | | Llambda idlist stmttail { Elambda ($1,$2,$3,EFall) } 75 | 76 | stmttail: 77 | | Lsemi eatom { $2 } 78 | | atom { $1 } 79 | 80 | idlist: 81 | | Lident idlist { last_pos := pos $1;(unpos $1)::$2 } 82 | | { [] } 83 | 84 | eatom: 85 | | expr { $1 } 86 | | atom { $1 } 87 | 88 | patom: 89 | | Lident { last_pos := pos $1; Eid (pos $1,unpos $1) } 90 | | atom { $1 } 91 | 92 | atom: 93 | | Lint { last_pos := pos $1; Eint (pos $1,unpos $1) } 94 | | Lstring { last_pos := pos $1; Estring (pos $1,unpos $1) } 95 | | Llpar expr Lrpar { $2 } 96 | -------------------------------------------------------------------------------- /tests/anonymous-nested.mly: -------------------------------------------------------------------------------- 1 | /* Test of the new anonymous rule syntax, including 2 | anonymous rules nested in anonymous rules. */ 3 | 4 | %token A B C D EOF 5 | %start phrase 6 | 7 | %% 8 | 9 | %inline id(X): 10 | x = X { x } 11 | 12 | foo: 13 | A B { 1 } 14 | 15 | bar: 16 | C D { 2 } 17 | 18 | phrase: 19 | y = id(id(x = foo { x } | z = bar { z })) 20 | t = id(x = foo { x } | id(z = bar { z }) { 2 }) 21 | EOF 22 | { y + t } 23 | -------------------------------------------------------------------------------- /tests/anonymous-param-redundant.mly: -------------------------------------------------------------------------------- 1 | /* Test of the new anonymous rule syntax, inside 2 | a parameterized definition, whose parameters 3 | are NOT used by the anonymous rule. */ 4 | 5 | %{ type ('a, 'b) either = Left of 'a | Right of 'b %} 6 | %token A B C D EOF 7 | %start<(int, int) either> phrase 8 | %start other 9 | 10 | %% 11 | 12 | %inline mixed_list(X, Y, Z): 13 | Z 14 | list( 15 | x = X { Left x } 16 | | y = Y { Right y } 17 | ) { $1 } 18 | 19 | phrase: 20 | xs = mixed_list(A, B, C) 21 | ys = mixed_list(A, B, D) 22 | (* We should obtain only ONE anonymous symbol because 23 | Z is unused in the anonymous rule above. *) 24 | EOF 25 | { xs @ ys } 26 | 27 | other: 28 | seplist(A) EOF {} 29 | 30 | (* A list of X's, separated with C's or D's. We should 31 | obtain a definition of a symbol that expands to C or 32 | D and is NOT parameterized over X. *) 33 | seplist(X): 34 | X {} 35 | | X midrule(C {} | D {}) seplist(X) {} 36 | -------------------------------------------------------------------------------- /tests/anonymous-param.mly: -------------------------------------------------------------------------------- 1 | /* Test of the new anonymous rule syntax, inside 2 | a parameterized definition. */ 3 | 4 | %{ type ('a, 'b) either = Left of 'a | Right of 'b %} 5 | %token A B C D EOF 6 | %start<(int, int) either> phrase 7 | 8 | %% 9 | 10 | mixed_list(X, Y): 11 | list( 12 | x = X { Left x } 13 | | y = Y { Right y } 14 | ) { $1 } 15 | 16 | phrase: 17 | xs = mixed_list(A, B) 18 | ys = mixed_list(C, D) 19 | EOF 20 | { xs @ ys } 21 | -------------------------------------------------------------------------------- /tests/anonymous.mly: -------------------------------------------------------------------------------- 1 | /* Test of the new anonymous rule syntax. */ 2 | 3 | %token A B C D EOF 4 | %start phrase 5 | 6 | %% 7 | 8 | phrase: 9 | xs = list(x = A | x = B | x = C { x }) 10 | ys = list(x = D y = D { x + y }) 11 | EOF 12 | { xs @ ys } 13 | -------------------------------------------------------------------------------- /tests/attributes-expansion.mly: -------------------------------------------------------------------------------- 1 | %token A B C 2 | %start main 3 | %attribute liste(A) [@liste.A true] 4 | %attribute liste [@happy true] 5 | %% 6 | 7 | main: 8 | liste(A) liste(B) liste(C) 9 | {} 10 | 11 | liste [@liste true] (X): 12 | x = X xs = liste(X) { x :: xs } 13 | | { [] } 14 | -------------------------------------------------------------------------------- /tests/attributes.mly: -------------------------------------------------------------------------------- 1 | %token A [@a 0] 2 | %token B [@b 0] 3 | %token C [@c 0] [@d 1] 4 | 5 | %attribute A B [@foo "foo"] 6 | %attribute C [@bar "bar"] [@baz "baz"] 7 | %attribute main [@main ()] 8 | 9 | %[@claim "this is a grammar attribute"] 10 | 11 | %start main 12 | 13 | %% 14 | 15 | main [@cost 0]: 16 | A B [@producer true] C {} 17 | -------------------------------------------------------------------------------- /tests/bare-bones.mly: -------------------------------------------------------------------------------- 1 | (* A grammar of arithmetic expressions without semantic actions. *) 2 | 3 | %token DIV EOL INT LPAREN MINUS PLUS RPAREN TIMES 4 | %start main 5 | 6 | %% 7 | 8 | let main := 9 | additive_expr; EOL 10 | 11 | let additive_expr := 12 | | multiplicative_expr 13 | | additive_expr; PLUS; multiplicative_expr 14 | | additive_expr; MINUS; multiplicative_expr 15 | 16 | let multiplicative_expr := 17 | | atomic_expr 18 | | multiplicative_expr; TIMES; atomic_expr 19 | | multiplicative_expr; DIV; atomic_expr 20 | 21 | let atomic_expr := 22 | | LPAREN; additive_expr; RPAREN 23 | | INT 24 | | MINUS; atomic_expr 25 | -------------------------------------------------------------------------------- /tests/bauer.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | open Prolog 3 | %} 4 | 5 | %token LPARENT RPARENT 6 | %token DOT COMMA INFERS EOF 7 | %token IDENTIFIER VARIABLE 8 | 9 | %start clauses 10 | %% 11 | clauses: clauses = separated_list( DOT, clause ) EOF { clauses } 12 | 13 | clause: 14 | tm = term INFERS ts = separated_list( COMMA, term ) { (tm, ts) } 15 | | fact = term { (fact, []) } 16 | 17 | term: 18 | name = IDENTIFIER p = option(params) { 19 | let ts = match p with None -> [] | Some l -> l in 20 | Struct (name, ts) 21 | } 22 | | v = VARIABLE { Var (0, v) } 23 | 24 | params: terms = delimited( LPARENT, separated_list( COMMA, term ) , 25 | RPARENT) { terms } 26 | -------------------------------------------------------------------------------- /tests/belloeil.mly: -------------------------------------------------------------------------------- 1 | (* Submitted by Thierry Belloeil -- thanks. *) 2 | 3 | (* This grammar shows a problem with the conflict explanation module. 4 | The %nonassoc directives force a shift/reduce conflict to be resolved 5 | in favor of reduction. This cuts a transition in the automaton, which 6 | in turn prevents another shift/reduce conflict from being explained. *) 7 | 8 | (* Beware: removing the %nonassoc or %prec directives causes the final 9 | numbering of states to change (but the raw numbers remain the same). *) 10 | 11 | %token SIZE LOG EOF ARROW BANG 12 | 13 | %nonassoc BANG 14 | %nonassoc below_NUM 15 | 16 | %start desc 17 | 18 | %type desc 19 | 20 | %% 21 | 22 | desc: 23 | exprl ARROW expr EOF {} 24 | 25 | expr: 26 | | expr BANG {} 27 | | expr SIZE {} 28 | | LOG exprl {} 29 | 30 | exprl: 31 | BANG exprl {} 32 | | SIZE exprl {} 33 | | {} %prec below_NUM 34 | 35 | -------------------------------------------------------------------------------- /tests/bibtex2html.1.99-bibtex_parser.mly: -------------------------------------------------------------------------------- 1 | (* Original file: bibtex2html.1.99/bibtex2html-1.99/bibtex_parser.mly *) 2 | /**************************************************************************/ 3 | /* bibtex2html - A BibTeX to HTML translator */ 4 | /* Copyright (C) 1997-2014 Jean-Christophe Filliâtre and Claude Marché */ 5 | /* */ 6 | /* This software is free software; you can redistribute it and/or */ 7 | /* modify it under the terms of the GNU General Public */ 8 | /* License version 2, as published by the Free Software Foundation. */ 9 | /* */ 10 | /* This software is distributed in the hope that it will be useful, */ 11 | /* but WITHOUT ANY WARRANTY; without even the implied warranty of */ 12 | /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ 13 | /* */ 14 | /* See the GNU General Public License version 2 for more details */ 15 | /* (enclosed in the file GPL). */ 16 | /**************************************************************************/ 17 | 18 | /* 19 | * bibtex2html - A BibTeX to HTML translator 20 | * Copyright (C) 1997 Jean-Christophe FILLIATRE 21 | * 22 | * This software is free software; you can redistribute it and/or 23 | * modify it under the terms of the GNU General Public 24 | * License version 2, as published by the Free Software Foundation. 25 | * 26 | * This software is distributed in the hope that it will be useful, 27 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 28 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 29 | * 30 | * See the GNU General Public License version 2 for more details 31 | * (enclosed in the file GPL). 32 | */ 33 | 34 | /*i $Id: bibtex_parser.mly,v 1.15 2010-02-22 07:38:19 filliatr Exp $ i*/ 35 | 36 | /*s Parser for BibTeX files. */ 37 | 38 | %{ 39 | 40 | open Bibtex 41 | 42 | %} 43 | 44 | %token Tident Tstring Tcomment 45 | %token Tentry 46 | %token Tabbrev Tpreamble Tlbrace Trbrace Tcomma Tequal EOF Tsharp 47 | 48 | %start command_list 49 | %type command_list 50 | %start command 51 | %type command 52 | 53 | %% 54 | 55 | command_list: 56 | commands EOF { $1 } 57 | ; 58 | 59 | commands: 60 | commands command 61 | { add_new_entry $2 $1 } 62 | | /* epsilon */ 63 | { empty_biblio } 64 | ; 65 | command: 66 | Tcomment 67 | { Comment $1 } 68 | | Tpreamble sharp_string_list Trbrace 69 | { Preamble $2 } 70 | | Tabbrev Tident Tequal sharp_string_list Trbrace 71 | { Abbrev (String.lowercase_ascii $2,$4) } 72 | | entry Tcomma comma_field_list Trbrace 73 | { let et,key = $1 in Entry (String.lowercase_ascii et, key, $3) } 74 | ; 75 | 76 | entry: 77 | | Tentry 78 | { let et,key = $1 in Bibtex.current_key := key; (et,key) } 79 | 80 | comma_field_list: 81 | field Tcomma comma_field_list 82 | { $1::$3 } 83 | | field 84 | { [$1] } 85 | | field Tcomma 86 | { [$1] } 87 | ; 88 | field: 89 | field_name Tequal sharp_string_list 90 | { ($1,$3) } 91 | | field_name Tequal 92 | { ($1,[String ""]) } 93 | ; 94 | field_name: 95 | Tident { String.lowercase_ascii $1 } 96 | | Tcomment { "comment" } 97 | ; 98 | sharp_string_list: 99 | atom Tsharp sharp_string_list 100 | { $1::$3 } 101 | | atom 102 | { [$1] } 103 | ; 104 | atom: 105 | Tident 106 | { Id (String.lowercase_ascii $1) } 107 | | Tstring 108 | { String $1 } 109 | ; 110 | 111 | %% 112 | -------------------------------------------------------------------------------- /tests/bibtex_parser.mly: -------------------------------------------------------------------------------- 1 | /* 2 | * bibtex2html - A BibTeX to HTML translator 3 | * Copyright (C) 1997 Jean-Christophe FILLIATRE 4 | * 5 | * This software is free software; you can redistribute it and/or 6 | * modify it under the terms of the GNU General Public 7 | * License version 2, as published by the Free Software Foundation. 8 | * 9 | * This software is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 12 | * 13 | * See the GNU General Public License version 2 for more details 14 | * (enclosed in the file GPL). 15 | */ 16 | 17 | /*i $Id: bibtex_parser.mly,v 1.10 2004/09/17 12:58:33 marche Exp $ i*/ 18 | 19 | /*s Parser for BibTeX files. */ 20 | 21 | %{ 22 | 23 | open Bibtex 24 | 25 | %} 26 | 27 | %token Tident Tstring Tcomment 28 | %token Tentry 29 | %token Tabbrev Tpreamble Tlbrace Trbrace Tcomma Tequal EOF Tsharp 30 | 31 | %start command_list 32 | %type command_list 33 | %start command 34 | %type command 35 | 36 | %% 37 | 38 | command_list: 39 | commands EOF { $1 } 40 | ; 41 | 42 | commands: 43 | commands command 44 | { add_new_entry $2 $1 } 45 | | /* epsilon */ 46 | { empty_biblio } 47 | ; 48 | command: 49 | Tcomment 50 | { Comment $1 } 51 | | Tpreamble sharp_string_list Trbrace 52 | { Preamble $2 } 53 | | Tabbrev Tident Tequal sharp_string_list Trbrace 54 | { Abbrev (String.uppercase $2,$4) } 55 | | entry Tcomma comma_field_list Trbrace 56 | { let et,key = $1 in Entry (String.uppercase et, key, $3) } 57 | ; 58 | 59 | entry: 60 | | Tentry 61 | { let et,key = $1 in Bibtex.current_key := key; (et,key) } 62 | 63 | comma_field_list: 64 | field Tcomma comma_field_list 65 | { $1::$3 } 66 | | field 67 | { [$1] } 68 | | field Tcomma 69 | { [$1] } 70 | ; 71 | field: 72 | field_name Tequal sharp_string_list 73 | { ($1,$3) } 74 | | field_name Tequal 75 | { ($1,[String ""]) } 76 | ; 77 | field_name: 78 | Tident { String.uppercase $1 } 79 | | Tcomment { "COMMENT" } 80 | ; 81 | sharp_string_list: 82 | atom Tsharp sharp_string_list 83 | { $1::$3 } 84 | | atom 85 | { [$1] } 86 | ; 87 | atom: 88 | Tident 89 | { Id (String.uppercase $1) } 90 | | Tstring 91 | { String $1 } 92 | ; 93 | 94 | %% 95 | -------------------------------------------------------------------------------- /tests/bison-mysterious-conflict.mly: -------------------------------------------------------------------------------- 1 | /* This grammar is not LALR(1). It is taken from the Bison manual. */ 2 | 3 | %token ID COMMA COLON 4 | %start def 5 | 6 | %% 7 | 8 | def: 9 | param_spec return_spec COMMA 10 | { () } 11 | 12 | param_spec: 13 | typ 14 | { () } 15 | | separated_nonempty_list(COMMA, name) COLON typ 16 | { () } 17 | 18 | return_spec: 19 | typ 20 | { () } 21 | | name COLON typ 22 | { () } 23 | 24 | typ: 25 | ID 26 | { () } 27 | 28 | name: 29 | ID 30 | { () } 31 | 32 | -------------------------------------------------------------------------------- /tests/bodin.mly: -------------------------------------------------------------------------------- 1 | (* Submitted by Martin Bodin. 2 | https://gitlab.inria.fr/fpottier/menhir/issues/4 3 | This file was rejected by Menhir prior to 2017/12/06 4 | because expr_or_assign(el) invokes expr(el) 5 | which invokes expr_or_assign(cr). *) 6 | 7 | %token NEW_LINE 8 | %token LPAR RPAR 9 | %token UNIT 10 | 11 | %start main 12 | 13 | %% 14 | 15 | main: 16 | | e = expr_or_assign (empty) { e } 17 | 18 | expr_or_assign (el): 19 | | e = expr (el) { e } 20 | 21 | expr (el): 22 | | el; p = LPAR; e = expr_or_assign (cr); cr; RPAR { e } 23 | | e = UNIT { e } 24 | 25 | cr: 26 | | NEW_LINE cr { } 27 | | { } 28 | 29 | empty: 30 | | { } 31 | 32 | %% 33 | -------------------------------------------------------------------------------- /tests/bodin_inlined.mly: -------------------------------------------------------------------------------- 1 | (* Submitted by Martin Bodin. 2 | https://gitlab.inria.fr/fpottier/menhir/issues/4 3 | This file was accepted by Menhir prior to 2017/12/06 4 | but that was unintended. 5 | Its close companion bodin.mly was rejected. *) 6 | 7 | %token NEW_LINE 8 | %token LPAR RPAR 9 | %token UNIT 10 | 11 | %start main 12 | 13 | %% 14 | 15 | main: 16 | | e = expr_or_assign (empty) { e } 17 | 18 | expr_or_assign (el): 19 | | e = expr (el) { e } 20 | 21 | expr_or_assign_cr: 22 | | e = expr_or_assign (cr) { e } 23 | 24 | expr (el): 25 | | el; p = LPAR; e = expr_or_assign_cr; cr; RPAR { e } 26 | | e = UNIT { e } 27 | 28 | cr: 29 | | NEW_LINE cr { } 30 | | { } 31 | 32 | empty: 33 | | { } 34 | 35 | %% 36 | -------------------------------------------------------------------------------- /tests/boris-mini-no-eos.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | 3 | open Ast 4 | 5 | %} 6 | 7 | %token SEP 8 | %token DOT 9 | 10 | %token LIDENT 11 | %token EOF 12 | 13 | %start debut 14 | 15 | %% 16 | 17 | 18 | 19 | inst : 20 | | i = LIDENT e = expr 21 | { () } 22 | | SEP i1 = inst i2 = inst 23 | { () } 24 | | e = expr DOT 25 | { () } 26 | 27 | 28 | 29 | expr: 30 | | i = LIDENT 31 | { () } 32 | | e = expr DOT 33 | { () } 34 | | i = inst DOT e = expr 35 | { () } 36 | 37 | debut: 38 | | expr EOF 39 | { () } 40 | -------------------------------------------------------------------------------- /tests/boris-mini.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | 3 | open Ast 4 | 5 | %} 6 | 7 | %token SEP 8 | %token DOT 9 | 10 | %token LIDENT 11 | 12 | 13 | %start debut 14 | 15 | %% 16 | 17 | 18 | 19 | inst : 20 | | i = LIDENT e = expr 21 | { () } 22 | | SEP i1 = inst i2 = inst 23 | { () } 24 | | e = expr DOT 25 | { () } 26 | 27 | 28 | 29 | expr: 30 | | i = LIDENT 31 | { () } 32 | | e = expr DOT 33 | { () } 34 | | i = inst DOT e = expr 35 | { () } 36 | 37 | debut: 38 | | expr DOT 39 | { () } 40 | 41 | -------------------------------------------------------------------------------- /tests/calc-anonymous.mly: -------------------------------------------------------------------------------- 1 | %token INT 2 | %token PLUS MINUS TIMES DIV 3 | %token LPAREN RPAREN 4 | %token EOL 5 | 6 | %left PLUS MINUS /* lowest precedence */ 7 | %left TIMES DIV /* medium precedence */ 8 | %nonassoc UMINUS /* highest precedence */ 9 | 10 | %start main 11 | 12 | (* The calc demo, with an anonymous rule for binary operators. *) 13 | 14 | %% 15 | 16 | main: 17 | | e = expr EOL 18 | { e } 19 | 20 | expr: 21 | | i = INT 22 | { i } 23 | | LPAREN e = expr RPAREN 24 | { e } 25 | | e1 = expr 26 | op = anonymous(PLUS { (+) } | MINUS { (-) } | TIMES { ( * ) } | DIV { (/) }) 27 | e2 = expr 28 | { op e1 e2 } 29 | | MINUS e = expr %prec UMINUS 30 | { - e } 31 | 32 | -------------------------------------------------------------------------------- /tests/calc-locations.mly: -------------------------------------------------------------------------------- 1 | %token INT 2 | %token PLUS MINUS TIMES DIV 3 | %token LPAREN RPAREN 4 | %token EOL 5 | %left PLUS MINUS 6 | %left TIMES DIV 7 | %nonassoc UMINUS 8 | %start main 9 | %type main 10 | %% 11 | main: 12 | expr EOL { $startpos($1), $endpos($1) } 13 | ; 14 | expr: 15 | INT { $1 } 16 | | LPAREN expr RPAREN { $2 } 17 | | expr PLUS expr { $1 + $3 } 18 | | expr MINUS expr { $1 - $3 } 19 | | expr TIMES expr { $1 * $3 } 20 | | expr DIV expr { $1 / $3 } 21 | | MINUS expr %prec UMINUS { - $2 } 22 | ; 23 | -------------------------------------------------------------------------------- /tests/calc-never-useful.mly: -------------------------------------------------------------------------------- 1 | (* A copy of calc.mly, with an extra intentionally useless 2 | precedence declaration -- warning suppressed via a flag. *) 3 | 4 | %token INT 5 | %token PLUS MINUS TIMES DIV 6 | %token LPAREN RPAREN 7 | %token EOL 8 | %left EOL 9 | %left PLUS MINUS 10 | %left TIMES DIV 11 | %nonassoc UMINUS 12 | %start main 13 | %type main 14 | %% 15 | main: 16 | expr EOL { $1 } 17 | ; 18 | expr: 19 | INT { $1 } 20 | | LPAREN expr RPAREN { $2 } 21 | | expr PLUS expr { $1 + $3 } 22 | | expr MINUS expr { $1 - $3 } 23 | | expr TIMES expr { $1 * $3 } 24 | | expr DIV expr { $1 / $3 } 25 | | MINUS expr %prec UMINUS { - $2 } 26 | ; 27 | -------------------------------------------------------------------------------- /tests/calc-new-syntax.mly: -------------------------------------------------------------------------------- 1 | %token INT 2 | %token PLUS MINUS TIMES DIV 3 | %token LPAREN RPAREN 4 | %token EOL 5 | 6 | %start main 7 | 8 | (* In this demo, we do not use precedence declarations (%left, etc.). 9 | Instead, we manually stratify the grammar. It is quite easy and 10 | allows us to demonstrate the use of parameterized definitions. *) 11 | 12 | %% 13 | 14 | (* -------------------------------------------------------------------------- *) 15 | 16 | (* [fold_left(op, elem)] recognizes a nonempty, left-associative list of 17 | elements, separated with operators. The semantic value of the symbol [op] 18 | is expected to be a binary function, which is applied to the left-hand 19 | summary and to the right-hand element. *) 20 | 21 | let fold_left(op, elem) := 22 | | elem 23 | | sum = fold_left(op, elem); ~ = op; ~ = elem; { op sum elem } 24 | 25 | (* -------------------------------------------------------------------------- *) 26 | 27 | (* [app(f, x)] recognizes the sequence [f; x]. Its semantic value is the 28 | application of the semantic value of [f] to the semantic value of [x]. *) 29 | 30 | let app(f, x) == 31 | ~ = f; ~ = x; { f x } 32 | 33 | (* -------------------------------------------------------------------------- *) 34 | 35 | (* We wish to parse an expression followed with an end-of-line. *) 36 | 37 | (* The notation <> is a short-hand for a semantic action {...} that builds 38 | a tuple of the variables that have been introduced. Here, one variable 39 | [expr] has been introduced by [~ = expr], so <> stands for {expr}. *) 40 | 41 | let main := 42 | ~ = expr; EOL; <> 43 | 44 | (* An expression is an additive expression. *) 45 | 46 | let expr == 47 | additive_expr 48 | 49 | (* An additive expression is a left-associative list of multiplicative 50 | expressions, separated with additive operators. *) 51 | 52 | let additive_expr == 53 | fold_left(additive_op, multiplicative_expr) 54 | 55 | (* These are the additive operators and their meaning. *) 56 | 57 | let additive_op == 58 | | PLUS; { ( + ) } 59 | | MINUS; { ( - ) } 60 | 61 | (* A multiplicative expression is a left-associative list of atomic 62 | expressions, separated with multiplicative operators. *) 63 | 64 | let multiplicative_expr == 65 | fold_left(multiplicative_op, atomic_expr) 66 | 67 | (* These are the multiplicative operators and their meaning. *) 68 | 69 | let multiplicative_op == 70 | | TIMES; { ( * ) } 71 | | DIV; { ( / ) } 72 | 73 | (* An atomic expression is one of: 74 | an integer literal, 75 | an expression between parentheses, 76 | an application of a unary operator to an atomic expression. *) 77 | 78 | let atomic_expr := 79 | | INT 80 | | delimited(LPAREN, expr, RPAREN) 81 | | app(unary_op, atomic_expr) 82 | 83 | (* These are the unary operators and their meaning. *) 84 | 85 | let unary_op == 86 | | MINUS; { (~- ) } 87 | -------------------------------------------------------------------------------- /tests/calc.mly: -------------------------------------------------------------------------------- 1 | %token INT 2 | %token PLUS MINUS TIMES DIV 3 | %token LPAREN RPAREN 4 | %token EOL 5 | %left PLUS MINUS 6 | %left TIMES DIV 7 | %nonassoc UMINUS 8 | %start main 9 | %type main 10 | %% 11 | main: 12 | expr EOL { $1 } 13 | ; 14 | expr: 15 | INT { $1 } 16 | | LPAREN expr RPAREN { $2 } 17 | | expr PLUS expr { $1 + $3 } 18 | | expr MINUS expr { $1 - $3 } 19 | | expr TIMES expr { $1 * $3 } 20 | | expr DIV expr { $1 / $3 } 21 | | MINUS expr %prec UMINUS { - $2 } 22 | ; 23 | -------------------------------------------------------------------------------- /tests/camltemplate.1.0.2-ctParser.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/camltemplate.1.0.2-ctParser.mly -------------------------------------------------------------------------------- /tests/cil-cparser.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/cil-cparser.mly -------------------------------------------------------------------------------- /tests/cime-coq.mly: -------------------------------------------------------------------------------- 1 | /* This parser is done to match the interface with Coq that was previously done for the ELAN system */ 2 | 3 | %{ 4 | open Coq_syntax 5 | open Gen_terms 6 | 7 | let current_variables = new vars 8 | let opset = new operators 9 | %} 10 | 11 | %token COMMA COLON LEFT_PAR RIGHT_PAR RIGHTARROW EQUAL NEWLINE 12 | %token SEARCH THEORY SORT FUNCTION RULE END TERM QUIT ERROR 13 | %token IDENT 14 | 15 | %start search 16 | %type search 17 | 18 | %start theory_oe 19 | %type theory_oe 20 | 21 | %start term_oe 22 | %type term_oe 23 | 24 | %% 25 | 26 | search : 27 | | SEARCH IDENT comment 28 | { $2 } 29 | | ERROR comment 30 | { raise (Client_error $2) } 31 | ; 32 | 33 | theory_oe : 34 | /* On abandonne le SORT */ 35 | | THEORY IDENT comment sortp signature rule_set END comment 36 | { ($2, $5, $6) } 37 | | ERROR comment 38 | { raise (Client_error $2) } 39 | ; 40 | 41 | term_oe : 42 | | TERM comment term NEWLINE END 43 | { $3 } 44 | | ERROR comment 45 | { raise (Client_error $2) } 46 | | QUIT comment 47 | { raise Quit } 48 | ; 49 | 50 | comment : 51 | | NEWLINE 52 | { "" } 53 | | IDENT comment 54 | { $1^" "^$2 } 55 | | punctuation comment 56 | { $1^" "^$2 } 57 | 58 | punctuation : 59 | | RIGHT_PAR 60 | { ")" } 61 | | LEFT_PAR 62 | { "(" } 63 | | COLON 64 | { ":" } 65 | | COMMA 66 | { "," } 67 | 68 | term : 69 | | LEFT_PAR IDENT term_seq RIGHT_PAR /* all operators are prefix */ 70 | { make_term hash_consing_table $2 $3 } 71 | | IDENT 72 | { if opset#contains $1 then 73 | make_term hash_consing_table $1 [] 74 | else 75 | make_var_term hash_consing_table (current_variables#var_of_string $1) } 76 | ; 77 | 78 | term_seq : 79 | | term 80 | { [$1] } 81 | | term term_seq /* arguments are sequentially listed, separated by spaces */ 82 | { $1::$2 } 83 | ; 84 | 85 | sortp : 86 | | sort 87 | { [$1] } 88 | | sort sortp 89 | { $1::$2 } 90 | ; 91 | 92 | sort : 93 | SORT ident_seq NEWLINE 94 | { $2 } 95 | ; 96 | 97 | ident_seq : 98 | | IDENT 99 | { [$1] } 100 | | IDENT COMMA ident_seq 101 | { $1::$3 } 102 | ; 103 | 104 | signature : 105 | | symbol 106 | { [$1] } 107 | | symbol signature 108 | { $1::$2 } 109 | ; 110 | 111 | symbol : 112 | FUNCTION ident_seq COLON types NEWLINE 113 | { opset#union $2; 114 | ($2, $4) } 115 | ; 116 | 117 | types : 118 | | IDENT 119 | { 0 } 120 | | IDENT RIGHTARROW types 121 | { 1+$3 } 122 | 123 | rule_set : 124 | | rule 125 | { [$1] } 126 | | rule rule_set 127 | { $1::$2 } 128 | ; 129 | 130 | rule : 131 | RULE IDENT term EQUAL term NEWLINE 132 | { ($2, $3, $5) } 133 | ; 134 | -------------------------------------------------------------------------------- /tests/cime-genpoly.mly: -------------------------------------------------------------------------------- 1 | /*************************************************************************** 2 | 3 | parser for generic polynomials and polynomial interpretations 4 | 5 | $Id: genpoly_parser.mly,v 1.5 2002/05/27 12:07:35 contejea Exp $ 6 | 7 | ***************************************************************************/ 8 | 9 | %{ 10 | 11 | open Signatures;; 12 | open Generic_polynomials;; 13 | open Genpoly_syntax;; 14 | open Non_linear_solving;; 15 | open Finite_domains;; 16 | open Poly_interp;; 17 | 18 | exception Syntax_error of string 19 | 20 | let gen_var s = 21 | try 22 | let n = Listutils.index s !current_poly_vars 23 | in GenericPolynomials.var n 24 | with 25 | Not_found -> 26 | GenericPolynomials.cte 27 | (Fd_polynomials.var (fd_var_id_of_string s)) 28 | ;; 29 | 30 | let var s = 31 | try 32 | let n = Listutils.index s !current_poly_vars 33 | in IntPolynomials.var n 34 | with 35 | Not_found -> raise (Syntax_error ("undefined variable "^s)) 36 | ;; 37 | 38 | %} 39 | 40 | 41 | %token VAR 42 | %token INTERP 43 | %token PARGAUCHE PARDROITE SEMICOLON EQUAL COMMA EOF 44 | %token PLUS MINUS EXP MULT 45 | %token INT 46 | 47 | %start gen_poly_entry 48 | %type gen_poly_entry 49 | 50 | %start poly_interp_entry 51 | %type <(User_signatures.symbol_id Marked_dp_criteria.dupl, Poly_interp.IntPolynomials.poly) Signatures.SymbolMap.t > poly_interp_entry 52 | 53 | %left PLUS MINUS 54 | %left MULT 55 | %nonassoc UMINUS 56 | %right EXP 57 | 58 | %% 59 | 60 | gen_poly_entry: 61 | gen_poly EOF { $1 } 62 | ; 63 | 64 | 65 | gen_poly: 66 | VAR { gen_var $1 } 67 | | INT { GenericPolynomials.cte (Fd_polynomials.cte $1) } 68 | | PARGAUCHE gen_poly PARDROITE { $2 } 69 | | gen_poly PLUS gen_poly { GenericPolynomials.add $1 $3 } 70 | | gen_poly MINUS gen_poly { GenericPolynomials.sub $1 $3 } 71 | | MINUS gen_poly %prec UMINUS { GenericPolynomials.minus $2 } 72 | | gen_poly MULT gen_poly { GenericPolynomials.mult $1 $3 } 73 | | gen_poly EXP INT 74 | { try 75 | GenericPolynomials.power $1 (Num.int_of_num $3) 76 | with 77 | Failure("int_of_big_int") -> 78 | failwith "Exponent too large" 79 | } 80 | ; 81 | 82 | poly_interp_entry : 83 | interp EOF { $1 } 84 | ; 85 | 86 | interp : 87 | /* epsilon */ { SymbolMap.empty } 88 | | symbol_interp EQUAL poly SEMICOLON interp 89 | { SymbolMap.add $1 $3 $5 } 90 | ; 91 | 92 | symbol_interp: 93 | INTERP { current_poly_vars := []; $1 } 94 | | INTERP PARGAUCHE vars { current_poly_vars := $3; $1 } 95 | ; 96 | 97 | vars: 98 | VAR PARDROITE { [$1] } 99 | | VAR COMMA vars { $1::$3 } 100 | ; 101 | 102 | 103 | poly: 104 | VAR { var $1 } 105 | | INT { IntPolynomials.cte $1 } 106 | | PARGAUCHE poly PARDROITE { $2 } 107 | | poly PLUS poly { IntPolynomials.add $1 $3 } 108 | | poly MINUS poly { IntPolynomials.sub $1 $3 } 109 | | MINUS poly %prec UMINUS { IntPolynomials.minus $2 } 110 | | poly MULT poly { IntPolynomials.mult $1 $3 } 111 | | poly EXP INT 112 | { try 113 | IntPolynomials.power $1 (Num.int_of_num $3) 114 | with 115 | Failure("int_of_big_int") -> 116 | failwith "Exponent too large" 117 | } 118 | ; 119 | -------------------------------------------------------------------------------- /tests/cime-parameterized-signatures.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/cime-parameterized-signatures.mly -------------------------------------------------------------------------------- /tests/cime-poly-interp.mly: -------------------------------------------------------------------------------- 1 | /*************************************************************************** 2 | 3 | parser for polynomial interpretations 4 | 5 | $Id: poly_interp_parser.mly,v 1.3 2003/07/11 15:42:48 contejea Exp $ 6 | 7 | ***************************************************************************/ 8 | 9 | %{ 10 | 11 | open Signatures;; 12 | open Poly_interp;; 13 | 14 | exception Syntax_error of string 15 | 16 | let var s = 17 | try 18 | let n = Listutils.index s !current_poly_vars 19 | in IntPolynomials.var n 20 | with 21 | Not_found -> raise (Syntax_error ("undefined variable "^s)) 22 | ;; 23 | 24 | %} 25 | 26 | 27 | %token VAR 28 | %token INTERP 29 | %token LEFT_BRA RIGHT_BRA 30 | %token LEFT_PAR RIGHT_PAR SEMICOLON EQUAL COMMA EOF 31 | %token PLUS MINUS EXP MULT 32 | %token INT 33 | 34 | %start poly_interp_entry 35 | %type <(User_signatures.symbol_id, Poly_interp.IntPolynomials.poly) Signatures.SymbolMap.t > poly_interp_entry 36 | 37 | %start weight_entry 38 | %type <(User_signatures.symbol_id, int) Signatures.SymbolMap.t > weight_entry 39 | 40 | %left PLUS MINUS 41 | %left MULT 42 | %nonassoc UMINUS 43 | %right EXP 44 | 45 | %% 46 | 47 | poly_interp_entry : 48 | interp EOF { $1 } 49 | ; 50 | 51 | interp : 52 | /* epsilon */ { SymbolMap.empty } 53 | | symbol_interp EQUAL poly SEMICOLON interp 54 | { SymbolMap.add $1 $3 $5 } 55 | ; 56 | 57 | symbol_interp: 58 | INTERP { current_poly_vars := []; $1 } 59 | | INTERP LEFT_PAR vars { current_poly_vars := $3; $1 } 60 | ; 61 | 62 | vars: 63 | VAR RIGHT_PAR { [$1] } 64 | | VAR COMMA vars { $1::$3 } 65 | ; 66 | 67 | 68 | poly: 69 | VAR { var $1 } 70 | | INT { IntPolynomials.cte $1 } 71 | | LEFT_PAR poly RIGHT_PAR { $2 } 72 | | poly PLUS poly { IntPolynomials.add $1 $3 } 73 | | poly MINUS poly { IntPolynomials.sub $1 $3 } 74 | | MINUS poly %prec UMINUS { IntPolynomials.minus $2 } 75 | | poly MULT poly { IntPolynomials.mult $1 $3 } 76 | | poly EXP INT 77 | { try 78 | IntPolynomials.power $1 (Numbers.to_int $3) 79 | with 80 | Failure("int_of_big_int") -> 81 | failwith "Exponent too large" 82 | } 83 | ; 84 | 85 | 86 | weight_entry: 87 | weight EOF { $1 } 88 | ; 89 | 90 | weight: 91 | /* epsilon */ { SymbolMap.empty } 92 | | INTERP EQUAL INT SEMICOLON weight { SymbolMap.add $1 (Numbers.to_int $3) $5 } 93 | 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /tests/cime-poly.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/cime-poly.mly -------------------------------------------------------------------------------- /tests/cime-signature.mly: -------------------------------------------------------------------------------- 1 | /*************************************************************************** 2 | 3 | parser for user signatures 4 | 5 | $Id: signature_parser.mly,v 1.2 2001/04/20 13:42:28 marche Exp $ 6 | 7 | ***************************************************************************/ 8 | 9 | %{ 10 | 11 | open Symbols 12 | open C_declare_operator 13 | 14 | %} 15 | 16 | 17 | %token IDENT INT 18 | %token COMMA COLON SEMICOLON 19 | %token KW_PREFIX KW_INFIX KW_POSTFIX 20 | %token KW_C KW_AC KW_CONSTANT KW_UNARY KW_BINARY 21 | %token EOF 22 | 23 | %start signature 24 | %type <(string list * int * Symbols.fix_type * C_declare_operator.symbol_type) list> signature 25 | 26 | 27 | %% 28 | 29 | signature: 30 | EOF { [] } 31 | | decl SEMICOLON signature { $1::$3 } 32 | ; 33 | 34 | 35 | decl: 36 | op_list COLON fix arity 37 | { let t,a = $4 38 | in 39 | if $3=INFIX & a<>2 40 | then Errors.semantical_error "Infix symbols must be binary" 41 | else ($1,a,$3,t) 42 | } 43 | /* 44 | | op_list_colon fix arity AS profile_list 45 | { let t,a = $3 in 46 | if $2=INFIX & a<>2 47 | then raise (Erreur_de_syntaxe "Infix symbols must be binary") 48 | else 49 | if (List.exists (fun x -> (List.length x-1)<>a) $5) 50 | then raise (Erreur_de_syntaxe "Profile must be compatible with arity") 51 | else 52 | begin 53 | (List.iter (definir_operateur t a $2 (List.map see_as_functional_sort $5)) $1); 54 | ($1,$2,$3) 55 | end 56 | } 57 | */ 58 | ; 59 | 60 | /* 61 | profile_list: 62 | profile {[$1]} 63 | | profile COMMA profile_list {$1::$3} 64 | ; 65 | 66 | profile: 67 | base_sorte {[$1]} 68 | | base_sorte ARROW profile {$1::$3} 69 | ; 70 | */ 71 | 72 | fix: 73 | KW_PREFIX { PREFIX } 74 | | KW_INFIX { INFIX } 75 | | KW_POSTFIX { POSTFIX } 76 | | /* epsilon */ { DEFAULT } 77 | ; 78 | arity: 79 | KW_C { (C,2) } 80 | | KW_AC { (AC,2) } 81 | | KW_CONSTANT { (FREE,0) } 82 | | KW_UNARY { (FREE,1) } 83 | | KW_BINARY { (FREE,2) } 84 | | INT { (FREE,int_of_string $1) } 85 | ; 86 | op_list: 87 | ident { [$1] } 88 | | ident COMMA op_list { $1::$3 } 89 | ; 90 | ident: 91 | IDENT { $1 } 92 | | INT { $1 } 93 | 94 | -------------------------------------------------------------------------------- /tests/cime-term.mly: -------------------------------------------------------------------------------- 1 | /*************************************************************************** 2 | 3 | parser for user signatures 4 | 5 | $Id: term_parser.mly,v 1.2 2001/04/20 13:42:28 marche Exp $ 6 | 7 | ***************************************************************************/ 8 | 9 | %{ 10 | 11 | open C_declare_operator 12 | open Symbols 13 | open Terms 14 | open Errors 15 | open Equations 16 | 17 | %} 18 | 19 | 20 | %token PREFIX_IDENT POSTFIX_IDENT INFIX_IDENT 21 | %token COMMA SEMICOLON OPENPAR CLOSEPAR ARROW 22 | %token EOF 23 | 24 | %left INFIX_IDENT TERMLIST 25 | %nonassoc CLOSEPAR 26 | 27 | %start term_eof 28 | %type term_eof 29 | 30 | %start rule_set_eof 31 | %type rule_set_eof 32 | 33 | 34 | %% 35 | 36 | term_eof: 37 | term EOF { $1 } 38 | ; 39 | 40 | term : 41 | PREFIX_IDENT 42 | { (* printf "forme a\n"; *) 43 | try 44 | VAR (var_id_of_string $1) 45 | with Not_found -> 46 | let f=(get_symbol_id $1) in 47 | if (arity f)=0 48 | then TERM(f,[]) 49 | else semantical_error ("Bad number of arguments for " ^ $1) 50 | } 51 | | OPENPAR term CLOSEPAR 52 | { $2 53 | } 54 | | PREFIX_IDENT OPENPAR term_list CLOSEPAR 55 | { (* printf "forme f(...)\n"; *) 56 | let f=(get_symbol_id $1) in 57 | if (arity f)=(List.length $3) 58 | then TERM(f,$3) 59 | else semantical_error ("Bad number of arguments for " ^ $1) 60 | } 61 | | OPENPAR term CLOSEPAR POSTFIX_IDENT 62 | { (* printf "forme f(...)\n"; *) 63 | let f=(get_symbol_id $4) in 64 | if (arity f)=1 65 | then TERM(f,[$2]) 66 | else semantical_error ("Bad number of arguments for " ^ $4) 67 | } 68 | | OPENPAR term_list CLOSEPAR POSTFIX_IDENT 69 | { (* printf "forme f(...)\n"; *) 70 | let f=(get_symbol_id $4) in 71 | if (arity f)=(List.length $2) 72 | then TERM(f,$2) 73 | else semantical_error ("Bad number of arguments for " ^ $4) 74 | } 75 | | term INFIX_IDENT term 76 | { let f = (get_symbol_id $2) in 77 | TERM(f,[$1;$3]) 78 | } 79 | ; 80 | 81 | term_list: 82 | term %prec TERMLIST { [$1] } 83 | | term COMMA term_list { $1 :: $3 } 84 | ; 85 | 86 | rule_set_eof: 87 | rule_set EOF { $1 } 88 | ; 89 | 90 | rule_set: 91 | rule { [$1] } 92 | | rule SEMICOLON rule_set { $1::$3 } 93 | ; 94 | rule: 95 | term ARROW term { make_basic_rule ((flatten_term $1), 96 | (flatten_term $3)) } 97 | 98 | -------------------------------------------------------------------------------- /tests/cime-terms-signature.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/cime-terms-signature.mly -------------------------------------------------------------------------------- /tests/cime-terms.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/cime-terms.mly -------------------------------------------------------------------------------- /tests/cime-theory.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/cime-theory.mly -------------------------------------------------------------------------------- /tests/cime-toplevel.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/cime-toplevel.mly -------------------------------------------------------------------------------- /tests/cime-word.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/cime-word.mly -------------------------------------------------------------------------------- /tests/cohttp.1.2.0-accept_parser.mly: -------------------------------------------------------------------------------- 1 | (* Original file: cohttp.1.2.0/cohttp-v1.2.0/cohttp/src/accept_parser.mly *) 2 | /* 3 | Copyright (C) 2012, David Sheets 4 | 5 | Permission to use, copy, modify, and/or distribute this software for 6 | any purpose with or without fee is hereby granted, provided that the 7 | above copyright notice and this permission notice appear in all 8 | copies. 9 | 10 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL 11 | WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED 12 | WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE 13 | AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL 14 | DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA 15 | OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER 16 | TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR 17 | PERFORMANCE OF THIS SOFTWARE. 18 | */ 19 | 20 | %{ 21 | open Accept_types 22 | 23 | type param = Q of int | Kv of (string * pv) 24 | 25 | let rec get_q = function 26 | | (Q q)::_ -> q 27 | | _::r -> get_q r 28 | | [] -> 1000 29 | 30 | let get_rest pl = List.fold_right 31 | (function Kv p -> fun l -> p::l | Q _ -> fun l -> l) pl [] 32 | %} 33 | 34 | %token STAR SLASH SEMI COMMA EQUAL EOI 35 | %token TOK QS 36 | %start media_ranges charsets encodings languages 37 | %type <(Accept_types.media_range * Accept_types.p list) Accept_types.qlist> media_ranges 38 | %type charsets 39 | %type encodings 40 | %type languages 41 | %% 42 | 43 | param : 44 | | SEMI TOK EQUAL QS { Kv ($2, S $4) } 45 | | SEMI TOK EQUAL TOK { 46 | if $2="q" then try Q (truncate (1000.*.(float_of_string $4))) 47 | with Failure _ -> raise Parsing.Parse_error 48 | else Kv ($2, T $4) 49 | } 50 | 51 | params : 52 | | param params { $1::$2 } 53 | | { [] } 54 | 55 | media_range : 56 | | STAR SLASH STAR params { 57 | (get_q $4, (AnyMedia, get_rest $4)) 58 | } 59 | | TOK SLASH STAR params { 60 | (get_q $4, (AnyMediaSubtype (String.lowercase_ascii $1), get_rest $4)) 61 | } 62 | | TOK SLASH TOK params { 63 | (get_q $4, (MediaType (String.lowercase_ascii $1, String.lowercase_ascii $3), get_rest $4)) 64 | } 65 | 66 | media_ranges : 67 | | media_range EOI { [$1] } 68 | | media_range COMMA media_ranges { $1::$3 } 69 | | EOI { [] } 70 | 71 | charset : 72 | | TOK params { (get_q $2, Charset (String.lowercase_ascii $1)) } 73 | | STAR params { (get_q $2, AnyCharset) } 74 | 75 | charsets : 76 | | charset EOI { [$1] } 77 | | charset COMMA charsets { $1::$3 } 78 | 79 | encoding : 80 | | TOK params { 81 | (get_q $2, match (String.lowercase_ascii $1) with 82 | | "gzip" -> Gzip 83 | | "compress" -> Compress 84 | | "deflate" -> Deflate 85 | | "identity" -> Identity 86 | | enc -> Encoding enc 87 | ) 88 | } 89 | | STAR params { (get_q $2, AnyEncoding) } 90 | 91 | encodings : 92 | | encoding EOI { [$1] } 93 | | encoding COMMA encodings { $1::$3 } 94 | | EOI { [] } 95 | 96 | language : 97 | | TOK params { 98 | (get_q $2, Language (Stringext.split ~on:'-' (String.lowercase_ascii $1))) 99 | } 100 | | STAR params { (get_q $2, AnyLanguage) } 101 | 102 | languages : 103 | | language EOI { [$1] } 104 | | language COMMA languages { $1::$3 } 105 | 106 | %% 107 | -------------------------------------------------------------------------------- /tests/condition_parser.mly: -------------------------------------------------------------------------------- 1 | /* 2 | * bibtex2html - A BibTeX to HTML translator 3 | * Copyright (C) 1997 Jean-Christophe FILLIATRE 4 | * 5 | * This software is free software; you can redistribute it and/or 6 | * modify it under the terms of the GNU General Public 7 | * License version 2, as published by the Free Software Foundation. 8 | * 9 | * This software is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 12 | * 13 | * See the GNU General Public License version 2 for more details 14 | * (enclosed in the file GPL). 15 | */ 16 | 17 | /*i $Id: condition_parser.mly,v 1.6 2003/10/03 15:37:30 marche Exp $ i*/ 18 | 19 | %{ 20 | 21 | open Condition 22 | 23 | %} 24 | 25 | %token IDENT STRING COMP 26 | %token INT 27 | %token COLON AND OR NOT LPAR RPAR DOLLAR_KEY DOLLAR_TYPE EXISTS EOF 28 | 29 | %start condition_start 30 | %type condition_start 31 | 32 | %left OR 33 | %left AND 34 | %left NOT 35 | 36 | %% 37 | 38 | condition_start: 39 | condition EOF { $1 } 40 | ; 41 | 42 | condition: 43 | condition OR condition { Or($1,$3) } 44 | | condition AND condition { And($1,$3) } 45 | | NOT condition { Not($2) } 46 | | LPAR condition RPAR { $2 } 47 | | atom { $1 } 48 | ; 49 | 50 | atom: 51 | | cte COLON STRING 52 | { let s = Latex_accents.normalize true $3 in 53 | (*i 54 | Printf.printf "regexp = %s\n" s; 55 | i*) 56 | Match($1, Str.regexp_case_fold s) } 57 | | cte COMP cte 58 | { Comp($1,$2,$3) } 59 | | EXISTS IDENT 60 | { Exists(String.uppercase $2) } 61 | ; 62 | 63 | cte: 64 | IDENT { Field(String.uppercase $1) } 65 | | INT { Cte($1) } 66 | | STRING { Cte($1) } 67 | | DOLLAR_KEY { Key } 68 | | DOLLAR_TYPE { Entrytype } 69 | ; 70 | 71 | 72 | -------------------------------------------------------------------------------- /tests/couac.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | 3 | open Syntax 4 | 5 | %} 6 | 7 | %token TOKEN TYPE LEFT RIGHT NONASSOC START PREC COLON BAR EOF 8 | %token LID UID 9 | %token HEADER OCAMLTYPE 10 | %token PERCENTPERCENT 11 | %token ACTION 12 | 13 | %start grammar 14 | %type grammar 15 | 16 | %{ 17 | 18 | let error i msg = 19 | Error.error2 (Parsing.rhs_start_pos i) (Parsing.rhs_end_pos i) msg 20 | 21 | %} 22 | 23 | %% 24 | 25 | grammar: 26 | declarations PERCENTPERCENT rules trailer 27 | { List.rev $1, $3, $4 } 28 | 29 | trailer: 30 | EOF 31 | { None } 32 | | PERCENTPERCENT /* followed by actual trailer */ 33 | { Some $1 } 34 | 35 | declarations: 36 | /* epsilon */ 37 | { [] } 38 | | declarations declaration 39 | { $2 @ $1 } 40 | 41 | declaration: 42 | | HEADER /* lexically delimited by %{ ... %} */ 43 | { [ DCode $1 ] } 44 | | TOKEN terminals 45 | { List.map (fun x -> DToken (None, x)) $2 } 46 | | TOKEN OCAMLTYPE /* lexically delimited by angle brackets */ terminals 47 | { List.map (fun x -> DToken (Some $2, x)) $3 } 48 | | START nonterminals 49 | { List.map (fun x -> DStart x) $2 } 50 | | TYPE OCAMLTYPE symbols 51 | { [] } /* TEMPORARY */ 52 | | LEFT symbols 53 | { [] } /* TEMPORARY */ 54 | | RIGHT symbols 55 | { [] } /* TEMPORARY */ 56 | | NONASSOC symbols 57 | { [] } /* TEMPORARY */ 58 | 59 | symbols: 60 | /* epsilon */ 61 | { [] } 62 | | symbols symbol 63 | { $2 :: $1 } 64 | 65 | /* One would like to require nonterminal symbols to begin with a lowercase 66 | letter, so as to lexically distinguish them for terminal symbols, which 67 | must begin with an uppercase letter. However, for compatibility with 68 | ocamlyacc, this is impossible. It can be required only for nonterminal 69 | symbols that are also start symbols. */ 70 | 71 | symbol: 72 | LID 73 | { $1 } 74 | | UID 75 | { $1 } 76 | 77 | terminals: 78 | /* epsilon */ 79 | { [] } 80 | | terminals UID 81 | { $2 :: $1 } 82 | | terminals LID 83 | { error 2 "Terminal symbols must begin with an uppercase letter." } 84 | 85 | nonterminals: 86 | /* epsilon */ 87 | { [] } 88 | | nonterminals LID 89 | { $2 :: $1 } 90 | | nonterminals UID 91 | { error 2 "Nonterminal start symbols must begin with a lowercase letter." } 92 | 93 | rules: 94 | /* epsilon */ 95 | { [] } 96 | | rules rule 97 | { $2 :: $1 } 98 | 99 | rule: 100 | symbol COLON optional_bar production productions 101 | { $1, $4 :: $5 } 102 | 103 | optional_bar: 104 | /* epsilon */ 105 | { () } 106 | | BAR 107 | { () } 108 | 109 | productions: 110 | /* epsilon */ 111 | { [] } 112 | | productions BAR production 113 | { $3 :: $1 } 114 | 115 | production: 116 | symbols precedence ACTION /* lexically delimited by braces */ 117 | /* TEMPORARY any symbol can be ERROR here */ 118 | { List.rev $1, $3, $2 } 119 | 120 | precedence: 121 | /* epsilon */ 122 | { None } 123 | | PREC symbol 124 | { Some $2 } 125 | -------------------------------------------------------------------------------- /tests/cutdown.mly: -------------------------------------------------------------------------------- 1 | /* Bug reported by Jan Midtgaard on 2012/09/25. */ 2 | /* The bug manifests itself when --explain is provided. */ 3 | /* Menhir says: 4 | Warning: one state has shift/reduce conflicts. 5 | Warning: one state has reduce/reduce conflicts. 6 | ** Internal failure (Pager's theorem). 7 | ** Tokens of interest: R_BRACKET 8 | ** Goal state: 14 9 | */ 10 | 11 | (* This problem, which remained ignored for many years, has been 12 | re-discovered as issue #21. *) 13 | 14 | (* This is an example where Menhir's implementation of Pager's 15 | algorithm produces two distinct conflicts, but the canonical 16 | automaton has only one conflict. So one of the two conflicts 17 | cannot be explained. *) 18 | 19 | %{ %} 20 | 21 | %token EOF 22 | %token NEW 23 | %token L_BRACKET R_BRACKET 24 | %token ASSIGN 25 | %token INTEGER_LITERAL 26 | 27 | %start goal /* the entry point */ 28 | %% 29 | 30 | goal : assignment EOF { } 31 | ; 32 | 33 | primary 34 | : INTEGER_LITERAL { } 35 | | array_access { } 36 | ; 37 | 38 | primary_not_name 39 | : array_access { } 40 | | NEW L_BRACKET expression R_BRACKET { } 41 | ; 42 | 43 | array_access 44 | : expression L_BRACKET primary_not_name R_BRACKET { } 45 | 46 | expression 47 | : primary { } 48 | | assignment { } 49 | ; 50 | 51 | assignment 52 | : left_hand_side ASSIGN expression { } 53 | ; 54 | 55 | left_hand_side 56 | : primary_not_name { } 57 | ; 58 | -------------------------------------------------------------------------------- /tests/dario.mly: -------------------------------------------------------------------------------- 1 | // Example inspired by Dario Teixeira's question. 2 | %token A B 3 | %start main 4 | %nonassoc block_is_finished 5 | %nonassoc A 6 | %% 7 | main: block* B {} 8 | block: items {} 9 | items: item {} %prec block_is_finished 10 | | item items {} 11 | item: A {} 12 | -------------------------------------------------------------------------------- /tests/dolmen.0.2-parseDimacs.mly: -------------------------------------------------------------------------------- 1 | (* Original file: dolmen.0.2/dolmen-0.2/src/languages/dimacs/parseDimacs.mly *) 2 | 3 | (* This file is free software, part of dolmen. See file "LICENSE" for more details *) 4 | 5 | /* Functor parameters */ 6 | 7 | %parameter 8 | %parameter 9 | %parameter 10 | 11 | /* Starting symbols */ 12 | 13 | %start file 14 | %start input 15 | 16 | %% 17 | 18 | input: 19 | | NEWLINE i=input 20 | { i } 21 | | p=start 22 | { Some p } 23 | | c=clause 24 | { Some c } 25 | | EOF 26 | { None } 27 | 28 | file: 29 | | NEWLINE* h=start l=cnf 30 | { h :: l } 31 | 32 | start: 33 | | P CNF nbvar=INT nbclause=INT NEWLINE 34 | { let loc = L.mk_pos $startpos $endpos in 35 | S.p_cnf ~loc nbvar nbclause } 36 | 37 | cnf: 38 | | EOF 39 | { [] } 40 | | NEWLINE l=cnf 41 | { l } 42 | | c=clause l=cnf 43 | { c :: l } 44 | 45 | clause: 46 | | c=nonempty_list(atom) ZERO NEWLINE 47 | { let loc = L.mk_pos $startpos $endpos in S.clause ~loc c } 48 | 49 | atom: 50 | | i=INT 51 | { let loc = L.mk_pos $startpos $endpos in T.atom ~loc i } 52 | -------------------------------------------------------------------------------- /tests/dolmen.0.2-parseDimacs01.mly: -------------------------------------------------------------------------------- 1 | 2 | (* This file is free software, part of dolmen. See file "LICENSE" for more details *) 3 | 4 | /* Tokens for dimacs parsing */ 5 | 6 | %token EOF 7 | %token ZERO 8 | %token P CNF 9 | %token NEWLINE 10 | %token INT 11 | 12 | %% 13 | 14 | -------------------------------------------------------------------------------- /tests/dolmen.0.2-parseTptp01.mly: -------------------------------------------------------------------------------- 1 | 2 | (* This file is free software, part of dolmen. See file "LICENSE" for more information *) 3 | 4 | /* Token declarations for Tptp parser */ 5 | 6 | %token EOF 7 | 8 | %token DOT 9 | %token COMMA 10 | %token COLON 11 | 12 | %token LEFT_PAREN 13 | %token RIGHT_PAREN 14 | %token LEFT_BRACKET 15 | %token RIGHT_BRACKET 16 | 17 | %token CNF 18 | %token FOF 19 | %token TFF 20 | %token THF 21 | %token TPI 22 | %token INCLUDE 23 | 24 | %token LAMBDA 25 | %token APPLY 26 | %token DEFINITE_DESCRIPTION 27 | %token INDEFINITE_DESCRIPTION 28 | %token FORALL_TY 29 | %token FORALL 30 | %token EXISTS_TY 31 | %token EXISTS 32 | 33 | %token PI 34 | %token SIGMA 35 | 36 | %token LESS 37 | %token ARROW 38 | 39 | %token STAR 40 | %token PLUS 41 | 42 | %token XOR 43 | %token EQUIV 44 | %token IMPLY 45 | %token LEFT_IMPLY 46 | 47 | %token NOT 48 | %token AND 49 | %token VLINE 50 | %token NOTAND 51 | %token NOTVLINE 52 | 53 | %token EQUAL 54 | %token NOT_EQUAL 55 | %token GENTZEN_ARROW 56 | 57 | %token ITE_F 58 | %token ITE_T 59 | %token LET_TF 60 | %token LET_FF 61 | %token LET_FT 62 | %token LET_TT 63 | 64 | %token DOLLAR_THF 65 | %token DOLLAR_TFF 66 | %token DOLLAR_FOF 67 | %token DOLLAR_CNF 68 | %token DOLLAR_FOT 69 | 70 | %token LOWER_WORD 71 | %token UPPER_WORD 72 | %token SINGLE_QUOTED 73 | %token DISTINCT_OBJECT 74 | %token DOLLAR_WORD 75 | %token DOLLAR_DOLLAR_WORD 76 | %token REAL 77 | %token RATIONAL 78 | %token INTEGER 79 | 80 | /* 81 | 82 | %left VLINE 83 | %left AND 84 | %nonassoc EQUIV 85 | %nonassoc XOR 86 | %nonassoc IMPLY 87 | %nonassoc LEFT_IMPLY 88 | %nonassoc NOTVLINE 89 | %nonassoc NOTAND 90 | 91 | */ 92 | 93 | %% 94 | 95 | -------------------------------------------------------------------------------- /tests/dune: -------------------------------------------------------------------------------- 1 | (test 2 | (name test) 3 | (libraries re unix) 4 | (deps (glob_files *.mly)) 5 | (action (run %{test} %{bin:obelisk} "%{deps}"))) 6 | -------------------------------------------------------------------------------- /tests/dune.1.4.0-opamBaseParser.mly: -------------------------------------------------------------------------------- 1 | (* Original file: dune.1.4.0/dune-1.4.0/vendor/opam-file-format/src/opamBaseParser.mly *) 2 | /**************************************************************************/ 3 | /* */ 4 | /* Copyright 2012-2015 OCamlPro */ 5 | /* Copyright 2012 INRIA */ 6 | /* */ 7 | /* All rights reserved. This file is distributed under the terms of the */ 8 | /* GNU Lesser General Public License version 2.1, with the special */ 9 | /* exception on linking described in the file LICENSE. */ 10 | /* */ 11 | /**************************************************************************/ 12 | 13 | %{ 14 | 15 | open OpamParserTypes 16 | 17 | (** OPAM config file generic type parser *) 18 | 19 | let get_pos n = 20 | let pos = Parsing.rhs_start_pos n in 21 | Lexing.(pos.pos_fname, 22 | pos.pos_lnum, 23 | pos.pos_cnum - pos.pos_bol) 24 | 25 | %} 26 | 27 | %token STRING IDENT 28 | %token BOOL 29 | %token EOF 30 | %token LBRACKET RBRACKET 31 | %token LPAR RPAR 32 | %token LBRACE RBRACE 33 | %token COLON 34 | %token INT 35 | %token RELOP 36 | %token AND 37 | %token OR 38 | %token PFXOP 39 | %token ENVOP 40 | 41 | %left COLON 42 | %left ATOM 43 | %left AND 44 | %left OR 45 | %nonassoc ENVOP 46 | %nonassoc PFXOP 47 | %left LBRACE RBRACE 48 | %nonassoc RELOP 49 | %nonassoc URELOP 50 | 51 | %start main value 52 | %type OpamParserTypes.opamfile> main 53 | %type value 54 | 55 | %% 56 | 57 | main: 58 | | items EOF { fun file_name -> 59 | { file_contents = $1; file_name } } 60 | ; 61 | 62 | items: 63 | | item items { $1 :: $2 } 64 | | { [] } 65 | ; 66 | 67 | item: 68 | | IDENT COLON value { Variable (get_pos 1, $1, $3) } 69 | | IDENT LBRACE items RBRACE { 70 | Section (get_pos 1, 71 | {section_kind=$1; section_name=None; section_items= $3}) 72 | } 73 | | IDENT STRING LBRACE items RBRACE { 74 | Section (get_pos 1, 75 | {section_kind=$1; section_name=Some $2; section_items= $4}) 76 | } 77 | ; 78 | 79 | value: 80 | | atom %prec ATOM { $1 } 81 | | LPAR values RPAR { Group (get_pos 1,$2) } 82 | | LBRACKET values RBRACKET { List (get_pos 1,$2) } 83 | | value LBRACE values RBRACE { Option (get_pos 2,$1, $3) } 84 | | value AND value { Logop (get_pos 2,`And,$1,$3) } 85 | | value OR value { Logop (get_pos 2,`Or,$1,$3) } 86 | | atom RELOP atom { Relop (get_pos 2,$2,$1,$3) } 87 | | atom ENVOP atom { Env_binding (get_pos 1,$1,$2,$3) } 88 | | PFXOP value { Pfxop (get_pos 1,$1,$2) } 89 | | RELOP atom { Prefix_relop (get_pos 1,$1,$2) } 90 | ; 91 | 92 | values: 93 | | { [] } 94 | | value values { $1 :: $2 } 95 | ; 96 | 97 | atom: 98 | | IDENT { Ident (get_pos 1,$1) } 99 | | BOOL { Bool (get_pos 1,$1) } 100 | | INT { Int (get_pos 1,$1) } 101 | | STRING { String (get_pos 1,$1) } 102 | ; 103 | 104 | %% 105 | 106 | let main t l f = 107 | try 108 | let r = main t l f in 109 | Parsing.clear_parser (); 110 | r 111 | with 112 | | e -> 113 | Parsing.clear_parser (); 114 | raise e 115 | -------------------------------------------------------------------------------- /tests/duplicated_prec.mly: -------------------------------------------------------------------------------- 1 | (* Message from Valentin Gatien-Baron, Sat, 09 Jan 2010. *) 2 | 3 | (* Version of Menhir earlier than 2015/10/06 would warn that the 4 | declaration [%prec type_] is never useful. In fact, it is. 5 | This definition is duplicated by the inlining of ioption, 6 | and one of its copies is useful. *) 7 | 8 | %token Eoi Rparen Lparen Colon Ident Typevar 9 | 10 | %left type_ 11 | %left Lparen 12 | %start expr_eoi 13 | %% 14 | 15 | expr_eoi: expr Eoi {} 16 | 17 | type_expr: 18 | | Ident ioption(delimited(Lparen, type_expr, Rparen)) {} %prec type_ 19 | | Typevar {} 20 | 21 | expr: 22 | | Ident {} 23 | | expr Colon type_expr {} 24 | | expr Lparen expr Rparen {} 25 | 26 | -------------------------------------------------------------------------------- /tests/efuns.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | (***********************************************************************) 3 | (* *) 4 | (* xlib for Ocaml *) 5 | (* *) 6 | (* Fabrice Le Fessant, projet Para/SOR, INRIA Rocquencourt *) 7 | (* *) 8 | (* Copyright 1998 Institut National de Recherche en Informatique et *) 9 | (* Automatique. Distributed only by permission. *) 10 | (* *) 11 | (***********************************************************************) 12 | %} 13 | 14 | /* tokens */ 15 | 16 | %token EOF 17 | %token EOL 18 | %token SEP 19 | %token IDENT 20 | %token TILDE 21 | 22 | /* main entry points */ 23 | 24 | %start parse_passwd 25 | %type parse_passwd 26 | %start parse_filename 27 | %type parse_filename 28 | 29 | %% 30 | 31 | parse_passwd: 32 | passwd_line EOL parse_passwd { $1 :: $3 } 33 | | passwd_line EOF { [$1] } 34 | ; 35 | 36 | passwd_line: 37 | IDENT SEP passwd_line { $1 :: $3 } 38 | | SEP passwd_line { "" :: $2 } 39 | | IDENT { [$1] } 40 | | /* nothing */ { [""] } 41 | ; 42 | 43 | parse_filename: 44 | TILDE parse_filename { TILDE :: $2 } 45 | | SEP parse_filename { SEP :: $2 } 46 | | IDENT parse_filename { (IDENT $1) :: $2 } 47 | | EOF { [] } 48 | ; 49 | 50 | %% 51 | -------------------------------------------------------------------------------- /tests/electrum00.mly: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Time-stamp: <2015-01-26 CET 14:36:55 David Chemouil> 3 | * 4 | * Electrum Analyzer 5 | * Copyright (C) 2014-2015 Onera 6 | * Authors: 7 | * XXXX 8 | * 9 | * This file is part of the Electrum Analyzer. 10 | * 11 | * The Electrum Analyzer is free software: you can redistribute it and/or 12 | * modify it under the terms of the GNU General Public License as 13 | * published by the Free Software Foundation, either version 3 of the 14 | * License, or (at your option) any later version. 15 | * 16 | * The Electrum Analyzer is distributed in the hope that it will be 17 | * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 19 | * General Public License for more details. 20 | * 21 | * You should have received a copy of the GNU General Public License 22 | * along with the Electrum Analyzer. If not, see 23 | * . 24 | ******************************************************************************/ 25 | 26 | 27 | %% 28 | 29 | %public %inline comma_sep(X) : 30 | xs = separated_list(COMMA, X) 31 | { xs } 32 | 33 | %public %inline comma_sep1(X) : 34 | xs = separated_nonempty_list(COMMA, X) 35 | { xs } 36 | 37 | %public %inline braces(X): 38 | x = delimited(LBRACE, X, RBRACE) 39 | { x } 40 | 41 | %public %inline brackets(X): 42 | x = delimited(LBRACKET, X, RBRACKET) 43 | { x } 44 | 45 | %public %inline parens(X): 46 | x = delimited(LPAREN, X, RPAREN) 47 | { x } 48 | 49 | %public %inline iboption(X): 50 | (* empty *) 51 | { false } 52 | | X 53 | { true } 54 | 55 | %public %inline first(X, Y): 56 | x = X Y { x } 57 | 58 | 59 | %public %inline second(X, Y): 60 | X y =Y { y } 61 | 62 | (* Given by François Pottier on 2015-01-21 63 | at http://gallium.inria.fr/blog/lr-lists/ *) 64 | %public right_flexible_list(delim, X): 65 | | (* nothing *) 66 | { [] } 67 | | x = X 68 | { [x] } 69 | | x = X delim xs = right_flexible_list(delim, X) 70 | { x :: xs } 71 | -------------------------------------------------------------------------------- /tests/electrum01.mly: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Time-stamp: <2015-02-24 CET 17:52:53 David Chemouil> 3 | * 4 | * Electrum Analyzer 5 | * Copyright (C) 2014-2015 Onera 6 | * Authors: 7 | * David Chemouil 8 | * 9 | * This file is part of the Electrum Analyzer. 10 | * 11 | * The Electrum Analyzer is free software: you can redistribute it and/or 12 | * modify it under the terms of the GNU General Public License as 13 | * published by the Free Software Foundation, either version 3 of the 14 | * License, or (at your option) any later version. 15 | * 16 | * The Electrum Analyzer is distributed in the hope that it will be 17 | * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 19 | * General Public License for more details. 20 | * 21 | * You should have received a copy of the GNU General Public License 22 | * along with the Electrum Analyzer. If not, see 23 | * . 24 | ******************************************************************************/ 25 | 26 | %{ 27 | open Ast.Ctrl 28 | %} 29 | 30 | %token RUN CHECK FOR EXPECT EXACTLY BUT 31 | %token NICKNAME 32 | 33 | %% 34 | 35 | 36 | %public cmd: 37 | c = named_cmd 38 | ioption(pair(EXPECT, NUMBER)) (* ignored *) 39 | { c } 40 | | b = block_cmd 41 | ioption(pair(EXPECT, NUMBER)) (* ignored *) 42 | { b } 43 | 44 | named_cmd: 45 | nick = first(NICKNAME, COLON)? (* nickname of the command *) 46 | cmd = run_or_check 47 | qname = qname (* name of the called pred or assert *) 48 | scope = scope? 49 | { make_named_cmd ~qname ~cmd ~scope ~nick } 50 | 51 | block_cmd: 52 | nick = first(NICKNAME, COLON)? (* nickname of the command *) 53 | cmd = run_or_check 54 | block = block 55 | scope = scope? 56 | { make_block_cmd ~block ~cmd ~scope ~nick } 57 | 58 | 59 | %inline run_or_check: 60 | RUN 61 | { Run } 62 | | CHECK 63 | { Check } 64 | 65 | scope: 66 | | FOR 67 | num = NUMBER 68 | typescopes = loption(second(BUT, comma_sep1(typescope))) 69 | { make_scope_for_but ~num ~typescopes } 70 | | FOR 71 | typescopes = loption(comma_sep1(typescope)) 72 | { make_scope_for_types ~typescopes } 73 | 74 | typescope: 75 | exactly = iboption(EXACTLY) 76 | num = NUMBER 77 | sig_name = qname 78 | { make_typescope ~exactly ~num ~sig_name } 79 | -------------------------------------------------------------------------------- /tests/electrum03.mly: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Time-stamp: <2015-04-17 CEST 11:02:54 David Chemouil> 3 | * 4 | * Electrum Analyzer 5 | * Copyright (C) 2014-2015 Onera 6 | * Authors: 7 | * David Chemouil 8 | * 9 | * This file is part of the Electrum Analyzer. 10 | * 11 | * The Electrum Analyzer is free software: you can redistribute it and/or 12 | * modify it under the terms of the GNU General Public License as 13 | * published by the Free Software Foundation, either version 3 of the 14 | * License, or (at your option) any later version. 15 | * 16 | * The Electrum Analyzer is distributed in the hope that it will be 17 | * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 19 | * General Public License for more details. 20 | * 21 | * You should have received a copy of the GNU General Public License 22 | * along with the Electrum Analyzer. If not, see 23 | * . 24 | ******************************************************************************/ 25 | 26 | %{ 27 | open Ast.File 28 | %} 29 | 30 | %start file 31 | 32 | %token MODULE OPEN AS 33 | 34 | %% 35 | 36 | 37 | %public file: 38 | s = specification 39 | EOF 40 | { s } 41 | 42 | specification: 43 | m = module_decl? 44 | op = import* 45 | ps = paragraph_or_cmd* 46 | { (m, op, ps) } 47 | 48 | paragraph_or_cmd: 49 | p = paragraph 50 | { Par p } 51 | | c = cmd 52 | { Cmd c } 53 | 54 | module_decl: 55 | MODULE 56 | module_name = qname 57 | params = loption(brackets(comma_sep1(param))) 58 | { make_module_decl ~module_name ~params } 59 | 60 | param: 61 | ioption(EXACTLY) 62 | qn = qname 63 | { qn } 64 | 65 | import: 66 | OPEN 67 | module_name = qname 68 | params = loption(brackets(comma_sep1(qname))) 69 | pun = punning? 70 | { make_import ~module_name ~params ~pun } 71 | 72 | punning: 73 | AS 74 | name = IDENT 75 | { name } 76 | 77 | -------------------------------------------------------------------------------- /tests/electrum04.mly: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Time-stamp: <2015-03-05 CET 14:01:11 David Chemouil> 3 | * 4 | * Electrum Analyzer 5 | * Copyright (C) 2014-2015 Onera 6 | * Authors: 7 | * David Chemouil 8 | * 9 | * This file is part of the Electrum Analyzer. 10 | * 11 | * The Electrum Analyzer is free software: you can redistribute it and/or 12 | * modify it under the terms of the GNU General Public License as 13 | * published by the Free Software Foundation, either version 3 of the 14 | * License, or (at your option) any later version. 15 | * 16 | * The Electrum Analyzer is distributed in the hope that it will be 17 | * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of 18 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 19 | * General Public License for more details. 20 | * 21 | * You should have received a copy of the GNU General Public License 22 | * along with the Electrum Analyzer. If not, see 23 | * . 24 | ******************************************************************************/ 25 | 26 | %{ 27 | open Ast.Par 28 | %} 29 | 30 | %token FACT ASSERT PRED PRIVATE FUN ENUM 31 | %token SIG EXTENDS ABSTRACT 32 | 33 | %% 34 | 35 | %public paragraph : 36 | f = factDecl 37 | { Fact f } 38 | | 39 | a = assertDecl 40 | { Assert a } 41 | | 42 | p = predDecl 43 | { Pred p } 44 | | 45 | f = funDecl 46 | { Fun f } 47 | | 48 | e = enumDecl 49 | { Enum e } 50 | | 51 | s = sigDecl 52 | { Sig s } 53 | 54 | factDecl : 55 | FACT 56 | name = IDENT? 57 | body = block 58 | { make_fact ~name ~body } 59 | 60 | assertDecl : 61 | ASSERT 62 | name = IDENT? 63 | body = block 64 | { make_assertion ~name ~body } 65 | 66 | predDecl : 67 | PRIVATE? 68 | PRED 69 | name = IDENT 70 | params = loption(brackets(comma_sep(decl))) 71 | body = block 72 | { make_pred ~name ~params ~body } 73 | 74 | funDecl : 75 | PRIVATE? 76 | FUN 77 | name = IDENT 78 | params = loption(brackets(comma_sep(decl))) 79 | COLON 80 | returns = expr 81 | body = expr 82 | { make_func ~name ~params ~body ~returns } 83 | 84 | enumDecl : 85 | ENUM 86 | name = IDENT 87 | cases = braces(comma_sep1(IDENT)) 88 | { make_enum ~name ~cases } 89 | 90 | sigDecl : 91 | is_variable = iboption(VAR) 92 | qual = sigQual 93 | SIG 94 | names = comma_sep1(IDENT) 95 | extends = sigExt? 96 | fields = braces(right_flexible_list(COMMA, decl)) 97 | fact = block? 98 | { make_signature ~is_variable ~is_abstract:(fst qual) ~mult:(snd qual) 99 | ~names ~extends ~fields ~fact } 100 | 101 | sigQual : 102 | | is_abstract = iboption(ABSTRACT) mult = ioption(some_lone_one) 103 | { (is_abstract, mult)} 104 | | mult = some_lone_one ABSTRACT 105 | { (true, Some mult)} 106 | 107 | %inline some_lone_one: 108 | LONE 109 | { `Lone } 110 | | ONE 111 | { `One } 112 | | SOME 113 | { `Some } 114 | 115 | sigExt : 116 | EXTENDS 117 | name = qname 118 | { Extends name } 119 | | IN 120 | names = separated_nonempty_list(PLUS, qname) 121 | { In names } 122 | 123 | 124 | -------------------------------------------------------------------------------- /tests/empty-action.mly: -------------------------------------------------------------------------------- 1 | %token A 2 | %start a 3 | %% 4 | 5 | a: b c 6 | {} 7 | 8 | %inline b: A {} 9 | 10 | c: A {} 11 | -------------------------------------------------------------------------------- /tests/end-of-stream-conflict.mly: -------------------------------------------------------------------------------- 1 | %token INT 2 | %token PLUS TIMES 3 | %start expr 4 | 5 | %left PLUS 6 | %left TIMES 7 | 8 | %% 9 | 10 | expr: 11 | i = INT 12 | { i } 13 | | e1 = expr PLUS e2 = expr 14 | { e1 + e2 } 15 | | e1 = expr TIMES e2 = expr 16 | { e1 * e2 } 17 | 18 | -------------------------------------------------------------------------------- /tests/expansion_ok.mly: -------------------------------------------------------------------------------- 1 | (* This grammar looks very much like ../bad/expansion_diverges.mly 2 | except [wrap] does not use its argument, so expansion actually 3 | terminates. This grammar can (and should) be accepted. *) 4 | 5 | %token A 6 | %start start 7 | %% 8 | 9 | start: 10 | seq(A) {} 11 | 12 | wrap(t): 13 | A {} 14 | 15 | seq(t): 16 | | wrap(seq(wrap(t))) {} 17 | -------------------------------------------------------------------------------- /tests/expansion_unused.mly: -------------------------------------------------------------------------------- 1 | (* Because [ignore] does not use its argument, expansion terminates. 2 | This grammar can (and should) be accepted. *) 3 | 4 | %token A B C 5 | %start start 6 | %% 7 | 8 | ignore(X): 9 | C {} 10 | 11 | foo(X): 12 | ignore(foo(bar(X))) {} 13 | 14 | bar(X): 15 | B {} 16 | 17 | start: 18 | foo(A) {} 19 | -------------------------------------------------------------------------------- /tests/fibonacci.mly: -------------------------------------------------------------------------------- 1 | %token A 2 | %start fib 3 | 4 | %% 5 | 6 | (* An intensive inlining test. 7 | The size of the grammar after inlining is exponential. 8 | For efficiency, inlining must be performed bottom-up, 9 | beginning by inlining [fib0] and [fib1] into [fib2], 10 | then inlining [fib2] at its use site, and so on. 11 | A top-down strategy, without memoization, would cause 12 | repeated work. *) 13 | 14 | let fib0 == A 15 | let fib1 == A 16 | let fib2 == fib0; fib1 17 | let fib3 == fib1; fib2 18 | let fib4 == fib2; fib3 19 | let fib5 == fib3; fib4 20 | let fib6 == fib4; fib5 21 | let fib7 == fib5; fib6 22 | let fib8 == fib6; fib7 23 | let fib9 == fib7; fib8 24 | let fib10 == fib8; fib9 25 | let fib11 == fib9; fib10 26 | let fib12 == fib10; fib11 27 | let fib13 == fib11; fib12 28 | let fib14 == fib12; fib13 29 | let fib15 == fib13; fib14 30 | let fib16 == fib14; fib15 31 | let fib17 == fib15; fib16 32 | let fib18 == fib16; fib17 (* 4 seconds *) 33 | let fib19 == fib17; fib18 (* 11 seconds *) 34 | let fib20 == fib18; fib19 (* 31 seconds *) 35 | 36 | let fib := fib9 37 | -------------------------------------------------------------------------------- /tests/flowcaml-docgen.mly: -------------------------------------------------------------------------------- 1 | /**************************************************************************/ 2 | /* */ 3 | /* Flow Caml */ 4 | /* */ 5 | /* Vincent Simonet, Projet Cristal, INRIA Rocquencourt */ 6 | /* */ 7 | /* Copyright 2002, 2003 Institut National de Recherche en Informatique */ 8 | /* et en Automatique. All rights reserved. This file is distributed */ 9 | /* under the terms of the Q Public License version 1.0. */ 10 | /* */ 11 | /* Author contact: Vincent.Simonet@inria.fr */ 12 | /* Software page: http://cristal.inria.fr/~simonet/soft/flowcaml/ */ 13 | /* */ 14 | /**************************************************************************/ 15 | 16 | /* $Id: doc_parser.mly,v 1.4 2003/06/26 13:32:48 simonet Exp $ */ 17 | 18 | %{ 19 | open Doc_parsetree 20 | 21 | %} 22 | 23 | 24 | /***************************************************************************/ 25 | 26 | %token CLOSE 27 | %token DIRECTIVE 28 | %token EOF 29 | %token LBRACE 30 | %token OPEN 31 | %token RBRACE 32 | %token TERMINATE 33 | %token TEXT 34 | %token VERB 35 | %start file 36 | %type file 37 | 38 | %% 39 | 40 | file: 41 | EOF 42 | { [] } 43 | | OPEN ftext CLOSE file 44 | { 45 | { cmt_start = $1; 46 | cmt_end = $3; 47 | cmt_content = Ftext (List.rev $2) 48 | } :: $4 } 49 | | TERMINATE file 50 | { 51 | { cmt_start = fst $1; 52 | cmt_end = snd $1 ; 53 | cmt_content = Terminate 54 | } :: $2 } 55 | ; 56 | 57 | ftext: 58 | /*empty*/ 59 | { [] } 60 | | ftext DIRECTIVE 61 | { (Directive $2) :: $1 } 62 | | ftext TEXT 63 | { (String $2) :: $1 } 64 | | ftext LBRACE ftext RBRACE 65 | { (Block ($2, List.rev $3)) :: $1 } 66 | | ftext VERB 67 | { (Block ("src", [String $2])) :: $1 } 68 | ; 69 | -------------------------------------------------------------------------------- /tests/focc-pure-def.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | open Ast 3 | open Parsing 4 | open Lexing 5 | open Keywords 6 | %} 7 | 8 | %token END IMPORT FOR EQ LINK SEP EOF 9 | %token HEADER 10 | %token ID 11 | %token DEF 12 | %token COQ CAML CAMLI 13 | 14 | %start main 15 | %type main 16 | 17 | %% 18 | 19 | main: 20 | | EOF {()} 21 | | header_def main { $1 } 22 | | coll_def main { $1 } 23 | ; 24 | 25 | header_def: 26 | COQ HEADER { Hashtbl.add pure_def (toplevel,coq_h) ("",Some $2) } 27 | | CAML HEADER {Hashtbl.add pure_def (toplevel,caml_h) ($2,None)} 28 | | CAMLI HEADER {Hashtbl.add pure_def (toplevel,caml_hi) ($2,None)} 29 | ; 30 | coll_def: 31 | | IMPORT FOR ID limport IMPORT 32 | { List.iter (fun x -> let (a,b,c,d) = x in 33 | (if c <> "" then Hashtbl.add pure_def ($3,c) ("",d)); 34 | Hashtbl.add pure_def ($3 , a) (b,d)) $4 } 35 | ; 36 | limport: 37 | | END { [] } 38 | | ID idlist def SEP limport { let (a,b) = $3 in 39 | let name = if a = "" then $1 else a in 40 | (name,b,"",None) :: $5 } 41 | | ID idlist def LINK def SEP limport { let (a,b) = $3 in 42 | let name = if a = "" then $1 else a in 43 | let (c,d) = $5 in 44 | (name,b,c,Some d) :: $7 } 45 | ; 46 | 47 | def: 48 | DEF { ("" , $1) } 49 | | ID DEF { ($1,$2) } 50 | ; 51 | 52 | idlist: 53 | | EQ { [] } 54 | | ID idlist { $1 :: $2 } 55 | ; 56 | 57 | %% 58 | 59 | let parse_it rgl s = 60 | try main rgl s with 61 | | Parse_error -> 62 | prerr_string "error occured while parsing .fml file : "; 63 | prerr_string ((lexeme s) ^ " at position "); 64 | prerr_int (lexeme_end s ); 65 | prerr_newline(); 66 | 67 | | Exit -> raise Exit 68 | | s -> prerr_endline "Unknown error\n"; raise s 69 | ;; 70 | (* $Id: pure_def.mly,v 1.2 2003/09/19 12:25:21 prevosto Exp $ *) 71 | -------------------------------------------------------------------------------- /tests/foo.mly: -------------------------------------------------------------------------------- 1 | %start exp 2 | %token INT 3 | %token PLUS 4 | %token TIMES 5 | %left PLUS 6 | %left TIMES 7 | %type exp 8 | %% 9 | exp: 10 | | lhs = exp TIMES rhs = exp 11 | { 12 | let op = ( fun x y -> x * y ) in 13 | ( op lhs rhs ) 14 | } 15 | | lhs = exp PLUS rhs = exp 16 | { 17 | let op = ( fun x y -> x + y ) in 18 | ( op lhs rhs ) 19 | } 20 | | x = INT 21 | { 22 | ( x ) 23 | } 24 | 25 | -------------------------------------------------------------------------------- /tests/forbid_end.mly: -------------------------------------------------------------------------------- 1 | %% 2 | 3 | endif: END | ENDIF { } 4 | -------------------------------------------------------------------------------- /tests/fp.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | open Fp;; 3 | %} 4 | 5 | %token Lprim 6 | 7 | %token Leval 8 | %token Lpar Rpar 9 | %token Lang Rang 10 | %token Lsqu Rsqu 11 | %token Lcom Lscl Leol 12 | 13 | %token Lapplytoall Lins Lo Lcond Lcst Lbu Lwhile 14 | 15 | %token LT LF 16 | 17 | %token Laff LDef LUndef LShow LQuit LLoad LSave 18 | 19 | %token Ls 20 | %token Lr 21 | %token Lint 22 | %token Lident 23 | %token Lvar 24 | %token Lstr 25 | 26 | 27 | %right Leval 28 | %left Lprim 29 | %right Lcond 30 | %right Lo 31 | %left Lcst 32 | 33 | 34 | %start cmd 35 | %type cmd 36 | 37 | %start exp 38 | %type exp 39 | 40 | %start fct 41 | %type fct 42 | 43 | %% 44 | 45 | cmd : 46 | LDef Lident Laff fct Leol { Def($2,$4) } 47 | | LUndef Lident Leol { Undef $2 } 48 | | LShow Lident Leol { Show $2 } 49 | | exp Leol { Exp $1 } 50 | | LQuit Leol { Quit } 51 | | LLoad Lstr Leol { Load $2 } 52 | | LSave Lstr Leol { Save $2 } 53 | | Leol { None } 54 | ; 55 | 56 | exp : 57 | LT { T } 58 | | LF { F } 59 | | Lint { Int (Num.Int $1) } 60 | | Lvar { Var $1 } 61 | | Lpar exp Rpar { $2 } 62 | | Lang Rang { Seq [] } 63 | | Lang list Rang { Seq $2 } 64 | | fct Leval exp { App($1, $3) } 65 | ; 66 | 67 | fatom : 68 | Lprim { Prim $1 } 69 | | Ls { Sel $1 } 70 | | Lr { RSel $1 } 71 | 72 | | Lident { User $1 } 73 | 74 | | Lapplytoall fatom { ApplyToAll $2 } 75 | | Lins fatom { Insert $2 } 76 | | Lcst exp { Constant $2 } 77 | | Lsqu fctlist Rsqu { Construction $2 } 78 | | Lbu fatom exp { Bu($2,$3) } 79 | | Lwhile fatom fatom { While($2,$3) } 80 | | Lpar fct Rpar { $2 } 81 | ; 82 | 83 | fct : 84 | comp Lcond comp Lscl fct { Condition($1,$3,$5) } 85 | | comp { $1 } 86 | ; 87 | 88 | comp : 89 | fatom Lo comp { Composition($1,$3) } 90 | | fatom { $1 } 91 | 92 | list : 93 | exp Lcom list { $1 :: $3 } 94 | | exp { [ $1 ] } 95 | ; 96 | 97 | fctlist : 98 | fct Lcom fctlist { $1 :: $3 } 99 | | fct { [ $1 ] } 100 | ; 101 | 102 | %% 103 | 104 | -------------------------------------------------------------------------------- /tests/framac-print_api-grammar.mly: -------------------------------------------------------------------------------- 1 | /**************************************************************************/ 2 | /* */ 3 | /* This file is part of Frama-C. */ 4 | /* */ 5 | /* Copyright (C) 2007-2015 */ 6 | /* CEA (Commissariat à l'énergie atomique et aux énergies */ 7 | /* alternatives) */ 8 | /* */ 9 | /* you can redistribute it and/or modify it under the terms of the GNU */ 10 | /* Lesser General Public License as published by the Free Software */ 11 | /* Foundation, version 2.1. */ 12 | /* */ 13 | /* It is distributed in the hope that it will be useful, */ 14 | /* but WITHOUT ANY WARRANTY; without even the implied warranty of */ 15 | /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */ 16 | /* GNU Lesser General Public License for more details. */ 17 | /* */ 18 | /* See the GNU Lesser General Public License version 2.1 */ 19 | /* for more details (enclosed in the file licenses/LGPLv2.1). */ 20 | /* */ 21 | /**************************************************************************/ 22 | 23 | %{ 24 | %} 25 | 26 | %token WORD 27 | %token LPAR 28 | %token RPAR 29 | %token COMMA 30 | %token EOF 31 | %start main 32 | %type main 33 | %% 34 | main: 35 | type_string EOF { $1 } 36 | word: WORD { $1 } 37 | type_string: word { $1 } 38 | | type_string word { "'a "^$2 } 39 | | LPAR type_string COMMA type_string RPAR word { "('a,'b) "^$6 } 40 | | LPAR type_string COMMA type_string COMMA type_string RPAR word { "('a,'b,'c) "^$8 } 41 | | LPAR type_string COMMA type_string COMMA type_string COMMA type_string RPAR word { "('a,'b,'c,'d) "^$10 } 42 | -------------------------------------------------------------------------------- /tests/fslexpars.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | (* (c) Microsoft Corporation 2005-2006. *) 3 | 4 | open Fslexast 5 | 6 | %} 7 | 8 | %type spec 9 | %token STRING IDENT 10 | %token CODE 11 | %token CHAR 12 | %token RULE PARSE LET AND LPAREN RPAREN 13 | %token EOF BAR DOT PLUS STAR QMARK EQUALS UNDERSCORE LBRACK RBRACK HAT DASH 14 | %start spec 15 | %left BAR 16 | %left regexp_alt 17 | %left regexp_seq 18 | %nonassoc regexp_opt 19 | %nonassoc regexp_plus regexp_star 20 | %% 21 | 22 | spec: codeopt macros RULE rules codeopt { { topcode=$1;macros=$2;rules=$4;botcode=$5 } } 23 | codeopt: CODE { $1 } | { "", Parsing.symbol_start_pos () } 24 | macros: { [] } | macro macros { $1 :: $2 } 25 | macro: LET IDENT EQUALS regexp { ($2, $4) } 26 | rules: rule AND rules { $1 :: $3 } | rule { [$1] } 27 | rule: IDENT args EQUALS PARSE optbar clauses { ($1,$2,$6) } 28 | args: { [] } | IDENT args { $1 :: $2 } 29 | optbar: { } | BAR { } 30 | clauses: clause BAR clauses {$1 :: $3 } | clause { [$1] } 31 | clause: regexp CODE { $1, $2 } 32 | regexp: 33 | CHAR { Inp(LChar $1) } 34 | | EOF { Inp(LEof) } 35 | | UNDERSCORE { Alt(mapi (fun n -> Inp(LChar (Char.chr n))) 255) } 36 | | STRING { Seq(mapi (fun n -> Inp(LChar (String.get $1 n))) (String.length $1 - 1)) } 37 | | IDENT { Macro($1) } 38 | | regexp regexp %prec regexp_seq { Seq[$1;$2] } 39 | | regexp PLUS %prec regexp_plus { Seq[$1;Star $1] } 40 | | regexp STAR %prec regexp_star { Star $1 } 41 | | regexp QMARK %prec regexp_opt { Alt[Seq[];$1] } 42 | | regexp BAR regexp %prec regexp_alt { Alt[$1;$3] } 43 | | LPAREN regexp RPAREN { $2 } 44 | | LBRACK charset RBRACK { Alt (List.map (fun c -> Inp(LChar c)) $2) } 45 | | LBRACK HAT charset RBRACK { Alt(foldi(fun n l -> if List.mem (Char.chr n) $3 then l else Inp(LChar (Char.chr n))::l) 255 []) } 46 | 47 | charset: 48 | | CHAR { [$1] } 49 | | CHAR DASH CHAR { mapi(fun n -> Char.chr (n+Char.code $1)) (Char.code $3 - Char.code $1) } 50 | | charset charset { $1 @ $2 } 51 | 52 | 53 | -------------------------------------------------------------------------------- /tests/fsyaccpars.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | (* (c) Microsoft Corporation 2005-2006. *) 3 | 4 | open Fsyaccast 5 | 6 | %} 7 | 8 | %type spec 9 | %token IDENT 10 | %token HEADER CODE 11 | %token BAR PERCENT_PERCENT START LEFT RIGHT NONASSOC LESS GREATER COLON PREC SEMI EOF ERROR 12 | %token TYPE 13 | %token TOKEN 14 | %start spec 15 | %left BAR 16 | %% 17 | 18 | spec: headeropt decls PERCENT_PERCENT rules { List.fold_right (fun f x -> f x) $2 { header=$1;tokens=[];types=[];assoc=[];starts=[];rules=$4 } } 19 | headeropt: HEADER { $1 } | { "", Parsing.symbol_start_pos () } 20 | decls: { [] } | decl decls { $1 :: $2 } 21 | decl: 22 | TOKEN idents { (fun x -> {x with tokens = x.tokens @ (List.map (fun x -> (x,$1)) $2)}) } 23 | | TYPE idents { (fun x -> {x with types = x.types @ (List.map (fun x -> (x,$1)) $2)} ) } 24 | | START idents { (fun x -> {x with starts = x.starts @ $2} ) } 25 | | LEFT idents { (fun x -> {x with assoc = x.assoc @ [(List.map (fun x -> (x,LeftAssoc)) $2)]} ) } 26 | | RIGHT idents { (fun x -> {x with assoc = x.assoc @ [(List.map (fun x -> (x,RightAssoc)) $2)]} ) } 27 | | NONASSOC idents { (fun x -> {x with assoc = x.assoc @ [(List.map (fun x -> (x,NonAssoc)) $2)]} ) } 28 | 29 | idents: IDENT idents { $1 :: $2 } | { [] } 30 | rules: rule rules { $1 :: $2 } | rule { [$1] } 31 | rule: IDENT COLON optbar clauses optsemi { ($1,$4) } 32 | optbar: { } | BAR { } 33 | optsemi: { } | SEMI { } 34 | clauses: clause BAR clauses {$1 :: $3 } | clause { [$1] } 35 | clause: syms optprec CODE { Rule($1,$2,Some $3) } 36 | syms: IDENT syms { $1 :: $2 } | ERROR syms { "error" :: $2 } | { [] } 37 | optprec: { None } | PREC IDENT { Some $2 } 38 | 39 | 40 | -------------------------------------------------------------------------------- /tests/gdb.0.3-gdbmi_parser.mly: -------------------------------------------------------------------------------- 1 | (* Original file: gdb.0.3/ocaml-gdb-0.3/src/gdbmi_parser.mly *) 2 | /* 3 | GDB/MI parser 4 | */ 5 | 6 | %{ 7 | open! Gdbmi_types 8 | %} 9 | 10 | %token IDENT TOKEN STRING 11 | %token LCURLY RCURLY LBRACKET RBRACKET COMMA EOF 12 | %token PROMPT MINUS CARET ASTERISK PLUS EQUAL TILDE AT AMPERSAND 13 | 14 | %start output 15 | %start input_output 16 | 17 | %% 18 | 19 | input_output: 20 | | PROMPT EOF { Prompt } 21 | | input_prefix { Input $1 } 22 | | output { Output $1 } 23 | 24 | input_prefix: 25 | | token=TOKEN? MINUS { MI token } 26 | | IDENT { CLI } 27 | 28 | output: output_record EOF { $1 } 29 | 30 | output_record: 31 | | typ=stream_type s=STRING { Stream (typ, s) } 32 | | token=TOKEN? CARET state=IDENT r=list(preceded(COMMA,result)) 33 | { 34 | let r = match state with 35 | | "done" 36 | | "running" -> Done r 37 | | "exit" -> Exit 38 | | "connected" -> Connected 39 | | "error" -> 40 | let k n = try match List.assoc n r with String s -> Some s | _ -> None with Not_found -> None in 41 | let msg = match k "msg" with Some s -> s | None -> "" in 42 | OpError (msg, k "code") 43 | | s -> failwith ("bad result type : " ^ s) 44 | in 45 | Result (token, r) 46 | } 47 | | token=TOKEN? typ=async_type cls=IDENT r=list(preceded(COMMA,result)) { Async (token,typ,cls,r) } 48 | 49 | stream_type: 50 | | TILDE { Console } 51 | | AT { Target } 52 | | AMPERSAND { Log } 53 | 54 | async_type: 55 | | ASTERISK { Exec } 56 | | PLUS { Status } 57 | | EQUAL { Notify } 58 | 59 | result: n=IDENT EQUAL v=value { (n, v) } 60 | value: 61 | | s=STRING { String s } 62 | | LCURLY l=separated_list(COMMA,result) RCURLY { Tuple l } 63 | | LBRACKET l=separated_list(COMMA,result) RBRACKET { List l } 64 | | LBRACKET l=separated_nonempty_list(COMMA,value) RBRACKET { Values l } 65 | -------------------------------------------------------------------------------- /tests/gromit.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/gromit.mly -------------------------------------------------------------------------------- /tests/grune924.mly: -------------------------------------------------------------------------------- 1 | %token MINUS N LPAR RPAR 2 | %start s 3 | %type s 4 | 5 | %% 6 | 7 | s: 8 | e 9 | { () } 10 | 11 | e: 12 | e MINUS t 13 | { () } 14 | | t 15 | { () } 16 | 17 | t: 18 | N 19 | { () } 20 | | LPAR e RPAR 21 | { () } 22 | 23 | -------------------------------------------------------------------------------- /tests/ho.mly: -------------------------------------------------------------------------------- 1 | %token INT EOF SEMI LBRACE RBRACE COMMA 2 | %start main 3 | 4 | %% 5 | 6 | main: 7 | expr(separated_list, SEMI) 8 | weird(separated_list, SEMI) 9 | bizarre(separated_list, SEMI) 10 | EOF 11 | {} 12 | 13 | expr(seq, sep): 14 | INT 15 | | LBRACE seq(sep, expr(seq, sep)) RBRACE 16 | {} 17 | 18 | weird(seq, sep): 19 | INT 20 | | LBRACE seq(sep, weird(seq, COMMA)) RBRACE 21 | {} 22 | 23 | bizarre(seq, sep): 24 | INT 25 | | LBRACE seq(sep, bizarre(nonseparated_list, sep)) RBRACE 26 | {} 27 | 28 | nonseparated_list(sep, X): 29 | list(X) 30 | {} 31 | -------------------------------------------------------------------------------- /tests/infinite.mly: -------------------------------------------------------------------------------- 1 | %token A 2 | %start dummy 3 | 4 | %% 5 | 6 | dummy: 7 | A { () } 8 | | A infinite { () } 9 | 10 | infinite: 11 | A infinite 12 | { () } 13 | 14 | -------------------------------------------------------------------------------- /tests/inline-multi-level.mly: -------------------------------------------------------------------------------- 1 | %start a 2 | %token T 3 | %% 4 | 5 | a: x=b; T b { f_a x } 6 | %inline b: x=c; T { f_b x } 7 | %inline c: x=d; T { f_c x } 8 | d: T {} 9 | -------------------------------------------------------------------------------- /tests/inline-multi-sons.mly: -------------------------------------------------------------------------------- 1 | %token A B EOF 2 | %start s 3 | %% 4 | 5 | (* We test the freshness of producer's name during inlining. 6 | ioption and delimited both use 'x' in their definition. 7 | *) 8 | s: ioption(pair(ioption(A), delimited(A, B, A))) EOF { 0 } 9 | -------------------------------------------------------------------------------- /tests/inline-position.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | let f x y z t = () 3 | let g x y z t = () 4 | %} 5 | 6 | %token A B C 7 | %start prog 8 | %type prog 9 | %% 10 | 11 | prog: a1 a2 a3 a4 a5 a6 a7 a8 12 | {()} 13 | 14 | /* case: host is almost empty, inlined too. */ 15 | a1: x=b 16 | { 17 | f $startpos $endpos $startpos(x) $endpos(x) 18 | } 19 | 20 | %inline b: 21 | { g $startpos $endpos $startpos $endpos } 22 | 23 | /* case: host is not empty, inlined rule is empty. */ 24 | a2: a=A x=b b=B 25 | { 26 | f $startpos $endpos $startpos(x) $endpos(x) 27 | } 28 | 29 | /* case: host is almost empty, inlined rule is not empty. */ 30 | %inline c: a=A b=B 31 | { 32 | g $startpos $endpos $startpos(a) $endpos(a) 33 | } 34 | 35 | a3: x=c 36 | { 37 | f $startpos $endpos $startpos(x) $endpos(x) 38 | } 39 | 40 | /* case: host is not empty, inlined rule is not empty. */ 41 | 42 | a4: a=A x=c b=B 43 | { 44 | f $startpos $endpos $startpos(x) $endpos(x) 45 | } 46 | 47 | /* case: host is not empty but unnamed, inlined rule is empty. */ 48 | a5: A x=b C 49 | { 50 | f $startpos $endpos $startpos(x) $endpos(x) 51 | } 52 | 53 | /* case: host is empty, inlined rule is not empty but unnamed. */ 54 | a6: x=d 55 | { 56 | f $startpos $endpos $startpos(x) $endpos(x) 57 | } 58 | 59 | %inline d: A B 60 | { 61 | g $startpos $endpos $startpos $endpos 62 | } 63 | 64 | /* case: host is not empty, inlined_rule is empty but we do not use startpos. */ 65 | %inline f: 66 | { 67 | g $endpos $endpos $endpos $endpos 68 | } 69 | a7: A x=f B 70 | { 71 | f $endpos $endpos $endpos(x) $endpos(x) 72 | } 73 | 74 | /* case: host is not empty, inlined_rule is empty but we do not use endpos. */ 75 | %inline g: 76 | { 77 | g $startpos $startpos $startpos $startpos 78 | } 79 | a8: A x=g B 80 | { 81 | f $startpos $startpos $startpos(x) $startpos(x) 82 | } 83 | -------------------------------------------------------------------------------- /tests/inline-rename.mly: -------------------------------------------------------------------------------- 1 | %token T 2 | %start a 3 | %% 4 | 5 | a: x = c; y = b { x + y } 6 | c: x = T { x } 7 | %inline b : x = c { x } 8 | -------------------------------------------------------------------------------- /tests/inline-shift.mly: -------------------------------------------------------------------------------- 1 | %start a 2 | %token T 3 | %% 4 | 5 | a: b c d c { $2 + $3 + $4 } 6 | 7 | %inline b: T T {} 8 | c: x=T { x + 1 } 9 | %inline d: { 0 } 10 | -------------------------------------------------------------------------------- /tests/inline-test.mly: -------------------------------------------------------------------------------- 1 | %token PLUS TIMES EOF 2 | %left PLUS 3 | %left TIMES 4 | %token INT 5 | %start prog 6 | %% 7 | 8 | prog: x=exp EOF { x } 9 | 10 | exp: x = INT { x } 11 | | lhs = exp; op = op; rhs = exp { op lhs rhs } 12 | 13 | %inline op: PLUS { fun x y -> x + y } 14 | | TIMES { fun x y -> x * y } 15 | -------------------------------------------------------------------------------- /tests/inline-with-dollar.mly: -------------------------------------------------------------------------------- 1 | %start s 2 | %token A 3 | %% 4 | 5 | s: x=a A y=b { 6 | x; y 7 | } 8 | 9 | %inline a: A { 10 | $1 11 | } 12 | 13 | %inline b: A { 14 | $1 15 | } 16 | -------------------------------------------------------------------------------- /tests/inlined-dollar.mly: -------------------------------------------------------------------------------- 1 | %token A B 2 | %start t 3 | %% 4 | 5 | t : B y=y { 6 | y 7 | } 8 | 9 | %inline y: A 10 | { 11 | $1 12 | } 13 | -------------------------------------------------------------------------------- /tests/inlining-capture.mly: -------------------------------------------------------------------------------- 1 | %token A B C D E 2 | %start main 3 | %% 4 | %inline callee: B C a = D { $startpos(a), $startpos } 5 | main: A a = callee e = E { a, $startpos(a), $startpos, $startpos(e) } 6 | -------------------------------------------------------------------------------- /tests/inliningWithSideEffects.mly: -------------------------------------------------------------------------------- 1 | (* This grammar tests whether %inline affects the order in which 2 | semantic actions are executed. *) 3 | 4 | (* If neither [a] nor [b] are marked %inline, then this program 5 | should print "Reducing A" followed with "Reducing B". *) 6 | 7 | (* If [a] is marked %inline and [b] is not, then this program 8 | should print "Reducing B" followed with "Reducing A". This is 9 | unavoidable -- it follows from the meaning of %inline. *) 10 | 11 | (* Similarly, if [b] is marked %inline and [a] is not, 12 | then there is no choice. *) 13 | 14 | (* If both [a] and [b] are marked %inline, however, 15 | then the behavior of this program is unspecified. 16 | As of 2018/09/18, the order is indeed reversed: 17 | this program prints "Reducing B" first. *) 18 | 19 | %start main 20 | %token EOF 21 | 22 | %% 23 | 24 | main: 25 | a b EOF {} 26 | 27 | %inline a: 28 | { Printf.printf "Reducing A\n%!" } 29 | 30 | %inline b: 31 | { Printf.printf "Reducing B\n%!" } 32 | 33 | %% 34 | 35 | let () = 36 | let dummy = Lexing.from_string "" in 37 | main (fun _lexbuf -> EOF) dummy 38 | 39 | (* 40 | menhir inliningWithSideEffects.mly 41 | /usr/bin/env ocaml inliningWithSideEffects.ml 42 | *) 43 | -------------------------------------------------------------------------------- /tests/issue21_longer.mly: -------------------------------------------------------------------------------- 1 | (* This file was provided by Andrej Bauer; see issue #21. *) 2 | 3 | (* This is an example where Menhir's implementation of Pager's 4 | algorithm produces a conflict, even though it should not: the 5 | canonical automaton has no conflicts. *) 6 | 7 | (* Infix operations a la OCaml *) 8 | %token INFIXOP0 EQUAL 9 | 10 | (* Names *) 11 | %token NAME 12 | 13 | (* Expressions and computations *) 14 | %token NUMERAL 15 | %token LET REC IN AND 16 | 17 | (* End of input token *) 18 | %token EOF 19 | 20 | (* Precedence and fixity of infix operators *) 21 | %start commandline 22 | 23 | %% 24 | 25 | (* Toplevel syntax *) 26 | 27 | commandline: 28 | | LET f=NAME EQUAL e=term EOF 29 | { () } 30 | 31 | | term EOF 32 | { () } 33 | 34 | (* Main syntax tree *) 35 | term: 36 | | e=infix_term_ 37 | { e } 38 | 39 | | LET f=NAME EQUAL c1=infix_term IN c2=term 40 | { () } 41 | 42 | | LET REC fs=separated_nonempty_list(AND, recursive_clause) IN c2=term 43 | { () } 44 | 45 | infix_term: 46 | | n=NUMERAL 47 | { () } 48 | 49 | infix_term_: 50 | | n=NUMERAL 51 | { () } 52 | 53 | | e2=infix_term INFIXOP0 e3=infix_term 54 | { () } 55 | 56 | recursive_clause: 57 | | f=NAME EQUAL c=term 58 | { () } 59 | -------------------------------------------------------------------------------- /tests/issue21_shorter.mly: -------------------------------------------------------------------------------- 1 | (* This file was provided by Andrej Bauer; see issue #21. *) 2 | 3 | (* This is an example where Menhir's implementation of Pager's 4 | algorithm produces a conflict, even though it should not: the 5 | canonical automaton has no conflicts. *) 6 | 7 | (* Infix operations a la OCaml *) 8 | %token PLUS 9 | 10 | (* Names *) 11 | %token NAME 12 | 13 | (* Expressions and computations *) 14 | %token LET REC IN 15 | 16 | (* End of input token *) 17 | %token EOF 18 | 19 | (* Precedence and fixity of infix operators *) 20 | %start commandline 21 | 22 | %% 23 | 24 | (* Toplevel syntax *) 25 | 26 | commandline: 27 | | LET term EOF 28 | { () } 29 | 30 | | term EOF 31 | { () } 32 | 33 | (* Main syntax tree *) 34 | term: 35 | | NAME 36 | { () } 37 | 38 | | name PLUS 39 | { () } 40 | 41 | | LET name IN term 42 | { () } 43 | 44 | | REC term IN term 45 | { () } 46 | 47 | name: 48 | | NAME 49 | { () } 50 | -------------------------------------------------------------------------------- /tests/java.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/java.mly -------------------------------------------------------------------------------- /tests/jml.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/jml.mly -------------------------------------------------------------------------------- /tests/jocaml-ambients.mly: -------------------------------------------------------------------------------- 1 | /***********************************************************************/ 2 | /* */ 3 | /* Ambients in JoCaml */ 4 | /* */ 5 | /* Alan Schmitt, projet Para, INRIA Rocquencourt */ 6 | /* */ 7 | /* Copyright 1999 Institut National de Recherche en Informatique et */ 8 | /* Automatique. Distributed only by permission. */ 9 | /* */ 10 | /***********************************************************************/ 11 | 12 | %{ 13 | open Syntax;; 14 | %} 15 | %token IDENT STRING 16 | %token PAR IN OUT OPEN LPAREN RPAREN LBRACKET RBRACKET NEW BANG DOT LPOINT RPOINT ZERO SEMISEMI DOLLAR 17 | 18 | /* lowest precedence */ 19 | %right PAR 20 | %nonassoc BANG 21 | %right DOT 22 | %nonassoc DOLLAR 23 | /* highest precedence */ 24 | 25 | /* the entry point */ 26 | %start main 27 | %type main 28 | %% 29 | 30 | main: 31 | proc SEMISEMI { $1 } 32 | ; 33 | 34 | proc: 35 | proc PAR proc { Par ($1, $3) } 36 | | LPAREN proc RPAREN { $2 } 37 | | capa DOT proc { Seq ($1, $3) } 38 | | DOLLAR ident LBRACKET proc RBRACKET { Amb ($2, $4, true) } 39 | | ident LBRACKET proc RBRACKET { Amb ($1, $3, false) } 40 | | NEW str DOT proc { New ($2, $4) } 41 | | BANG proc { Bang $2 } 42 | | LPAREN str RPAREN DOT proc { Comm_in ($2, $5) } 43 | | LPOINT capa RPOINT { Comm_out $2 } 44 | | ZERO { Zero } 45 | ; 46 | 47 | capa: 48 | IN ident { In $2 } 49 | | OUT ident { Out $2 } 50 | | OPEN ident { Open $2 } 51 | | ident { Name $1 } 52 | | STRING { Yell $1 } 53 | | capa DOT capa { Seq_capa ($1, $3) } 54 | 55 | ident: 56 | IDENT { Ident $1 } 57 | 58 | str: 59 | IDENT { $1 } 60 | ; 61 | -------------------------------------------------------------------------------- /tests/judicael.mly: -------------------------------------------------------------------------------- 1 | /* Cf. bug report no 4. */ 2 | 3 | %token INT 4 | %token IDENT 5 | %token PLUS 6 | %token PARENG PAREND 7 | %token FIN 8 | 9 | %left PLUS 10 | 11 | %start main 12 | %type main 13 | 14 | %% 15 | 16 | main: 17 | 18 | instruction {print_string "\ninstruction\n"; flush stdout} 19 | 20 | ; 21 | 22 | expr: 23 | /* Constantes de types simples. */ 24 | INT 25 | { 26 | print_string "\nINT:"; print_int $1;flush stdout 27 | } 28 | |expr PLUS expr 29 | { 30 | print_string "\nPlus" 31 | } 32 | ; 33 | 34 | instruction: 35 | expr { print_string "\ninstruction\n" ; flush stdout } 36 | 37 | |IDENT PARENG PAREND { print_string "\nCall" ; flush stdout} 38 | 39 | ; 40 | 41 | -------------------------------------------------------------------------------- /tests/julia.mly: -------------------------------------------------------------------------------- 1 | %{ %} 2 | 3 | %token TIf TElse TOPar TCPar TReturn TIdent 4 | %token EOF 5 | 6 | %nonassoc TIf 7 | %nonassoc TElse 8 | 9 | %start main 10 | %type main 11 | 12 | %% 13 | 14 | main: 15 | statement EOF { } 16 | statement: 17 | TReturn 18 | { } 19 | | TIf TOPar expr TCPar statement %prec TIf 20 | { } 21 | | TIf TOPar expr TCPar statement TElse statement 22 | { } 23 | 24 | expr: 25 | TIdent { } 26 | 27 | -------------------------------------------------------------------------------- /tests/ketti.mly: -------------------------------------------------------------------------------- 1 | %token INT PLUS MINUS 2 | %start expr 3 | %% 4 | 5 | expr: 6 | | PLUS PLUS expr INT {} 7 | | INT {} 8 | | plus {} 9 | | indir MINUS indir {} 10 | 11 | indir: plus { } 12 | 13 | plus: expr PLUS expr { } 14 | 15 | -------------------------------------------------------------------------------- /tests/kimmit.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | let x = X 0 3 | %} 4 | %token X 5 | %start s 6 | %% 7 | s: X {} 8 | -------------------------------------------------------------------------------- /tests/kremlin.0.9.6.0-parser.mly: -------------------------------------------------------------------------------- 1 | (* Original file: kremlin.0.9.6.0/kremlin-0.9.6.0/parser/parser.mly *) 2 | %{ 3 | open Bundle 4 | %} 5 | 6 | %token INT 7 | %token UIDENT LIDENT 8 | %token PLUS MINUS STAR AT DOT EOF COMMA EQUALS PUBLIC LPAREN RPAREN 9 | 10 | %start <(Flags.flag * (int * int)) list> warn_error_list 11 | %start bundle 12 | %start drop 13 | %start lid 14 | 15 | (** Parsing of command-line error/warning/silent flags. *) 16 | 17 | %% 18 | 19 | warn_error_list: 20 | | ws = warn_error+ EOF 21 | { ws } 22 | 23 | warn_error: 24 | | f = flag r = range 25 | { f, r } 26 | 27 | flag: 28 | | AT 29 | { Flags.CError } 30 | | MINUS 31 | { Flags.CSilent } 32 | | PLUS 33 | { Flags.CWarning } 34 | 35 | range: 36 | | i = INT 37 | { i, i } 38 | | i = INT DOT DOT j = INT 39 | { i, j } 40 | 41 | 42 | (** Parsing of -bundle options *) 43 | 44 | pat: 45 | | STAR 46 | { Prefix [ ] } 47 | | u = UIDENT 48 | { Module [ u ] } 49 | | u = UIDENT DOT p = pat 50 | { match p with 51 | | Module m -> 52 | Module (u :: m) 53 | | Prefix m -> 54 | Prefix (u :: m) } 55 | 56 | %inline 57 | uident: 58 | | u = UIDENT 59 | { u } 60 | 61 | %inline 62 | lident: 63 | | l = LIDENT 64 | { l } 65 | 66 | mident: 67 | | l = separated_list(DOT, uident) 68 | { l } 69 | 70 | api: 71 | | m = mident 72 | { m, AsIs } 73 | | PUBLIC LPAREN m = mident RPAREN 74 | { m, Public } 75 | 76 | drop: 77 | | p = separated_list(COMMA, pat) EOF 78 | { p } 79 | 80 | bundle: 81 | | apis = separated_nonempty_list(PLUS, api) 82 | EQUALS 83 | l = separated_nonempty_list(COMMA, pat) EOF 84 | { apis, l } 85 | | l = separated_nonempty_list(COMMA, pat) EOF 86 | { [], l } 87 | 88 | lid: 89 | | l = lident 90 | { [], l } 91 | | m = UIDENT DOT l = lid 92 | { let m', l = l in m :: m', l } 93 | -------------------------------------------------------------------------------- /tests/labltk-ppyac.mly: -------------------------------------------------------------------------------- 1 | /***********************************************************************/ 2 | /* */ 3 | /* MLTk, Tcl/Tk interface of Objective Caml */ 4 | /* */ 5 | /* Francois Rouaix, Francois Pessaux, Jun Furuse and Pierre Weis */ 6 | /* projet Cristal, INRIA Rocquencourt */ 7 | /* Jacques Garrigue, Kyoto University RIMS */ 8 | /* */ 9 | /* Copyright 2002 Institut National de Recherche en Informatique et */ 10 | /* en Automatique and Kyoto University. All rights reserved. */ 11 | /* This file is distributed under the terms of the GNU Library */ 12 | /* General Public License, with the special exception on linking */ 13 | /* described in file ../LICENSE. */ 14 | /* */ 15 | /***********************************************************************/ 16 | 17 | %{ 18 | open Code 19 | %} 20 | 21 | %token IFDEF 22 | %token IFNDEF 23 | %token ELSE 24 | %token ENDIF 25 | %token DEFINE 26 | %token UNDEF 27 | %token OTHER 28 | %token EOF 29 | 30 | /* entry */ 31 | 32 | %start code_list 33 | %type code_list 34 | 35 | %% 36 | 37 | code_list: 38 | /* empty */ { [] } 39 | | code code_list { $1 :: $2 } 40 | ; 41 | 42 | code: 43 | | DEFINE { Define $1 } 44 | | UNDEF { Undef $1 } 45 | | IFDEF code_list ELSE code_list ENDIF { Ifdef (true, $1, $2, Some ($4)) } 46 | | IFNDEF code_list ELSE code_list ENDIF { Ifdef (false, $1, $2, Some ($4)) } 47 | | IFDEF code_list ENDIF { Ifdef (true, $1, $2, None) } 48 | | IFNDEF code_list ENDIF { Ifdef (false, $1, $2, None) } 49 | | OTHER { Line $1 } 50 | ; 51 | 52 | %% 53 | -------------------------------------------------------------------------------- /tests/ldap.2.4.0-ldap_filterparser.mly: -------------------------------------------------------------------------------- 1 | /* a parser for rfc2254 ldap filters 2 | 3 | Copyright (C) 2004 Eric Stokes, and The California State University 4 | at Northridge 5 | 6 | This library is free software; you can redistribute it and/or 7 | modify it under the terms of the GNU Lesser General Public 8 | License as published by the Free Software Foundation; either 9 | version 2.1 of the License, or (at your option) any later version. 10 | 11 | This library is distributed in the hope that it will be useful, 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 | Lesser General Public License for more details. 15 | 16 | You should have received a copy of the GNU Lesser General Public 17 | License along with this library; if not, write to the Free Software 18 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 19 | */ 20 | 21 | 22 | %{ 23 | open Ldap_types 24 | 25 | let star_escape_rex = Pcre.regexp ~study:true ("\\" ^ "\\2a") 26 | let lparen_escape_rex = Pcre.regexp ~study:true ("\\" ^ "\\28") 27 | let rparen_escape_rex = Pcre.regexp ~study:true ("\\" ^ "\\29") 28 | let backslash_escape_rex = Pcre.regexp ~study:true ("\\" ^ "\\5c") 29 | let null_escape_rex = Pcre.regexp ~study:true ("\\" ^ "\\00") 30 | let unescape s = 31 | (Pcre.qreplace ~rex:star_escape_rex ~templ:"*" 32 | (Pcre.qreplace ~rex:lparen_escape_rex ~templ:"(" 33 | (Pcre.qreplace ~rex:rparen_escape_rex ~templ:")" 34 | (Pcre.qreplace ~rex:null_escape_rex ~templ:"\000" 35 | (Pcre.qreplace ~rex:backslash_escape_rex ~templ:"\\" s))))) 36 | %} 37 | 38 | %token WHSP LPAREN RPAREN AND OR NOT EOF 39 | %token ATTREQUAL 40 | %token ATTREQUALSUB 41 | %token ATTRGTE 42 | %token ATTRLTE 43 | %token ATTRAPPROX 44 | %token ATTRPRESENT 45 | %token ATTREXTENDEDMATCH 46 | %token ATTREXTENDEDDN 47 | %start filter_and_eof 48 | %type filter_and_eof 49 | %% 50 | 51 | filterlist: 52 | filterlist filter {$2 :: $1} 53 | | filter {[$1]} 54 | ; 55 | 56 | filter: 57 | LPAREN AND filterlist RPAREN {`And $3} 58 | | LPAREN OR filterlist RPAREN {`Or $3} 59 | | LPAREN NOT filter RPAREN {`Not $3} 60 | | LPAREN filter RPAREN {$2} 61 | | ATTREQUALSUB {`Substrings {attrtype=(fst $1);substrings=(snd $1)}} 62 | | ATTREQUAL {`EqualityMatch {attributeDesc=(fst $1);assertionValue=(unescape (snd $1))}} 63 | | ATTRGTE {`GreaterOrEqual {attributeDesc=(fst $1);assertionValue=(unescape (snd $1))}} 64 | | ATTRLTE {`LessOrEqual {attributeDesc=(fst $1);assertionValue=(unescape (snd $1))}} 65 | | ATTRPRESENT {`Present $1} 66 | | ATTRAPPROX {`ApproxMatch {attributeDesc=(fst $1);assertionValue=(unescape (snd $1))}} 67 | | ATTREXTENDEDMATCH {let (a, oid, v) = $1 in 68 | `ExtensibleMatch 69 | {matchingRule=(Some (unescape oid)); 70 | ruletype=(Some (unescape a)); 71 | matchValue=(unescape v); 72 | dnAttributes=false}} 73 | | ATTREXTENDEDDN {let (a, oid, v) = $1 in 74 | `ExtensibleMatch 75 | {matchingRule=(match oid with 76 | Some s -> Some (unescape s) 77 | | None -> None); 78 | ruletype=(Some (unescape a)); 79 | matchValue=(unescape v); 80 | dnAttributes=true}} 81 | ; 82 | 83 | /* used to enforce EOF at the end of the filter */ 84 | filter_and_eof: 85 | filter EOF {$1} 86 | ; 87 | -------------------------------------------------------------------------------- /tests/link.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | open Structures 3 | %} 4 | 5 | %token VAR 6 | %token OP 7 | %token CP 8 | %token PAR 9 | %token TIMES 10 | %token SEQ 11 | %token NEXT 12 | %token IMP 13 | %token RIMP 14 | %token LIMP 15 | %token NEG 16 | %token END 17 | %token DIRECTIVE 18 | %token VDASH 19 | 20 | %left PAR SEQ 21 | %left TIMES NEXT 22 | %nonassoc NEG 23 | 24 | %start main 25 | %type <[`Directive of string | `Sequent of Structures.sequent]> main 26 | 27 | %% 28 | 29 | main : 30 | sequent END {`Sequent $1} 31 | | DIRECTIVE VAR {`Directive $2} 32 | 33 | formimp: 34 | VAR {`Var $1} 35 | | formimp PAR formimp {`Par($1,$3)} 36 | | formimp TIMES formimp {`Times($1,$3)} 37 | | formimp NEXT formimp {`Next($1,$3)} 38 | | formimp SEQ formimp {`Seq($1,$3)} 39 | | formimp IMP formimp {`Imp($1,$3)} 40 | | formimp RIMP formimp {`RImp($1,$3)} 41 | | formimp LIMP formimp {`LImp($1,$3)} 42 | | OP formimp CP {$2} 43 | | formimp NEG {`Not($1)} 44 | 45 | sequent: 46 | formimp VDASH formimp {Sequent($1, $3)} 47 | | VDASH formimp {Form($2)} 48 | | formimp {Form($1)} 49 | ; 50 | -------------------------------------------------------------------------------- /tests/links.0.8-xmlParser.mly: -------------------------------------------------------------------------------- 1 | (* Original file: links.0.8/links-0.8/core/xmlParser.mly *) 2 | %{ 3 | open Utility 4 | open Value 5 | 6 | let ensure_match (start, finish, _) (opening : string) (closing : string) = function 7 | | result when opening = closing -> result 8 | | _ -> raise (Sugartypes.ConcreteSyntaxError ("Closing tag '" ^ closing ^ "' does not match start tag '" ^ opening ^ "'.", 9 | (start, finish, None))) 10 | 11 | let pos () : Sugartypes.position = Parsing.symbol_start_pos (), Parsing.symbol_end_pos (), None 12 | 13 | %} 14 | 15 | %token IGNORE END 16 | %token EQ 17 | %token LQUOTE RQUOTE 18 | %token STRING CDATA 19 | %token VARIABLE 20 | %token LXML ENDTAG 21 | %token RXML SLASHRXML 22 | %token LCDATA RCDATA 23 | %token CHAR 24 | 25 | %start xml 26 | 27 | %type xml 28 | 29 | %% 30 | 31 | /* XML */ 32 | xml: 33 | | IGNORE xml { $2 } 34 | | xml_tree { $1 } 35 | 36 | xmlid: 37 | | VARIABLE { $1 } 38 | 39 | attrs: 40 | | attr_list { $1 } 41 | 42 | attr_list: 43 | | attr { [$1] } 44 | | attr_list attr { $2 :: $1 } 45 | 46 | attr: 47 | | xmlid EQ LQUOTE attr_val RQUOTE { Attr ($1, $4) } 48 | | xmlid EQ LQUOTE RQUOTE { Attr ($1, "") } 49 | 50 | attr_val: 51 | | STRING { $1 } 52 | 53 | xml_tree: 54 | | LXML SLASHRXML { Node ($1, []) } 55 | | LXML RXML ENDTAG { ensure_match (pos()) $1 $3 (Node ($1, [])) } 56 | | LXML RXML xml_contents_list ENDTAG { ensure_match (pos()) $1 $4 (Node ($1, $3)) } 57 | | LXML attrs RXML ENDTAG { ensure_match (pos()) $1 $4 (Node ($1, $2)) } 58 | | LXML attrs SLASHRXML { Node ($1, $2) } 59 | | LXML attrs RXML xml_contents_list ENDTAG { ensure_match (pos()) $1 $5 (Node ($1, $2 @ $4)) } 60 | 61 | xml_contents_list: 62 | | IGNORE { [] } 63 | | IGNORE xml_contents_list { $2 } 64 | | xml_contents { [$1] } 65 | | xml_contents xml_contents_list { $1 :: $2 } 66 | 67 | xml_contents: 68 | | xml_tree { $1 } 69 | | cdata { Text $1 } 70 | 71 | cdata: 72 | | CDATA { $1 } 73 | | LCDATA chars RCDATA { implode $2 } 74 | 75 | chars: 76 | | { [] } 77 | | CHAR chars { $1 :: $2 } 78 | -------------------------------------------------------------------------------- /tests/loop.mly: -------------------------------------------------------------------------------- 1 | (* This example is taken from Grun & Jacobs, second edition, p. 388. *) 2 | 3 | (* The grammmar recognizes the language epsilon^n B C^n, that is, 4 | B C^n. It is not LR(1), because the number of reductions of 5 | A -> epsilon that must be performed before shifting B should 6 | (ideally) be equal to the number of C's that follow. *) 7 | 8 | %token B C EOF 9 | %start phrase 10 | 11 | (* Give priority to reducing A -> epsilon over shifting B. *) 12 | 13 | (* This means, in fact, that B can never be shifted, and that the 14 | parser loops forever on any input that begins with B. *) 15 | 16 | %nonassoc B 17 | %nonassoc reduce 18 | 19 | %% 20 | 21 | phrase: 22 | s EOF {} 23 | 24 | s: 25 | a s C {} 26 | | B {} 27 | 28 | a: 29 | {} %prec reduce 30 | 31 | -------------------------------------------------------------------------------- /tests/lr-but-not-lalr.mly: -------------------------------------------------------------------------------- 1 | %token A B C D F 2 | %type s 3 | %start s 4 | 5 | %% 6 | 7 | s: 8 | A e C {} 9 | | A f D {} 10 | | B e D {} 11 | | B f C {} 12 | 13 | e: 14 | F {} 15 | 16 | f: 17 | F {} 18 | 19 | -------------------------------------------------------------------------------- /tests/lustre-v6.1.737-lv6parser.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/lustre-v6.1.737-lv6parser.mly -------------------------------------------------------------------------------- /tests/lutin.2.56-lutParser.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/lutin.2.56-lutParser.mly -------------------------------------------------------------------------------- /tests/lutin.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/lutin.mly -------------------------------------------------------------------------------- /tests/maple.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/maple.mly -------------------------------------------------------------------------------- /tests/mcc-frontends.mly: -------------------------------------------------------------------------------- 1 | /* 2 | * Parser for front-end configuration file. 3 | * ---------------------------------------------------------------- 4 | * 5 | * Copyright (C) 2002 Adam Granicz, Caltech 6 | * 7 | * This program is free software; you can redistribute it and/or 8 | * modify it under the terms of the GNU General Public License 9 | * as published by the Free Software Foundation; either version 2 10 | * of the License, or (at your option) any later version. 11 | * 12 | * This program is distributed in the hope that it will be useful, 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | * GNU General Public License for more details. 16 | * 17 | * You should have received a copy of the GNU General Public License 18 | * along with this program; if not, write to the Free Software 19 | * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 20 | * 21 | * Author: Adam Granicz 22 | * Email: granicz@cs.caltech.edu 23 | * 24 | */ 25 | 26 | %{%} 27 | 28 | %token TokEof 29 | 30 | %token TokEq 31 | %token TokSemi 32 | %token TokComma 33 | %token TokLeftBrack 34 | %token TokRightBrack 35 | 36 | %token TokString 37 | %token TokId 38 | 39 | %start main 40 | %type <(string * (string * string list) list) list> main 41 | %% 42 | 43 | main: 44 | section_list TokEof { $1 } 45 | 46 | section_list: 47 | section_list_rev { List.rev $1 } 48 | 49 | section_list_rev: 50 | section { [$1] } 51 | | section_list_rev section { $2 :: $1 } 52 | 53 | section: 54 | TokLeftBrack identifier TokRightBrack assignment_list 55 | { $2, $4 } 56 | 57 | assignment_list: 58 | assignment_list_rev { List.rev $1 } 59 | 60 | assignment_list_rev: 61 | assignment { [$1] } 62 | | assignment_list_rev assignment 63 | { $2 :: $1 } 64 | 65 | assignment: 66 | identifier TokEq string_list 67 | { $1, $3 } 68 | 69 | string_list: 70 | string_list_rev { List.rev $1 } 71 | 72 | string_list_rev: 73 | TokString { [$1] } 74 | | string_list_rev TokComma TokString 75 | { $3 :: $1 } 76 | 77 | identifier: 78 | TokId { $1 } 79 | -------------------------------------------------------------------------------- /tests/mcc-mc.mly: -------------------------------------------------------------------------------- 1 | /* 2 | * Parser for mc files. 3 | * Currently, it produces FC parse trees. This will probably 4 | * change. 5 | * 6 | * ---------------------------------------------------------------- 7 | * This program is free software; you can redistribute it and/or 8 | * modify it under the terms of the GNU General Public License 9 | * as published by the Free Software Foundation; either version 2 10 | * of the License, or (at your option) any later version. 11 | * 12 | * This program is distributed in the hope that it will be useful, 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | * GNU General Public License for more details. 16 | * 17 | * You should have received a copy of the GNU General Public License 18 | * along with this program; if not, write to the Free Software 19 | * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. 20 | * 21 | * Author: Adam Granicz 22 | * granicz@cs.caltech.edu 23 | */ 24 | 25 | %{ 26 | open Fc_parse_type 27 | open Fc_frontends 28 | 29 | let parse_pascal buffer = 30 | FrontEnd.set_pascal_parsing (); 31 | Fc_parse_state.init_types (); 32 | let elist = Pasqual_parser.bootstrap Pasqual_lexer.main (Lexing.from_string buffer) in 33 | [PascalExpr (Fc_parse_state.current_position (), elist)] 34 | 35 | let parse_pasqual buffer = 36 | FrontEnd.set_pasqual_parsing (); 37 | Fc_parse_state.init_types (); 38 | Pasqual_parser.bootstrap Pasqual_lexer.main (Lexing.from_string buffer) 39 | 40 | let parse_fc buffer = 41 | FrontEnd.set_parameter_copying CopyNone; 42 | Fc_parse_state.init_types (); 43 | Fc_parse.prog Fc_lex.main (Lexing.from_string buffer) 44 | 45 | %} 46 | 47 | %token TokLt 48 | %token TokGt 49 | %token TokEq 50 | %token TokDQuote 51 | 52 | %token TokPascal 53 | %token TokPasqual 54 | %token TokFC 55 | 56 | %token TokEof 57 | %token TokLanguage 58 | %token TokEndLanguage 59 | %token TokSource 60 | 61 | %token TokString 62 | %token TokSourceString 63 | %token TokId 64 | 65 | %start program 66 | %type program 67 | %% 68 | 69 | program: 70 | source_block_list TokEof { $1 } 71 | | source_block_list { $1 } 72 | | TokEof { [] } 73 | 74 | source_block_list: 75 | source_block_list source_block 76 | { $1 @ $2 } 77 | | source_block { $1 } 78 | 79 | source_block: 80 | pascal_block { $1 } 81 | | pasqual_block { $1 } 82 | | fc_block { $1 } 83 | | generic_block { $1 } 84 | 85 | pascal_block: 86 | TokLt TokLanguage TokEq TokPascal TokSource TokEq TokSourceString TokGt 87 | { parse_pascal (fst $7) } 88 | 89 | pasqual_block: 90 | TokLt TokLanguage TokEq TokPasqual TokSource TokEq TokSourceString TokGt 91 | { parse_pasqual (fst $7) } 92 | 93 | fc_block: 94 | TokLt TokLanguage TokEq TokFC TokSource TokEq TokSourceString TokGt 95 | { parse_fc (fst $7) } 96 | 97 | generic_block: 98 | TokLt TokLanguage TokEq TokString TokSource TokEq TokSourceString TokGt 99 | { match String.lowercase (fst $4) with 100 | "pascal" -> 101 | parse_pascal (fst $7) 102 | | "pasqual" -> 103 | parse_pasqual (fst $7) 104 | | "c" | "fc" -> 105 | parse_fc (fst $7) 106 | | _ -> 107 | print_string ("warning: block ignored (no front-end is found for " ^ (fst $4) ^ ")\n"); 108 | [] 109 | } 110 | -------------------------------------------------------------------------------- /tests/mlpost.0.8.2-pfb_parser.mly: -------------------------------------------------------------------------------- 1 | (* Original file: mlpost.0.8.2/mlpost-0.8.2/dvi/pfb_parser.mly *) 2 | /**************************************************************************/ 3 | /* */ 4 | /* Copyright (C) Johannes Kanig, Stephane Lescuyer */ 5 | /* Jean-Christophe Filliatre, Romain Bardou and Francois Bobot */ 6 | /* */ 7 | /* This software is free software; you can redistribute it and/or */ 8 | /* modify it under the terms of the GNU Library General Public */ 9 | /* License version 2.1, with the special exception on linking */ 10 | /* described in file LICENSE. */ 11 | /* */ 12 | /* This software is distributed in the hope that it will be useful, */ 13 | /* but WITHOUT ANY WARRANTY; without even the implied warranty of */ 14 | /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ 15 | /* */ 16 | /**************************************************************************/ 17 | 18 | /* File parser.mly */ 19 | %{ 20 | open Fonts_type 21 | 22 | let encoding_table = ref (Array.create 256 "") 23 | %} 24 | %token NAME_CHARSTRING, NAME_ENCODING 25 | %token ID_ENCODING 26 | %token DUMB 27 | %type <(string array) * (string list)> pfb_human_main 28 | %type enc_main 29 | %start pfb_human_main enc_main 30 | %% 31 | pfb_human_main : 32 | DUMB encoding DUMB charstrings DUMB{ 33 | let rencoding_table = !encoding_table in 34 | encoding_table := Array.create 256 ""; 35 | (rencoding_table,$4)} 36 | 37 | encoding : 38 | | {} 39 | | ID_ENCODING NAME_ENCODING encoding {(!encoding_table).($1)<-$2 } 40 | 41 | charstrings : 42 | | {[]} 43 | | NAME_CHARSTRING charstrings { $1::$2} 44 | 45 | enc_main : 46 | | DUMB enc_aux DUMB {$2} 47 | 48 | enc_aux : 49 | | {[]} 50 | | NAME_ENCODING enc_aux {$1::$2} 51 | -------------------------------------------------------------------------------- /tests/modulo.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/modulo.mly -------------------------------------------------------------------------------- /tests/multi-token-alias-0.mly: -------------------------------------------------------------------------------- 1 | (* A group of several files, where each file defines token aliases 2 | that are used in other files. *) 3 | 4 | %token LPAREN "(" 5 | %token RPAREN ")" 6 | %token INT 7 | 8 | %% 9 | 10 | %public atom: 11 | "(" term ")" 12 | { $2 } 13 | | INT 14 | { $1 } 15 | -------------------------------------------------------------------------------- /tests/multi-token-alias-1.mly: -------------------------------------------------------------------------------- 1 | (* A group of several files, where each file defines token aliases 2 | that are used in other files. *) 3 | 4 | %token MUL "*" 5 | %token DIV "/" 6 | 7 | %% 8 | 9 | %public factor: 10 | factor "*" atom 11 | { $1 * $3 } 12 | | factor "/" atom 13 | { $1 / $3 } 14 | | atom 15 | { $1 } 16 | -------------------------------------------------------------------------------- /tests/multi-token-alias-2.mly: -------------------------------------------------------------------------------- 1 | (* A group of several files, where each file defines token aliases 2 | that are used in other files. *) 3 | 4 | %token PLUS "+" 5 | %token MINUS "-" 6 | 7 | %% 8 | 9 | %public term: 10 | term "+" factor 11 | { $1 + $3 } 12 | | term "-" factor 13 | { $1 - $3 } 14 | | factor 15 | { $1 } 16 | -------------------------------------------------------------------------------- /tests/multi-token-alias-3.mly: -------------------------------------------------------------------------------- 1 | (* A group of several files, where each file defines token aliases 2 | that are used in other files. *) 3 | 4 | %token EOL 5 | %start phrase 6 | 7 | %% 8 | 9 | phrase: 10 | term EOL 11 | { print_endline $1 } 12 | -------------------------------------------------------------------------------- /tests/multiple-functor.mly: -------------------------------------------------------------------------------- 1 | 2 | %token T 3 | %{ let y = Y.y %} 4 | %parameter 5 | %parameter 6 | %token U 7 | %{ let x = X.x %} 8 | 9 | %start main 10 | 11 | %% 12 | 13 | main: 14 | T U { ($1, $2) } 15 | 16 | -------------------------------------------------------------------------------- /tests/name-clash-1.mly: -------------------------------------------------------------------------------- 1 | %start name_clash_1_a 2 | %token FOO 3 | 4 | %% 5 | 6 | name_clash_1_a: 7 | a { () } 8 | 9 | a: 10 | FOO { () } 11 | 12 | -------------------------------------------------------------------------------- /tests/name-clash-2.mly: -------------------------------------------------------------------------------- 1 | %start b 2 | %token BAR 3 | 4 | %% 5 | 6 | b: 7 | a { $1 } 8 | 9 | a: 10 | BAR { () } 11 | 12 | -------------------------------------------------------------------------------- /tests/no-end-of-stream-conflict.mly: -------------------------------------------------------------------------------- 1 | %token INT 2 | %token PLUS TIMES END 3 | %start main 4 | 5 | %left PLUS 6 | %left TIMES 7 | 8 | %% 9 | 10 | main: 11 | e = expr END 12 | { e } 13 | 14 | expr: 15 | i = INT 16 | { i } 17 | | e1 = expr PLUS e2 = expr 18 | { e1 + e2 } 19 | | e1 = expr TIMES e2 = expr 20 | { e1 * e2 } 21 | 22 | -------------------------------------------------------------------------------- /tests/no_future.mly: -------------------------------------------------------------------------------- 1 | /* This grammar describes a nonempty list of A's, followed with EOF. 2 | It is LR(2), not LR(1). There is a shift/reduce conflict, which we 3 | resolve in favor of shifting. As a result, the production listA -> 4 | can never be reduced (Menhir warns about this), and the resulting 5 | automaton rejects every sentence of the form A+ EOF: it reads all 6 | of the A's, then, upon seeing EOF, rejects. */ 7 | 8 | /* This automaton does *not* have the property that a syntax error is 9 | detected as soon as possible, or (stated differently) that, as 10 | long as the automaton continues reading, a viable future exists. 11 | Indeed, this automaton accepts the *empty* language, so, for the 12 | property to be satisfied, it should always fail immediately, without 13 | reading anything. Yet, it will read an arbitrarily long sequence of 14 | A's and fail only upon encountering EOF. */ 15 | 16 | /* The problem has nothing to do with merging of states. Indeed, even 17 | the canonical LR(1) automaton exhibits this problem. */ 18 | 19 | %token A EOF 20 | %start main 21 | 22 | %nonassoc empty_list 23 | %nonassoc A 24 | 25 | %% 26 | 27 | listA: 28 | %prec empty_list {} 29 | | A listA {} 30 | 31 | main: 32 | listA A EOF {} 33 | -------------------------------------------------------------------------------- /tests/ocaml-test-Lex.mly: -------------------------------------------------------------------------------- 1 | /***********************************************************************/ 2 | /* */ 3 | /* Objective Caml */ 4 | /* */ 5 | /* Xavier Leroy, projet Cristal, INRIA Rocquencourt */ 6 | /* */ 7 | /* Copyright 1996 Institut National de Recherche en Informatique et */ 8 | /* en Automatique. All rights reserved. This file is distributed */ 9 | /* under the terms of the Q Public License version 1.0. */ 10 | /* */ 11 | /***********************************************************************/ 12 | 13 | /* $Id: grammar.mly,v 1.4 1999/11/17 18:58:38 xleroy Exp $ */ 14 | 15 | /* The grammar for lexer definitions */ 16 | 17 | %{ 18 | open Syntax 19 | open Gram_aux 20 | %} 21 | 22 | %token Tident 23 | %token Tchar 24 | %token Tstring 25 | %token Taction 26 | %token Trule Tparse Tand Tequal Tend Tor Tunderscore Teof Tlbracket Trbracket 27 | %token Tstar Tmaybe Tplus Tlparen Trparen Tcaret Tdash 28 | 29 | %left Tor 30 | %left CONCAT 31 | %nonassoc Tmaybe 32 | %left Tstar 33 | %left Tplus 34 | 35 | %start lexer_definition 36 | %type lexer_definition 37 | 38 | %% 39 | 40 | lexer_definition: 41 | header Trule definition other_definitions Tend 42 | { Lexdef($1, $3::(List.rev $4)) } 43 | ; 44 | header: 45 | Taction 46 | { $1 } 47 | | 48 | { Location(0,0) } 49 | ; 50 | other_definitions: 51 | other_definitions Tand definition 52 | { $3::$1 } 53 | | 54 | { [] } 55 | ; 56 | definition: 57 | Tident Tequal entry 58 | { ($1,$3) } 59 | ; 60 | entry: 61 | Tparse case rest_of_entry 62 | { $2 :: List.rev $3 } 63 | ; 64 | rest_of_entry: 65 | rest_of_entry Tor case 66 | { $3::$1 } 67 | | 68 | { [] } 69 | ; 70 | case: 71 | regexp Taction 72 | { ($1,$2) } 73 | ; 74 | regexp: 75 | Tunderscore 76 | { Characters all_chars } 77 | | Teof 78 | { Characters ['\000'] } 79 | | Tchar 80 | { Characters [$1] } 81 | | Tstring 82 | { regexp_for_string $1 } 83 | | Tlbracket char_class Trbracket 84 | { Characters $2 } 85 | | regexp Tstar 86 | { Repetition $1 } 87 | | regexp Tmaybe 88 | { Alternative($1, Epsilon) } 89 | | regexp Tplus 90 | { Sequence($1, Repetition $1) } 91 | | regexp Tor regexp 92 | { Alternative($1,$3) } 93 | | regexp regexp %prec CONCAT 94 | { Sequence($1,$2) } 95 | | Tlparen regexp Trparen 96 | { $2 } 97 | ; 98 | char_class: 99 | Tcaret char_class1 100 | { subtract all_chars $2 } 101 | | char_class1 102 | { $1 } 103 | ; 104 | char_class1: 105 | Tchar Tdash Tchar 106 | { char_class $1 $3 } 107 | | Tchar 108 | { [$1] } 109 | | char_class char_class %prec CONCAT 110 | { $1 @ $2 } 111 | ; 112 | 113 | %% 114 | 115 | -------------------------------------------------------------------------------- /tests/ocamlweb-yacc.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/ocamlweb-yacc.mly -------------------------------------------------------------------------------- /tests/oclisp.mly: -------------------------------------------------------------------------------- 1 | %token NAME 2 | %token LBRACKET RBRACKET EOF QUOTE DOT 3 | %start main 4 | %type main 5 | 6 | %% 7 | 8 | main: 9 | sexp { $1 } 10 | ; 11 | 12 | sexp: 13 | list { $1 } 14 | | atom { $1 } 15 | | QUOTE sexp { Types.Cons (Types.Atom "quote", Types.Cons($2, Types.Atom "nil")) } 16 | ; 17 | 18 | list: 19 | LBRACKET RBRACKET { Types.Atom "nil" } 20 | | LBRACKET inside_list RBRACKET { $2 } 21 | ; 22 | 23 | inside_list: 24 | sexp DOT sexp { Types.Cons ($1,$3) } 25 | | sexp { Types.Cons ($1, Types.Atom "nil") } 26 | | sexp inside_list {Types.Cons($1,$2)} 27 | ; 28 | 29 | atom: NAME { Types.Atom $1 } 30 | ; 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /tests/odds.1.0-parser.mly: -------------------------------------------------------------------------------- 1 | (* Original file: odds.1.0/odds-1.0/src/parser.mly *) 2 | %{ 3 | 4 | open Odds.Algebra 5 | 6 | %} 7 | 8 | %token EOF 9 | %token INTEGER 10 | %token PLUS DASH STAR SLASH D 11 | %token LPAREN RPAREN 12 | 13 | %left PLUS DASH 14 | %left STAR SLASH 15 | %nonassoc NEG 16 | %left D 17 | 18 | %start entry 19 | 20 | %% 21 | 22 | entry: 23 | | t=formula EOF { t } 24 | 25 | formula: 26 | | i=INTEGER { !i } 27 | | LPAREN t = formula RPAREN { t } 28 | | l=formula D r=formula { dice l r } 29 | | l=formula PLUS r=formula { l + r } 30 | | l=formula DASH r=formula { l - r } 31 | | l=formula STAR r=formula { l * r } 32 | | l=formula SLASH r=formula { l / r } 33 | | DASH t=formula %prec NEG { ~- t } 34 | 35 | %% 36 | -------------------------------------------------------------------------------- /tests/ojacare.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/ojacare.mly -------------------------------------------------------------------------------- /tests/on-error-reduce-inlined.mly: -------------------------------------------------------------------------------- 1 | %token A B 2 | %start main 3 | %on_error_reduce ioption(A) 4 | 5 | %% 6 | 7 | main: 8 | ioption(A) B {} 9 | -------------------------------------------------------------------------------- /tests/on-error-reduce-unreachable.mly: -------------------------------------------------------------------------------- 1 | %token A B 2 | %start main 3 | %on_error_reduce bar 4 | 5 | %% 6 | 7 | main: 8 | A B {} 9 | 10 | bar: 11 | A {} 12 | -------------------------------------------------------------------------------- /tests/option2.mly: -------------------------------------------------------------------------------- 1 | %token FOO 2 | %start main 3 | 4 | %% 5 | 6 | main: 7 | o = option { o } 8 | 9 | option: 10 | FOO { () } 11 | 12 | -------------------------------------------------------------------------------- /tests/options.mly: -------------------------------------------------------------------------------- 1 | %token A B 2 | %start main 3 | 4 | %% 5 | 6 | main: 7 | option1(A); 8 | option2(B); 9 | {} 10 | 11 | let option1(x) := 12 | | { None } 13 | | x = x; { Some x } 14 | 15 | let option2(x) := 16 | | { None } 17 | | ~ = x; < Some > 18 | -------------------------------------------------------------------------------- /tests/pager.mly: -------------------------------------------------------------------------------- 1 | /* Taken from Pager's paper. Sent by Laurence Tratt. 2 | The LALR automaton has 21 states. 3 | Pager writes that his construction leads to 23 states. 4 | Yet, Menhir's version of Pager's algorithm leads to 38 states. 5 | Not sure why. Deserves study. */ 6 | 7 | %start x 8 | %token A B C D E T U 9 | 10 | %% 11 | 12 | x : A y D {} | A z C {} | A t {} | B y E {} | B z D {} | B t {} 13 | y : T w {} | U x {} 14 | z : T U {} 15 | t : U x A {} 16 | w : U v {} 17 | v : {} 18 | -------------------------------------------------------------------------------- /tests/pair-new-syntax.mly: -------------------------------------------------------------------------------- 1 | %token A B C D 2 | %token EOF 3 | %start main 4 | 5 | %% 6 | 7 | let pair(x, y) == 8 | ~ = x; ~ = y; { x, y } 9 | 10 | let fst(p) := 11 | (~, _) = p; <> 12 | 13 | let snd(p) := 14 | (_, ~) = p; <> 15 | 16 | let main := 17 | (a, b) = pair(A, B); 18 | c = fst(pair(A, B)); 19 | d = snd(pair(A, B)); 20 | _ = C; 21 | D; 22 | () = EOF; 23 | { a + b + c + d } 24 | -------------------------------------------------------------------------------- /tests/parameterized-nonterminal.mly: -------------------------------------------------------------------------------- 1 | %token A 2 | %start a 3 | %% 4 | 5 | a: b(A) {} 6 | 7 | b(X): c(X) d {} 8 | 9 | d: A {} 10 | c(X): X {} 11 | -------------------------------------------------------------------------------- /tests/permutation-growth.mly: -------------------------------------------------------------------------------- 1 | (* Another (artificial) example where the parameters are exchanged 2 | in the recursive call. *) 3 | 4 | %token A B 5 | %start main 6 | 7 | %% 8 | 9 | F(X,Y): 10 | B {} 11 | | A F(Y, X) {} 12 | 13 | id(X): 14 | X {} 15 | 16 | main: 17 | F(A, id(A)) {} 18 | -------------------------------------------------------------------------------- /tests/permutation.mly: -------------------------------------------------------------------------------- 1 | %token A B EOF 2 | %start main 3 | 4 | %% 5 | 6 | (* A list of alternating X's and Y's, 7 | possibly empty, beginning with an X. *) 8 | 9 | (* This grammar was rejected by Menhir prior to 2017/12/06 10 | because aseq(X, Y) calls itself recursively as aseq(Y, X). *) 11 | 12 | aseq(X, Y): 13 | /* epsilon */ {} 14 | | X aseq(Y, X) {} 15 | 16 | main: 17 | aseq(A, B) EOF {} 18 | -------------------------------------------------------------------------------- /tests/petit-1.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/petit-1.mly -------------------------------------------------------------------------------- /tests/petit-2.mly: -------------------------------------------------------------------------------- 1 | (* This file defines the set of tokens produced by the lexer and 2 | exploited by the parser. *) 3 | 4 | %token INTCONST 5 | %token BOOLCONST 6 | %token ID 7 | %token PLUS MINUS TIMES SLASH AND OR NOT LT LE GT GE EQ NE 8 | %token LPAREN RPAREN LBRACKET RBRACKET COMMA COLONEQ SEMICOLON COLON DOT 9 | %token PROGRAM BEGIN END IF THEN ELSE WHILE DO PROCEDURE FUNCTION VAR 10 | %token NEW READLN WRITE WRITELN 11 | %token INTEGER BOOLEAN ARRAY OF 12 | 13 | %% 14 | 15 | -------------------------------------------------------------------------------- /tests/photos.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/photos.mly -------------------------------------------------------------------------------- /tests/prec_inline.mly: -------------------------------------------------------------------------------- 1 | %token INT 2 | %token PLUS MINUS TIMES DIV 3 | %token LPAREN RPAREN 4 | %token EOL 5 | 6 | %left PLUS MINUS /* lowest precedence */ 7 | %left TIMES DIV /* medium precedence */ 8 | %nonassoc UMINUS /* highest precedence */ 9 | 10 | %start main 11 | 12 | %% 13 | 14 | main: 15 | | e = expr EOL 16 | { e } 17 | 18 | (* [raw_expr] is inlined into [expr], and some of its productions 19 | carry %prec annotations. This is OK under the new rules of 20 | 2015/11/18. *) 21 | expr: 22 | e = raw_expr 23 | { e } 24 | 25 | %inline raw_expr: 26 | | i = INT 27 | { i } 28 | | LPAREN e = expr RPAREN 29 | { e } 30 | | e1 = expr PLUS e2 = expr 31 | { e1 + e2 } 32 | | e1 = expr MINUS e2 = expr 33 | { e1 - e2 } 34 | | e1 = expr TIMES e2 = expr 35 | { e1 * e2 } 36 | | e1 = expr DIV e2 = expr 37 | { e1 / e2 } 38 | | MINUS e = expr %prec UMINUS 39 | { - e } 40 | 41 | -------------------------------------------------------------------------------- /tests/private-and-public-1.mly: -------------------------------------------------------------------------------- 1 | %% 2 | 3 | %public a: {} 4 | -------------------------------------------------------------------------------- /tests/private-and-public-2.mly: -------------------------------------------------------------------------------- 1 | %token C 2 | %% 3 | 4 | a: C {} 5 | %public c: a {} 6 | -------------------------------------------------------------------------------- /tests/private-and-public-3.mly: -------------------------------------------------------------------------------- 1 | %start b 2 | %% 3 | 4 | b: a c {} 5 | -------------------------------------------------------------------------------- /tests/public-1.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/public-1.mly -------------------------------------------------------------------------------- /tests/public-2.mly: -------------------------------------------------------------------------------- 1 | %token U 2 | %% 3 | 4 | %public a: U {} 5 | -------------------------------------------------------------------------------- /tests/public-inline-1.mly: -------------------------------------------------------------------------------- 1 | %start a 2 | %token I 3 | %% 4 | a: b {} 5 | %public %inline b: I {} 6 | -------------------------------------------------------------------------------- /tests/public-inline-2.mly: -------------------------------------------------------------------------------- 1 | %token J 2 | %% 3 | %public %inline b: J {} 4 | -------------------------------------------------------------------------------- /tests/pxp-ucs2_to_utf8.mly: -------------------------------------------------------------------------------- 1 | /******************************************************/ 2 | /* Claudio Sacerdoti Coen */ 3 | /* 14/05/2000 */ 4 | /******************************************************/ 5 | 6 | %token CHAR 7 | %token IDENT 8 | %token LET 9 | %token EQ 10 | %token END_OF_LET 11 | %token RBRACKET 12 | %token PIPE 13 | %token LBRACKET 14 | %token RANGE 15 | %token EOF 16 | %start main 17 | %type main 18 | 19 | %% 20 | 21 | main: 22 | EOF { [] } 23 | | declaration main { $1::$2 } 24 | ; 25 | 26 | declaration: 27 | LET IDENT EQ regexp END_OF_LET 28 | { { Types.id = $2 ; Types.rel = $4 } } 29 | ; 30 | 31 | regexp: 32 | regexptoken PIPE regexp { $1::$3 } 33 | | regexptoken { [$1] } 34 | ; 35 | 36 | regexptoken: 37 | CHAR { Types.Char $1 } 38 | | LBRACKET CHAR RANGE CHAR RBRACKET { Types.Interval ($2,$4) } 39 | | IDENT { Types.Identifier $1 } 40 | ; 41 | -------------------------------------------------------------------------------- /tests/self.mly: -------------------------------------------------------------------------------- 1 | %token A 2 | %type s 3 | %start dummy 4 | 5 | %% 6 | 7 | dummy: A {} 8 | 9 | s: b s {} 10 | 11 | b: {} 12 | 13 | -------------------------------------------------------------------------------- /tests/self_ground.mly: -------------------------------------------------------------------------------- 1 | (* Because [self] ignores its argument (it calls itself recursively 2 | with a ground actual argument), expansion terminates. This grammar 3 | can (and should) be accepted. *) 4 | 5 | %token A B C 6 | %start start 7 | %% 8 | 9 | self(X): 10 | A self(B) {} 11 | | C {} 12 | 13 | start: 14 | self(A) {} 15 | -------------------------------------------------------------------------------- /tests/simple-if-conflict-no-eos.mly: -------------------------------------------------------------------------------- 1 | %token TRUE FALSE 2 | %token IF THEN ELSE 3 | %token EOF 4 | %start phrase 5 | 6 | %% 7 | 8 | phrase: 9 | b = expression EOF 10 | { b } 11 | ; 12 | 13 | expression: 14 | | TRUE 15 | { true } 16 | | FALSE 17 | { false } 18 | | IF b = expression THEN e = expression 19 | { if b then e else false } 20 | | IF b = expression THEN e1 = expression ELSE e2 = expression 21 | { if b then e1 else e2 } 22 | -------------------------------------------------------------------------------- /tests/simple-if-conflict.mly: -------------------------------------------------------------------------------- 1 | %token TRUE FALSE 2 | %token IF THEN ELSE 3 | %start expression 4 | 5 | %% 6 | 7 | expression: 8 | | TRUE 9 | { true } 10 | | FALSE 11 | { false } 12 | | IF b = expression THEN e = expression 13 | { if b then e else false } 14 | | IF b = expression THEN e1 = expression ELSE e2 = expression 15 | { if b then e1 else e2 } 16 | 17 | -------------------------------------------------------------------------------- /tests/split-public-symbol-with-renaming.mly: -------------------------------------------------------------------------------- 1 | %token A B EOF 2 | %start main 3 | %% 4 | %inline eps: {} 5 | %public liste(X): eps {} 6 | %public liste(B): A B liste(B) {} 7 | main: liste(A) B EOF {} 8 | -------------------------------------------------------------------------------- /tests/split-public-symbol.mly: -------------------------------------------------------------------------------- 1 | %token A EOF 2 | %start main 3 | %% 4 | %inline eps: {} 5 | %public liste(X): eps {} 6 | %public liste(X): X liste(X) {} 7 | main: liste(A) EOF {} 8 | /* The definition of liste(X) is split in two. 9 | Note: liste(X) cannot be renamed list(X), 10 | as it would then collide with the standard 11 | library. */ 12 | -------------------------------------------------------------------------------- /tests/subiso.mly: -------------------------------------------------------------------------------- 1 | /* File tparser.mly */ 2 | %{ 3 | (***********************************************************************) 4 | (* *) 5 | (* SubIso *) 6 | (* *) 7 | (* Projet Cristal, INRIA Rocquencourt *) 8 | (* *) 9 | (* Copyright 2002 Institut National de Recherche en Informatique et *) 10 | (* en Automatique. All rights reserved. This file is distributed *) 11 | (* under the terms of the GNU Lesser General Public License. *) 12 | (* *) 13 | (* Roberto Di Cosmo *) 14 | (***********************************************************************) 15 | 16 | (* 17 | $Id: tparser.mly,v 1.1 2002/03/11 16:03:27 dicosmo Exp $ 18 | *) 19 | 20 | open Types 21 | %} 22 | %token IDENT 23 | %token TVAR 24 | %token PLUS TIMES ARROW 25 | %token TYPE EQ AND 26 | %token LPAR RPAR 27 | %token Eof 28 | 29 | %right PRECTYDEF 30 | %left AND /* lowest precedence */ 31 | %nonassoc EQ 32 | %right ARROW 33 | %right TIMES 34 | %nonassoc ASSIGNS 35 | 36 | %start main /* the entry point */ 37 | %type main 38 | %% 39 | main: 40 | texpr Eof { $1 } 41 | ; 42 | texpr: 43 | | IDENT { TBase $1} 44 | | LPAR texpr RPAR { $2 } 45 | | texpr ARROW texpr { TArrow($1,$3) } 46 | | texpr TIMES texpr { TProduct($1,$3) } 47 | | TYPE type_binding_list %prec PRECTYDEF { Trec(List.rev $2) } 48 | ; 49 | 50 | type_binding_list: 51 | type_binding {[$1]} 52 | | type_binding_list AND type_binding {$3 :: $1} 53 | ; 54 | type_binding: 55 | IDENT EQ texpr {($1,$3)} 56 | ; 57 | -------------------------------------------------------------------------------- /tests/subtle-conflict-pager.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/subtle-conflict-pager.mly -------------------------------------------------------------------------------- /tests/symbolstartofs.mly: -------------------------------------------------------------------------------- 1 | %token A B 2 | %start main 3 | %% 4 | main: A B { $symbolstartofs } 5 | -------------------------------------------------------------------------------- /tests/symbolstartpos.mly: -------------------------------------------------------------------------------- 1 | %token A B C 2 | %start main 3 | %% 4 | (* Case: token in front. *) 5 | main: A B hop sugar { $symbolstartpos, $symbolstartofs } 6 | (* Case: epsilon symbol in front, followed with non-nullable symbol in front. *) 7 | hop: nothing bar A B { $symbolstartpos } 8 | (* Case: nullable symbol in front. *) 9 | bar: foo? B { $symbolstartpos } 10 | foo: C nothing {} 11 | (* Case: epsilon rule. *) 12 | nothing: { $symbolstartpos } 13 | (* Sugar. *) 14 | sugar: c = C { $loc, $loc(c), $sloc } 15 | -------------------------------------------------------------------------------- /tests/tilde-used-warning.mly: -------------------------------------------------------------------------------- 1 | %token A B C D 2 | %token EOF 3 | %start main 4 | 5 | %% 6 | 7 | let paire(x, y) == 8 | ~ = x; ~ = y; { x, y } 9 | 10 | let fst(p) := 11 | (~, _) = p; <> 12 | 13 | let snd(p) := 14 | (_, ~) = p; <> 15 | 16 | let bizarre_fst(p) := 17 | (x, ~) = p; {x} 18 | (* This use of ~ should trigger a warning. *) 19 | 20 | let hop(p) := 21 | (~, ~) = p; {()} 22 | (* This use of ~ should trigger a warning with two positions. *) 23 | 24 | let a := 25 | A 26 | 27 | let b := 28 | B 29 | 30 | let odd := 31 | ~ = a; b 32 | (* This use of ~ does not trigger a warning, 33 | because it is a pun -- [~] is sugar for [a]. 34 | A warning will be emitted by OCaml because 35 | [a] is unused. *) 36 | 37 | let main := 38 | hop(paire(A, B)); 39 | (a, b) = paire(A, B); 40 | c = fst(paire(A, B)); 41 | _ = bizarre_fst(paire(A, B)); 42 | d = snd(paire(A, B)); 43 | odd; 44 | ~ = C; 45 | (* This use of ~ should trigger a warning. *) 46 | D; 47 | () = EOF; 48 | { a + b + c + d } 49 | -------------------------------------------------------------------------------- /tests/tony-rc_parse.mly: -------------------------------------------------------------------------------- 1 | /* 2 | $Id: rc_parse.mly,v 1.1 1999/08/09 17:10:00 lindig Exp $ 3 | */ 4 | 5 | %{ 6 | open Rc_ast 7 | %} 8 | 9 | /* tokens */ 10 | 11 | %token STR 12 | %token ID 13 | %token BOOL 14 | %token FLOAT 15 | %token INT 16 | 17 | %token COMMA EQUAL TRUE FALSE EOF 18 | 19 | 20 | %start rcfile 21 | %type rcfile 22 | 23 | %% 24 | 25 | rcfile : rclines EOF { $1 } 26 | 27 | rclines : /**/ { empty } 28 | | rclines rcline { let (id,rc) = $2 in 29 | add id rc $1 30 | } 31 | 32 | rcline : ID EQUAL value { ($1,$3) } 33 | | ID EQUAL values { ($1,RClist(List.rev $3)) } 34 | 35 | value : ID { RCstr($1) } 36 | | STR { RCstr($1) } 37 | | TRUE { RCbool(true) } 38 | | FALSE { RCbool(false) } 39 | | INT { RCint($1) } 40 | | FLOAT { RCfloat($1) } 41 | 42 | values : value COMMA value { [$3 ; $1] } 43 | | values COMMA value { $3 :: $1 } 44 | -------------------------------------------------------------------------------- /tests/tptp2cime.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | 3 | open Abs_syntax;; 4 | open Symbols;; 5 | open Terms;; 6 | open Clauses;; 7 | 8 | %} 9 | 10 | 11 | %token IDENT VRB FILE_NAME 12 | %token PLUS MOINS VIRGULE POINT POINT_VIRGULE QUOTE 13 | %token PARGAUCHE PARDROITE CROGAUCHE CRODROIT 14 | %token TOKEN_INCLUDE TOKEN_INPUT_CLAUSE EGAL 15 | %token FIN 16 | 17 | %start donnees 18 | %type donnees 19 | 20 | %start term_line 21 | %type term_line 22 | 23 | %left TERMLIST 24 | %nonassoc PARDROITE 25 | 26 | %left PLUS MINUS 27 | %left MULT 28 | %left EXP 29 | 30 | %% 31 | 32 | donnees: 33 | FIN { [] } 34 | | declaration donnees { $1::$2 } 35 | ; 36 | 37 | declaration: 38 | keyword_include PARGAUCHE f_name PARDROITE POINT 39 | { Abstract_include $3 } 40 | | keyword_input_clause PARGAUCHE ident VIRGULE ident VIRGULE clause PARDROITE POINT 41 | { Abstract_clause $7 } 42 | ; 43 | keyword_include: 44 | TOKEN_INCLUDE { etat_analyse.lexer_state <- Include_lexer } 45 | ; 46 | keyword_input_clause: 47 | TOKEN_INPUT_CLAUSE { etat_analyse.lexer_state <- Input_clause_lexer } 48 | ; 49 | f_name: 50 | FILE_NAME { $1 } 51 | ; 52 | clause: 53 | CROGAUCHE literal_list CRODROIT { transform_clause $2 } 54 | ; 55 | literal_list: 56 | literal { $1::[] } 57 | | literal VIRGULE literal_list { $1::$3 } 58 | ; 59 | literal: 60 | PLUS atom { P($2) } 61 | | MOINS atom { M($2) } 62 | ; 63 | atom: 64 | EGAL PARGAUCHE term VIRGULE term PARDROITE { ($3,$5) } 65 | ; 66 | ident: 67 | IDENT { $1 } 68 | ; 69 | term : 70 | VRB { add_operator_to_cime_pb VARIABLE 0 DEFAULT $1; 71 | Var (var_id_of_string $1) } 72 | | IDENT { add_operator_to_cime_pb FREE 0 DEFAULT $1; 73 | Term ((get_symbol_id $1),[]) } 74 | | PARGAUCHE term PARDROITE { $2 } 75 | | IDENT PARGAUCHE term_list PARDROITE 76 | { let ar_f = (List.length $3) in 77 | add_operator_to_cime_pb FREE ar_f DEFAULT $1; 78 | let f=(get_symbol_id $1) in 79 | Term(f,$3) 80 | } 81 | ; 82 | term_list: 83 | term %prec TERMLIST { [$1] } 84 | | term VIRGULE term_list { $1 :: $3 } 85 | ; 86 | term_line: 87 | term POINT_VIRGULE { $1 } 88 | ; 89 | 90 | 91 | -------------------------------------------------------------------------------- /tests/typage.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/typage.mly -------------------------------------------------------------------------------- /tests/typed-freshml.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/typed-freshml.mly -------------------------------------------------------------------------------- /tests/undetermined_sort.mly: -------------------------------------------------------------------------------- 1 | %token A B 2 | %start main 3 | 4 | %% 5 | 6 | main: 7 | A B {} 8 | 9 | foo(X): 10 | {} 11 | 12 | (* [foo] is unreachable, and the sort of its parameter [X] is undetermined. *) 13 | -------------------------------------------------------------------------------- /tests/unreachable-symbol.mly: -------------------------------------------------------------------------------- 1 | %token FOO 2 | %start main 3 | %type main 4 | 5 | %% 6 | 7 | main: 8 | FOO 9 | { () } 10 | 11 | bar: 12 | main 13 | { $1 } 14 | 15 | -------------------------------------------------------------------------------- /tests/unused-attribute.mly: -------------------------------------------------------------------------------- 1 | %token A B C 2 | %attribute list(B) [@foo "bar"] (* the symbol list(B) is never created during expansion, so this declaration is not OK *) 3 | %attribute list(C) [@foo "bar"] (* the symbol list(C) is created during expansion, so this declaration is OK *) 4 | %start main 5 | %% 6 | main: 7 | A B list(C) {} 8 | -------------------------------------------------------------------------------- /tests/unused-pseudo-token.mly: -------------------------------------------------------------------------------- 1 | %token FOO BAR 2 | 3 | %left zorro 4 | 5 | %start main 6 | 7 | %% 8 | 9 | main: 10 | FOO {} 11 | 12 | -------------------------------------------------------------------------------- /tests/unused-token1.mly: -------------------------------------------------------------------------------- 1 | %token A B 2 | %start a 3 | %% 4 | 5 | a: A {} 6 | -------------------------------------------------------------------------------- /tests/unused-value.mly: -------------------------------------------------------------------------------- 1 | %token FOO BAR 2 | %start main 3 | 4 | %% 5 | 6 | main: 7 | f = FOO b = BAR 8 | { () } 9 | 10 | -------------------------------------------------------------------------------- /tests/uppercase.mly: -------------------------------------------------------------------------------- 1 | /* A grammar where a nonterminal symbol begins with an uppercase 2 | letter. Non-standard, but allowed, for compatibility with 3 | ocamlyacc. In versions of Menhir up to 20150911, this caused 4 | a problem when --infer was used. */ 5 | 6 | %token A B EOF 7 | %start entry 8 | 9 | %% 10 | 11 | entry: 12 | AB+ EOF {} 13 | 14 | AB: 15 | A B {} 16 | 17 | -------------------------------------------------------------------------------- /tests/useless-priorities.mly: -------------------------------------------------------------------------------- 1 | %token INT 2 | %token PLUS TIMES 3 | %left PLUS 4 | %left TIMES 5 | %token EOF 6 | %start main 7 | %% 8 | expression: i = INT { i } | e = expression; o = op; f = expression { o e f } 9 | op: PLUS { ( + ) } | TIMES { ( * ) } 10 | main: e = expression EOF { e } 11 | -------------------------------------------------------------------------------- /tests/useless-pseudo-token.mly: -------------------------------------------------------------------------------- 1 | %token FOO BAR 2 | 3 | %left zorro 4 | 5 | %start main 6 | 7 | %% 8 | 9 | main: 10 | FOO BAR {} %prec zorro 11 | 12 | -------------------------------------------------------------------------------- /tests/wallace.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/wallace.mly -------------------------------------------------------------------------------- /tests/webidl.1.4-parser00.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | let return_if_strict strict value = 3 | if strict then 4 | raise Parsing.Parse_error 5 | else 6 | value 7 | 8 | %} 9 | 10 | %parameter 13 | 14 | %% 15 | 16 | %public legacyArray : 17 | | nonAnyType LBRACKET RBRACKET null 18 | { 19 | return_if_strict Param.strict 20 | (to_non_any $4 (`FrozenArray ([], ($1 :> type_)))) 21 | } 22 | 23 | %public promiseOnly : 24 | | PROMISE { return_if_strict Param.strict (`Promise `Any) } 25 | 26 | %public promiseNull : 27 | | PROMISE LT returnType GT QUESTION { return_if_strict Param.strict $3 } 28 | 29 | %public attributeSerializetion : 30 | | ATTRIBUTE { return_if_strict Param.strict (`Identifiers [attribute]) } 31 | 32 | %public commnaEnd : 33 | | COMMA { return_if_strict Param.strict [] } 34 | 35 | %public emptyExtendedAttributeList : 36 | | LBRACKET RBRACKET { return_if_strict Param.strict [] } -------------------------------------------------------------------------------- /tests/webidl.1.4-parser01.mly: -------------------------------------------------------------------------------- 1 | %{ 2 | (* Web IDL parser 3 | * The below rules are based on Editor’s Draft, 1 June 2017 4 | * https://heycam.github.io/webidl/#idl-grammar 5 | *) 6 | open Ast 7 | open Keyword 8 | %} 9 | 10 | %start ext_main 11 | %type < Ast.extended_attribute > ext_main 12 | 13 | %% 14 | 15 | ext_main : 16 | | extendedAttribute EOF { $1 } 17 | 18 | %public extendedAttributeList : 19 | | LBRACKET extendedAttribute extendedAttributes RBRACKET { ($2 :: $3) } 20 | | { [] } 21 | 22 | extendedAttributes : 23 | | COMMA extendedAttribute extendedAttributes { $2 :: $3 } 24 | | { [] } 25 | 26 | extendedAttribute : 27 | | extendedAttributeNoArgs { $1 } 28 | | extendedAttributeArgList { $1 } 29 | | extendedAttributeIdent { $1 } 30 | | extendedAttributeIdentList { $1 } 31 | | extendedAttributeNamedArgList { $1 } 32 | 33 | identifierList : 34 | | IDENTIFIER identifiers { $1 :: $2 } 35 | 36 | extendedAttributeNoArgs : 37 | | IDENTIFIER { `NoArgs $1 } 38 | 39 | extendedAttributeArgList : 40 | | IDENTIFIER LPAR argumentList RPAR { `ArgumentList($1, $3) } 41 | 42 | extendedAttributeIdent : 43 | | IDENTIFIER EQUAL IDENTIFIER { `Ident($1, $3) } 44 | 45 | extendedAttributeIdentList : 46 | | IDENTIFIER EQUAL LPAR identifierList RPAR { `IdentList($1, $4) } 47 | 48 | extendedAttributeNamedArgList : 49 | | IDENTIFIER EQUAL IDENTIFIER LPAR argumentList RPAR 50 | { `NamedArgList($1, $3, $5) } -------------------------------------------------------------------------------- /tests/webidl.1.4-parser04.mly: -------------------------------------------------------------------------------- 1 | %token LBRACE RBRACE LBRACKET RBRACKET LPAR RPAR LT GT 2 | %token UNSIGNED BYTE OCTET SHORT LONG 3 | %token DOMSTRING USVSTRING BYTESTRING 4 | %token UNRESTRICTED FLOAT DOUBLE 5 | %token ANY VOID BOOLEAN OBJECT OR 6 | %token TRUE FALSE NULL INFINITY MINUSINFINITY NAN 7 | %token GETTER SETTER DELETER LEGACYCALLER 8 | %token STRINGIFIER 9 | %token QUESTION EQUAL COMMA COLON SEMICOLON ELLIPSIS MINUS DOT 10 | %token MAPLIKE SETLIKE ITERABLE 11 | %token PROMISE RECORD SEQUENCE INCLUDES MIXIN 12 | %token NAMESPACE CALLBACK PARTIAL INTERFACE DICTIONARY ENUM 13 | %token IMPLEMENTS INHERIT ATTRIBUTE TYPEDEF CONST 14 | %token READONLY REQUIRED STATIC OPTIONAL 15 | %token DOMEXCEPTION ERROR_ 16 | %token INT8ARRAY INT16ARRAY INT32ARRAY 17 | %token UINT8ARRAY UINT16ARRAY UINT32ARRAY UINT8CLAMPEDARRAY 18 | %token FLOAT32ARRAY FLOAT64ARRAY 19 | %token ARRAYBUFFER DATAVIEW FROZENARRAY 20 | 21 | %token INTVAL 22 | %token FLOATVAL 23 | %token IDENTIFIER STRING 24 | %token OTHER 25 | %token EOF 26 | 27 | %% -------------------------------------------------------------------------------- /tests/why-clparser.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/why-clparser.mly -------------------------------------------------------------------------------- /tests/why-cparser.mly: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lelio-Brun/Obelisk/3b8cb17133628f9689e5656ff9d7f99bdcc6e68f/tests/why-cparser.mly -------------------------------------------------------------------------------- /tests/yann.mly: -------------------------------------------------------------------------------- 1 | %token LPAREN RPAREN MATRIX TIMES INT PLUS TENSOR 2 | %start exp 3 | %% 4 | 5 | exp: 6 | term {} 7 | 8 | fa1: 9 | LPAREN exp RPAREN {} 10 | | INT {} 11 | 12 | fa4: 13 | MATRIX {} 14 | | LPAREN exp RPAREN {} 15 | 16 | product: 17 | product TIMES fa1 {} 18 | | fa1 {} 19 | 20 | tensorproduct: 21 | tensorproduct TENSOR fa4 {} 22 | | fa4 {} 23 | 24 | term: 25 | term PLUS product {} 26 | | term PLUS tensorproduct {} 27 | | product {} 28 | | term PLUS tensorproduct {} 29 | --------------------------------------------------------------------------------
" 65 | let prod_bar fmt = fprintf fmt "@[
" 66 | 67 | let print_rule_name = 68 | print_rule_name_with 69 | (print_string' "") 70 | (print_string' "") 71 | 72 | let print_symbol symbols = 73 | print_symbol_aux symbols 74 | (print_string' "") 75 | (print_string' "") 76 | 77 | let opt _ print fmt = 78 | fprintf fmt "%a%t%a" 79 | print_string "" 80 | print 81 | print_string "" 82 | 83 | let plus e print fmt = 84 | fprintf fmt "%a%t%a" 85 | print_string "" 86 | (par e print) 87 | print_string "" 88 | let star e print fmt = 89 | fprintf fmt "%a%t%a" 90 | print_string "" 91 | (par e print) 92 | print_string "" 93 | -------------------------------------------------------------------------------- /src/helpers/latexBacknaur.ml: -------------------------------------------------------------------------------- 1 | open Format 2 | 3 | include MiniLatex 4 | 5 | let print_header symbols fmt = 6 | documentclass 7 | (fun fmt -> fprintf fmt 8 | "%a@;\ 9 | \\newenvironment{%s}{\\begin{bnf*}}{\\end{bnf*}}@;@;\ 10 | %a%a%a%a%a%a%a%a@;" 11 | usepackage ("[epsilon]", "backnaur") 12 | grammarname 13 | newcommand ("gramsp", 0, None, print_string' "\\ensuremath{\\bnfsp}") 14 | newcommand ("gramterm", 1, None, print_string' "\\bnfts{#1}") 15 | newcommand ("gramnonterm", 1, None, print_string' "\\ensuremath{\\bnfpn{#1}}") 16 | newcommand ("gramfunc", 1, None, print_string' "\\ensuremath{\\bnfpn{#1}}") 17 | newcommand ("grameps", 0, None, print_string' "\\ensuremath{\\bnfes}") 18 | newcommand ("gramprod", 3, (Some "\\textwidth"), 19 | fun fmt -> fprintf fmt 20 | "\\bnfprod{#2}{%%@;<0 2>\ 21 | \\begin{minipage}[t]{#1}@;<0 4>\ 22 | $#3$@;<0 2>\ 23 | \\end{minipage}}") 24 | newcommand ("grambar", 0, None, print_string' "\\hspace*{-2.5em}\\bnfor\\hspace*{1.2em}") 25 | newcommand ("grambaranon", 0, None, print_string' "\\ensuremath{\\bnfor}")); 26 | begin_document (fun _ -> ()) fmt symbols 27 | 28 | let def fmt = print_string fmt "}{" 29 | let prod_bar fmt = fprintf fmt "\\%s " (command "grambar") 30 | let bar fmt = fprintf fmt "@ \\%s@ " (command "grambaranon") 31 | let space fmt = fprintf fmt "\\%s@ " (command "gramsp") 32 | let break fmt = fprintf fmt "\\\\@;" 33 | let eps fmt = fprintf fmt "\\%s" (command "grameps") 34 | 35 | let print_rule_name = 36 | print_rule_name_raw 37 | let rule_begin fmt = 38 | fprintf fmt "@[\\%s{" (command "gramprod") 39 | let rule_end fmt = 40 | fprintf fmt "}\\\\@]" 41 | -------------------------------------------------------------------------------- /src/helpers/latexSimplebnf.ml: -------------------------------------------------------------------------------- 1 | (* open Common *) 2 | open Format 3 | 4 | include MiniLatex 5 | 6 | let print_header symbols fmt = 7 | documentclass (fun fmt -> 8 | fprintf fmt 9 | "%a@;\ 10 | @[\\newenvironment{%s}{@;\ 11 | @[\\begin{bnf}[@;\ 12 | colspec={%@{}l%@{}rcX%@{}l%@{}},@;\ 13 | column{2}={cmd=\\%s}\ 14 | @]@;]@]@;\ 15 | }{@;<0 2>\ 16 | \\end{bnf}@;}@;@;\ 17 | %a%a%a%a%a%a%a@;\ 18 | @[\\SetBNFConfig{@;\ 19 | relation-sym-map={{::=}={\\%s}},@;\ 20 | or-sym={\\%s}\ 21 | @]@;}@;@;" 22 | usepackage ("", "simplebnf") 23 | grammarname 24 | (command "gramnonterm") 25 | newcommand ("gramterm", 1, None, print_string' "#1") 26 | newcommand ("gramnonterm", 1, None, print_string' "\\IfBlankF{#1}{\\ensuremath{\\langle\\textnormal{#1}\\rangle}}") 27 | newcommand ("gramfunc", 1, None, fun fmt -> fprintf fmt "\\%s{#1}" (command "gramnonterm")) 28 | newcommand ("gramdef", 0, None, print_string' "\\ensuremath{\\Coloneqq}") 29 | newcommand ("grambar", 0, None, print_string' "\\ensuremath{|}") 30 | newcommand ("grambaranon", 0, None, print_string' "\\ensuremath{|}") 31 | newcommand ("grameps", 0, None, print_string' "\\ensuremath{\\epsilon}") 32 | (command "gramdef") 33 | (command "grambar") 34 | ); 35 | begin_document (fun _ -> ()) fmt symbols 36 | 37 | let def fmt = fprintf fmt " ::= " 38 | let prod_bar fmt = fprintf fmt "| " 39 | let bar fmt = fprintf fmt "@ \\%s@ " (command "grambaranon{}") 40 | let space fmt = fprintf fmt "@ " 41 | let break fmt = fprintf fmt "@;" 42 | let eps fmt = fprintf fmt "\\%s" (command "grameps") 43 | 44 | let print_rule_name print_params fmt name = 45 | fprintf fmt "%a :" 46 | (print_rule_name_raw print_params) name 47 | let rule_begin fmt = 48 | fprintf fmt "@[" 49 | let rule_end fmt = 50 | fprintf fmt "@]@;;;" 51 | -------------------------------------------------------------------------------- /src/helpers/latexSyntax.ml: -------------------------------------------------------------------------------- 1 | open Common 2 | open Format 3 | 4 | include MiniLatex 5 | 6 | let print_header symbols fmt = 7 | let max = 8 | let params = 9 | let rec aux = function 10 | | [] -> "" 11 | | [x] -> x 12 | | x :: xs -> sprintf "%s, %s" x (aux xs) 13 | in function 14 | | [] -> "" 15 | | xs -> sprintf "(%s)" (aux xs) 16 | in 17 | let compare_length (s1, xs1) (s2, xs2) = 18 | compare 19 | (String.length (s2 ^ params xs2)) 20 | (String.length (s1 ^ params xs1)) 21 | in 22 | let f, xs = try 23 | List.(hd (sort compare_length (Symbols.defined symbols))) 24 | with _ -> " ", [] 25 | in 26 | let max = f ^ params xs in 27 | Re.Str.global_replace (Re.Str.regexp "_") "\\_" max 28 | in 29 | documentclass (fun fmt -> 30 | fprintf fmt 31 | "%a@;\ 32 | \\newenvironment{%s}{\\begin{grammar}}{\\end{grammar}}@;@;\ 33 | %a%a%a%a%a%a%a\ 34 | \\renewcommand\\grammarlabel[2]{\\%s{#1} #2}@;\ 35 | \\newlength{\\%s}@;\ 36 | \\settowidth{\\%s}{\\synt{%s} \\%s{} }@;@;" 37 | usepackage ("", "syntax") 38 | grammarname 39 | newcommand ("gramterm", 1, None, print_string' "\\lit{#1}") 40 | newcommand ("gramnonterm", 1, None, print_string' "\\synt{#1}") 41 | newcommand ("gramfunc", 1, None, fun fmt -> fprintf fmt "\\%s{#1}" (command "gramnonterm")) 42 | newcommand ("gramdef", 0, None, print_string' "::=") 43 | newcommand ("grambar", 0, None, print_string' "\\alt") 44 | newcommand ("grambaranon", 0, None, print_string' "\\ensuremath{|}") 45 | newcommand ("grameps", 0, None, print_string' "\\ensuremath{\\epsilon}") 46 | (command "gramnonterm") 47 | (command "grammaxindent") 48 | (command "grammaxindent") 49 | max 50 | (command "gramdef") 51 | ); 52 | begin_document (fun fmt -> fprintf fmt 53 | "\\setlength{\\grammarindent}{\\%s}" 54 | (command "grammaxindent")) 55 | fmt symbols 56 | 57 | let def fmt = fprintf fmt " \\%s{} " (command "gramdef") 58 | let prod_bar fmt = fprintf fmt "\\%s " (command "grambar") 59 | let bar fmt = fprintf fmt "@ \\%s@ " (command "grambaranon{}") 60 | let space fmt = fprintf fmt "@ " 61 | let break fmt = fprintf fmt "@;" 62 | let eps fmt = fprintf fmt "\\%s" (command "grameps") 63 | 64 | let print_rule_name print_params fmt name = 65 | fprintf fmt "%a%a%a" 66 | print_string "<" 67 | (print_rule_name_raw print_params) name 68 | print_string ">" 69 | let rule_begin fmt = 70 | fprintf fmt "@[" 71 | let rule_end fmt = 72 | fprintf fmt "@]@;" 73 | -------------------------------------------------------------------------------- /src/helpers/latexTabular.ml: -------------------------------------------------------------------------------- 1 | open Format 2 | 3 | include MiniLatex 4 | 5 | let print_header symbols fmt = 6 | documentclass (fun fmt -> 7 | fprintf fmt 8 | "%a%a%a@;\ 9 | %% https://tex.stackexchange.com/a/279787@;\ 10 | \\makeatletter@;\ 11 | \\let\\collectcell@notabu\\collectcell@;\ 12 | \\def\\collectcell@intabu#1#2\\tabu@cellleft{#2\\tabu@cellleft\\collectcell@notabu{#1}}@;\ 13 | \\def\\collectcell{\\tabu@ifenvir\\collectcell@intabu\\collectcell@notabu}@;\ 14 | \\makeatother@;\ 15 | @[\\newenvironment{%s}{@;\ 16 | \\begin{longtabu}{\ 17 | %@{}>{\\collectcell\\%s}r<{\\endcollectcell}%@{}c%@{}X%@{}}@]@;\ 18 | }{@;<0 2>\ 19 | \\end{longtabu}@;}@;@;\ 20 | %a%a%a%a%a%a%a%a@;" 21 | usepackage ("", "longtable") 22 | usepackage ("", "tabu") 23 | usepackage ("", "collcell") 24 | grammarname 25 | (command "gramnonterm") 26 | newcommand ("gramsp" ,0, None, print_string' "\\quad") 27 | newcommand ("gramdef", 0, None, fun fmt -> 28 | fprintf fmt "$\\%s::=\\%s$" (command "gramsp") (command "gramsp")) 29 | newcommand ("grambar", 0, None, fun fmt -> 30 | fprintf fmt "$\\%s|\\%s$" (command "gramsp") (command "gramsp")) 31 | newcommand ("grambaranon", 0, None, print_string' "$|$") 32 | newcommand ("grameps", 0, None, print_string' "\\ensuremath{\\epsilon}") 33 | newcommand ("gramnonterm", 1, None, print_string' "\\IfBlankF{#1}{\\ensuremath{\\langle\\textnormal{#1}\\rangle}}") 34 | newcommand ("gramfunc", 1, None, fun fmt -> fprintf fmt "\\%s{#1}" (command "gramnonterm")) 35 | newcommand ("gramterm", 1, None, print_string' "#1") 36 | ); 37 | begin_document (fun _ -> ()) fmt symbols 38 | 39 | let def fmt = fprintf fmt "& \\%s & " (command "gramdef") 40 | let prod_bar fmt = fprintf fmt "& \\%s &" (command "grambar") 41 | let bar fmt = fprintf fmt "@ \\%s@ " (command "grambaranon{}") 42 | let space fmt = fprintf fmt "@ " 43 | let break fmt = fprintf fmt "\\\\@;" 44 | let eps fmt = fprintf fmt "\\%s" (command "grameps") 45 | 46 | let print_rule_name = print_rule_name_raw 47 | let rule_begin fmt = 48 | fprintf fmt "@[" 49 | let rule_end fmt = 50 | fprintf fmt "@;\\\\@]@;" 51 | -------------------------------------------------------------------------------- /src/helpers/miniHelper.ml: -------------------------------------------------------------------------------- 1 | open Format 2 | 3 | let print_string fmt s = 4 | pp_print_string fmt s 5 | let print_string' s fmt = print_string fmt s 6 | 7 | let print_param = print_string 8 | 9 | let production_begin fmt = 10 | fprintf fmt "@[" 11 | let production_end fmt = 12 | fprintf fmt "@]" 13 | 14 | let enclose print op cl fmt = 15 | fprintf fmt "%t%t%t" 16 | op print cl 17 | 18 | let par e print fmt = 19 | (if e then enclose print (print_string' "(") (print_string' ")") else print) 20 | fmt 21 | 22 | let print_rule_name_with opening closing print_params fmt name = 23 | fprintf fmt "%t%a%t%t" opening print_string name print_params closing 24 | 25 | let print_symbol_aux symbols opening closing print_params fmt s = 26 | let is_def = match Common.Symbols.is_defined s symbols with 27 | | Some _ -> true 28 | | None -> false 29 | in 30 | let s = match Hashtbl.find_opt Lexer.tokens s with 31 | | Some a -> if !Options.no_aliases then a else "'" ^ s ^ "'" 32 | | None -> s 33 | in 34 | fprintf fmt "%t%a%t%t" 35 | (if is_def then opening else fun _ -> ()) 36 | print_string s 37 | print_params 38 | (if is_def then closing else fun _ -> ()) 39 | -------------------------------------------------------------------------------- /src/helpers/miniHtml.ml: -------------------------------------------------------------------------------- 1 | open Format 2 | 3 | include MiniHelper 4 | 5 | let print_header_with_style fmt style = 6 | fprintf fmt 7 | "@[@;\ 8 | @[@;\ 9 | @[@;\ 10 | Grammar@;\ 11 | @[\ 33 | @]@;@;@;\ 34 | @[@;@;\ 35 | @[@;@;" 36 | style 37 | 38 | let print_footer fmt = 39 | fprintf fmt 40 | "@]@;
@;\ 41 | @]@;@;\ 42 | @]@;@]@." 43 | 44 | let bar fmt = pp_print_string fmt " | " 45 | let space fmt = fprintf fmt "@ " 46 | let break fmt = fprintf fmt "@;" 47 | let eps fmt = pp_print_string fmt "ε" 48 | 49 | let rule_begin fmt = 50 | fprintf fmt "@[