├── .formatter.exs ├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── config └── config.exs ├── coveralls.json ├── lib └── leibniz.ex ├── mix.exs ├── mix.lock ├── src ├── lexer.erl ├── lexer.xrl ├── parser.erl └── parser.yrl └── test ├── leibniz_test.exs └── test_helper.exs /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where 3rd-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | leibniz-*.tar 24 | 25 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: elixir 2 | elixir: 3 | - '1.6' 4 | otp_release: 5 | - '20.2' 6 | 7 | before_script: 8 | - MIX_ENV=test mix compile 9 | 10 | script: 11 | - MIX_ENV=test mix do compile, coveralls.json 12 | 13 | after_success: 14 | - bash <(curl -s https://codecov.io/bash) 15 | 16 | 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Saúl Cabrera 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Leibniz 2 | [![Build Status](https://travis-ci.org/saulecabrera/leibniz.svg?branch=master)](https://travis-ci.org/saulecabrera/leibniz) 3 | [![codecov](https://codecov.io/gh/saulecabrera/leibniz/branch/master/graph/badge.svg)](https://codecov.io/gh/saulecabrera/leibniz) 4 | 5 | Leibniz is a pure erlang/elixir math expression parser and evaluator. 6 | 7 | ## Installation 8 | 9 | ```elixir 10 | defp deps() do 11 | [{:leibniz, "~> 1.0.0"}] 12 | end 13 | ``` 14 | 15 | ## Usage 16 | 17 | [Documentation](https://hexdocs.pm/leibniz/api-reference.html) 18 | 19 | Leibniz provides two core functionalities, evaluating valid math expressions and evaluating valid math expression in a given context. 20 | 21 | #### Examples 22 | 23 | Evaluating simple math expressions 24 | 25 | ```elixir 26 | Leibniz.eval("1 + 1") 27 | 28 | {:ok, 2} 29 | 30 | ``` 31 | 32 | Evaluating math expressions in a given context 33 | 34 | ```elixir 35 | Leibniz.eval("10 * foo", foo: 10) 36 | 37 | {:ok, 100} 38 | ``` 39 | 40 | ```elixir 41 | Leibniz.eval("1 * baz") 42 | {:error, "value expected for the following dependencies: baz"} 43 | ``` 44 | 45 | ## TODO 46 | 47 | - [ ] Improve errors 48 | - [ ] Provide a `parse/1` function 49 | - [ ] Provide a `depencies/1` function 50 | - [ ] Add trigonometric and other math functions 51 | 52 | ## LICENSE 53 | 54 | MIT 55 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | # This file is responsible for configuring your application 2 | # and its dependencies with the aid of the Mix.Config module. 3 | use Mix.Config 4 | 5 | # This configuration is loaded before any dependency and is restricted 6 | # to this project. If another project depends on this project, this 7 | # file won't be loaded nor affect the parent project. For this reason, 8 | # if you want to provide default values for your application for 9 | # 3rd-party users, it should be done in your "mix.exs" file. 10 | 11 | # You can configure your application as: 12 | # 13 | # config :leibniz, key: :value 14 | # 15 | # and access this configuration in your application as: 16 | # 17 | # Application.get_env(:leibniz, :key) 18 | # 19 | # You can also configure a 3rd-party app: 20 | # 21 | # config :logger, level: :info 22 | # 23 | 24 | # It is also possible to import configuration files, relative to this 25 | # directory. For example, you can emulate configuration per environment 26 | # by uncommenting the line below and defining dev.exs, test.exs and such. 27 | # Configuration from the imported file will override the ones defined 28 | # here (which is why it is important to import them last). 29 | # 30 | # import_config "#{Mix.env}.exs" 31 | -------------------------------------------------------------------------------- /coveralls.json: -------------------------------------------------------------------------------- 1 | { 2 | "skip_files": [ 3 | "src" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /lib/leibniz.ex: -------------------------------------------------------------------------------- 1 | defmodule Leibniz do 2 | 3 | @moduledoc """ 4 | Leibniz is a math expression parser and evaluator. 5 | """ 6 | 7 | @doc ~S""" 8 | Evaluates a valid math expression interpolating any given values. 9 | 10 | ## Examples 11 | 12 | iex> Leibniz.eval("2 * 10 / 2") 13 | {:ok, 10.0} 14 | 15 | iex> Leibniz.eval("2 * foo + bar - baz", foo: 5.3, bar: 10, baz: 3) 16 | {:ok, 17.6} 17 | 18 | iex> Leibniz.eval("2 * x + y") 19 | {:error, "value expected for the following dependencies: x,y"} 20 | """ 21 | 22 | @spec eval(String.t, Keyword.t(number)) :: {:ok, number} | {:error, term} 23 | def eval(expr, vars \\ []) do 24 | with {:ok, ast} <- parse(expr), 25 | :ok <- verify_dependencies(dependecies(ast), Keyword.keys(vars)) do 26 | {:ok, match(ast, vars)} 27 | else 28 | {:error, e} -> {:error, e} 29 | end 30 | end 31 | 32 | def parse(expr) do 33 | with {:ok, tokens, _} <- expr |> to_charlist() |> :lexer.string(), 34 | {:ok, ast} <- :parser.parse(tokens) do 35 | {:ok, ast} 36 | else 37 | {:error, e, _} -> {:error, e} 38 | {:error, e} -> {:error, e} 39 | end 40 | end 41 | 42 | defp match(token, vars) when is_number(token), do: token 43 | 44 | defp match(token, vars) when is_atom(token) do 45 | Keyword.get(vars, token) 46 | end 47 | 48 | defp match({:+, lhs, rhs}, vars), do: match(lhs, vars) + match(rhs, vars) 49 | defp match({:-, lhs, rhs}, vars), do: match(lhs, vars) - match(rhs, vars) 50 | defp match({:*, lhs, rhs}, vars), do: match(lhs, vars) * match(rhs, vars) 51 | defp match({:/, lhs, rhs}, vars), do: match(lhs, vars) / match(rhs, vars) 52 | 53 | defp dependecies({_, lhs, rhs}) do 54 | do_dependecies(lhs, []) ++ do_dependecies(rhs, []) 55 | end 56 | 57 | defp dependecies(_), do: [] 58 | 59 | defp do_dependecies(node, acc) when is_atom(node), do: [node] ++ acc 60 | 61 | defp do_dependecies({_, lhs, rhs}, acc) do 62 | do_dependecies(lhs, acc) ++ do_dependecies(rhs, acc) 63 | end 64 | 65 | defp do_dependecies(_, acc), do: acc 66 | 67 | defp verify_dependencies(required, actual) do 68 | case required -- actual do 69 | [] -> :ok 70 | missing -> {:error, "value expected for the following dependencies: #{Enum.join(missing, ",")}"} 71 | end 72 | end 73 | end 74 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Leibniz.MixProject do 2 | use Mix.Project 3 | 4 | @desc "Math expression parser and evaluator" 5 | 6 | def project do 7 | [ 8 | app: :leibniz, 9 | version: "1.0.0", 10 | elixir: "~> 1.6", 11 | start_permanent: Mix.env() == :prod, 12 | deps: deps(), 13 | test_coverage: [tool: ExCoveralls], 14 | preferred_cli_env: ["coveralls": :test, "coveralls.json": :test], 15 | description: @desc, 16 | package: package() 17 | ] 18 | end 19 | 20 | def application do 21 | [ 22 | extra_applications: [:logger] 23 | ] 24 | end 25 | 26 | defp package() do 27 | [ 28 | name: "leibniz", 29 | maintainers: ["Saúl Cabrera"], 30 | licenses: ["MIT"], 31 | links: %{"GitHub" => "https://github.com/saulecabrera/leibniz"} 32 | ] 33 | end 34 | 35 | defp deps do 36 | [ 37 | {:ex_doc, "~> 0.16", only: :dev, runtime: false}, 38 | {:excoveralls, "~> 0.8", only: :test} 39 | ] 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "certifi": {:hex, :certifi, "2.3.1", "d0f424232390bf47d82da8478022301c561cf6445b5b5fb6a84d49a9e76d2639", [:rebar3], [{:parse_trans, "3.2.0", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"}, 3 | "earmark": {:hex, :earmark, "1.2.5", "4d21980d5d2862a2e13ec3c49ad9ad783ffc7ca5769cf6ff891a4553fbaae761", [:mix], [], "hexpm"}, 4 | "ex_doc": {:hex, :ex_doc, "0.18.3", "f4b0e4a2ec6f333dccf761838a4b253d75e11f714b85ae271c9ae361367897b7", [:mix], [{:earmark, "~> 1.1", [hex: :earmark, repo: "hexpm", optional: false]}], "hexpm"}, 5 | "excoveralls": {:hex, :excoveralls, "0.8.2", "b941a08a1842d7aa629e0bbc969186a4cefdd035bad9fe15d43aaaaaeb8fae36", [:mix], [{:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: false]}, {:hackney, ">= 0.12.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}, 6 | "exjsx": {:hex, :exjsx, "4.0.0", "60548841e0212df401e38e63c0078ec57b33e7ea49b032c796ccad8cde794b5c", [:mix], [{:jsx, "~> 2.8.0", [hex: :jsx, repo: "hexpm", optional: false]}], "hexpm"}, 7 | "hackney": {:hex, :hackney, "1.12.1", "8bf2d0e11e722e533903fe126e14d6e7e94d9b7983ced595b75f532e04b7fdc7", [:rebar3], [{:certifi, "2.3.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "5.1.1", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"}, 8 | "idna": {:hex, :idna, "5.1.1", "cbc3b2fa1645113267cc59c760bafa64b2ea0334635ef06dbac8801e42f7279c", [:rebar3], [{:unicode_util_compat, "0.3.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm"}, 9 | "jsx": {:hex, :jsx, "2.8.3", "a05252d381885240744d955fbe3cf810504eb2567164824e19303ea59eef62cf", [:mix, :rebar3], [], "hexpm"}, 10 | "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"}, 11 | "mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], [], "hexpm"}, 12 | "parse_trans": {:hex, :parse_trans, "3.2.0", "2adfa4daf80c14dc36f522cf190eb5c4ee3e28008fc6394397c16f62a26258c2", [:rebar3], [], "hexpm"}, 13 | "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.1", "28a4d65b7f59893bc2c7de786dec1e1555bd742d336043fe644ae956c3497fbe", [:make, :rebar], [], "hexpm"}, 14 | "unicode_util_compat": {:hex, :unicode_util_compat, "0.3.1", "a1f612a7b512638634a603c8f401892afbf99b8ce93a45041f8aaca99cadb85e", [:rebar3], [], "hexpm"}, 15 | } 16 | -------------------------------------------------------------------------------- /src/lexer.erl: -------------------------------------------------------------------------------- 1 | -file("/Users/saulcabrera/.asdf/installs/erlang/20.2/lib/parsetools-2.1.6/include/leexinc.hrl", 0). 2 | %% The source of this file is part of leex distribution, as such it 3 | %% has the same Copyright as the other files in the leex 4 | %% distribution. The Copyright is defined in the accompanying file 5 | %% COPYRIGHT. However, the resultant scanner generated by leex is the 6 | %% property of the creator of the scanner and is not covered by that 7 | %% Copyright. 8 | 9 | -module(lexer). 10 | 11 | -export([string/1,string/2,token/2,token/3,tokens/2,tokens/3]). 12 | -export([format_error/1]). 13 | 14 | %% User code. This is placed here to allow extra attributes. 15 | -file("src/lexer.xrl", 25). 16 | 17 | -file("/Users/saulcabrera/.asdf/installs/erlang/20.2/lib/parsetools-2.1.6/include/leexinc.hrl", 14). 18 | 19 | format_error({illegal,S}) -> ["illegal characters ",io_lib:write_string(S)]; 20 | format_error({user,S}) -> S. 21 | 22 | string(String) -> string(String, 1). 23 | 24 | string(String, Line) -> string(String, Line, String, []). 25 | 26 | %% string(InChars, Line, TokenChars, Tokens) -> 27 | %% {ok,Tokens,Line} | {error,ErrorInfo,Line}. 28 | %% Note the line number going into yystate, L0, is line of token 29 | %% start while line number returned is line of token end. We want line 30 | %% of token start. 31 | 32 | string([], L, [], Ts) -> % No partial tokens! 33 | {ok,yyrev(Ts),L}; 34 | string(Ics0, L0, Tcs, Ts) -> 35 | case yystate(yystate(), Ics0, L0, 0, reject, 0) of 36 | {A,Alen,Ics1,L1} -> % Accepting end state 37 | string_cont(Ics1, L1, yyaction(A, Alen, Tcs, L0), Ts); 38 | {A,Alen,Ics1,L1,_S1} -> % Accepting transistion state 39 | string_cont(Ics1, L1, yyaction(A, Alen, Tcs, L0), Ts); 40 | {reject,_Alen,Tlen,_Ics1,L1,_S1} -> % After a non-accepting state 41 | {error,{L0,?MODULE,{illegal,yypre(Tcs, Tlen+1)}},L1}; 42 | {A,Alen,Tlen,_Ics1,L1,_S1} -> 43 | Tcs1 = yysuf(Tcs, Alen), 44 | L2 = adjust_line(Tlen, Alen, Tcs1, L1), 45 | string_cont(Tcs1, L2, yyaction(A, Alen, Tcs, L0), Ts) 46 | end. 47 | 48 | %% string_cont(RestChars, Line, Token, Tokens) 49 | %% Test for and remove the end token wrapper. Push back characters 50 | %% are prepended to RestChars. 51 | 52 | -dialyzer({nowarn_function, string_cont/4}). 53 | 54 | string_cont(Rest, Line, {token,T}, Ts) -> 55 | string(Rest, Line, Rest, [T|Ts]); 56 | string_cont(Rest, Line, {token,T,Push}, Ts) -> 57 | NewRest = Push ++ Rest, 58 | string(NewRest, Line, NewRest, [T|Ts]); 59 | string_cont(Rest, Line, {end_token,T}, Ts) -> 60 | string(Rest, Line, Rest, [T|Ts]); 61 | string_cont(Rest, Line, {end_token,T,Push}, Ts) -> 62 | NewRest = Push ++ Rest, 63 | string(NewRest, Line, NewRest, [T|Ts]); 64 | string_cont(Rest, Line, skip_token, Ts) -> 65 | string(Rest, Line, Rest, Ts); 66 | string_cont(Rest, Line, {skip_token,Push}, Ts) -> 67 | NewRest = Push ++ Rest, 68 | string(NewRest, Line, NewRest, Ts); 69 | string_cont(_Rest, Line, {error,S}, _Ts) -> 70 | {error,{Line,?MODULE,{user,S}},Line}. 71 | 72 | %% token(Continuation, Chars) -> 73 | %% token(Continuation, Chars, Line) -> 74 | %% {more,Continuation} | {done,ReturnVal,RestChars}. 75 | %% Must be careful when re-entering to append the latest characters to the 76 | %% after characters in an accept. The continuation is: 77 | %% {token,State,CurrLine,TokenChars,TokenLen,TokenLine,AccAction,AccLen} 78 | 79 | token(Cont, Chars) -> token(Cont, Chars, 1). 80 | 81 | token([], Chars, Line) -> 82 | token(yystate(), Chars, Line, Chars, 0, Line, reject, 0); 83 | token({token,State,Line,Tcs,Tlen,Tline,Action,Alen}, Chars, _) -> 84 | token(State, Chars, Line, Tcs ++ Chars, Tlen, Tline, Action, Alen). 85 | 86 | %% token(State, InChars, Line, TokenChars, TokenLen, TokenLine, 87 | %% AcceptAction, AcceptLen) -> 88 | %% {more,Continuation} | {done,ReturnVal,RestChars}. 89 | %% The argument order is chosen to be more efficient. 90 | 91 | token(S0, Ics0, L0, Tcs, Tlen0, Tline, A0, Alen0) -> 92 | case yystate(S0, Ics0, L0, Tlen0, A0, Alen0) of 93 | %% Accepting end state, we have a token. 94 | {A1,Alen1,Ics1,L1} -> 95 | token_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, Tline)); 96 | %% Accepting transition state, can take more chars. 97 | {A1,Alen1,[],L1,S1} -> % Need more chars to check 98 | {more,{token,S1,L1,Tcs,Alen1,Tline,A1,Alen1}}; 99 | {A1,Alen1,Ics1,L1,_S1} -> % Take what we got 100 | token_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, Tline)); 101 | %% After a non-accepting state, maybe reach accept state later. 102 | {A1,Alen1,Tlen1,[],L1,S1} -> % Need more chars to check 103 | {more,{token,S1,L1,Tcs,Tlen1,Tline,A1,Alen1}}; 104 | {reject,_Alen1,Tlen1,eof,L1,_S1} -> % No token match 105 | %% Check for partial token which is error. 106 | Ret = if Tlen1 > 0 -> {error,{Tline,?MODULE, 107 | %% Skip eof tail in Tcs. 108 | {illegal,yypre(Tcs, Tlen1)}},L1}; 109 | true -> {eof,L1} 110 | end, 111 | {done,Ret,eof}; 112 | {reject,_Alen1,Tlen1,Ics1,L1,_S1} -> % No token match 113 | Error = {Tline,?MODULE,{illegal,yypre(Tcs, Tlen1+1)}}, 114 | {done,{error,Error,L1},Ics1}; 115 | {A1,Alen1,Tlen1,_Ics1,L1,_S1} -> % Use last accept match 116 | Tcs1 = yysuf(Tcs, Alen1), 117 | L2 = adjust_line(Tlen1, Alen1, Tcs1, L1), 118 | token_cont(Tcs1, L2, yyaction(A1, Alen1, Tcs, Tline)) 119 | end. 120 | 121 | %% token_cont(RestChars, Line, Token) 122 | %% If we have a token or error then return done, else if we have a 123 | %% skip_token then continue. 124 | 125 | -dialyzer({nowarn_function, token_cont/3}). 126 | 127 | token_cont(Rest, Line, {token,T}) -> 128 | {done,{ok,T,Line},Rest}; 129 | token_cont(Rest, Line, {token,T,Push}) -> 130 | NewRest = Push ++ Rest, 131 | {done,{ok,T,Line},NewRest}; 132 | token_cont(Rest, Line, {end_token,T}) -> 133 | {done,{ok,T,Line},Rest}; 134 | token_cont(Rest, Line, {end_token,T,Push}) -> 135 | NewRest = Push ++ Rest, 136 | {done,{ok,T,Line},NewRest}; 137 | token_cont(Rest, Line, skip_token) -> 138 | token(yystate(), Rest, Line, Rest, 0, Line, reject, 0); 139 | token_cont(Rest, Line, {skip_token,Push}) -> 140 | NewRest = Push ++ Rest, 141 | token(yystate(), NewRest, Line, NewRest, 0, Line, reject, 0); 142 | token_cont(Rest, Line, {error,S}) -> 143 | {done,{error,{Line,?MODULE,{user,S}},Line},Rest}. 144 | 145 | %% tokens(Continuation, Chars, Line) -> 146 | %% {more,Continuation} | {done,ReturnVal,RestChars}. 147 | %% Must be careful when re-entering to append the latest characters to the 148 | %% after characters in an accept. The continuation is: 149 | %% {tokens,State,CurrLine,TokenChars,TokenLen,TokenLine,Tokens,AccAction,AccLen} 150 | %% {skip_tokens,State,CurrLine,TokenChars,TokenLen,TokenLine,Error,AccAction,AccLen} 151 | 152 | tokens(Cont, Chars) -> tokens(Cont, Chars, 1). 153 | 154 | tokens([], Chars, Line) -> 155 | tokens(yystate(), Chars, Line, Chars, 0, Line, [], reject, 0); 156 | tokens({tokens,State,Line,Tcs,Tlen,Tline,Ts,Action,Alen}, Chars, _) -> 157 | tokens(State, Chars, Line, Tcs ++ Chars, Tlen, Tline, Ts, Action, Alen); 158 | tokens({skip_tokens,State,Line,Tcs,Tlen,Tline,Error,Action,Alen}, Chars, _) -> 159 | skip_tokens(State, Chars, Line, Tcs ++ Chars, Tlen, Tline, Error, Action, Alen). 160 | 161 | %% tokens(State, InChars, Line, TokenChars, TokenLen, TokenLine, Tokens, 162 | %% AcceptAction, AcceptLen) -> 163 | %% {more,Continuation} | {done,ReturnVal,RestChars}. 164 | 165 | tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Ts, A0, Alen0) -> 166 | case yystate(S0, Ics0, L0, Tlen0, A0, Alen0) of 167 | %% Accepting end state, we have a token. 168 | {A1,Alen1,Ics1,L1} -> 169 | tokens_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, Tline), Ts); 170 | %% Accepting transition state, can take more chars. 171 | {A1,Alen1,[],L1,S1} -> % Need more chars to check 172 | {more,{tokens,S1,L1,Tcs,Alen1,Tline,Ts,A1,Alen1}}; 173 | {A1,Alen1,Ics1,L1,_S1} -> % Take what we got 174 | tokens_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, Tline), Ts); 175 | %% After a non-accepting state, maybe reach accept state later. 176 | {A1,Alen1,Tlen1,[],L1,S1} -> % Need more chars to check 177 | {more,{tokens,S1,L1,Tcs,Tlen1,Tline,Ts,A1,Alen1}}; 178 | {reject,_Alen1,Tlen1,eof,L1,_S1} -> % No token match 179 | %% Check for partial token which is error, no need to skip here. 180 | Ret = if Tlen1 > 0 -> {error,{Tline,?MODULE, 181 | %% Skip eof tail in Tcs. 182 | {illegal,yypre(Tcs, Tlen1)}},L1}; 183 | Ts == [] -> {eof,L1}; 184 | true -> {ok,yyrev(Ts),L1} 185 | end, 186 | {done,Ret,eof}; 187 | {reject,_Alen1,Tlen1,_Ics1,L1,_S1} -> 188 | %% Skip rest of tokens. 189 | Error = {L1,?MODULE,{illegal,yypre(Tcs, Tlen1+1)}}, 190 | skip_tokens(yysuf(Tcs, Tlen1+1), L1, Error); 191 | {A1,Alen1,Tlen1,_Ics1,L1,_S1} -> 192 | Token = yyaction(A1, Alen1, Tcs, Tline), 193 | Tcs1 = yysuf(Tcs, Alen1), 194 | L2 = adjust_line(Tlen1, Alen1, Tcs1, L1), 195 | tokens_cont(Tcs1, L2, Token, Ts) 196 | end. 197 | 198 | %% tokens_cont(RestChars, Line, Token, Tokens) 199 | %% If we have an end_token or error then return done, else if we have 200 | %% a token then save it and continue, else if we have a skip_token 201 | %% just continue. 202 | 203 | -dialyzer({nowarn_function, tokens_cont/4}). 204 | 205 | tokens_cont(Rest, Line, {token,T}, Ts) -> 206 | tokens(yystate(), Rest, Line, Rest, 0, Line, [T|Ts], reject, 0); 207 | tokens_cont(Rest, Line, {token,T,Push}, Ts) -> 208 | NewRest = Push ++ Rest, 209 | tokens(yystate(), NewRest, Line, NewRest, 0, Line, [T|Ts], reject, 0); 210 | tokens_cont(Rest, Line, {end_token,T}, Ts) -> 211 | {done,{ok,yyrev(Ts, [T]),Line},Rest}; 212 | tokens_cont(Rest, Line, {end_token,T,Push}, Ts) -> 213 | NewRest = Push ++ Rest, 214 | {done,{ok,yyrev(Ts, [T]),Line},NewRest}; 215 | tokens_cont(Rest, Line, skip_token, Ts) -> 216 | tokens(yystate(), Rest, Line, Rest, 0, Line, Ts, reject, 0); 217 | tokens_cont(Rest, Line, {skip_token,Push}, Ts) -> 218 | NewRest = Push ++ Rest, 219 | tokens(yystate(), NewRest, Line, NewRest, 0, Line, Ts, reject, 0); 220 | tokens_cont(Rest, Line, {error,S}, _Ts) -> 221 | skip_tokens(Rest, Line, {Line,?MODULE,{user,S}}). 222 | 223 | %%skip_tokens(InChars, Line, Error) -> {done,{error,Error,Line},Ics}. 224 | %% Skip tokens until an end token, junk everything and return the error. 225 | 226 | skip_tokens(Ics, Line, Error) -> 227 | skip_tokens(yystate(), Ics, Line, Ics, 0, Line, Error, reject, 0). 228 | 229 | %% skip_tokens(State, InChars, Line, TokenChars, TokenLen, TokenLine, Tokens, 230 | %% AcceptAction, AcceptLen) -> 231 | %% {more,Continuation} | {done,ReturnVal,RestChars}. 232 | 233 | skip_tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Error, A0, Alen0) -> 234 | case yystate(S0, Ics0, L0, Tlen0, A0, Alen0) of 235 | {A1,Alen1,Ics1,L1} -> % Accepting end state 236 | skip_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, Tline), Error); 237 | {A1,Alen1,[],L1,S1} -> % After an accepting state 238 | {more,{skip_tokens,S1,L1,Tcs,Alen1,Tline,Error,A1,Alen1}}; 239 | {A1,Alen1,Ics1,L1,_S1} -> 240 | skip_cont(Ics1, L1, yyaction(A1, Alen1, Tcs, Tline), Error); 241 | {A1,Alen1,Tlen1,[],L1,S1} -> % After a non-accepting state 242 | {more,{skip_tokens,S1,L1,Tcs,Tlen1,Tline,Error,A1,Alen1}}; 243 | {reject,_Alen1,_Tlen1,eof,L1,_S1} -> 244 | {done,{error,Error,L1},eof}; 245 | {reject,_Alen1,Tlen1,_Ics1,L1,_S1} -> 246 | skip_tokens(yysuf(Tcs, Tlen1+1), L1, Error); 247 | {A1,Alen1,Tlen1,_Ics1,L1,_S1} -> 248 | Token = yyaction(A1, Alen1, Tcs, Tline), 249 | Tcs1 = yysuf(Tcs, Alen1), 250 | L2 = adjust_line(Tlen1, Alen1, Tcs1, L1), 251 | skip_cont(Tcs1, L2, Token, Error) 252 | end. 253 | 254 | %% skip_cont(RestChars, Line, Token, Error) 255 | %% Skip tokens until we have an end_token or error then return done 256 | %% with the original rror. 257 | 258 | -dialyzer({nowarn_function, skip_cont/4}). 259 | 260 | skip_cont(Rest, Line, {token,_T}, Error) -> 261 | skip_tokens(yystate(), Rest, Line, Rest, 0, Line, Error, reject, 0); 262 | skip_cont(Rest, Line, {token,_T,Push}, Error) -> 263 | NewRest = Push ++ Rest, 264 | skip_tokens(yystate(), NewRest, Line, NewRest, 0, Line, Error, reject, 0); 265 | skip_cont(Rest, Line, {end_token,_T}, Error) -> 266 | {done,{error,Error,Line},Rest}; 267 | skip_cont(Rest, Line, {end_token,_T,Push}, Error) -> 268 | NewRest = Push ++ Rest, 269 | {done,{error,Error,Line},NewRest}; 270 | skip_cont(Rest, Line, skip_token, Error) -> 271 | skip_tokens(yystate(), Rest, Line, Rest, 0, Line, Error, reject, 0); 272 | skip_cont(Rest, Line, {skip_token,Push}, Error) -> 273 | NewRest = Push ++ Rest, 274 | skip_tokens(yystate(), NewRest, Line, NewRest, 0, Line, Error, reject, 0); 275 | skip_cont(Rest, Line, {error,_S}, Error) -> 276 | skip_tokens(yystate(), Rest, Line, Rest, 0, Line, Error, reject, 0). 277 | 278 | -compile({nowarn_unused_function, [yyrev/1, yyrev/2, yypre/2, yysuf/2]}). 279 | 280 | yyrev(List) -> lists:reverse(List). 281 | yyrev(List, Tail) -> lists:reverse(List, Tail). 282 | yypre(List, N) -> lists:sublist(List, N). 283 | yysuf(List, N) -> lists:nthtail(N, List). 284 | 285 | %% adjust_line(TokenLength, AcceptLength, Chars, Line) -> NewLine 286 | %% Make sure that newlines in Chars are not counted twice. 287 | %% Line has been updated with respect to newlines in the prefix of 288 | %% Chars consisting of (TokenLength - AcceptLength) characters. 289 | 290 | -compile({nowarn_unused_function, adjust_line/4}). 291 | 292 | adjust_line(N, N, _Cs, L) -> L; 293 | adjust_line(T, A, [$\n|Cs], L) -> 294 | adjust_line(T-1, A, Cs, L-1); 295 | adjust_line(T, A, [_|Cs], L) -> 296 | adjust_line(T-1, A, Cs, L). 297 | 298 | %% yystate() -> InitialState. 299 | %% yystate(State, InChars, Line, CurrTokLen, AcceptAction, AcceptLen) -> 300 | %% {Action, AcceptLen, RestChars, Line} | 301 | %% {Action, AcceptLen, RestChars, Line, State} | 302 | %% {reject, AcceptLen, CurrTokLen, RestChars, Line, State} | 303 | %% {Action, AcceptLen, CurrTokLen, RestChars, Line, State}. 304 | %% Generated state transition functions. The non-accepting end state 305 | %% return signal either an unrecognised character or end of current 306 | %% input. 307 | 308 | -file("src/lexer.erl", 307). 309 | yystate() -> 7. 310 | 311 | yystate(10, [C|Ics], Line, Tlen, _, _) when C >= 48, C =< 57 -> 312 | yystate(10, Ics, Line, Tlen+1, 5, Tlen); 313 | yystate(10, Ics, Line, Tlen, _, _) -> 314 | {5,Tlen,Ics,Line,10}; 315 | yystate(9, [124|Ics], Line, Tlen, _, _) -> 316 | yystate(9, Ics, Line, Tlen+1, 8, Tlen); 317 | yystate(9, [32|Ics], Line, Tlen, _, _) -> 318 | yystate(9, Ics, Line, Tlen+1, 8, Tlen); 319 | yystate(9, [9|Ics], Line, Tlen, _, _) -> 320 | yystate(9, Ics, Line, Tlen+1, 8, Tlen); 321 | yystate(9, Ics, Line, Tlen, _, _) -> 322 | {8,Tlen,Ics,Line,9}; 323 | yystate(8, [C|Ics], Line, Tlen, Action, Alen) when C >= 48, C =< 57 -> 324 | yystate(10, Ics, Line, Tlen+1, Action, Alen); 325 | yystate(8, Ics, Line, Tlen, Action, Alen) -> 326 | {Action,Alen,Tlen,Ics,Line,8}; 327 | yystate(7, [124|Ics], Line, Tlen, Action, Alen) -> 328 | yystate(9, Ics, Line, Tlen+1, Action, Alen); 329 | yystate(7, [95|Ics], Line, Tlen, Action, Alen) -> 330 | yystate(3, Ics, Line, Tlen+1, Action, Alen); 331 | yystate(7, [94|Ics], Line, Tlen, Action, Alen) -> 332 | yystate(0, Ics, Line, Tlen+1, Action, Alen); 333 | yystate(7, [47|Ics], Line, Tlen, Action, Alen) -> 334 | yystate(2, Ics, Line, Tlen+1, Action, Alen); 335 | yystate(7, [45|Ics], Line, Tlen, Action, Alen) -> 336 | yystate(6, Ics, Line, Tlen+1, Action, Alen); 337 | yystate(7, [43|Ics], Line, Tlen, Action, Alen) -> 338 | yystate(6, Ics, Line, Tlen+1, Action, Alen); 339 | yystate(7, [42|Ics], Line, Tlen, Action, Alen) -> 340 | yystate(2, Ics, Line, Tlen+1, Action, Alen); 341 | yystate(7, [41|Ics], Line, Tlen, Action, Alen) -> 342 | yystate(1, Ics, Line, Tlen+1, Action, Alen); 343 | yystate(7, [40|Ics], Line, Tlen, Action, Alen) -> 344 | yystate(5, Ics, Line, Tlen+1, Action, Alen); 345 | yystate(7, [32|Ics], Line, Tlen, Action, Alen) -> 346 | yystate(9, Ics, Line, Tlen+1, Action, Alen); 347 | yystate(7, [9|Ics], Line, Tlen, Action, Alen) -> 348 | yystate(9, Ics, Line, Tlen+1, Action, Alen); 349 | yystate(7, [C|Ics], Line, Tlen, Action, Alen) when C >= 48, C =< 57 -> 350 | yystate(4, Ics, Line, Tlen+1, Action, Alen); 351 | yystate(7, [C|Ics], Line, Tlen, Action, Alen) when C >= 97, C =< 122 -> 352 | yystate(3, Ics, Line, Tlen+1, Action, Alen); 353 | yystate(7, Ics, Line, Tlen, Action, Alen) -> 354 | {Action,Alen,Tlen,Ics,Line,7}; 355 | yystate(6, Ics, Line, Tlen, _, _) -> 356 | {1,Tlen,Ics,Line}; 357 | yystate(5, Ics, Line, Tlen, _, _) -> 358 | {6,Tlen,Ics,Line}; 359 | yystate(4, [46|Ics], Line, Tlen, _, _) -> 360 | yystate(8, Ics, Line, Tlen+1, 4, Tlen); 361 | yystate(4, [C|Ics], Line, Tlen, _, _) when C >= 48, C =< 57 -> 362 | yystate(4, Ics, Line, Tlen+1, 4, Tlen); 363 | yystate(4, Ics, Line, Tlen, _, _) -> 364 | {4,Tlen,Ics,Line,4}; 365 | yystate(3, [95|Ics], Line, Tlen, _, _) -> 366 | yystate(3, Ics, Line, Tlen+1, 0, Tlen); 367 | yystate(3, [C|Ics], Line, Tlen, _, _) when C >= 97, C =< 122 -> 368 | yystate(3, Ics, Line, Tlen+1, 0, Tlen); 369 | yystate(3, Ics, Line, Tlen, _, _) -> 370 | {0,Tlen,Ics,Line,3}; 371 | yystate(2, Ics, Line, Tlen, _, _) -> 372 | {2,Tlen,Ics,Line}; 373 | yystate(1, Ics, Line, Tlen, _, _) -> 374 | {7,Tlen,Ics,Line}; 375 | yystate(0, Ics, Line, Tlen, _, _) -> 376 | {3,Tlen,Ics,Line}; 377 | yystate(S, Ics, Line, Tlen, Action, Alen) -> 378 | {Action,Alen,Tlen,Ics,Line,S}. 379 | 380 | %% yyaction(Action, TokenLength, TokenChars, TokenLine) -> 381 | %% {token,Token} | {end_token, Token} | skip_token | {error,String}. 382 | %% Generated action function. 383 | 384 | yyaction(0, TokenLen, YYtcs, TokenLine) -> 385 | TokenChars = yypre(YYtcs, TokenLen), 386 | yyaction_0(TokenChars, TokenLine); 387 | yyaction(1, TokenLen, YYtcs, TokenLine) -> 388 | TokenChars = yypre(YYtcs, TokenLen), 389 | yyaction_1(TokenChars, TokenLine); 390 | yyaction(2, TokenLen, YYtcs, TokenLine) -> 391 | TokenChars = yypre(YYtcs, TokenLen), 392 | yyaction_2(TokenChars, TokenLine); 393 | yyaction(3, TokenLen, YYtcs, TokenLine) -> 394 | TokenChars = yypre(YYtcs, TokenLen), 395 | yyaction_3(TokenChars, TokenLine); 396 | yyaction(4, TokenLen, YYtcs, TokenLine) -> 397 | TokenChars = yypre(YYtcs, TokenLen), 398 | yyaction_4(TokenChars, TokenLine); 399 | yyaction(5, TokenLen, YYtcs, TokenLine) -> 400 | TokenChars = yypre(YYtcs, TokenLen), 401 | yyaction_5(TokenChars, TokenLine); 402 | yyaction(6, TokenLen, YYtcs, TokenLine) -> 403 | TokenChars = yypre(YYtcs, TokenLen), 404 | yyaction_6(TokenChars, TokenLine); 405 | yyaction(7, TokenLen, YYtcs, TokenLine) -> 406 | TokenChars = yypre(YYtcs, TokenLen), 407 | yyaction_7(TokenChars, TokenLine); 408 | yyaction(8, _, _, _) -> 409 | yyaction_8(); 410 | yyaction(_, _, _, _) -> error. 411 | 412 | -compile({inline,yyaction_0/2}). 413 | -file("src/lexer.xrl", 12). 414 | yyaction_0(TokenChars, TokenLine) -> 415 | { token, { identifier, TokenLine, list_to_atom (TokenChars) } } . 416 | 417 | -compile({inline,yyaction_1/2}). 418 | -file("src/lexer.xrl", 13). 419 | yyaction_1(TokenChars, TokenLine) -> 420 | { token, { add_operator, TokenLine, list_to_atom (TokenChars) } } . 421 | 422 | -compile({inline,yyaction_2/2}). 423 | -file("src/lexer.xrl", 14). 424 | yyaction_2(TokenChars, TokenLine) -> 425 | { token, { mul_operator, TokenLine, list_to_atom (TokenChars) } } . 426 | 427 | -compile({inline,yyaction_3/2}). 428 | -file("src/lexer.xrl", 15). 429 | yyaction_3(TokenChars, TokenLine) -> 430 | { token, { pow_operator, TokenLine, list_to_atom (TokenChars) } } . 431 | 432 | -compile({inline,yyaction_4/2}). 433 | -file("src/lexer.xrl", 16). 434 | yyaction_4(TokenChars, TokenLine) -> 435 | { token, { int, TokenLine, list_to_integer (TokenChars) } } . 436 | 437 | -compile({inline,yyaction_5/2}). 438 | -file("src/lexer.xrl", 17). 439 | yyaction_5(TokenChars, TokenLine) -> 440 | { token, { float, TokenLine, list_to_float (TokenChars) } } . 441 | 442 | -compile({inline,yyaction_6/2}). 443 | -file("src/lexer.xrl", 18). 444 | yyaction_6(TokenChars, TokenLine) -> 445 | { token, { open_paren, TokenLine, list_to_atom (TokenChars) } } . 446 | 447 | -compile({inline,yyaction_7/2}). 448 | -file("src/lexer.xrl", 19). 449 | yyaction_7(TokenChars, TokenLine) -> 450 | { token, { close_paren, TokenLine, list_to_atom (TokenChars) } } . 451 | 452 | -compile({inline,yyaction_8/0}). 453 | -file("src/lexer.xrl", 20). 454 | yyaction_8() -> 455 | skip_token . 456 | 457 | -file("/Users/saulcabrera/.asdf/installs/erlang/20.2/lib/parsetools-2.1.6/include/leexinc.hrl", 313). 458 | -------------------------------------------------------------------------------- /src/lexer.xrl: -------------------------------------------------------------------------------- 1 | Definitions. 2 | 3 | CHAR = [a-z_] 4 | ADD_OPERATOR = (\+|-) 5 | MUL_OPERATOR = (\*|/) 6 | POW_OPERATOR = \^ 7 | DIGIT = [0-9] 8 | OPEN_PAREN = \( 9 | CLOSE_PAREN = \) 10 | WHITE = [\s|\t] 11 | 12 | Rules. 13 | 14 | {CHAR}+ : {token, {identifier, TokenLine, list_to_atom(TokenChars)}}. 15 | {ADD_OPERATOR} : {token, {add_operator, TokenLine, list_to_atom(TokenChars)}}. 16 | {MUL_OPERATOR} : {token, {mul_operator, TokenLine, list_to_atom(TokenChars)}}. 17 | {POW_OPERATOR} : {token, {pow_operator, TokenLine, list_to_atom(TokenChars)}}. 18 | {DIGIT}+ : {token, {int, TokenLine, list_to_integer(TokenChars)}}. 19 | {DIGIT}+\.{DIGIT}+ : {token, {float, TokenLine, list_to_float(TokenChars)}}. 20 | {OPEN_PAREN} : {token, {open_paren, TokenLine, list_to_atom(TokenChars)}}. 21 | {CLOSE_PAREN} : {token, {close_paren, TokenLine, list_to_atom(TokenChars)}}. 22 | {WHITE}+ : skip_token. 23 | 24 | 25 | Erlang code. 26 | -------------------------------------------------------------------------------- /src/parser.erl: -------------------------------------------------------------------------------- 1 | -module(parser). 2 | -export([parse/1, parse_and_scan/1, format_error/1]). 3 | -file("src/parser.yrl", 24). 4 | 5 | unwrap({_, _, V}) -> V. 6 | 7 | -file("/Users/saulcabrera/.asdf/installs/erlang/20.2/lib/parsetools-2.1.6/include/yeccpre.hrl", 0). 8 | %% 9 | %% %CopyrightBegin% 10 | %% 11 | %% Copyright Ericsson AB 1996-2017. All Rights Reserved. 12 | %% 13 | %% Licensed under the Apache License, Version 2.0 (the "License"); 14 | %% you may not use this file except in compliance with the License. 15 | %% You may obtain a copy of the License at 16 | %% 17 | %% http://www.apache.org/licenses/LICENSE-2.0 18 | %% 19 | %% Unless required by applicable law or agreed to in writing, software 20 | %% distributed under the License is distributed on an "AS IS" BASIS, 21 | %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 22 | %% See the License for the specific language governing permissions and 23 | %% limitations under the License. 24 | %% 25 | %% %CopyrightEnd% 26 | %% 27 | 28 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 29 | % The parser generator will insert appropriate declarations before this line.% 30 | 31 | -type yecc_ret() :: {'error', _} | {'ok', _}. 32 | 33 | -spec parse(Tokens :: list()) -> yecc_ret(). 34 | parse(Tokens) -> 35 | yeccpars0(Tokens, {no_func, no_line}, 0, [], []). 36 | 37 | -spec parse_and_scan({function() | {atom(), atom()}, [_]} 38 | | {atom(), atom(), [_]}) -> yecc_ret(). 39 | parse_and_scan({F, A}) -> 40 | yeccpars0([], {{F, A}, no_line}, 0, [], []); 41 | parse_and_scan({M, F, A}) -> 42 | Arity = length(A), 43 | yeccpars0([], {{fun M:F/Arity, A}, no_line}, 0, [], []). 44 | 45 | -spec format_error(any()) -> [char() | list()]. 46 | format_error(Message) -> 47 | case io_lib:deep_char_list(Message) of 48 | true -> 49 | Message; 50 | _ -> 51 | io_lib:write(Message) 52 | end. 53 | 54 | %% To be used in grammar files to throw an error message to the parser 55 | %% toplevel. Doesn't have to be exported! 56 | -compile({nowarn_unused_function, return_error/2}). 57 | -spec return_error(integer(), any()) -> no_return(). 58 | return_error(Line, Message) -> 59 | throw({error, {Line, ?MODULE, Message}}). 60 | 61 | -define(CODE_VERSION, "1.4"). 62 | 63 | yeccpars0(Tokens, Tzr, State, States, Vstack) -> 64 | try yeccpars1(Tokens, Tzr, State, States, Vstack) 65 | catch 66 | error: Error -> 67 | Stacktrace = erlang:get_stacktrace(), 68 | try yecc_error_type(Error, Stacktrace) of 69 | Desc -> 70 | erlang:raise(error, {yecc_bug, ?CODE_VERSION, Desc}, 71 | Stacktrace) 72 | catch _:_ -> erlang:raise(error, Error, Stacktrace) 73 | end; 74 | %% Probably thrown from return_error/2: 75 | throw: {error, {_Line, ?MODULE, _M}} = Error -> 76 | Error 77 | end. 78 | 79 | yecc_error_type(function_clause, [{?MODULE,F,ArityOrArgs,_} | _]) -> 80 | case atom_to_list(F) of 81 | "yeccgoto_" ++ SymbolL -> 82 | {ok,[{atom,_,Symbol}],_} = erl_scan:string(SymbolL), 83 | State = case ArityOrArgs of 84 | [S,_,_,_,_,_,_] -> S; 85 | _ -> state_is_unknown 86 | end, 87 | {Symbol, State, missing_in_goto_table} 88 | end. 89 | 90 | yeccpars1([Token | Tokens], Tzr, State, States, Vstack) -> 91 | yeccpars2(State, element(1, Token), States, Vstack, Token, Tokens, Tzr); 92 | yeccpars1([], {{F, A},_Line}, State, States, Vstack) -> 93 | case apply(F, A) of 94 | {ok, Tokens, Endline} -> 95 | yeccpars1(Tokens, {{F, A}, Endline}, State, States, Vstack); 96 | {eof, Endline} -> 97 | yeccpars1([], {no_func, Endline}, State, States, Vstack); 98 | {error, Descriptor, _Endline} -> 99 | {error, Descriptor} 100 | end; 101 | yeccpars1([], {no_func, no_line}, State, States, Vstack) -> 102 | Line = 999999, 103 | yeccpars2(State, '$end', States, Vstack, yecc_end(Line), [], 104 | {no_func, Line}); 105 | yeccpars1([], {no_func, Endline}, State, States, Vstack) -> 106 | yeccpars2(State, '$end', States, Vstack, yecc_end(Endline), [], 107 | {no_func, Endline}). 108 | 109 | %% yeccpars1/7 is called from generated code. 110 | %% 111 | %% When using the {includefile, Includefile} option, make sure that 112 | %% yeccpars1/7 can be found by parsing the file without following 113 | %% include directives. yecc will otherwise assume that an old 114 | %% yeccpre.hrl is included (one which defines yeccpars1/5). 115 | yeccpars1(State1, State, States, Vstack, Token0, [Token | Tokens], Tzr) -> 116 | yeccpars2(State, element(1, Token), [State1 | States], 117 | [Token0 | Vstack], Token, Tokens, Tzr); 118 | yeccpars1(State1, State, States, Vstack, Token0, [], {{_F,_A}, _Line}=Tzr) -> 119 | yeccpars1([], Tzr, State, [State1 | States], [Token0 | Vstack]); 120 | yeccpars1(State1, State, States, Vstack, Token0, [], {no_func, no_line}) -> 121 | Line = yecctoken_end_location(Token0), 122 | yeccpars2(State, '$end', [State1 | States], [Token0 | Vstack], 123 | yecc_end(Line), [], {no_func, Line}); 124 | yeccpars1(State1, State, States, Vstack, Token0, [], {no_func, Line}) -> 125 | yeccpars2(State, '$end', [State1 | States], [Token0 | Vstack], 126 | yecc_end(Line), [], {no_func, Line}). 127 | 128 | %% For internal use only. 129 | yecc_end({Line,_Column}) -> 130 | {'$end', Line}; 131 | yecc_end(Line) -> 132 | {'$end', Line}. 133 | 134 | yecctoken_end_location(Token) -> 135 | try erl_anno:end_location(element(2, Token)) of 136 | undefined -> yecctoken_location(Token); 137 | Loc -> Loc 138 | catch _:_ -> yecctoken_location(Token) 139 | end. 140 | 141 | -compile({nowarn_unused_function, yeccerror/1}). 142 | yeccerror(Token) -> 143 | Text = yecctoken_to_string(Token), 144 | Location = yecctoken_location(Token), 145 | {error, {Location, ?MODULE, ["syntax error before: ", Text]}}. 146 | 147 | -compile({nowarn_unused_function, yecctoken_to_string/1}). 148 | yecctoken_to_string(Token) -> 149 | try erl_scan:text(Token) of 150 | undefined -> yecctoken2string(Token); 151 | Txt -> Txt 152 | catch _:_ -> yecctoken2string(Token) 153 | end. 154 | 155 | yecctoken_location(Token) -> 156 | try erl_scan:location(Token) 157 | catch _:_ -> element(2, Token) 158 | end. 159 | 160 | -compile({nowarn_unused_function, yecctoken2string/1}). 161 | yecctoken2string({atom, _, A}) -> io_lib:write_atom(A); 162 | yecctoken2string({integer,_,N}) -> io_lib:write(N); 163 | yecctoken2string({float,_,F}) -> io_lib:write(F); 164 | yecctoken2string({char,_,C}) -> io_lib:write_char(C); 165 | yecctoken2string({var,_,V}) -> io_lib:format("~s", [V]); 166 | yecctoken2string({string,_,S}) -> io_lib:write_string(S); 167 | yecctoken2string({reserved_symbol, _, A}) -> io_lib:write(A); 168 | yecctoken2string({_Cat, _, Val}) -> io_lib:format("~tp", [Val]); 169 | yecctoken2string({dot, _}) -> "'.'"; 170 | yecctoken2string({'$end', _}) -> []; 171 | yecctoken2string({Other, _}) when is_atom(Other) -> 172 | io_lib:write_atom(Other); 173 | yecctoken2string(Other) -> 174 | io_lib:format("~tp", [Other]). 175 | 176 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 177 | 178 | 179 | 180 | -file("src/parser.erl", 180). 181 | 182 | -dialyzer({nowarn_function, yeccpars2/7}). 183 | yeccpars2(0=S, Cat, Ss, Stack, T, Ts, Tzr) -> 184 | yeccpars2_0(S, Cat, Ss, Stack, T, Ts, Tzr); 185 | %% yeccpars2(1=S, Cat, Ss, Stack, T, Ts, Tzr) -> 186 | %% yeccpars2_1(S, Cat, Ss, Stack, T, Ts, Tzr); 187 | yeccpars2(2=S, Cat, Ss, Stack, T, Ts, Tzr) -> 188 | yeccpars2_2(S, Cat, Ss, Stack, T, Ts, Tzr); 189 | yeccpars2(3=S, Cat, Ss, Stack, T, Ts, Tzr) -> 190 | yeccpars2_3(S, Cat, Ss, Stack, T, Ts, Tzr); 191 | yeccpars2(4=S, Cat, Ss, Stack, T, Ts, Tzr) -> 192 | yeccpars2_4(S, Cat, Ss, Stack, T, Ts, Tzr); 193 | yeccpars2(5=S, Cat, Ss, Stack, T, Ts, Tzr) -> 194 | yeccpars2_0(S, Cat, Ss, Stack, T, Ts, Tzr); 195 | %% yeccpars2(6=S, Cat, Ss, Stack, T, Ts, Tzr) -> 196 | %% yeccpars2_6(S, Cat, Ss, Stack, T, Ts, Tzr); 197 | yeccpars2(7=S, Cat, Ss, Stack, T, Ts, Tzr) -> 198 | yeccpars2_0(S, Cat, Ss, Stack, T, Ts, Tzr); 199 | yeccpars2(8=S, Cat, Ss, Stack, T, Ts, Tzr) -> 200 | yeccpars2_8(S, Cat, Ss, Stack, T, Ts, Tzr); 201 | yeccpars2(9=S, Cat, Ss, Stack, T, Ts, Tzr) -> 202 | yeccpars2_0(S, Cat, Ss, Stack, T, Ts, Tzr); 203 | yeccpars2(10=S, Cat, Ss, Stack, T, Ts, Tzr) -> 204 | yeccpars2_0(S, Cat, Ss, Stack, T, Ts, Tzr); 205 | %% yeccpars2(11=S, Cat, Ss, Stack, T, Ts, Tzr) -> 206 | %% yeccpars2_11(S, Cat, Ss, Stack, T, Ts, Tzr); 207 | %% yeccpars2(12=S, Cat, Ss, Stack, T, Ts, Tzr) -> 208 | %% yeccpars2_12(S, Cat, Ss, Stack, T, Ts, Tzr); 209 | %% yeccpars2(13=S, Cat, Ss, Stack, T, Ts, Tzr) -> 210 | %% yeccpars2_13(S, Cat, Ss, Stack, T, Ts, Tzr); 211 | yeccpars2(Other, _, _, _, _, _, _) -> 212 | erlang:error({yecc_bug,"1.4",{missing_state_in_action_table, Other}}). 213 | 214 | -dialyzer({nowarn_function, yeccpars2_0/7}). 215 | yeccpars2_0(S, float, Ss, Stack, T, Ts, Tzr) -> 216 | yeccpars1(S, 2, Ss, Stack, T, Ts, Tzr); 217 | yeccpars2_0(S, identifier, Ss, Stack, T, Ts, Tzr) -> 218 | yeccpars1(S, 3, Ss, Stack, T, Ts, Tzr); 219 | yeccpars2_0(S, int, Ss, Stack, T, Ts, Tzr) -> 220 | yeccpars1(S, 4, Ss, Stack, T, Ts, Tzr); 221 | yeccpars2_0(S, open_paren, Ss, Stack, T, Ts, Tzr) -> 222 | yeccpars1(S, 5, Ss, Stack, T, Ts, Tzr); 223 | yeccpars2_0(_, _, _, _, T, _, _) -> 224 | yeccerror(T). 225 | 226 | -dialyzer({nowarn_function, yeccpars2_1/7}). 227 | yeccpars2_1(_S, '$end', _Ss, Stack, _T, _Ts, _Tzr) -> 228 | {ok, hd(Stack)}; 229 | yeccpars2_1(S, add_operator, Ss, Stack, T, Ts, Tzr) -> 230 | yeccpars1(S, 7, Ss, Stack, T, Ts, Tzr); 231 | yeccpars2_1(S, mul_operator, Ss, Stack, T, Ts, Tzr) -> 232 | yeccpars1(S, 9, Ss, Stack, T, Ts, Tzr); 233 | yeccpars2_1(S, pow_operator, Ss, Stack, T, Ts, Tzr) -> 234 | yeccpars1(S, 10, Ss, Stack, T, Ts, Tzr); 235 | yeccpars2_1(_, _, _, _, T, _, _) -> 236 | yeccerror(T). 237 | 238 | yeccpars2_2(_S, Cat, Ss, Stack, T, Ts, Tzr) -> 239 | NewStack = yeccpars2_2_(Stack), 240 | yeccgoto_expr(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). 241 | 242 | yeccpars2_3(_S, Cat, Ss, Stack, T, Ts, Tzr) -> 243 | NewStack = yeccpars2_3_(Stack), 244 | yeccgoto_expr(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). 245 | 246 | yeccpars2_4(_S, Cat, Ss, Stack, T, Ts, Tzr) -> 247 | NewStack = yeccpars2_4_(Stack), 248 | yeccgoto_expr(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). 249 | 250 | %% yeccpars2_5: see yeccpars2_0 251 | 252 | -dialyzer({nowarn_function, yeccpars2_6/7}). 253 | yeccpars2_6(S, add_operator, Ss, Stack, T, Ts, Tzr) -> 254 | yeccpars1(S, 7, Ss, Stack, T, Ts, Tzr); 255 | yeccpars2_6(S, close_paren, Ss, Stack, T, Ts, Tzr) -> 256 | yeccpars1(S, 8, Ss, Stack, T, Ts, Tzr); 257 | yeccpars2_6(S, mul_operator, Ss, Stack, T, Ts, Tzr) -> 258 | yeccpars1(S, 9, Ss, Stack, T, Ts, Tzr); 259 | yeccpars2_6(S, pow_operator, Ss, Stack, T, Ts, Tzr) -> 260 | yeccpars1(S, 10, Ss, Stack, T, Ts, Tzr); 261 | yeccpars2_6(_, _, _, _, T, _, _) -> 262 | yeccerror(T). 263 | 264 | %% yeccpars2_7: see yeccpars2_0 265 | 266 | yeccpars2_8(_S, Cat, Ss, Stack, T, Ts, Tzr) -> 267 | [_,_|Nss] = Ss, 268 | NewStack = yeccpars2_8_(Stack), 269 | yeccgoto_expr(hd(Nss), Cat, Nss, NewStack, T, Ts, Tzr). 270 | 271 | %% yeccpars2_9: see yeccpars2_0 272 | 273 | %% yeccpars2_10: see yeccpars2_0 274 | 275 | yeccpars2_11(_S, Cat, Ss, Stack, T, Ts, Tzr) -> 276 | [_,_|Nss] = Ss, 277 | NewStack = yeccpars2_11_(Stack), 278 | yeccgoto_expr(hd(Nss), Cat, Nss, NewStack, T, Ts, Tzr). 279 | 280 | yeccpars2_12(S, pow_operator, Ss, Stack, T, Ts, Tzr) -> 281 | yeccpars1(S, 10, Ss, Stack, T, Ts, Tzr); 282 | yeccpars2_12(_S, Cat, Ss, Stack, T, Ts, Tzr) -> 283 | [_,_|Nss] = Ss, 284 | NewStack = yeccpars2_12_(Stack), 285 | yeccgoto_expr(hd(Nss), Cat, Nss, NewStack, T, Ts, Tzr). 286 | 287 | yeccpars2_13(S, mul_operator, Ss, Stack, T, Ts, Tzr) -> 288 | yeccpars1(S, 9, Ss, Stack, T, Ts, Tzr); 289 | yeccpars2_13(S, pow_operator, Ss, Stack, T, Ts, Tzr) -> 290 | yeccpars1(S, 10, Ss, Stack, T, Ts, Tzr); 291 | yeccpars2_13(_S, Cat, Ss, Stack, T, Ts, Tzr) -> 292 | [_,_|Nss] = Ss, 293 | NewStack = yeccpars2_13_(Stack), 294 | yeccgoto_expr(hd(Nss), Cat, Nss, NewStack, T, Ts, Tzr). 295 | 296 | -dialyzer({nowarn_function, yeccgoto_expr/7}). 297 | yeccgoto_expr(0, Cat, Ss, Stack, T, Ts, Tzr) -> 298 | yeccpars2_1(1, Cat, Ss, Stack, T, Ts, Tzr); 299 | yeccgoto_expr(5, Cat, Ss, Stack, T, Ts, Tzr) -> 300 | yeccpars2_6(6, Cat, Ss, Stack, T, Ts, Tzr); 301 | yeccgoto_expr(7, Cat, Ss, Stack, T, Ts, Tzr) -> 302 | yeccpars2_13(13, Cat, Ss, Stack, T, Ts, Tzr); 303 | yeccgoto_expr(9, Cat, Ss, Stack, T, Ts, Tzr) -> 304 | yeccpars2_12(12, Cat, Ss, Stack, T, Ts, Tzr); 305 | yeccgoto_expr(10=_S, Cat, Ss, Stack, T, Ts, Tzr) -> 306 | yeccpars2_11(_S, Cat, Ss, Stack, T, Ts, Tzr). 307 | 308 | -compile({inline,yeccpars2_2_/1}). 309 | -file("src/parser.yrl", 17). 310 | yeccpars2_2_(__Stack0) -> 311 | [__1 | __Stack] = __Stack0, 312 | [begin 313 | unwrap ( __1 ) 314 | end | __Stack]. 315 | 316 | -compile({inline,yeccpars2_3_/1}). 317 | -file("src/parser.yrl", 18). 318 | yeccpars2_3_(__Stack0) -> 319 | [__1 | __Stack] = __Stack0, 320 | [begin 321 | unwrap ( __1 ) 322 | end | __Stack]. 323 | 324 | -compile({inline,yeccpars2_4_/1}). 325 | -file("src/parser.yrl", 16). 326 | yeccpars2_4_(__Stack0) -> 327 | [__1 | __Stack] = __Stack0, 328 | [begin 329 | unwrap ( __1 ) 330 | end | __Stack]. 331 | 332 | -compile({inline,yeccpars2_8_/1}). 333 | -file("src/parser.yrl", 11). 334 | yeccpars2_8_(__Stack0) -> 335 | [__3,__2,__1 | __Stack] = __Stack0, 336 | [begin 337 | { unwrap ( __1 ) , __2 } 338 | end | __Stack]. 339 | 340 | -compile({inline,yeccpars2_11_/1}). 341 | -file("src/parser.yrl", 14). 342 | yeccpars2_11_(__Stack0) -> 343 | [__3,__2,__1 | __Stack] = __Stack0, 344 | [begin 345 | { unwrap ( __2 ) , __1 , __3 } 346 | end | __Stack]. 347 | 348 | -compile({inline,yeccpars2_12_/1}). 349 | -file("src/parser.yrl", 13). 350 | yeccpars2_12_(__Stack0) -> 351 | [__3,__2,__1 | __Stack] = __Stack0, 352 | [begin 353 | { unwrap ( __2 ) , __1 , __3 } 354 | end | __Stack]. 355 | 356 | -compile({inline,yeccpars2_13_/1}). 357 | -file("src/parser.yrl", 12). 358 | yeccpars2_13_(__Stack0) -> 359 | [__3,__2,__1 | __Stack] = __Stack0, 360 | [begin 361 | { unwrap ( __2 ) , __1 , __3 } 362 | end | __Stack]. 363 | 364 | 365 | -file("src/parser.yrl", 27). 366 | -------------------------------------------------------------------------------- /src/parser.yrl: -------------------------------------------------------------------------------- 1 | Nonterminals 2 | expr. 3 | 4 | Terminals 5 | identifier add_operator mul_operator pow_operator int float open_paren close_paren. 6 | 7 | Rootsymbol expr. 8 | 9 | Left 100 open_paren. 10 | Left 200 add_operator. 11 | Left 300 mul_operator. 12 | Left 400 pow_operator. 13 | 14 | 15 | expr -> open_paren expr close_paren : {unwrap('$1'), '$2'}. 16 | expr -> expr add_operator expr : {unwrap('$2'), '$1', '$3'}. 17 | expr -> expr mul_operator expr : {unwrap('$2'), '$1', '$3'}. 18 | expr -> expr pow_operator expr : {unwrap('$2'), '$1', '$3'}. 19 | 20 | expr -> int : unwrap('$1'). 21 | expr -> float : unwrap('$1'). 22 | expr -> identifier : unwrap('$1'). 23 | 24 | Erlang code. 25 | 26 | unwrap({_, _, V}) -> V. 27 | -------------------------------------------------------------------------------- /test/leibniz_test.exs: -------------------------------------------------------------------------------- 1 | defmodule LeibnizTest do 2 | use ExUnit.Case 3 | doctest Leibniz 4 | 5 | test "eval/2 evaluates a valid math expression and returns the result" do 6 | assert Leibniz.eval("2 + 2") == {:ok, 4} 7 | assert Leibniz.eval("2 * 2") == {:ok, 4} 8 | assert Leibniz.eval("2 / 2") == {:ok, 1} 9 | assert Leibniz.eval("2 - 2") == {:ok, 0} 10 | end 11 | 12 | test "eval/2 returns {:error, reason} when an invalid expression is given" do 13 | assert {:error, _} = Leibniz.eval("2 *") 14 | assert {:error, _} = Leibniz.eval("4 % 2") 15 | end 16 | 17 | test "eval/2 evaluates a valid math expression interpolating variables" do 18 | assert Leibniz.eval("2 * foo + bar", foo: 5, bar: 7) == {:ok, 17} 19 | 20 | variable = 100 21 | assert Leibniz.eval("2 * variable", variable: variable) == {:ok, 200} 22 | end 23 | 24 | test "eval/2 returns {:error, reason} when a math expression with missing dependencies is given" do 25 | reason = "value expected for the following dependencies: foo,bar" 26 | assert {:error, ^reason} = Leibniz.eval("2 * foo * bar * baz", baz: 3) 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | --------------------------------------------------------------------------------