├── README ├── tests ├── RunTests.hs └── Language │ └── SQL │ └── SimpleSQL │ ├── CreateIndex.hs │ ├── EmptyStatement.hs │ ├── Oracle.hs │ ├── MySQL.hs │ ├── QueryExprs.hs │ ├── CustomDialect.hs │ ├── QueryExprParens.hs │ ├── FullQueries.hs │ ├── Expectations.hs │ ├── TestTypes.hs │ ├── Odbc.hs │ ├── Tests.hs │ ├── TestRunners.hs │ ├── TableRefs.hs │ ├── SQL2011Bits.hs │ ├── QueryExprComponents.hs │ ├── GroupBy.hs │ ├── SQL2011AccessControl.hs │ ├── Postgres.hs │ ├── ErrorMessages.hs │ ├── LexerTests.hs │ ├── SQL2011DataManipulation.hs │ └── ScalarExprs.hs ├── .gitignore ├── shell.nix ├── stack.yaml.lock ├── website ├── tpch21.sql ├── template1.pandoc ├── render-test-cases.cabal ├── main.css ├── main1.css ├── template.pandoc ├── RenderTestCases.hs ├── supported_sql.md └── index.md ├── stack.yaml ├── LICENSE ├── TODO ├── examples └── SimpleSQLParserTool.hs ├── Makefile ├── simple-sql-parser.cabal ├── release_checklist ├── changelog └── Language └── SQL └── SimpleSQL └── Dialect.hs /README: -------------------------------------------------------------------------------- 1 | A parser for SQL in Haskell. 2 | 3 | Homepage: http://jakewheat.github.io/simple-sql-parser/latest 4 | 5 | Contact: jakewheat@tutanota.com 6 | -------------------------------------------------------------------------------- /tests/RunTests.hs: -------------------------------------------------------------------------------- 1 | 2 | 3 | import Test.Hspec (hspec) 4 | 5 | 6 | import Language.SQL.SimpleSQL.Tests 7 | 8 | main :: IO () 9 | main = hspec tests 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /dist/ 2 | /logchanges 3 | /cabal.sandbox.config 4 | /.cabal-sandbox/ 5 | /build/ 6 | /.stack-work/ 7 | /.ghc.environment.* 8 | dist-newstyle/ 9 | /cabal.project.local 10 | .emacs.* 11 | /expected-parse-errors/actual 12 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | with import { }; 2 | stdenv.mkDerivation rec { 3 | name = "env"; 4 | env = buildEnv { name = name; paths = buildInputs; }; 5 | buildInputs = [ 6 | ghc 7 | cabal-install 8 | glibcLocales 9 | gnumake 10 | ]; 11 | shellHook = "export LANG=en_GB.UTF-8"; 12 | } 13 | 14 | -------------------------------------------------------------------------------- /stack.yaml.lock: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by Stack. 2 | # You should not edit this file by hand. 3 | # For more information, please see the documentation at: 4 | # https://docs.haskellstack.org/en/stable/lock_files 5 | 6 | packages: 7 | - completed: 8 | hackage: megaparsec-9.6.1@sha256:8d8f8ee5aca5d5c16aa4219afd13687ceab8be640f40ba179359f2b42a628241,3323 9 | pantry-tree: 10 | sha256: ac654040a2402a733496678905ee17198bf628d75032dd025d595bd329739af8 11 | size: 1545 12 | original: 13 | hackage: megaparsec-9.6.1 14 | snapshots: 15 | - completed: 16 | sha256: c6274f9587d6bf40b6aaa7d1092684c37a2d8bb1e0df999ae0e2b755db15682d 17 | size: 720026 18 | url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/22/37.yaml 19 | original: lts-22.37 20 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/CreateIndex.hs: -------------------------------------------------------------------------------- 1 | 2 | {-# LANGUAGE OverloadedStrings #-} 3 | module Language.SQL.SimpleSQL.CreateIndex where 4 | 5 | import Language.SQL.SimpleSQL.Syntax 6 | import Language.SQL.SimpleSQL.TestTypes 7 | import Language.SQL.SimpleSQL.TestRunners 8 | import Data.Text (Text) 9 | 10 | createIndexTests :: TestItem 11 | createIndexTests = Group "create index tests" 12 | [s "create index a on tbl(c1)" 13 | $ CreateIndex False [nm "a"] [nm "tbl"] [nm "c1"] 14 | ,s "create index a.b on sc.tbl (c1, c2)" 15 | $ CreateIndex False [nm "a", nm "b"] [nm "sc", nm "tbl"] [nm "c1", nm "c2"] 16 | ,s "create unique index a on tbl(c1)" 17 | $ CreateIndex True [nm "a"] [nm "tbl"] [nm "c1"] 18 | ] 19 | where 20 | nm = Name Nothing 21 | s :: HasCallStack => Text -> Statement -> TestItem 22 | s src ast = testStatement ansi2011 src ast 23 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/EmptyStatement.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module Language.SQL.SimpleSQL.EmptyStatement where 3 | 4 | import Language.SQL.SimpleSQL.Syntax 5 | import Language.SQL.SimpleSQL.TestTypes 6 | import Language.SQL.SimpleSQL.TestRunners 7 | import Data.Text (Text) 8 | 9 | emptyStatementTests :: TestItem 10 | emptyStatementTests = Group "empty statement" 11 | [ s ";" EmptyStatement 12 | , t ";" [EmptyStatement] 13 | , t ";;" [EmptyStatement, EmptyStatement] 14 | , t ";;;" [EmptyStatement, EmptyStatement, EmptyStatement] 15 | , s "/* comment */ ;" EmptyStatement 16 | , t "" [] 17 | , t "/* comment */" [] 18 | , t "/* comment */ ;" [EmptyStatement] 19 | , t "/* comment */ ; /* comment */ ;" 20 | [EmptyStatement, EmptyStatement] 21 | , t "/* comment */ ; /* comment */ ; /* comment */ ;" 22 | [EmptyStatement, EmptyStatement, EmptyStatement] 23 | ] 24 | where 25 | s :: HasCallStack => Text -> Statement -> TestItem 26 | s src a = testStatement ansi2011 src a 27 | t :: HasCallStack => Text -> [Statement] -> TestItem 28 | t src a = testStatements ansi2011 src a 29 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/Oracle.hs: -------------------------------------------------------------------------------- 1 | 2 | -- Tests for oracle dialect parsing 3 | 4 | {-# LANGUAGE OverloadedStrings #-} 5 | module Language.SQL.SimpleSQL.Oracle (oracleTests) where 6 | 7 | import Language.SQL.SimpleSQL.TestTypes 8 | import Language.SQL.SimpleSQL.Syntax 9 | import Language.SQL.SimpleSQL.TestRunners 10 | 11 | oracleTests :: TestItem 12 | oracleTests = Group "oracle dialect" 13 | [oracleLobUnits] 14 | 15 | 16 | oracleLobUnits :: TestItem 17 | oracleLobUnits = Group "oracleLobUnits" 18 | [testScalarExpr oracle "cast (a as varchar2(3 char))" 19 | $ Cast (Iden [Name Nothing "a"]) ( 20 | PrecLengthTypeName [Name Nothing "varchar2"] 3 Nothing (Just PrecCharacters)) 21 | ,testScalarExpr oracle "cast (a as varchar2(3 byte))" 22 | $ Cast (Iden [Name Nothing "a"]) ( 23 | PrecLengthTypeName [Name Nothing "varchar2"] 3 Nothing (Just PrecOctets)) 24 | ,testStatement oracle 25 | "create table t (a varchar2(55 BYTE));" 26 | $ CreateTable [Name Nothing "t"] 27 | [TableColumnDef $ ColumnDef (Name Nothing "a") 28 | (Just (PrecLengthTypeName [Name Nothing "varchar2"] 55 Nothing (Just PrecOctets))) 29 | []] 30 | False 31 | ] 32 | 33 | -------------------------------------------------------------------------------- /website/tpch21.sql: -------------------------------------------------------------------------------- 1 | select 2 | s_name, 3 | count(*) as numwait 4 | from 5 | supplier, 6 | lineitem l1, 7 | orders, 8 | nation 9 | where 10 | s_suppkey = l1.l_suppkey 11 | and o_orderkey = l1.l_orderkey 12 | and o_orderstatus = 'F' 13 | and l1.l_receiptdate > l1.l_commitdate 14 | and exists ( 15 | select 16 | * 17 | from 18 | lineitem l2 19 | where 20 | l2.l_orderkey = l1.l_orderkey 21 | and l2.l_suppkey <> l1.l_suppkey 22 | ) 23 | and not exists ( 24 | select 25 | * 26 | from 27 | lineitem l3 28 | where 29 | l3.l_orderkey = l1.l_orderkey 30 | and l3.l_suppkey <> l1.l_suppkey 31 | and l3.l_receiptdate > l3.l_commitdate 32 | ) 33 | and s_nationkey = n_nationkey 34 | and n_name = 'INDIA' 35 | group by 36 | s_name 37 | order by 38 | numwait desc, 39 | s_name 40 | fetch first 100 rows only; 41 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/MySQL.hs: -------------------------------------------------------------------------------- 1 | 2 | -- Tests for mysql dialect parsing 3 | 4 | {-# LANGUAGE OverloadedStrings #-} 5 | module Language.SQL.SimpleSQL.MySQL (mySQLTests) where 6 | 7 | import Language.SQL.SimpleSQL.TestTypes 8 | import Language.SQL.SimpleSQL.Syntax 9 | import Language.SQL.SimpleSQL.TestRunners 10 | 11 | mySQLTests :: TestItem 12 | mySQLTests = Group "mysql dialect" 13 | [backtickQuotes 14 | ,limit] 15 | 16 | {- 17 | backtick quotes 18 | 19 | limit syntax 20 | 21 | [LIMIT {[offset,] row_count | row_count OFFSET offset}] 22 | -} 23 | 24 | backtickQuotes :: TestItem 25 | backtickQuotes = Group "backtickQuotes" 26 | [testScalarExpr mysql "`test`" $ Iden [Name (Just ("`","`")) "test"] 27 | ,testParseScalarExprFails ansi2011 "`test`"] 28 | 29 | limit :: TestItem 30 | limit = Group "queries" 31 | [testQueryExpr mysql "select * from t limit 5" 32 | $ toQueryExpr $ sel {msFetchFirst = Just (NumLit "5")} 33 | ,testParseQueryExprFails mysql "select a from t fetch next 10 rows only;" 34 | ,testParseQueryExprFails ansi2011 "select * from t limit 5"] 35 | where 36 | sel = makeSelect 37 | {msSelectList = [(Star, Nothing)] 38 | ,msFrom = [TRSimple [Name Nothing "t"]] 39 | } 40 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/QueryExprs.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | These are the tests for the queryExprs parsing which parses multiple 4 | query expressions from one string. 5 | -} 6 | 7 | {-# LANGUAGE OverloadedStrings #-} 8 | module Language.SQL.SimpleSQL.QueryExprs (queryExprsTests) where 9 | 10 | import Language.SQL.SimpleSQL.TestTypes 11 | import Language.SQL.SimpleSQL.Syntax 12 | import Language.SQL.SimpleSQL.TestRunners 13 | import Data.Text (Text) 14 | 15 | queryExprsTests :: TestItem 16 | queryExprsTests = Group "query exprs" 17 | [q "select 1" [ms] 18 | ,q "select 1;" [ms] 19 | ,q "select 1;select 1" [ms,ms] 20 | ,q " select 1;select 1; " [ms,ms] 21 | ,q "SELECT CURRENT_TIMESTAMP;" 22 | [SelectStatement $ toQueryExpr $ makeSelect 23 | {msSelectList = [(Iden [Name Nothing "CURRENT_TIMESTAMP"],Nothing)]}] 24 | ,q "SELECT \"CURRENT_TIMESTAMP\";" 25 | [SelectStatement $ toQueryExpr $ makeSelect 26 | {msSelectList = [(Iden [Name (Just ("\"","\"")) "CURRENT_TIMESTAMP"],Nothing)]}] 27 | ] 28 | where 29 | ms = SelectStatement $ toQueryExpr $ makeSelect {msSelectList = [(NumLit "1",Nothing)]} 30 | q :: HasCallStack => Text -> [Statement] -> TestItem 31 | q src ast = testStatements ansi2011 src ast 32 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/CustomDialect.hs: -------------------------------------------------------------------------------- 1 | 2 | {-# LANGUAGE OverloadedStrings #-} 3 | module Language.SQL.SimpleSQL.CustomDialect (customDialectTests) where 4 | 5 | import Language.SQL.SimpleSQL.TestTypes 6 | import Language.SQL.SimpleSQL.TestRunners 7 | import Data.Text (Text) 8 | 9 | customDialectTests :: TestItem 10 | customDialectTests = Group "custom dialect tests" $ 11 | [q ansi2011 "SELECT a b" 12 | ,q noDateKeyword "SELECT DATE('2000-01-01')" 13 | ,q noDateKeyword "SELECT DATE" 14 | ,q dateApp "SELECT DATE('2000-01-01')" 15 | ,q dateIden "SELECT DATE" 16 | ,f ansi2011 "SELECT DATE('2000-01-01')" 17 | ,f ansi2011 "SELECT DATE" 18 | ,f dateApp "SELECT DATE" 19 | ,f dateIden "SELECT DATE('2000-01-01')" 20 | -- show this never being allowed as an alias 21 | ,f ansi2011 "SELECT a date" 22 | ,f dateApp "SELECT a date" 23 | ,f dateIden "SELECT a date" 24 | ] 25 | where 26 | noDateKeyword = ansi2011 {diKeywords = filter (/="date") (diKeywords ansi2011)} 27 | dateIden = ansi2011 {diIdentifierKeywords = "date" : diIdentifierKeywords ansi2011} 28 | dateApp = ansi2011 {diAppKeywords = "date" : diAppKeywords ansi2011} 29 | q :: HasCallStack => Dialect -> Text -> TestItem 30 | q d src = testParseQueryExpr d src 31 | f :: HasCallStack => Dialect -> Text -> TestItem 32 | f d src = testParseQueryExprFails d src 33 | -------------------------------------------------------------------------------- /stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration.html 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.37 5 | 6 | # Local packages, usually specified by relative directory name 7 | packages: 8 | - '.' 9 | 10 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 11 | # todo: remove this once the version of megaparsec on Stack lts has the same api 12 | # as the latest one as far as simple sql parser uses it (in this case, lts-22.5 13 | # has megaparsec 9.5, which doesn't support the megaparsec parser state feature 14 | # in the same way 9.6 does) 15 | extra-deps: [megaparsec-9.6.1] 16 | 17 | # Override default flag values for local packages and extra-deps 18 | flags: {} 19 | 20 | # Extra package databases containing global packages 21 | extra-package-dbs: [] 22 | 23 | # Control whether we use the GHC we find on the path 24 | # system-ghc: true 25 | 26 | # Require a specific version of stack, using version ranges 27 | # require-stack-version: -any # Default 28 | # require-stack-version: >= 1.0.0 29 | 30 | # Override the architecture used by stack, especially useful on Windows 31 | # arch: i386 32 | # arch: x86_64 33 | 34 | # Extra directories used by stack for building 35 | # extra-include-dirs: [/path/to/dir] 36 | # extra-lib-dirs: [/path/to/dir] 37 | 38 | # Allow a newer minor version of GHC than the snapshot specifies 39 | # compiler-check: newer-minor 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2013 - 2024, Jake Wheat and the simple-sql-parser contributors. 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Jake Wheat nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/QueryExprParens.hs: -------------------------------------------------------------------------------- 1 | 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE QuasiQuotes #-} 4 | module Language.SQL.SimpleSQL.QueryExprParens (queryExprParensTests) where 5 | 6 | import Language.SQL.SimpleSQL.TestTypes 7 | import Language.SQL.SimpleSQL.Syntax 8 | import Language.SQL.SimpleSQL.TestRunners 9 | import Data.Text (Text) 10 | import qualified Text.RawString.QQ as R 11 | 12 | queryExprParensTests :: TestItem 13 | queryExprParensTests = Group "query expr parens" 14 | [q "(select * from t)" $ QueryExprParens $ ms "t" 15 | ,q "select * from t except (select * from u except select * from v)" 16 | $ (ms "t") `sexcept` QueryExprParens (ms "u" `sexcept` ms "v") 17 | 18 | ,q "(select * from t except select * from u) except select * from v" 19 | $ QueryExprParens (ms "t" `sexcept` ms "u") `sexcept` ms "v" 20 | 21 | ,q [R.r| 22 | select * from t 23 | union 24 | with a as (select * from u) 25 | select * from a 26 | |] 27 | $ ms "t" `sunion` with [("a", ms "u")] (ms "a") 28 | 29 | ,q [R.r| 30 | select * from t 31 | union 32 | (with a as (select * from u) 33 | select * from a) 34 | |] 35 | $ ms "t" `sunion` QueryExprParens (with [("a", ms "u")] (ms "a")) 36 | ] 37 | where 38 | q :: HasCallStack => Text -> QueryExpr -> TestItem 39 | q src ast = testQueryExpr ansi2011 src ast 40 | ms t = toQueryExpr $ makeSelect 41 | {msSelectList = [(Star,Nothing)] 42 | ,msFrom = [TRSimple [Name Nothing t]]} 43 | sexcept = so Except 44 | sunion = so Union 45 | so op a b = QueryExprSetOp a op SQDefault Respectively b 46 | with es s = 47 | With False (flip map es $ \(n,sn) -> (Alias (Name Nothing n) Nothing ,sn)) s -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/FullQueries.hs: -------------------------------------------------------------------------------- 1 | 2 | -- Some tests for parsing full queries. 3 | 4 | {-# LANGUAGE OverloadedStrings #-} 5 | module Language.SQL.SimpleSQL.FullQueries (fullQueriesTests) where 6 | 7 | import Language.SQL.SimpleSQL.TestTypes 8 | import Language.SQL.SimpleSQL.Syntax 9 | import Language.SQL.SimpleSQL.TestRunners 10 | import Data.Text (Text) 11 | 12 | fullQueriesTests :: TestItem 13 | fullQueriesTests = Group "queries" $ 14 | [q "select count(*) from t" 15 | $ toQueryExpr $ makeSelect 16 | {msSelectList = [(App [Name Nothing "count"] [Star], Nothing)] 17 | ,msFrom = [TRSimple [Name Nothing "t"]] 18 | } 19 | 20 | ,q "select a, sum(c+d) as s\n\ 21 | \ from t,u\n\ 22 | \ where a > 5\n\ 23 | \ group by a\n\ 24 | \ having count(1) > 5\n\ 25 | \ order by s" 26 | $ toQueryExpr $ makeSelect 27 | {msSelectList = [(Iden [Name Nothing "a"], Nothing) 28 | ,(App [Name Nothing "sum"] 29 | [BinOp (Iden [Name Nothing "c"]) 30 | [Name Nothing "+"] (Iden [Name Nothing "d"])] 31 | ,Just $ Name Nothing "s")] 32 | ,msFrom = [TRSimple [Name Nothing "t"], TRSimple [Name Nothing "u"]] 33 | ,msWhere = Just $ BinOp (Iden [Name Nothing "a"]) [Name Nothing ">"] (NumLit "5") 34 | ,msGroupBy = [SimpleGroup $ Iden [Name Nothing "a"]] 35 | ,msHaving = Just $ BinOp (App [Name Nothing "count"] [NumLit "1"]) 36 | [Name Nothing ">"] (NumLit "5") 37 | ,msOrderBy = [SortSpec (Iden [Name Nothing "s"]) DirDefault NullsOrderDefault] 38 | } 39 | 40 | ] 41 | where 42 | q :: HasCallStack => Text -> QueryExpr -> TestItem 43 | q src a = testQueryExpr ansi2011 src a 44 | -------------------------------------------------------------------------------- /website/template1.pandoc: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | $for(author-meta)$ 8 | 9 | $endfor$ 10 | $if(date-meta)$ 11 | 12 | $endif$ 13 | $if(keywords)$ 14 | 15 | $endif$ 16 | $if(description-meta)$ 17 | 18 | $endif$ 19 | $if(title-prefix)$$title-prefix$ – $endif$$pagetitle$ 20 | 23 | $for(css)$ 24 | 25 | $endfor$ 26 | $for(header-includes)$ 27 | $header-includes$ 28 | $endfor$ 29 | $if(math)$ 30 | $if(mathjax)$ 31 | 32 | $endif$ 33 | $math$ 34 | $endif$ 35 | 36 | 37 | $if(toc)$ 38 | 44 | $endif$ 45 | 46 | 47 | $for(include-before)$ 48 | $include-before$ 49 | $endfor$ 50 | $if(title)$ 51 |
52 |

$title$

53 | $if(subtitle)$ 54 |

$subtitle$

55 | $endif$ 56 | $for(author)$ 57 |

$author$

58 | $endfor$ 59 | $if(date)$ 60 |

$date$

61 | $endif$ 62 | $if(abstract)$ 63 |
64 |
$abstract-title$
65 | $abstract$ 66 |
67 | $endif$ 68 |
69 | $endif$ 70 | $body$ 71 | $for(include-after)$ 72 | $include-after$ 73 | $endfor$ 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/Expectations.hs: -------------------------------------------------------------------------------- 1 | 2 | module Language.SQL.SimpleSQL.Expectations 3 | (shouldParseA 4 | ,shouldParseL 5 | ,shouldParse1 6 | ,shouldFail 7 | ,shouldSucceed 8 | ,shouldFailWith 9 | ) where 10 | 11 | 12 | import Language.SQL.SimpleSQL.Parse 13 | import qualified Language.SQL.SimpleSQL.Lex as Lex 14 | 15 | import qualified Data.Text as T 16 | import Data.Text (Text) 17 | 18 | import Test.Hspec.Expectations 19 | (Expectation 20 | ,HasCallStack 21 | ,expectationFailure 22 | ) 23 | 24 | import Test.Hspec 25 | (shouldBe 26 | ) 27 | 28 | shouldParseA :: (HasCallStack,Eq a, Show a) => Either ParseError a -> a -> Expectation 29 | shouldParseA = shouldParse1 (T.unpack . prettyError) 30 | 31 | shouldParseL :: (HasCallStack,Eq a, Show a) => Either Lex.ParseError a -> a -> Expectation 32 | shouldParseL = shouldParse1 (T.unpack . Lex.prettyError) 33 | 34 | shouldParse1 :: (HasCallStack, Show a, Eq a) => 35 | (e -> String) 36 | -> Either e a 37 | -> a 38 | -> Expectation 39 | shouldParse1 prettyErr r v = case r of 40 | Left e -> 41 | expectationFailure $ 42 | "expected: " 43 | ++ show v 44 | ++ "\nbut parsing failed with error:\n" 45 | ++ prettyErr e 46 | Right x -> x `shouldBe` v 47 | 48 | shouldFail :: (HasCallStack, Show a) => Either e a -> Expectation 49 | shouldFail r = case r of 50 | Left _ -> (1 :: Int) `shouldBe` 1 51 | Right a -> expectationFailure $ "expected parse failure, but succeeded with " <> show a 52 | 53 | shouldFailWith :: (HasCallStack, Show a) => (e -> Text) -> Either e a -> Text -> Expectation 54 | shouldFailWith p r e = case r of 55 | Left e1 -> p e1 `shouldBe` e 56 | Right a -> expectationFailure $ "expected parse failure, but succeeded with " <> show a 57 | 58 | shouldSucceed :: (HasCallStack) => (e -> String) -> Either e a -> Expectation 59 | shouldSucceed pe r = case r of 60 | Left e -> expectationFailure $ "expected parse success, but got: " <> pe e 61 | Right _ -> (1 :: Int) `shouldBe` 1 62 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/TestTypes.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | This is the types used to define the tests as pure data. See the 4 | Tests.hs module for the 'interpreter'. 5 | -} 6 | 7 | module Language.SQL.SimpleSQL.TestTypes 8 | (TestItem(..) 9 | ,module Language.SQL.SimpleSQL.Dialect 10 | ) where 11 | 12 | import Language.SQL.SimpleSQL.Syntax 13 | import Language.SQL.SimpleSQL.Lex (Token) 14 | import Language.SQL.SimpleSQL.Dialect 15 | 16 | import Test.Hspec (SpecWith) 17 | 18 | 19 | import Data.Text (Text) 20 | 21 | {- 22 | TODO: maybe make the dialect args into [dialect], then each test 23 | checks all the dialects mentioned work, and all the dialects not 24 | mentioned give a parse error. Not sure if this will be too awkward due 25 | to lots of tricky exceptions/variationsx. 26 | 27 | The test items are designed to allow code to grab all the examples 28 | in easily usable data types, but since hspec has this neat feature 29 | where it will give a source location for a test failure, each testitem 30 | apart from group already has the SpecWith attached to run that test, 31 | that way we can attach the source location to each test item 32 | -} 33 | 34 | data TestItem = Group Text [TestItem] 35 | | TestScalarExpr Dialect Text ScalarExpr (SpecWith ()) 36 | | TestQueryExpr Dialect Text QueryExpr (SpecWith ()) 37 | | TestStatement Dialect Text Statement (SpecWith ()) 38 | | TestStatements Dialect Text [Statement] (SpecWith ()) 39 | 40 | {- 41 | this just checks the sql parses without error, mostly just a 42 | intermediate when I'm too lazy to write out the parsed AST. These 43 | should all be TODO to convert to a testqueryexpr test. 44 | -} 45 | 46 | | ParseQueryExpr Dialect Text (SpecWith ()) 47 | 48 | -- check that the string given fails to parse 49 | 50 | | ParseQueryExprFails Dialect Text (SpecWith ()) 51 | | ParseScalarExprFails Dialect Text (SpecWith ()) 52 | | LexTest Dialect Text [Token] (SpecWith ()) 53 | | LexFails Dialect Text (SpecWith ()) 54 | | GeneralParseFailTest Text Text (SpecWith ()) 55 | | GoldenErrorTest Text [(Text,Text,Text)] (SpecWith ()) 56 | 57 | -------------------------------------------------------------------------------- /website/render-test-cases.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 2.2 2 | 3 | name: simple-sql-parser 4 | version: 0.8.0 5 | 6 | executable RenderTestCases 7 | main-is: RenderTestCases.hs 8 | hs-source-dirs: .,..,../tests 9 | Build-Depends: base >=4 && <5, 10 | text, 11 | megaparsec, 12 | prettyprinter, 13 | parser-combinators, 14 | mtl, 15 | containers, 16 | hspec, 17 | hspec-megaparsec, 18 | pretty-show, 19 | hspec-expectations, 20 | raw-strings-qq, 21 | hspec-golden, 22 | filepath, 23 | default-language: Haskell2010 24 | ghc-options: -Wall -O0 25 | 26 | other-modules: 27 | Language.SQL.SimpleSQL.CreateIndex 28 | Language.SQL.SimpleSQL.CustomDialect 29 | Language.SQL.SimpleSQL.Dialect 30 | Language.SQL.SimpleSQL.EmptyStatement 31 | Language.SQL.SimpleSQL.FullQueries 32 | Language.SQL.SimpleSQL.GroupBy 33 | Language.SQL.SimpleSQL.Lex 34 | Language.SQL.SimpleSQL.LexerTests 35 | Language.SQL.SimpleSQL.MySQL 36 | Language.SQL.SimpleSQL.Odbc 37 | Language.SQL.SimpleSQL.Oracle 38 | Language.SQL.SimpleSQL.Parse 39 | Language.SQL.SimpleSQL.Postgres 40 | Language.SQL.SimpleSQL.Pretty 41 | Language.SQL.SimpleSQL.QueryExprComponents 42 | Language.SQL.SimpleSQL.QueryExprs 43 | Language.SQL.SimpleSQL.QueryExprParens 44 | Language.SQL.SimpleSQL.SQL2011AccessControl 45 | Language.SQL.SimpleSQL.SQL2011Bits 46 | Language.SQL.SimpleSQL.SQL2011DataManipulation 47 | Language.SQL.SimpleSQL.SQL2011Queries 48 | Language.SQL.SimpleSQL.SQL2011Schema 49 | Language.SQL.SimpleSQL.ScalarExprs 50 | Language.SQL.SimpleSQL.Syntax 51 | Language.SQL.SimpleSQL.TableRefs 52 | Language.SQL.SimpleSQL.TestTypes 53 | Language.SQL.SimpleSQL.Tests 54 | Language.SQL.SimpleSQL.Tpch 55 | Language.SQL.SimpleSQL.Expectations 56 | Language.SQL.SimpleSQL.TestRunners 57 | Language.SQL.SimpleSQL.ErrorMessages 58 | -------------------------------------------------------------------------------- /website/main.css: -------------------------------------------------------------------------------- 1 | 2 | body { 3 | margin: 0 auto; 4 | 5 | hyphens: auto; 6 | overflow-wrap: break-word; 7 | text-rendering: optimizeLegibility; 8 | font-kerning: normal; 9 | color:#000000; 10 | 11 | margin-left: 5em; 12 | margin-right: 5em; 13 | margin-bottom: 5em; 14 | margin-top: 2em; 15 | 16 | left:auto; 17 | right:0; 18 | margin-right:30em; 19 | 20 | max-width: 120ex; 21 | 22 | } 23 | 24 | pre { 25 | padding: 1em; 26 | background:#fafafa; 27 | } 28 | 29 | code { 30 | padding: 0; 31 | margin: 0; 32 | font-size:15px; 33 | } 34 | 35 | #TOC { 36 | position:fixed; 37 | width:25em; 38 | left:0; 39 | top:0; 40 | z-index:1000; 41 | height:100%; 42 | overflow:auto; 43 | 44 | left:auto; 45 | right:0; 46 | 47 | border-top: 1px solid #858585; 48 | border-right-width: 0; 49 | border-left: 1px solid #858585; 50 | 51 | margin-top: 0 !important; 52 | background: #fafafa; 53 | 54 | border-top-width: 0 !important; 55 | border-bottom-width: 0 !important; 56 | 57 | padding-top: 0em; 58 | padding-right: 1em; 59 | padding-bottom: 1.25em; 60 | padding-left: 1em; 61 | margin: 0; 62 | 63 | font-size: .9em; 64 | font-family:"Noto Serif","Open Sans","DejaVu Sans",sans-serif; 65 | font-style: normal; 66 | color: #000000; 67 | font-weight: 400; 68 | 69 | } 70 | 71 | 72 | #TOC li { 73 | list-style: none; 74 | line-height:1.3334; 75 | margin-top:.3334em 76 | } 77 | 78 | #TOC ul { 79 | padding-left: 1.3em; 80 | 81 | } 82 | #TOC a:not(:hover) { 83 | text-decoration: none; 84 | } 85 | 86 | h1,h2,h3,h4,h5,h6 { 87 | font-family:"Open Sans","DejaVu Sans",sans-serif; 88 | font-weight:300; 89 | font-style:normal; 90 | color:#000000; 91 | text-rendering:optimizeLegibility; 92 | margin-top:1em; 93 | margin-bottom:.5em; 94 | line-height:1.0125em 95 | } 96 | 97 | h1 { 98 | border-top: 1px solid #858585; 99 | padding-top:1em; 100 | } 101 | 102 | .title { 103 | color:#000000; 104 | font-size: 3em; 105 | border-top: none; 106 | padding-top:0em; 107 | } 108 | 109 | a { 110 | font-weight:400; 111 | font-style:normal; 112 | color: #05117f; 113 | } 114 | 115 | hr{border:solid #858585;border-width:1px 0 0;clear:both;margin:1.25em 0 1.1875em;height:0} 116 | 117 | -------------------------------------------------------------------------------- /website/main1.css: -------------------------------------------------------------------------------- 1 | 2 | body { 3 | margin: 0 auto; 4 | 5 | hyphens: auto; 6 | overflow-wrap: break-word; 7 | text-rendering: optimizeLegibility; 8 | font-kerning: normal; 9 | color:#000000; 10 | 11 | margin-left: 5em; 12 | margin-right: 5em; 13 | margin-bottom: 5em; 14 | margin-top: 2em; 15 | 16 | /*left:auto; 17 | right:0; 18 | margin-right:30em;*/ 19 | 20 | /*max-width: 120ex;*/ 21 | 22 | } 23 | 24 | 25 | 26 | pre { 27 | padding: 1em; 28 | background:#fafafa; 29 | } 30 | 31 | code { 32 | padding: 0; 33 | margin: 0; 34 | font-size:15px; 35 | } 36 | 37 | #TOC { 38 | /*position:fixed; 39 | width:25em; 40 | left:0; 41 | top:0; 42 | z-index:1000; 43 | height:100%; 44 | overflow:auto; 45 | 46 | left:auto; 47 | right:0; 48 | 49 | border-top: 1px solid #858585; 50 | border-right-width: 0; 51 | border-left: 1px solid #858585; 52 | 53 | margin-top: 0 !important; 54 | background: #fafafa; 55 | 56 | border-top-width: 0 !important; 57 | border-bottom-width: 0 !important; 58 | 59 | padding-top: 0em; 60 | padding-right: 1em; 61 | padding-bottom: 1.25em; 62 | padding-left: 1em; 63 | margin: 0;*/ 64 | 65 | font-size: .9em; 66 | font-family:"Noto Serif","Open Sans","DejaVu Sans",sans-serif; 67 | font-style: normal; 68 | color: #000000; 69 | font-weight: 400; 70 | 71 | } 72 | 73 | 74 | #TOC li { 75 | list-style: none; 76 | line-height:1.3334; 77 | margin-top:.3334em 78 | } 79 | 80 | #TOC ul { 81 | padding-left: 1.3em; 82 | 83 | } 84 | #TOC a:not(:hover) { 85 | text-decoration: none; 86 | } 87 | 88 | h1,h2,h3,h4,h5,h6 { 89 | font-family:"Open Sans","DejaVu Sans",sans-serif; 90 | font-weight:300; 91 | font-style:normal; 92 | color:#000000; 93 | text-rendering:optimizeLegibility; 94 | margin-top:1em; 95 | margin-bottom:.5em; 96 | line-height:1.0125em 97 | } 98 | 99 | /*h1 { 100 | border-top: 1px solid #858585; 101 | padding-top:1em; 102 | }*/ 103 | 104 | .title { 105 | color:#000000; 106 | font-size: 3em; 107 | border-top: none; 108 | padding-top:0em; 109 | } 110 | 111 | a { 112 | font-weight:400; 113 | font-style:normal; 114 | color: #05117f; 115 | } 116 | 117 | hr{border:solid #858585;border-width:1px 0 0;clear:both;margin:1.25em 0 1.1875em;height:0} 118 | 119 | table { 120 | width: 100%; 121 | border-spacing: 0; 122 | border-collapse: collapse; 123 | } 124 | 125 | td { 126 | width: 50%; 127 | vertical-align: top; 128 | border:solid #c5c5c5; 129 | border-width:1px; 130 | } 131 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/Odbc.hs: -------------------------------------------------------------------------------- 1 | 2 | {-# LANGUAGE OverloadedStrings #-} 3 | module Language.SQL.SimpleSQL.Odbc (odbcTests) where 4 | 5 | import Language.SQL.SimpleSQL.TestTypes 6 | import Language.SQL.SimpleSQL.Syntax 7 | import Language.SQL.SimpleSQL.TestRunners 8 | import Data.Text (Text) 9 | 10 | odbcTests :: TestItem 11 | odbcTests = Group "odbc" [ 12 | Group "datetime" [ 13 | e "{d '2000-01-01'}" (OdbcLiteral OLDate "2000-01-01") 14 | ,e "{t '12:00:01.1'}" (OdbcLiteral OLTime "12:00:01.1") 15 | ,e "{ts '2000-01-01 12:00:01.1'}" 16 | (OdbcLiteral OLTimestamp "2000-01-01 12:00:01.1") 17 | ] 18 | ,Group "functions" [ 19 | e "{fn CHARACTER_LENGTH(string_exp)}" 20 | $ OdbcFunc (ap "CHARACTER_LENGTH" [iden "string_exp"]) 21 | ,e "{fn EXTRACT(day from t)}" 22 | $ OdbcFunc (SpecialOpK [Name Nothing "extract"] (Just $ Iden [Name Nothing "day"]) [("from", Iden [Name Nothing "t"])]) 23 | ,e "{fn now()}" 24 | $ OdbcFunc (ap "now" []) 25 | ,e "{fn CONVERT('2000-01-01', SQL_DATE)}" 26 | $ OdbcFunc (ap "CONVERT" 27 | [StringLit "'" "'" "2000-01-01" 28 | ,iden "SQL_DATE"]) 29 | ,e "{fn CONVERT({fn CURDATE()}, SQL_DATE)}" 30 | $ OdbcFunc (ap "CONVERT" 31 | [OdbcFunc (ap "CURDATE" []) 32 | ,iden "SQL_DATE"]) 33 | ] 34 | ,Group "outer join" [ 35 | q 36 | "select * from {oj t1 left outer join t2 on expr}" 37 | $ toQueryExpr $ makeSelect 38 | {msSelectList = [(Star,Nothing)] 39 | ,msFrom = [TROdbc $ TRJoin (TRSimple [Name Nothing "t1"]) False JLeft (TRSimple [Name Nothing "t2"]) 40 | (Just $ JoinOn $ Iden [Name Nothing "expr"])]}] 41 | ,Group "check parsing bugs" [ 42 | q 43 | "select {fn CONVERT(cint,SQL_BIGINT)} from t;" 44 | $ toQueryExpr $ makeSelect 45 | {msSelectList = [(OdbcFunc (ap "CONVERT" 46 | [iden "cint" 47 | ,iden "SQL_BIGINT"]), Nothing)] 48 | ,msFrom = [TRSimple [Name Nothing "t"]]}] 49 | ] 50 | where 51 | e :: HasCallStack => Text -> ScalarExpr -> TestItem 52 | e src ast = testScalarExpr ansi2011{diOdbc = True} src ast 53 | 54 | q :: HasCallStack => Text -> QueryExpr -> TestItem 55 | q src ast = testQueryExpr ansi2011{diOdbc = True} src ast 56 | 57 | --tsql = ParseProcSql defaultParseFlags {pfDialect=sqlServerDialect} 58 | ap n = App [Name Nothing n] 59 | iden n = Iden [Name Nothing n] 60 | 61 | -------------------------------------------------------------------------------- /website/template.pandoc: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | $for(author-meta)$ 8 | 9 | $endfor$ 10 | $if(date-meta)$ 11 | 12 | $endif$ 13 | $if(keywords)$ 14 | 15 | $endif$ 16 | $if(description-meta)$ 17 | 18 | $endif$ 19 | $if(title-prefix)$$title-prefix$ – $endif$$pagetitle$ 20 | 23 | $for(css)$ 24 | 25 | $endfor$ 26 | $for(header-includes)$ 27 | $header-includes$ 28 | $endfor$ 29 | $if(math)$ 30 | $if(mathjax)$ 31 | 32 | $endif$ 33 | $math$ 34 | $endif$ 35 | 36 | 37 | $for(include-before)$ 38 | $include-before$ 39 | $endfor$ 40 | $if(title)$ 41 |
42 |

$title$

43 | $if(subtitle)$ 44 |

$subtitle$

45 | $endif$ 46 | $for(author)$ 47 |

$author$

48 | $endfor$ 49 | $if(date)$ 50 |

$date$

51 | $endif$ 52 | $if(abstract)$ 53 |
54 |
$abstract-title$
55 | $abstract$ 56 |
57 | $endif$ 58 |
59 | $endif$ 60 | $body$ 61 | $for(include-after)$ 62 | $include-after$ 63 | $endfor$ 64 | 65 | $if(toc)$ 66 | 90 | $endif$ 91 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/Tests.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | This is the main tests module which exposes the test data plus the 4 | Test.Framework tests. It also contains the code which converts the 5 | test data to the Test.Framework tests. 6 | -} 7 | 8 | {-# LANGUAGE OverloadedStrings #-} 9 | module Language.SQL.SimpleSQL.Tests 10 | (testData 11 | ,tests 12 | ,TestItem(..) 13 | ) where 14 | 15 | import Test.Hspec 16 | (SpecWith 17 | ,describe 18 | ,parallel 19 | ) 20 | 21 | import Language.SQL.SimpleSQL.TestTypes 22 | 23 | import Language.SQL.SimpleSQL.FullQueries 24 | import Language.SQL.SimpleSQL.GroupBy 25 | import Language.SQL.SimpleSQL.Postgres 26 | import Language.SQL.SimpleSQL.QueryExprComponents 27 | import Language.SQL.SimpleSQL.QueryExprs 28 | import Language.SQL.SimpleSQL.QueryExprParens 29 | import Language.SQL.SimpleSQL.TableRefs 30 | import Language.SQL.SimpleSQL.ScalarExprs 31 | import Language.SQL.SimpleSQL.Odbc 32 | import Language.SQL.SimpleSQL.Tpch 33 | import Language.SQL.SimpleSQL.LexerTests 34 | import Language.SQL.SimpleSQL.EmptyStatement 35 | import Language.SQL.SimpleSQL.CreateIndex 36 | 37 | import Language.SQL.SimpleSQL.SQL2011Queries 38 | import Language.SQL.SimpleSQL.SQL2011AccessControl 39 | import Language.SQL.SimpleSQL.SQL2011Bits 40 | import Language.SQL.SimpleSQL.SQL2011DataManipulation 41 | import Language.SQL.SimpleSQL.SQL2011Schema 42 | 43 | import Language.SQL.SimpleSQL.MySQL 44 | import Language.SQL.SimpleSQL.Oracle 45 | import Language.SQL.SimpleSQL.CustomDialect 46 | import Language.SQL.SimpleSQL.ErrorMessages 47 | 48 | import qualified Data.Text as T 49 | 50 | {- 51 | Order the tests to start from the simplest first. This is also the 52 | order on the generated documentation. 53 | -} 54 | 55 | testData :: TestItem 56 | testData = 57 | Group "parserTest" 58 | [lexerTests 59 | ,scalarExprTests 60 | ,odbcTests 61 | ,queryExprComponentTests 62 | ,queryExprsTests 63 | ,queryExprParensTests 64 | ,tableRefTests 65 | ,groupByTests 66 | ,fullQueriesTests 67 | ,postgresTests 68 | ,tpchTests 69 | ,sql2011QueryTests 70 | ,sql2011DataManipulationTests 71 | ,sql2011SchemaTests 72 | ,sql2011AccessControlTests 73 | ,sql2011BitsTests 74 | ,mySQLTests 75 | ,oracleTests 76 | ,customDialectTests 77 | ,emptyStatementTests 78 | ,createIndexTests 79 | ,errorMessageTests 80 | ] 81 | 82 | tests :: SpecWith () 83 | tests = parallel $ itemToTest testData 84 | 85 | itemToTest :: TestItem -> SpecWith () 86 | itemToTest (Group nm ts) = 87 | describe (T.unpack nm) $ mapM_ itemToTest ts 88 | itemToTest (TestScalarExpr _ _ _ t) = t 89 | itemToTest (TestQueryExpr _ _ _ t) = t 90 | itemToTest (TestStatement _ _ _ t) = t 91 | itemToTest (TestStatements _ _ _ t) = t 92 | itemToTest (ParseQueryExpr _ _ t) = t 93 | itemToTest (ParseQueryExprFails _ _ t) = t 94 | itemToTest (ParseScalarExprFails _ _ t) = t 95 | itemToTest (LexTest _ _ _ t) = t 96 | itemToTest (LexFails _ _ t) = t 97 | itemToTest (GeneralParseFailTest _ _ t) = t 98 | itemToTest (GoldenErrorTest _ _ t) = t 99 | -------------------------------------------------------------------------------- /website/RenderTestCases.hs: -------------------------------------------------------------------------------- 1 | -- Converts the test data to markdown 2 | -- it uses raw html for the table parts 3 | 4 | {-# LANGUAGE OverloadedStrings #-} 5 | import Language.SQL.SimpleSQL.Tests 6 | import Text.Show.Pretty (ppShow) 7 | import qualified Language.SQL.SimpleSQL.Parse as P 8 | import qualified Language.SQL.SimpleSQL.Lex as L 9 | import qualified Data.Text as T 10 | 11 | import qualified Data.Text.Lazy as L 12 | import qualified Data.Text.Lazy.IO as L 13 | 14 | data TableItem = Heading Int L.Text 15 | | Row L.Text L.Text 16 | 17 | doc :: Int -> TestItem -> [TableItem] 18 | -- filter out some groups of tests 19 | doc _ (Group nm _) | "generated" `T.isInfixOf` nm = [] 20 | doc n (Group nm is) = 21 | Heading n (L.fromStrict nm) 22 | : concatMap (doc (n + 1)) is 23 | doc _ (TestScalarExpr _ str e _) = 24 | [Row (L.fromStrict str) (L.pack $ ppShow e)] 25 | doc _ (TestQueryExpr _ str e _) = 26 | [Row (L.fromStrict str) (L.pack $ ppShow e)] 27 | doc _ (TestStatement _ str e _) = 28 | [Row (L.fromStrict str) (L.pack $ ppShow e)] 29 | doc _ (TestStatements _ str e _) = 30 | [Row (L.fromStrict str) (L.pack $ ppShow e)] 31 | doc _ (ParseQueryExpr d str _) = 32 | [Row (L.fromStrict str) (showResult $ P.parseQueryExpr d "" Nothing str)] 33 | doc _ (ParseQueryExprFails d str _) = 34 | [Row (L.fromStrict str) (showResult $ P.parseQueryExpr d "" Nothing str)] 35 | doc _ (ParseScalarExprFails d str _) = 36 | [Row (L.fromStrict str) (showResult $ P.parseScalarExpr d "" Nothing str)] 37 | 38 | doc _ (LexTest d str _ _) = 39 | [Row (L.fromStrict str) (showResultL $ L.lexSQL d False "" Nothing str)] 40 | 41 | doc _ (LexFails d str _) = 42 | [Row (L.fromStrict str) (showResultL $ L.lexSQL d False "" Nothing str)] 43 | doc _ (GeneralParseFailTest {}) = [] 44 | -- todo: find some way to render error message examples in a readable way 45 | doc _ (GoldenErrorTest {}) = [] 46 | 47 | showResult :: Show a => Either P.ParseError a -> L.Text 48 | showResult = either (("Left\n" <>) . L.fromStrict . P.prettyError) (L.pack . ppShow) 49 | 50 | showResultL :: Show a => Either L.ParseError a -> L.Text 51 | showResultL = either (("Left\n" <>) . L.fromStrict . L.prettyError) (L.pack . ppShow) 52 | 53 | 54 | -- TODO: should put the dialect in the html output 55 | 56 | 57 | render :: [TableItem] -> L.Text 58 | render = go False 59 | where 60 | go _t (Heading level title : is) = 61 | "\n" 62 | <> 63 | -- slight hack 64 | (if (level > 1) 65 | then "\n" <> L.replicate (fromIntegral $ level - 1) "#" <> " " <> title <> "\n" 66 | else "") 67 | <> go False is 68 | go t (Row sql hask : is) = 69 | (if (not t) 70 | then "\n" 71 | else "") 72 | <> let sql' = "\n~~~~{.sql}\n" <> sql <> "\n~~~~\n" 73 | hask' = "\n~~~~{.haskell}\n" <> hask <> "\n~~~~\n" 74 | in "\n" 75 | <> go True is 76 | go _t [] = "
\n" <> sql' <> "\n" <> hask' <> "
\n" 77 | {-escapePipe t = T.pack $ escapePipe' $ T.unpack t 78 | escapePipe' [] = [] 79 | escapePipe' ('\\':'|':xs) = '\\' : '\\' : '\\' : '|' : escapePipe' xs 80 | escapePipe' ('|':xs) = '\\' : '|' : escapePipe' xs 81 | escapePipe' (x:xs) = x : escapePipe' xs-} 82 | 83 | main :: IO () 84 | main = L.putStrLn $ render $ doc 1 testData 85 | 86 | -------------------------------------------------------------------------------- /TODO: -------------------------------------------------------------------------------- 1 | Some random notes on what could be done with the package in the 2 | future. None of this is scheduled. 3 | 4 | Infrastructure 5 | -------------- 6 | 7 | write a CI script 8 | 9 | decide if to use a code formatter - pro: it will preserve git blame stuff better 10 | 11 | switch the website to use markdown 12 | 13 | try to improve the usability of the rendered test cases 14 | 15 | add automated tests for the examples on the website 16 | 17 | add a few more examples to the website: 18 | parse some sql and detect if it has a particular feature 19 | do a transformation on some sql 20 | idea: convert tpch to sql server syntax 21 | generate some sql 22 | format some sql 23 | check if some sql parses 24 | trivial documentation generation for ddl 25 | trivial lint checker 26 | demos: 27 | crunch sql: this takes sql and tries to make it as small as possible 28 | (combining nested selects where possible and inlining 29 | ctes) 30 | expand sql: 31 | breaks apart complex sql using nested queries and ctes, try to make 32 | queries easier to understand in stages 33 | 34 | write a beginners tutorial for how to add support for some new sql syntax 35 | show how to develop parsers interactively, then tidy them up for merging 36 | to the main branch 37 | 38 | review code coverage and see if there are any important gaps to fill in 39 | set up hlint to run easily 40 | 41 | Code 42 | ---- 43 | 44 | There could be more negative tests for lexing and dialect options. 45 | 46 | Check the fixity in the tableref parsing, see if there is anywhere else that needs tweaking. 47 | 48 | Do all sql dialects have compatible fixities? If not, want to add dialect control over the fixity. 49 | 50 | add parse error recovery 51 | 52 | add ability to type check: 53 | uuagc still seems like the nicest option? 54 | uuagc has an option to attach to an external ast now, so could 55 | put the type checker in a separate package 56 | 57 | figure out how to support parsing some sql, transforming it, pretty printing it 58 | while perserving as much of the original formatting as possible, and all the comments 59 | an intermediate step is to minimise the difference in non whitespace/comment tokens 60 | when you parse then pretty print any supported sql 61 | 62 | add an annotation field to the syntax to make it more useful 63 | add source positions to this annotation when parsing 64 | 65 | can you make it properly extensible? the goal is for users to work with asts that 66 | represent only the dialect they are working in 67 | 68 | review names in the syntax for correspondence with sql standard, avoid 69 | gratuitous differences 70 | 71 | reduce use of booleans in the syntax 72 | 73 | quasi quotation support 74 | 75 | use this lib to build a typesafe sql wrapper for haskell 76 | 77 | optimise the lexer: 78 | add some benchmarks 79 | do some experiments with left factoring 80 | try to use the match approach with megaparsec 81 | see if it's work using something other than megaparsec for the lexer 82 | or// maybe it's no longer worth having a separate lexer? 83 | 84 | rewrite bits of the parser, lots of it is a bit questionable 85 | - an expert with megaparsec would write something simpler 86 | I think it's not worth doing for the sake of it, but if a bit 87 | is too difficult to add new features to, or to improve 88 | the error messages, then it might be worth it 89 | 90 | work on error messages 91 | 92 | review the crazy over the top lexer testing 93 | maybe it's enough to document an easy way to skip these tests 94 | 95 | check more of the formatting of the pretty printing and add regression tests for this 96 | 97 | is there a way to get incremental parsing like attoparsec? 98 | -------------------------------------------------------------------------------- /examples/SimpleSQLParserTool.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | Simple command line tool to experiment with simple-sql-parser 4 | 5 | Commands: 6 | 7 | parse: parse sql from file, stdin or from command line 8 | lex: lex sql same 9 | indent: parse then pretty print sql 10 | 11 | TODO: this is supposed to be a simple example, but it's a total mess 12 | write some simple helpers so it's all in text? 13 | 14 | -} 15 | 16 | {-# LANGUAGE TupleSections #-} 17 | import System.Environment (getArgs) 18 | import Control.Monad (forM_, when) 19 | import Data.Maybe (isJust) 20 | import System.Exit (exitFailure) 21 | import Data.List (intercalate) 22 | import Text.Show.Pretty (ppShow) 23 | --import Control.Applicative 24 | 25 | import qualified Data.Text as T 26 | 27 | import Language.SQL.SimpleSQL.Pretty 28 | (prettyStatements) 29 | import Language.SQL.SimpleSQL.Parse 30 | (parseStatements 31 | ,prettyError) 32 | import qualified Language.SQL.SimpleSQL.Lex as L 33 | import Language.SQL.SimpleSQL.Dialect (ansi2011) 34 | 35 | 36 | main :: IO () 37 | main = do 38 | args <- getArgs 39 | case args of 40 | [] -> do 41 | -- exit with 0 in this case 42 | showHelp Nothing -- $ Just "no command given" 43 | (c:as) -> do 44 | let cmd = lookup c commands 45 | maybe (showHelp (Just "command not recognised")) 46 | (\(_,cmd') -> cmd' as) 47 | cmd 48 | 49 | commands :: [(String, (String,[String] -> IO ()))] 50 | commands = 51 | [("help", helpCommand) 52 | ,("parse", parseCommand) 53 | ,("lex", lexCommand) 54 | ,("format", formatCommand)] 55 | 56 | showHelp :: Maybe String -> IO () 57 | showHelp msg = do 58 | maybe (return ()) (\e -> putStrLn $ "Error: " ++ e) msg 59 | putStrLn "Usage:\n SimpleSQLParserTool command args" 60 | forM_ commands $ \(c, (h,_)) -> do 61 | putStrLn $ c ++ "\t" ++ h 62 | when (isJust msg) $ exitFailure 63 | 64 | helpCommand :: (String,[String] -> IO ()) 65 | helpCommand = 66 | ("show help for this progam", \_ -> showHelp Nothing) 67 | 68 | getInput :: [String] -> IO (FilePath,String) 69 | getInput as = 70 | case as of 71 | ["-"] -> ("",) <$> getContents 72 | ("-c":as') -> return ("", unwords as') 73 | [filename] -> (filename,) <$> readFile filename 74 | _ -> showHelp (Just "arguments not recognised") >> error "" 75 | 76 | parseCommand :: (String,[String] -> IO ()) 77 | parseCommand = 78 | ("parse SQL from file/stdin/command line (use -c to parse from command line)" 79 | ,\args -> do 80 | (f,src) <- getInput args 81 | either (error . T.unpack . prettyError) 82 | (putStrLn . ppShow) 83 | $ parseStatements ansi2011 (T.pack f) Nothing (T.pack src) 84 | ) 85 | 86 | lexCommand :: (String,[String] -> IO ()) 87 | lexCommand = 88 | ("lex SQL from file/stdin/command line (use -c to parse from command line)" 89 | ,\args -> do 90 | (f,src) <- getInput args 91 | either (error . T.unpack . L.prettyError) 92 | (putStrLn . intercalate ",\n" . map show) 93 | $ L.lexSQL ansi2011 False (T.pack f) Nothing (T.pack src) 94 | ) 95 | 96 | 97 | formatCommand :: (String,[String] -> IO ()) 98 | formatCommand = 99 | ("parse then pretty print SQL from file/stdin/command line (use -c to parse from command line)" 100 | ,\args -> do 101 | (f,src) <- getInput args 102 | either (error . T.unpack . prettyError) 103 | (putStrLn . T.unpack . prettyStatements ansi2011) 104 | $ parseStatements ansi2011 (T.pack f) Nothing (T.pack src) 105 | 106 | ) 107 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/TestRunners.hs: -------------------------------------------------------------------------------- 1 | 2 | {-# LANGUAGE OverloadedStrings #-} 3 | module Language.SQL.SimpleSQL.TestRunners 4 | (testLex 5 | ,lexFails 6 | ,testScalarExpr 7 | ,testQueryExpr 8 | ,testStatement 9 | ,testStatements 10 | ,testParseQueryExpr 11 | ,testParseQueryExprFails 12 | ,testParseScalarExprFails 13 | ,HasCallStack 14 | ) where 15 | 16 | import Language.SQL.SimpleSQL.Syntax 17 | import Language.SQL.SimpleSQL.TestTypes 18 | import Language.SQL.SimpleSQL.Pretty 19 | import Language.SQL.SimpleSQL.Parse 20 | import qualified Language.SQL.SimpleSQL.Lex as Lex 21 | 22 | import Data.Text (Text) 23 | import qualified Data.Text as T 24 | 25 | import Language.SQL.SimpleSQL.Expectations 26 | (shouldParseL 27 | ,shouldFail 28 | ,shouldParseA 29 | ,shouldSucceed 30 | ) 31 | 32 | import Test.Hspec 33 | (it 34 | ,HasCallStack 35 | ) 36 | 37 | testLex :: HasCallStack => Dialect -> Text -> [Lex.Token] -> TestItem 38 | testLex d input a = 39 | LexTest d input a $ do 40 | it (T.unpack input) $ Lex.lexSQL d False "" Nothing input `shouldParseL` a 41 | it (T.unpack $ "pp: " <> input) $ Lex.lexSQL d False "" Nothing (Lex.prettyTokens d a) `shouldParseL` a 42 | 43 | lexFails :: HasCallStack => Dialect -> Text -> TestItem 44 | lexFails d input = 45 | LexFails d input $ 46 | it (T.unpack input) $ shouldFail $ Lex.lexSQL d False "" Nothing input 47 | 48 | testScalarExpr :: HasCallStack => Dialect -> Text -> ScalarExpr -> TestItem 49 | testScalarExpr d input a = 50 | TestScalarExpr d input a $ do 51 | it (T.unpack input) $ parseScalarExpr d "" Nothing input `shouldParseA` a 52 | it (T.unpack $ "pp: " <> input) $ parseScalarExpr d "" Nothing (prettyScalarExpr d a) `shouldParseA` a 53 | 54 | testQueryExpr :: HasCallStack => Dialect -> Text -> QueryExpr -> TestItem 55 | testQueryExpr d input a = 56 | TestQueryExpr d input a $ do 57 | it (T.unpack input) $ parseQueryExpr d "" Nothing input `shouldParseA` a 58 | it (T.unpack $ "pp: " <> input) $ parseQueryExpr d "" Nothing (prettyQueryExpr d a) `shouldParseA` a 59 | 60 | testParseQueryExpr :: HasCallStack => Dialect -> Text -> TestItem 61 | testParseQueryExpr d input = 62 | let a = parseQueryExpr d "" Nothing input 63 | in ParseQueryExpr d input $ do 64 | it (T.unpack input) $ shouldSucceed (T.unpack . prettyError) a 65 | case a of 66 | Left _ -> pure () 67 | Right a' -> 68 | it (T.unpack $ "pp: " <> input) $ 69 | parseQueryExpr d "" Nothing (prettyQueryExpr d a') `shouldParseA` a' 70 | 71 | testParseQueryExprFails :: HasCallStack => Dialect -> Text -> TestItem 72 | testParseQueryExprFails d input = 73 | ParseQueryExprFails d input $ 74 | it (T.unpack input) $ shouldFail $ parseQueryExpr d "" Nothing input 75 | 76 | testParseScalarExprFails :: HasCallStack => Dialect -> Text -> TestItem 77 | testParseScalarExprFails d input = 78 | ParseScalarExprFails d input $ 79 | it (T.unpack input) $ shouldFail $ parseScalarExpr d "" Nothing input 80 | 81 | testStatement :: HasCallStack => Dialect -> Text -> Statement -> TestItem 82 | testStatement d input a = 83 | TestStatement d input a $ do 84 | it (T.unpack input) $ parseStatement d "" Nothing input `shouldParseA` a 85 | it (T.unpack $ "pp: " <> input) $ parseStatement d "" Nothing (prettyStatement d a) `shouldParseA` a 86 | 87 | testStatements :: HasCallStack => Dialect -> Text -> [Statement] -> TestItem 88 | testStatements d input a = 89 | TestStatements d input a $ do 90 | it (T.unpack input) $ parseStatements d "" Nothing input `shouldParseA` a 91 | it (T.unpack $ "pp: " <> input) $ parseStatements d "" Nothing (prettyStatements d a) `shouldParseA` a 92 | 93 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | # quick makefile to document how to do the various tasks 3 | 4 | # there is no real reason to actually use the makefile except for a 5 | # very small amount of convenience, apart from the website build 6 | # and website consistency checks 7 | 8 | .PHONY : build 9 | build : 10 | cabal build 11 | 12 | .PHONY : test 13 | test : 14 | cabal run test:Tests -- -f failed-examples +RTS -N 15 | 16 | .PHONY : fast-test 17 | fast-test : 18 | cabal run test:Tests -- -f failed-examples --skip ansiLexerTests --skip postgresLexerTests +RTS -N 19 | 20 | .PHONY : test-coverage 21 | test-coverage : 22 | cabal test --enable-coverage 23 | 24 | .PHONY : clean 25 | clean : 26 | cabal clean 27 | cd website && cabal clean 28 | rm -Rf build/ 29 | 30 | .PHONY : parserexe 31 | parserexe : 32 | cabal build -fparserexe SimpleSQLParserTool 33 | 34 | .PHONY : all 35 | all : build test parserexe 36 | 37 | ############################################### 38 | 39 | # website 40 | 41 | # it's a bit crap, run cabal test or make test or something at least once 42 | # to get the website build to work 43 | 44 | .PHONY : website 45 | website : website-non-haddock build-haddock 46 | 47 | .PHONY : website-non-haddock 48 | website-non-haddock : build/main.css build/main1.css build/index.html \ 49 | build/supported_sql.html build/test_cases.html 50 | 51 | build/main.css : website/main.css 52 | mkdir -p build 53 | cp website/main.css build 54 | 55 | # todo: combine main and main1, change the one bit they can't share with sed 56 | # to create the additional main1 as part of the build 57 | build/main1.css : website/main1.css 58 | mkdir -p build 59 | cp website/main1.css build 60 | 61 | build/index.html : website/index.md website/template.pandoc 62 | mkdir -p build 63 | pandoc -s --template website/template.pandoc -V toc-title:"Table of contents" -c main.css -f markdown -t html --toc=true --metadata title="Simple SQL Parser" website/index.md > build/index.html 64 | 65 | build/supported_sql.html : website/supported_sql.md website/template.pandoc 66 | mkdir -p build 67 | pandoc -s --template website/template.pandoc -V toc-title:"Table of contents" -c main.css -f markdown -t html --toc=true --metadata title="Simple SQL Parser supported SQL" website/supported_sql.md > build/supported_sql.html 68 | 69 | build/test_cases.html : website/RenderTestCases.hs website/template1.pandoc 70 | mkdir -p build 71 | # no idea why not using --disable-optimisation on cabal build, but putting -O0 72 | # in the cabal file (and then cabal appears to say it's still using -O1 73 | # is faster 74 | echo Entering directory \`website/\' 75 | cd website/ && cabal build RenderTestCases && cabal run RenderTestCases | pandoc -s -N --template template1.pandoc -V toc-title:"Simple SQL Parser test case examples" -c main1.css -f markdown -t html --toc=true --metadata title="Simple SQL Parse test case examples" > ../build/test_cases.html 76 | echo Leaving directory \`website/\' 77 | 78 | # works here, but not in a recipe. amazing 79 | # GHC_VER="$(shell ghc --numeric-version)" 80 | 81 | .PHONY : build-haddock 82 | build-haddock : 83 | cabal haddock --haddock-option="--hyperlinked-source" 84 | # todo: handle the deps properly 85 | rm -Rf build/haddock 86 | mkdir build/haddock/ 87 | $(eval GHC_VER="$(shell ghc --numeric-version)") 88 | $(eval SSP_VER="$(shell cat simple-sql-parser.cabal |grep -P '^version:' | awk '{print $$2}')") 89 | cp -R dist-newstyle/build/x86_64-linux/ghc-${GHC_VER}/simple-sql-parser-${SSP_VER}/doc/html/simple-sql-parser/* build/haddock/ 90 | 91 | # check the website pages code snippets 92 | .PHONY : doctool 93 | doctool : 94 | cabal build -fparserexe SimpleSQLParserTool 95 | silverbane website/index.md 96 | 97 | .PHONY : really-all 98 | really-all : build test parserexe website doctool 99 | 100 | -------------------------------------------------------------------------------- /simple-sql-parser.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 2.2 2 | 3 | name: simple-sql-parser 4 | version: 0.8.0 5 | synopsis: A parser for SQL. 6 | 7 | description: A parser for SQL. Parses most SQL:2011 8 | queries, non-query DML, DDL, access control and 9 | transaction management syntax. Please see the 10 | homepage for more information 11 | . 12 | 13 | homepage: http://jakewheat.github.io/simple-sql-parser/latest 14 | license: BSD-3-Clause 15 | license-file: LICENSE 16 | author: Jake Wheat 17 | maintainer: jakewheat@tutanota.com 18 | copyright: Copyright 2013 - 2024, Jake Wheat and the simple-sql-parser contributors. 19 | category: Database,Language 20 | build-type: Simple 21 | extra-doc-files: README,LICENSE,changelog 22 | bug-reports: https://github.com/JakeWheat/simple-sql-parser/issues 23 | 24 | source-repository head 25 | type: git 26 | location: https://github.com/JakeWheat/simple-sql-parser.git 27 | 28 | Flag parserexe 29 | Description: Build SimpleSQLParserTool exe 30 | Default: False 31 | 32 | common shared-properties 33 | default-language: Haskell2010 34 | build-depends: base >=4 && <5, 35 | megaparsec >=9.6 && <9.7, 36 | parser-combinators >= 1.3 && < 1.4, 37 | mtl >=2.1 && <2.4, 38 | prettyprinter >= 1.7 && < 1.8, 39 | text >= 2.0 && < 2.2, 40 | containers >= 0.6 && < 0.8 41 | ghc-options: -Wall 42 | 43 | library 44 | import: shared-properties 45 | exposed-modules: Language.SQL.SimpleSQL.Pretty, 46 | Language.SQL.SimpleSQL.Parse, 47 | Language.SQL.SimpleSQL.Lex, 48 | Language.SQL.SimpleSQL.Syntax, 49 | Language.SQL.SimpleSQL.Dialect 50 | 51 | Test-Suite Tests 52 | import: shared-properties 53 | type: exitcode-stdio-1.0 54 | main-is: RunTests.hs 55 | hs-source-dirs: tests 56 | Build-Depends: simple-sql-parser, 57 | hspec, 58 | hspec-megaparsec, 59 | hspec-expectations, 60 | raw-strings-qq, 61 | hspec-golden, 62 | filepath, 63 | pretty-show, 64 | 65 | Other-Modules: Language.SQL.SimpleSQL.ErrorMessages, 66 | Language.SQL.SimpleSQL.FullQueries, 67 | Language.SQL.SimpleSQL.GroupBy, 68 | Language.SQL.SimpleSQL.MySQL, 69 | Language.SQL.SimpleSQL.Postgres, 70 | Language.SQL.SimpleSQL.Odbc, 71 | Language.SQL.SimpleSQL.Oracle, 72 | Language.SQL.SimpleSQL.QueryExprComponents, 73 | Language.SQL.SimpleSQL.QueryExprs, 74 | Language.SQL.SimpleSQL.QueryExprParens, 75 | Language.SQL.SimpleSQL.SQL2011Queries, 76 | Language.SQL.SimpleSQL.SQL2011AccessControl, 77 | Language.SQL.SimpleSQL.SQL2011Bits, 78 | Language.SQL.SimpleSQL.SQL2011DataManipulation, 79 | Language.SQL.SimpleSQL.SQL2011Schema, 80 | Language.SQL.SimpleSQL.TableRefs, 81 | Language.SQL.SimpleSQL.TestTypes, 82 | Language.SQL.SimpleSQL.Tests, 83 | Language.SQL.SimpleSQL.Tpch, 84 | Language.SQL.SimpleSQL.ScalarExprs, 85 | Language.SQL.SimpleSQL.LexerTests, 86 | Language.SQL.SimpleSQL.CustomDialect, 87 | Language.SQL.SimpleSQL.EmptyStatement, 88 | Language.SQL.SimpleSQL.CreateIndex 89 | Language.SQL.SimpleSQL.Expectations 90 | Language.SQL.SimpleSQL.TestRunners 91 | 92 | ghc-options: -threaded 93 | 94 | -- this is a testing tool, do some dumb stuff to hide the dependencies in hackage 95 | Test-Suite SimpleSQLParserTool 96 | import: shared-properties 97 | type: exitcode-stdio-1.0 98 | main-is: SimpleSQLParserTool.hs 99 | hs-source-dirs: examples 100 | Build-Depends: simple-sql-parser, 101 | pretty-show >= 1.6 && < 1.10 102 | if flag(parserexe) 103 | buildable: True 104 | else 105 | buildable: False 106 | 107 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/TableRefs.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | These are the tests for parsing focusing on the from part of query 4 | expression 5 | -} 6 | 7 | {-# LANGUAGE OverloadedStrings #-} 8 | module Language.SQL.SimpleSQL.TableRefs (tableRefTests) where 9 | 10 | import Language.SQL.SimpleSQL.TestTypes 11 | import Language.SQL.SimpleSQL.Syntax 12 | import Language.SQL.SimpleSQL.TestRunners 13 | import Data.Text (Text) 14 | 15 | tableRefTests :: TestItem 16 | tableRefTests = Group "tableRefTests" 17 | [q "select a from t" 18 | $ ms [TRSimple [Name Nothing "t"]] 19 | 20 | ,q "select a from f(a)" 21 | $ ms [TRFunction [Name Nothing "f"] [Iden [Name Nothing "a"]]] 22 | 23 | ,q "select a from t,u" 24 | $ ms [TRSimple [Name Nothing "t"], TRSimple [Name Nothing "u"]] 25 | 26 | ,q "select a from s.t" 27 | $ ms [TRSimple [Name Nothing "s", Name Nothing "t"]] 28 | 29 | -- these lateral queries make no sense but the syntax is valid 30 | 31 | ,q "select a from lateral a" 32 | $ ms [TRLateral $ TRSimple [Name Nothing "a"]] 33 | 34 | ,q "select a from lateral a,b" 35 | $ ms [TRLateral $ TRSimple [Name Nothing "a"], TRSimple [Name Nothing "b"]] 36 | 37 | ,q "select a from a, lateral b" 38 | $ ms [TRSimple [Name Nothing "a"], TRLateral $ TRSimple [Name Nothing "b"]] 39 | 40 | ,q "select a from a natural join lateral b" 41 | $ ms [TRJoin (TRSimple [Name Nothing "a"]) True JInner 42 | (TRLateral $ TRSimple [Name Nothing "b"]) 43 | Nothing] 44 | 45 | ,q "select a from lateral a natural join lateral b" 46 | $ ms [TRJoin (TRLateral $ TRSimple [Name Nothing "a"]) True JInner 47 | (TRLateral $ TRSimple [Name Nothing "b"]) 48 | Nothing] 49 | 50 | 51 | ,q "select a from t inner join u on expr" 52 | $ ms [TRJoin (TRSimple [Name Nothing "t"]) False JInner (TRSimple [Name Nothing "u"]) 53 | (Just $ JoinOn $ Iden [Name Nothing "expr"])] 54 | 55 | ,q "select a from t join u on expr" 56 | $ ms [TRJoin (TRSimple [Name Nothing "t"]) False JInner (TRSimple [Name Nothing "u"]) 57 | (Just $ JoinOn $ Iden [Name Nothing "expr"])] 58 | 59 | ,q "select a from t left join u on expr" 60 | $ ms [TRJoin (TRSimple [Name Nothing "t"]) False JLeft (TRSimple [Name Nothing "u"]) 61 | (Just $ JoinOn $ Iden [Name Nothing "expr"])] 62 | 63 | ,q "select a from t right join u on expr" 64 | $ ms [TRJoin (TRSimple [Name Nothing "t"]) False JRight (TRSimple [Name Nothing "u"]) 65 | (Just $ JoinOn $ Iden [Name Nothing "expr"])] 66 | 67 | ,q "select a from t full join u on expr" 68 | $ ms [TRJoin (TRSimple [Name Nothing "t"]) False JFull (TRSimple [Name Nothing "u"]) 69 | (Just $ JoinOn $ Iden [Name Nothing "expr"])] 70 | 71 | ,q "select a from t cross join u" 72 | $ ms [TRJoin (TRSimple [Name Nothing "t"]) False 73 | JCross (TRSimple [Name Nothing "u"]) Nothing] 74 | 75 | ,q "select a from t natural inner join u" 76 | $ ms [TRJoin (TRSimple [Name Nothing "t"]) True JInner (TRSimple [Name Nothing "u"]) 77 | Nothing] 78 | 79 | ,q "select a from t inner join u using(a,b)" 80 | $ ms [TRJoin (TRSimple [Name Nothing "t"]) False JInner (TRSimple [Name Nothing "u"]) 81 | (Just $ JoinUsing [Name Nothing "a", Name Nothing "b"])] 82 | 83 | ,q "select a from (select a from t)" 84 | $ ms [TRQueryExpr $ ms [TRSimple [Name Nothing "t"]]] 85 | 86 | ,q "select a from t as u" 87 | $ ms [TRAlias (TRSimple [Name Nothing "t"]) (Alias (Name Nothing "u") Nothing)] 88 | 89 | ,q "select a from t u" 90 | $ ms [TRAlias (TRSimple [Name Nothing "t"]) (Alias (Name Nothing "u") Nothing)] 91 | 92 | ,q "select a from t u(b)" 93 | $ ms [TRAlias (TRSimple [Name Nothing "t"]) (Alias (Name Nothing "u") $ Just [Name Nothing "b"])] 94 | 95 | ,q "select a from (t cross join u) as u" 96 | $ ms [TRAlias (TRParens $ 97 | TRJoin (TRSimple [Name Nothing "t"]) False JCross (TRSimple [Name Nothing "u"]) Nothing) 98 | (Alias (Name Nothing "u") Nothing)] 99 | -- todo: not sure if the associativity is correct 100 | 101 | ,q "select a from t cross join u cross join v" 102 | $ ms [TRJoin 103 | (TRJoin (TRSimple [Name Nothing "t"]) False 104 | JCross (TRSimple [Name Nothing "u"]) Nothing) 105 | False JCross (TRSimple [Name Nothing "v"]) Nothing] 106 | ] 107 | where 108 | ms f = toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"],Nothing)] 109 | ,msFrom = f} 110 | q :: HasCallStack => Text -> QueryExpr -> TestItem 111 | q src ast = testQueryExpr ansi2011 src ast 112 | -------------------------------------------------------------------------------- /release_checklist: -------------------------------------------------------------------------------- 1 | # Release checklist 2 | 3 | Check the version in the cabal file - update it if it hasn't already been updated. git grep for any other mentions of the version number that need updating. 4 | 5 | Update the changelog, use git diff or similar to try to reduce the chance of missing anything important. 6 | 7 | Run the tests (if any fail at the point of thinking about a release, then something has gone horribly wrong ...) 8 | 9 | ~~~~ 10 | cabal test 11 | ~~~~ 12 | 13 | Do the cabal checks: 14 | 15 | ~~~~ 16 | cabal update 17 | cabal outdated 18 | cabal check 19 | ~~~~ 20 | 21 | Check everything: 22 | 23 | ~~~~ 24 | make clean 25 | make really-all 26 | ~~~~ 27 | 28 | TODO: you need silverbane to check the examples in the index.md. 29 | 30 | Then: 31 | 32 | * check the webpages appear nicely 33 | 34 | * check all the tests are rendered on the example page -> need to find a robust way of doing this, because there are huge numbers and it's impossible to eyeball and tell if it's good unless you somehow spot a problem. 35 | 36 | Update stack.yaml to the latest lts - check this page: https://www.stackage.org/ . While updating, check the extra-deps field, if there are any there, see if they can be removed. 37 | 38 | Install latest stack and check it works - maybe the stack.yaml file needs a tweak, maybe the cabal file. 39 | 40 | ~~~~ 41 | ghcup list 42 | ghcup install stack [latest from the list on stackage.org] 43 | stack test 44 | ~~~~ 45 | 46 | Run the tests on the previous 2 ghcs latest point releases, and the latest ghc, each with the latest cabal-install they support (e.g. as of October 2024, these three ghc versions are 9.10.1, 9.8.2, 9.6.6). This is now trivial to do with ghcup, amazing progress in Haskell tools in recent years. 47 | 48 | Build the release tarball, run a test with an example using this tarball: 49 | 50 | ~~~~ 51 | cabal sdist 52 | mkdir temp-build 53 | # get the path to the tar.gz from the output of cabal sdist 54 | cp simple-sql-parser/main/dist-newstyle/sdist/simple-sql-parser-0.X.X.tar.gz temp-build 55 | cd temp-build 56 | cabal init -n 57 | cp ../examples/SimpleSQLParserTool.hs app/Main.hs 58 | ~~~~ 59 | 60 | Add these to the build-depends: for the Main in temp-build.cabal: 61 | 62 | ~~~~ 63 | simple-sql-parser == 0.X.X, 64 | pretty-show, 65 | text 66 | ~~~~ 67 | 68 | Add a cabal.project.local file containing: 69 | 70 | ~~~~ 71 | packages: 72 | ./ 73 | ./simple-sql-parser-0.X.X.tar.gz 74 | ~~~~ 75 | 76 | Run the test: 77 | 78 | ~~~~ 79 | cabal run temp-build -- parse -c "select 1" 80 | ~~~~ 81 | 82 | Example of output on success: 83 | 84 | ~~~~ 85 | $ cabal run temp-build -- parse -c "select 1" 86 | Build profile: -w ghc-9.8.1 -O1 87 | In order, the following will be built (use -v for more details): 88 | - simple-sql-parser-0.7.0 (lib) (requires build) 89 | - temp-build-0.1.0.0 (exe:temp-build) (first run) 90 | Starting simple-sql-parser-0.7.0 (lib) 91 | Building simple-sql-parser-0.7.0 (lib) 92 | Installing simple-sql-parser-0.7.0 (lib) 93 | Completed simple-sql-parser-0.7.0 (lib) 94 | Configuring executable 'temp-build' for temp-build-0.1.0.0.. 95 | Preprocessing executable 'temp-build' for temp-build-0.1.0.0.. 96 | Building executable 'temp-build' for temp-build-0.1.0.0.. 97 | [1 of 1] Compiling Main ( app/Main.hs, /home/jake/wd/simple-sql-parser/main/temp-build/dist-newstyle/build/x86_64-linux/ghc-9.8.1/temp-build-0.1.0.0/x/temp-build/build/temp-build/temp-build-tmp/Main.o ) 98 | [2 of 2] Linking /home/jake/wd/simple-sql-parser/main/temp-build/dist-newstyle/build/x86_64-linux/ghc-9.8.1/temp-build-0.1.0.0/x/temp-build/build/temp-build/temp-build 99 | [ SelectStatement 100 | Select 101 | { qeSetQuantifier = SQDefault 102 | , qeSelectList = [ ( NumLit "1" , Nothing ) ] 103 | , qeFrom = [] 104 | , qeWhere = Nothing 105 | , qeGroupBy = [] 106 | , qeHaving = Nothing 107 | , qeOrderBy = [] 108 | , qeOffset = Nothing 109 | , qeFetchFirst = Nothing 110 | } 111 | ] 112 | ~~~~ 113 | 114 | TODO: hlint?, how to do a spell check, what about automatic code formatting? 115 | 116 | If there are any non trivial changes to the website or api, upload a new website. 117 | 118 | Upload candidate to hackage, run a test with example using this package 119 | - don't remember how this works, but I think you'll do the same as testing the tarball locally, but don't copy the tarball or add a cabal.project file, after uploading the candidate I think you just need to do a 'cabal update', then the cabal build should find the candidate if you gave it the exact version. 120 | 121 | If all good, release the candidate - a button on the hackage website. 122 | 123 | Update the website 124 | 125 | add a tag for the commit corresponding to the version: 126 | 127 | ~~~~ 128 | git tag -a v0.7.0 -m "0.7.0" 129 | git push origin v0.7.0 130 | ~~~~ 131 | 132 | This will add the tag to the current commit. 133 | 134 | 135 | Todo: try to turn as much of this into a script, with a nice report as possible, order this list properly, say what you need to check in more detail, say what else you need to redo if any steps need actions. 136 | -------------------------------------------------------------------------------- /website/supported_sql.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | This page has more details on the supported SQL in simple-sql-parser. 4 | 5 | See the [simple-sql-parser test cases](test_cases.html) page for 6 | examples. 7 | 8 | The target dialect of SQL at this time is ISO/ANSI SQL:2011. The 9 | parser supports queries, DDL, non-query DML, access control and 10 | transaction management syntax. The parser and syntax does not follow 11 | the standard grammar closely - they permit a lot of things which the 12 | grammar in the standard forbids. The intended usage is that an 13 | additional pass over the ast can be made if you want to carefully 14 | prohibit everything that the standard doesn't allow. 15 | 16 | Apart from this permissiveness, some work has been put into trying to 17 | get good parser error messages. 18 | 19 | # Queries 20 | 21 | ## Select lists 22 | 23 | Supports scalar expressions, aliases with optional 'as'. 24 | 25 | ## Set quantifiers on select 26 | 27 | Supports 'select distinct' and explicit 'select all'. 28 | 29 | ## From clause 30 | 31 | * aliases 32 | * subqueries 33 | * functions 34 | * joins 35 | - natural 36 | - inner 37 | - left/right/full outer 38 | - cross 39 | - on expressions 40 | - using lists 41 | - lateral 42 | 43 | ## Group by clause 44 | 45 | Supports scalar expressions, group by (), cube, rollup, grouping 46 | parentheses and grouping sets with nested grouping expressions. 47 | 48 | ## Order by clause 49 | 50 | Supports scalar expressions, asc/desc and nulls first/last. 51 | 52 | ## Offset and fetch 53 | 54 | Supports 'offset n rows' and 'fetch first n rows only'. 55 | 56 | ## Set operators 57 | 58 | Union, except, intersect + all/distinct and corresponding. 59 | 60 | ## Table value constructor 61 | 62 | Example: 'values (1,2),(3,4)'. 63 | 64 | ## Explicit table 65 | 66 | Example: 'table t', which is shorthand for 'select * from t'. 67 | 68 | ## Scalar expressions 69 | 70 | The scalar expressions type and parser is used in many contexts, 71 | including: 72 | 73 | * select lists; 74 | * where clause expressions; 75 | * group by clause expressions; 76 | * having clause expressions; 77 | * order by clause expressions; 78 | * offset and fetch clause expressions; 79 | * table value constructors. 80 | 81 | This doesn't exactly follow the ANSI Standards, which have separate 82 | grammars for most of these. 83 | 84 | The supported scalar expressions include: 85 | 86 | * basic string literals in single quotes 87 | * number literals: digits.digitse+-exp 88 | * explicitly typed literal, e.g. int '3' 89 | * binary operators 90 | - comparisons: = != <> <= >= < > 91 | - arithmetic: + - / * % ^ 92 | - logic: and, or 93 | - bitwise: & | (and ^ as above) 94 | - string: ||, like, not like 95 | - other: overlaps, is similar to, is not similar too, is distinct 96 | from, is not distinct from 97 | * prefix unary operators 98 | - +, - 99 | - not 100 | - ~ 101 | * postfix unary 102 | - is null, is not null 103 | - is true, is not true, is false, is not false, is unknown, is not unknown 104 | * other operators 105 | - extract (extract(day from dt)) 106 | - position (position string1 in string2) 107 | - substring (substring(x from 2 for 4)) 108 | - convert (convert(string using conversion)) 109 | - translate (translate(string using translation)) 110 | - overlay (overlay (string placing embedded_string from start for 111 | length)) 112 | - trim (trim(leading '_' from s)) 113 | - between (a between 1 and 5) 114 | - in list (a in (1,2,3,4)) 115 | - cast (cast(a as int)) 116 | * subqueries 117 | - in subquery 118 | - any/some/all 119 | - exists 120 | * case expressions 121 | * parentheses 122 | * quoted and unquoted identifiers 123 | * a.b qualified identifiers 124 | * \*, a.* 125 | * functions: f(a,b) 126 | * aggregates: agg(distinct a order by b) 127 | * window functions: sum(x) over (partition by y order by z) 128 | plus some explicit frame support (same as in postgres 9.3) 129 | * row constructors, e.g. where (a,b) = any (select a,b from t) 130 | * ? used in parameterized queries 131 | 132 | # DDL 133 | 134 | * schemas 135 | * create, drop + drop restrict 136 | 137 | * tables 138 | * create table 139 | * constraints: named, null, unique, primary key, foreign key (matches, on update/delete) 140 | * identity (the weird ansi version), defaults 141 | * defaults 142 | * alter table 143 | * defaults, null, set data type, drop column, constraints 144 | * drop table + restrict 145 | 146 | * create, drop view 147 | * create, alter, drop domain 148 | * defaults, constraints 149 | * create, drop assertion 150 | * create, alter, drop sequence 151 | 152 | # Non-query DML 153 | 154 | * delete 155 | * delete from 156 | * as alias 157 | * where 158 | * truncate 159 | * with identity options 160 | * insert 161 | * values, general queries, defaults 162 | * update 163 | * including row updates 164 | 165 | # Access Control 166 | 167 | * grant privileges 168 | * all, grant option, table, domain, type, sequence, role, etc. 169 | * revoke 170 | * create role, drop role 171 | 172 | # Transaction management 173 | 174 | * begin, commit, rollback 175 | * savepoints 176 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/SQL2011Bits.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | Sections 17 and 19 in Foundation 4 | 5 | This module covers the tests for transaction management (begin, 6 | commit, savepoint, etc.), and session management (set). 7 | -} 8 | 9 | 10 | {-# LANGUAGE OverloadedStrings #-} 11 | module Language.SQL.SimpleSQL.SQL2011Bits (sql2011BitsTests) where 12 | 13 | import Language.SQL.SimpleSQL.TestTypes 14 | import Language.SQL.SimpleSQL.Syntax 15 | import Language.SQL.SimpleSQL.TestRunners 16 | import Data.Text (Text) 17 | 18 | sql2011BitsTests :: TestItem 19 | sql2011BitsTests = Group "sql 2011 bits tests" [ 20 | 21 | {- 22 | 17 Transaction management 23 | 24 | 17.1 25 | 26 | ::= 27 | START TRANSACTION [ ] 28 | 29 | BEGIN is not in the standard! 30 | -} 31 | 32 | s "start transaction" StartTransaction 33 | 34 | {- 35 | 17.2 36 | 37 | ::= 38 | SET [ LOCAL ] TRANSACTION 39 | 40 | 17.3 41 | 42 | ::= 43 | [ [ { }... ] ] 44 | 45 | ::= 46 | 47 | | 48 | | 49 | 50 | ::= 51 | READ ONLY 52 | | READ WRITE 53 | 54 | ::= 55 | ISOLATION LEVEL 56 | 57 | ::= 58 | READ UNCOMMITTED 59 | | READ COMMITTED 60 | | REPEATABLE READ 61 | | SERIALIZABLE 62 | 63 | ::= 64 | DIAGNOSTICS SIZE 65 | 66 | ::= 67 | 68 | 69 | 17.4 70 | 71 | ::= 72 | SET CONSTRAINTS { DEFERRED | IMMEDIATE } 73 | 74 | ::= 75 | ALL 76 | | [ { }... ] 77 | 78 | 17.5 79 | 80 | ::= 81 | SAVEPOINT 82 | 83 | ::= 84 | 85 | -} 86 | 87 | ,s "savepoint difficult_bit" 88 | $ Savepoint $ Name Nothing "difficult_bit" 89 | 90 | 91 | {- 92 | 17.6 93 | 94 | ::= 95 | RELEASE SAVEPOINT 96 | -} 97 | 98 | ,s "release savepoint difficult_bit" 99 | $ ReleaseSavepoint $ Name Nothing "difficult_bit" 100 | 101 | 102 | {- 103 | 17.7 104 | 105 | ::= 106 | COMMIT [ WORK ] [ AND [ NO ] CHAIN ] 107 | -} 108 | 109 | ,s "commit" Commit 110 | 111 | ,s "commit work" Commit 112 | 113 | 114 | {- 115 | 17.8 116 | 117 | ::= 118 | ROLLBACK [ WORK ] [ AND [ NO ] CHAIN ] [ ] 119 | 120 | ::= 121 | TO SAVEPOINT 122 | -} 123 | 124 | ,s "rollback" $ Rollback Nothing 125 | 126 | ,s "rollback work" $ Rollback Nothing 127 | 128 | ,s "rollback to savepoint difficult_bit" 129 | $ Rollback $ Just $ Name Nothing "difficult_bit" 130 | 131 | 132 | {- 133 | 19 Session management 134 | 135 | 19.1 136 | 137 | ::= 138 | SET SESSION CHARACTERISTICS AS 139 | 140 | ::= 141 | [ { }... ] 142 | 143 | ::= 144 | 145 | 146 | ::= 147 | TRANSACTION [ { }... ] 148 | 149 | 19.2 150 | 151 | ::= 152 | SET SESSION AUTHORIZATION 153 | 154 | 19.3 155 | 156 | ::= 157 | SET ROLE 158 | 159 | ::= 160 | 161 | | NONE 162 | 163 | 19.4 164 | 165 | ::= 166 | SET TIME ZONE 167 | 168 | ::= 169 | 170 | | LOCAL 171 | 172 | 19.5 173 | 174 | ::= 175 | SET 176 | 177 | ::= 178 | CATALOG 179 | 180 | 19.6 181 | 182 | ::= 183 | SET 184 | 185 | ::= 186 | SCHEMA 187 | 188 | 19.7 189 | 190 | ::= 191 | SET 192 | 193 | ::= 194 | NAMES 195 | 196 | 19.8 197 | 198 | ::= 199 | SET 200 | 201 | ::= 202 | PATH 203 | 204 | 19.9 205 | 206 | ::= 207 | SET 208 | 209 | ::= 210 | DEFAULT TRANSFORM GROUP 211 | | TRANSFORM GROUP FOR TYPE 212 | 213 | 19.10 214 | 215 | ::= 216 | SET COLLATION [ FOR ] 217 | | SET NO COLLATION [ FOR ] 218 | 219 | ::= 220 | 221 | -} 222 | 223 | ] 224 | 225 | s :: HasCallStack => Text -> Statement -> TestItem 226 | s src ast = testStatement ansi2011 src ast 227 | -------------------------------------------------------------------------------- /website/index.md: -------------------------------------------------------------------------------- 1 | 2 | # Overview 3 | 4 | A parser for SQL in Haskell. Also includes a pretty printer which 5 | formats SQL. 6 | 7 | This is the documentation for version 0.8.0. Documentation for other 8 | versions is available here: 9 | . 10 | 11 | Status: usable for parsing a substantial amount of SQL. Adding support 12 | for new SQL is easy. Expect a little bit of churn on the AST types 13 | when support for new SQL features is added. 14 | 15 | This version is tested with GHC 9.10.1, 9.8.2, 9.6.6. 16 | 17 | # Examples 18 | 19 | Parse a SQL statement: 20 | 21 | ~~~~{.haskell sb-session='cabal repl --repl-options=-XOverloadedStrings' sb-prompt='ghci> ' sb-no-initial-text=} 22 | ghci> import Language.SQL.SimpleSQL.Parse 23 | ghci> import qualified Data.Text as T 24 | ghci> either (T.unpack . prettyError) show $ parseStatement ansi2011 "" Nothing "select a + b * c" 25 | "SelectStatement (Select {qeSetQuantifier = SQDefault, qeSelectList = [(BinOp (Iden [Name Nothing \"a\"]) [Name Nothing \"+\"] (BinOp (Iden [Name Nothing \"b\"]) [Name Nothing \"*\"] (Iden [Name Nothing \"c\"])),Nothing)], qeFrom = [], qeWhere = Nothing, qeGroupBy = [], qeHaving = Nothing, qeOrderBy = [], qeOffset = Nothing, qeFetchFirst = Nothing})" 26 | ~~~~ 27 | 28 | The result printed readably: 29 | 30 | ~~~~{.haskell sb-run='cabal run -fparserexe SimpleSQLParserTool -- parse -c "select a + b * c"' sb-cwd='..'} 31 | [ SelectStatement 32 | Select 33 | { qeSetQuantifier = SQDefault 34 | , qeSelectList = 35 | [ ( BinOp 36 | (Iden [ Name Nothing "a" ]) 37 | [ Name Nothing "+" ] 38 | (BinOp 39 | (Iden [ Name Nothing "b" ]) 40 | [ Name Nothing "*" ] 41 | (Iden [ Name Nothing "c" ])) 42 | , Nothing 43 | ) 44 | ] 45 | , qeFrom = [] 46 | , qeWhere = Nothing 47 | , qeGroupBy = [] 48 | , qeHaving = Nothing 49 | , qeOrderBy = [] 50 | , qeOffset = Nothing 51 | , qeFetchFirst = Nothing 52 | } 53 | ] 54 | ~~~~ 55 | 56 | Formatting SQL, TPC-H query 21: 57 | 58 | ~~~~{.sql sb-file='tpch21.sql'} 59 | select 60 | s_name, 61 | count(*) as numwait 62 | from 63 | supplier, 64 | lineitem l1, 65 | orders, 66 | nation 67 | where 68 | s_suppkey = l1.l_suppkey 69 | and o_orderkey = l1.l_orderkey 70 | and o_orderstatus = 'F' 71 | and l1.l_receiptdate > l1.l_commitdate 72 | and exists ( 73 | select 74 | * 75 | from 76 | lineitem l2 77 | where 78 | l2.l_orderkey = l1.l_orderkey 79 | and l2.l_suppkey <> l1.l_suppkey 80 | ) 81 | and not exists ( 82 | select 83 | * 84 | from 85 | lineitem l3 86 | where 87 | l3.l_orderkey = l1.l_orderkey 88 | and l3.l_suppkey <> l1.l_suppkey 89 | and l3.l_receiptdate > l3.l_commitdate 90 | ) 91 | and s_nationkey = n_nationkey 92 | and n_name = 'INDIA' 93 | group by 94 | s_name 95 | order by 96 | numwait desc, 97 | s_name 98 | fetch first 100 rows only; 99 | ~~~~ 100 | 101 | Output from the simple-sql-parser pretty printer: 102 | 103 | ~~~~{.haskell sb-run='cabal run -fparserexe SimpleSQLParserTool -- format website/tpch21.sql' sb-cwd='..'} 104 | select s_name, count(*) as numwait 105 | from supplier, 106 | lineitem as l1, 107 | orders, 108 | nation 109 | where s_suppkey = l1.l_suppkey 110 | and o_orderkey = l1.l_orderkey 111 | and o_orderstatus = 'F' 112 | and l1.l_receiptdate > l1.l_commitdate 113 | and exists (select * 114 | from lineitem as l2 115 | where l2.l_orderkey = l1.l_orderkey 116 | and l2.l_suppkey <> l1.l_suppkey) 117 | and not exists (select * 118 | from lineitem as l3 119 | where l3.l_orderkey = l1.l_orderkey 120 | and l3.l_suppkey <> l1.l_suppkey 121 | and l3.l_receiptdate > l3.l_commitdate) 122 | and s_nationkey = n_nationkey 123 | and n_name = 'INDIA' 124 | group by s_name 125 | order by numwait desc, s_name 126 | fetch first 100 rows only; 127 | 128 | ~~~~ 129 | 130 | # Supported SQL overview 131 | 132 | * query expressions 133 | * select lists 134 | * from clause 135 | * where clause 136 | * group by clause 137 | * having clause 138 | * order by clause 139 | * offset and fetch 140 | * set operators 141 | * common table expressions 142 | * wide range of scalar expressions 143 | * DDL (ansi dialect) 144 | * create, drop schema 145 | * create, alter, drop table 146 | * create, drop view 147 | * create, alter, drop domain 148 | * create, drop assertion 149 | * create, alter, drop sequence 150 | * non-query DML 151 | * delete 152 | * truncate 153 | * insert 154 | * update 155 | * Access control 156 | * grant, revoke - permissions and roles 157 | * create, drop role 158 | * Transaction management 159 | * begin, commit, rollback, savepoints 160 | 161 | See the [supported_sql.html](supported_sql.html) page for details on the supported SQL. 162 | 163 | Here is all the [test_cases.html](test_cases.html) rendered in a webpage so you can get 164 | an idea of what it supports, and what various instances of SQL parse to. 165 | 166 | # Installation 167 | 168 | This package is on hackage, use it in the usual way. You can install 169 | the SimpleSQLParserTool demo exe using: 170 | 171 | ~~~~ 172 | cabal install -fparserexe simple-sql-parser 173 | ~~~~ 174 | 175 | # Reporting bugs 176 | 177 | Please report bugs here: 178 | 179 | A good bug report (or feature request) should have an example of the 180 | SQL which is failing. You can expect bugs to get fixed. 181 | 182 | Feature requests are welcome, but be aware that there is no-one 183 | generally available to work on these, so you should either make a pull 184 | request, or find someone willing to implement the features and make a 185 | pull request. 186 | 187 | Bug reports of confusing or poor parse errors are also encouraged. 188 | 189 | There is a related tutorial on implementing a SQL parser here: 190 | (TODO: this is out of 191 | date, in the process of being updated) 192 | 193 | # Modifying the library 194 | 195 | Get the latest development version: 196 | 197 | ~~~~ 198 | git clone https://github.com/JakeWheat/simple-sql-parser.git 199 | cd simple-sql-parser 200 | cabal build 201 | ~~~~ 202 | 203 | You can run the tests using cabal: 204 | 205 | ~~~~ 206 | cabal test 207 | ~~~~ 208 | 209 | Or use the makefile target 210 | 211 | ~~~~ 212 | make test 213 | ~~~~ 214 | 215 | To skip some of the slow lexer tests, which you usually only need to 216 | run before each commit, use: 217 | 218 | ~~~~ 219 | make fast-test 220 | ~~~~ 221 | 222 | When you add support for new syntax: add some tests. If you modify or 223 | fix something, and it doesn't have tests, add some. If the syntax 224 | isn't in ANSI SQL, guard it behind a dialect flag. If you add 225 | support for something from a new dialect, add that dialect. 226 | 227 | Check all the tests still pass, then send a pull request on Github. 228 | 229 | # Links 230 | 231 | * Haddock: [haddock/index.html](haddock/index.html) 232 | * Supported SQL: [supported_sql.html](supported_sql.html) 233 | * Test cases: [test_cases.html](test_cases.html) 234 | * Homepage: 235 | * Hackage: 236 | * Source repository: 237 | * Bug tracker: 238 | * Changes: 239 | * Other versions: 240 | * Contact: jakewheat@tutanota.com 241 | 242 | The simple-sql-parser is a lot less simple than it used to be. If you 243 | just need to parse much simpler SQL than this, or want to start with a 244 | simpler parser and modify it slightly, you could also look at the 245 | basic query parser in the intro_to_parsing project, the code is here: 246 | 247 | (TODO: this is out of date, in the process of being updated). 248 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/QueryExprComponents.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | These are the tests for the query expression components apart from the 4 | table refs which are in a separate file. 5 | 6 | 7 | These are a few misc tests which don't fit anywhere else. 8 | -} 9 | 10 | {-# LANGUAGE OverloadedStrings #-} 11 | module Language.SQL.SimpleSQL.QueryExprComponents (queryExprComponentTests) where 12 | 13 | import Language.SQL.SimpleSQL.TestTypes 14 | import Language.SQL.SimpleSQL.Syntax 15 | import Language.SQL.SimpleSQL.TestRunners 16 | import Data.Text (Text) 17 | 18 | queryExprComponentTests :: TestItem 19 | queryExprComponentTests = Group "queryExprComponentTests" 20 | [duplicates 21 | ,selectLists 22 | ,whereClause 23 | ,having 24 | ,orderBy 25 | ,offsetFetch 26 | ,combos 27 | ,withQueries 28 | ,values 29 | ,tables 30 | ] 31 | 32 | 33 | 34 | duplicates :: TestItem 35 | duplicates = Group "duplicates" 36 | [q "select a from t" $ ms SQDefault 37 | ,q "select all a from t" $ ms All 38 | ,q "select distinct a from t" $ ms Distinct 39 | ] 40 | where 41 | ms d = toQueryExpr $ makeSelect 42 | {msSetQuantifier = d 43 | ,msSelectList = [(Iden [Name Nothing "a"],Nothing)] 44 | ,msFrom = [TRSimple [Name Nothing "t"]]} 45 | 46 | selectLists :: TestItem 47 | selectLists = Group "selectLists" 48 | [q "select 1" 49 | $ toQueryExpr $ makeSelect {msSelectList = [(NumLit "1",Nothing)]} 50 | 51 | ,q "select a" 52 | $ toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"],Nothing)]} 53 | 54 | ,q "select a,b" 55 | $ toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"],Nothing) 56 | ,(Iden [Name Nothing "b"],Nothing)]} 57 | 58 | ,q "select 1+2,3+4" 59 | $ toQueryExpr $ makeSelect {msSelectList = 60 | [(BinOp (NumLit "1") [Name Nothing "+"] (NumLit "2"),Nothing) 61 | ,(BinOp (NumLit "3") [Name Nothing "+"] (NumLit "4"),Nothing)]} 62 | 63 | ,q "select a as a, /*comment*/ b as b" 64 | $ toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"], Just $ Name Nothing "a") 65 | ,(Iden [Name Nothing "b"], Just $ Name Nothing "b")]} 66 | 67 | ,q "select a a, b b" 68 | $ toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"], Just $ Name Nothing "a") 69 | ,(Iden [Name Nothing "b"], Just $ Name Nothing "b")]} 70 | 71 | ,q "select a + b * c" 72 | $ toQueryExpr $ makeSelect {msSelectList = 73 | [(BinOp (Iden [Name Nothing "a"]) [Name Nothing "+"] 74 | (BinOp (Iden [Name Nothing "b"]) [Name Nothing "*"] (Iden [Name Nothing "c"])) 75 | ,Nothing)]} 76 | ,q "select * from t" 77 | $ toQueryExpr $ makeSelect {msSelectList = [(Star,Nothing)] 78 | ,msFrom = [TRSimple [Name Nothing "t"]]} 79 | 80 | ,q "select t.* from t" 81 | $ toQueryExpr $ makeSelect {msSelectList = [(QStar [Name Nothing "t"],Nothing)] 82 | ,msFrom = [TRSimple [Name Nothing "t"]]} 83 | 84 | ,q "select t.*, a as b, u.* from t" 85 | $ toQueryExpr $ makeSelect 86 | {msSelectList = 87 | [(QStar [Name Nothing "t"],Nothing) 88 | ,(Iden [Name Nothing "a"], Just $ Name Nothing "b") 89 | ,(QStar [Name Nothing "u"],Nothing)] 90 | ,msFrom = [TRSimple [Name Nothing "t"]]} 91 | 92 | ] 93 | 94 | whereClause :: TestItem 95 | whereClause = Group "whereClause" 96 | [q "select a from t where a = 5" 97 | $ toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"],Nothing)] 98 | ,msFrom = [TRSimple [Name Nothing "t"]] 99 | ,msWhere = Just $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "="] (NumLit "5")} 100 | ] 101 | 102 | having :: TestItem 103 | having = Group "having" 104 | [q "select a,sum(b) from t group by a having sum(b) > 5" 105 | $ toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"],Nothing) 106 | ,(App [Name Nothing "sum"] [Iden [Name Nothing "b"]],Nothing)] 107 | ,msFrom = [TRSimple [Name Nothing "t"]] 108 | ,msGroupBy = [SimpleGroup $ Iden [Name Nothing "a"]] 109 | ,msHaving = Just $ BinOp (App [Name Nothing "sum"] [Iden [Name Nothing "b"]]) 110 | [Name Nothing ">"] (NumLit "5") 111 | } 112 | ] 113 | 114 | orderBy :: TestItem 115 | orderBy = Group "orderBy" 116 | [q "select a from t order by a" 117 | $ ms [SortSpec (Iden [Name Nothing "a"]) DirDefault NullsOrderDefault] 118 | 119 | ,q "select a from t order by a, b" 120 | $ ms [SortSpec (Iden [Name Nothing "a"]) DirDefault NullsOrderDefault 121 | ,SortSpec (Iden [Name Nothing "b"]) DirDefault NullsOrderDefault] 122 | 123 | ,q "select a from t order by a asc" 124 | $ ms [SortSpec (Iden [Name Nothing "a"]) Asc NullsOrderDefault] 125 | 126 | ,q "select a from t order by a desc, b desc" 127 | $ ms [SortSpec (Iden [Name Nothing "a"]) Desc NullsOrderDefault 128 | ,SortSpec (Iden [Name Nothing "b"]) Desc NullsOrderDefault] 129 | 130 | ,q "select a from t order by a desc nulls first, b desc nulls last" 131 | $ ms [SortSpec (Iden [Name Nothing "a"]) Desc NullsFirst 132 | ,SortSpec (Iden [Name Nothing "b"]) Desc NullsLast] 133 | 134 | ] 135 | where 136 | ms o = toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"],Nothing)] 137 | ,msFrom = [TRSimple [Name Nothing "t"]] 138 | ,msOrderBy = o} 139 | 140 | offsetFetch :: TestItem 141 | offsetFetch = Group "offsetFetch" 142 | [-- ansi standard 143 | q "select a from t offset 5 rows fetch next 10 rows only" 144 | $ ms (Just $ NumLit "5") (Just $ NumLit "10") 145 | ,q "select a from t offset 5 rows;" 146 | $ ms (Just $ NumLit "5") Nothing 147 | ,q "select a from t fetch next 10 row only;" 148 | $ ms Nothing (Just $ NumLit "10") 149 | ,q "select a from t offset 5 row fetch first 10 row only" 150 | $ ms (Just $ NumLit "5") (Just $ NumLit "10") 151 | -- postgres: disabled, will add back when postgres 152 | -- dialect is added 153 | --,q "select a from t limit 10 offset 5" 154 | -- $ ms (Just $ NumLit "5") (Just $ NumLit "10")) 155 | ] 156 | where 157 | ms o l = toQueryExpr $ makeSelect 158 | {msSelectList = [(Iden [Name Nothing "a"],Nothing)] 159 | ,msFrom = [TRSimple [Name Nothing "t"]] 160 | ,msOffset = o 161 | ,msFetchFirst = l} 162 | 163 | combos :: TestItem 164 | combos = Group "combos" 165 | [q "select a from t union select b from u" 166 | $ QueryExprSetOp mst Union SQDefault Respectively msu 167 | 168 | ,q "select a from t intersect select b from u" 169 | $ QueryExprSetOp mst Intersect SQDefault Respectively msu 170 | 171 | ,q "select a from t except all select b from u" 172 | $ QueryExprSetOp mst Except All Respectively msu 173 | 174 | ,q "select a from t union distinct corresponding \ 175 | \select b from u" 176 | $ QueryExprSetOp mst Union Distinct Corresponding msu 177 | 178 | ,q "select a from t union select a from t union select a from t" 179 | $ QueryExprSetOp (QueryExprSetOp mst Union SQDefault Respectively mst) 180 | Union SQDefault Respectively mst 181 | ] 182 | where 183 | mst = toQueryExpr $ makeSelect 184 | {msSelectList = [(Iden [Name Nothing "a"],Nothing)] 185 | ,msFrom = [TRSimple [Name Nothing "t"]]} 186 | msu = toQueryExpr $ makeSelect 187 | {msSelectList = [(Iden [Name Nothing "b"],Nothing)] 188 | ,msFrom = [TRSimple [Name Nothing "u"]]} 189 | 190 | 191 | withQueries :: TestItem 192 | withQueries = Group "with queries" 193 | [q "with u as (select a from t) select a from u" 194 | $ With False [(Alias (Name Nothing "u") Nothing, ms1)] ms2 195 | 196 | ,q "with u(b) as (select a from t) select a from u" 197 | $ With False [(Alias (Name Nothing "u") (Just [Name Nothing "b"]), ms1)] ms2 198 | 199 | ,q "with x as (select a from t),\n\ 200 | \ u as (select a from x)\n\ 201 | \select a from u" 202 | $ With False [(Alias (Name Nothing "x") Nothing, ms1), (Alias (Name Nothing "u") Nothing,ms3)] ms2 203 | 204 | ,q "with recursive u as (select a from t) select a from u" 205 | $ With True [(Alias (Name Nothing "u") Nothing, ms1)] ms2 206 | ] 207 | where 208 | ms c t = toQueryExpr $ makeSelect 209 | {msSelectList = [(Iden [Name Nothing c],Nothing)] 210 | ,msFrom = [TRSimple [Name Nothing t]]} 211 | ms1 = ms "a" "t" 212 | ms2 = ms "a" "u" 213 | ms3 = ms "a" "x" 214 | 215 | values :: TestItem 216 | values = Group "values" 217 | [q "values (1,2),(3,4)" 218 | $ Values [[NumLit "1", NumLit "2"] 219 | ,[NumLit "3", NumLit "4"]] 220 | ] 221 | 222 | tables :: TestItem 223 | tables = Group "tables" 224 | [q "table tbl" $ Table [Name Nothing "tbl"] 225 | ] 226 | 227 | q :: HasCallStack => Text -> QueryExpr -> TestItem 228 | q src ast = testQueryExpr ansi2011 src ast 229 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/GroupBy.hs: -------------------------------------------------------------------------------- 1 | 2 | -- Here are the tests for the group by component of query exprs 3 | 4 | {-# LANGUAGE OverloadedStrings #-} 5 | module Language.SQL.SimpleSQL.GroupBy (groupByTests) where 6 | 7 | import Language.SQL.SimpleSQL.TestTypes 8 | import Language.SQL.SimpleSQL.Syntax 9 | import Language.SQL.SimpleSQL.TestRunners 10 | import Data.Text (Text) 11 | 12 | 13 | groupByTests :: TestItem 14 | groupByTests = Group "groupByTests" 15 | [simpleGroupBy 16 | ,newGroupBy 17 | ,randomGroupBy 18 | ] 19 | 20 | q :: HasCallStack => Text -> QueryExpr -> TestItem 21 | q src a = testQueryExpr ansi2011 src a 22 | 23 | p :: HasCallStack => Text -> TestItem 24 | p src = testParseQueryExpr ansi2011 src 25 | 26 | 27 | 28 | simpleGroupBy :: TestItem 29 | simpleGroupBy = Group "simpleGroupBy" 30 | [q "select a,sum(b) from t group by a" 31 | $ toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"],Nothing) 32 | ,(App [Name Nothing "sum"] [Iden [Name Nothing "b"]],Nothing)] 33 | ,msFrom = [TRSimple [Name Nothing "t"]] 34 | ,msGroupBy = [SimpleGroup $ Iden [Name Nothing "a"]] 35 | } 36 | 37 | ,q "select a,b,sum(c) from t group by a,b" 38 | $ toQueryExpr $ makeSelect {msSelectList = [(Iden [Name Nothing "a"],Nothing) 39 | ,(Iden [Name Nothing "b"],Nothing) 40 | ,(App [Name Nothing "sum"] [Iden [Name Nothing "c"]],Nothing)] 41 | ,msFrom = [TRSimple [Name Nothing "t"]] 42 | ,msGroupBy = [SimpleGroup $ Iden [Name Nothing "a"] 43 | ,SimpleGroup $ Iden [Name Nothing "b"]] 44 | } 45 | ] 46 | 47 | {- 48 | test the new group by (), grouping sets, cube and rollup syntax (not 49 | sure which sql version they were introduced, 1999 or 2003 I think). 50 | -} 51 | 52 | newGroupBy :: TestItem 53 | newGroupBy = Group "newGroupBy" 54 | [q "select * from t group by ()" $ ms [GroupingParens []] 55 | ,q "select * from t group by grouping sets ((), (a))" 56 | $ ms [GroupingSets [GroupingParens [] 57 | ,GroupingParens [SimpleGroup $ Iden [Name Nothing "a"]]]] 58 | ,q "select * from t group by cube(a,b)" 59 | $ ms [Cube [SimpleGroup $ Iden [Name Nothing "a"], SimpleGroup $ Iden [Name Nothing "b"]]] 60 | ,q "select * from t group by rollup(a,b)" 61 | $ ms [Rollup [SimpleGroup $ Iden [Name Nothing "a"], SimpleGroup $ Iden [Name Nothing "b"]]] 62 | ] 63 | where 64 | ms g = toQueryExpr $ makeSelect {msSelectList = [(Star,Nothing)] 65 | ,msFrom = [TRSimple [Name Nothing "t"]] 66 | ,msGroupBy = g} 67 | 68 | randomGroupBy :: TestItem 69 | randomGroupBy = Group "randomGroupBy" 70 | [p "select * from t GROUP BY a" 71 | ,p "select * from t GROUP BY GROUPING SETS((a))" 72 | ,p "select * from t GROUP BY a,b,c" 73 | ,p "select * from t GROUP BY GROUPING SETS((a,b,c))" 74 | ,p "select * from t GROUP BY ROLLUP(a,b)" 75 | ,p "select * from t GROUP BY GROUPING SETS((a,b),\n\ 76 | \(a),\n\ 77 | \() )" 78 | ,p "select * from t GROUP BY ROLLUP(b,a)" 79 | ,p "select * from t GROUP BY GROUPING SETS((b,a),\n\ 80 | \(b),\n\ 81 | \() )" 82 | ,p "select * from t GROUP BY CUBE(a,b,c)" 83 | ,p "select * from t GROUP BY GROUPING SETS((a,b,c),\n\ 84 | \(a,b),\n\ 85 | \(a,c),\n\ 86 | \(b,c),\n\ 87 | \(a),\n\ 88 | \(b),\n\ 89 | \(c),\n\ 90 | \() )" 91 | ,p "select * from t GROUP BY ROLLUP(Province, County, City)" 92 | ,p "select * from t GROUP BY ROLLUP(Province, (County, City))" 93 | ,p "select * from t GROUP BY ROLLUP(Province, (County, City))" 94 | ,p "select * from t GROUP BY GROUPING SETS((Province, County, City),\n\ 95 | \(Province),\n\ 96 | \() )" 97 | ,p "select * from t GROUP BY GROUPING SETS((Province, County, City),\n\ 98 | \(Province, County),\n\ 99 | \(Province),\n\ 100 | \() )" 101 | ,p "select * from t GROUP BY a, ROLLUP(b,c)" 102 | ,p "select * from t GROUP BY GROUPING SETS((a,b,c),\n\ 103 | \(a,b),\n\ 104 | \(a) )" 105 | ,p "select * from t GROUP BY a, b, ROLLUP(c,d)" 106 | ,p "select * from t GROUP BY GROUPING SETS((a,b,c,d),\n\ 107 | \(a,b,c),\n\ 108 | \(a,b) )" 109 | ,p "select * from t GROUP BY ROLLUP(a), ROLLUP(b,c)" 110 | ,p "select * from t GROUP BY GROUPING SETS((a,b,c),\n\ 111 | \(a,b),\n\ 112 | \(a),\n\ 113 | \(b,c),\n\ 114 | \(b),\n\ 115 | \() )" 116 | ,p "select * from t GROUP BY ROLLUP(a), CUBE(b,c)" 117 | ,p "select * from t GROUP BY GROUPING SETS((a,b,c),\n\ 118 | \(a,b),\n\ 119 | \(a,c),\n\ 120 | \(a),\n\ 121 | \(b,c),\n\ 122 | \(b),\n\ 123 | \(c),\n\ 124 | \() )" 125 | ,p "select * from t GROUP BY CUBE(a,b), ROLLUP(c,d)" 126 | ,p "select * from t GROUP BY GROUPING SETS((a,b,c,d),\n\ 127 | \(a,b,c),\n\ 128 | \(a,b),\n\ 129 | \(a,c,d),\n\ 130 | \(a,c),\n\ 131 | \(a),\n\ 132 | \(b,c,d),\n\ 133 | \(b,c),\n\ 134 | \(b),\n\ 135 | \(c,d),\n\ 136 | \(c),\n\ 137 | \() )" 138 | ,p "select * from t GROUP BY a, ROLLUP(a,b)" 139 | ,p "select * from t GROUP BY GROUPING SETS((a,b),\n\ 140 | \(a) )" 141 | ,p "select * from t GROUP BY Region,\n\ 142 | \ROLLUP(Sales_Person, WEEK(Sales_Date)),\n\ 143 | \CUBE(YEAR(Sales_Date), MONTH (Sales_Date))" 144 | ,p "select * from t GROUP BY ROLLUP (Region, Sales_Person, WEEK(Sales_Date),\n\ 145 | \YEAR(Sales_Date), MONTH(Sales_Date) )" 146 | 147 | ,p "SELECT WEEK(SALES_DATE) AS WEEK,\n\ 148 | \DAYOFWEEK(SALES_DATE) AS DAY_WEEK,\n\ 149 | \SALES_PERSON, SUM(SALES) AS UNITS_SOLD\n\ 150 | \FROM SALES\n\ 151 | \WHERE WEEK(SALES_DATE) = 13\n\ 152 | \GROUP BY WEEK(SALES_DATE), DAYOFWEEK(SALES_DATE), SALES_PERSON\n\ 153 | \ORDER BY WEEK, DAY_WEEK, SALES_PERSON" 154 | 155 | ,p "SELECT WEEK(SALES_DATE) AS WEEK,\n\ 156 | \DAYOFWEEK(SALES_DATE) AS DAY_WEEK,\n\ 157 | \SALES_PERSON, SUM(SALES) AS UNITS_SOLD\n\ 158 | \FROM SALES\n\ 159 | \WHERE WEEK(SALES_DATE) = 13\n\ 160 | \GROUP BY GROUPING SETS ( (WEEK(SALES_DATE), SALES_PERSON),\n\ 161 | \(DAYOFWEEK(SALES_DATE), SALES_PERSON))\n\ 162 | \ORDER BY WEEK, DAY_WEEK, SALES_PERSON" 163 | 164 | ,p "SELECT WEEK(SALES_DATE) AS WEEK,\n\ 165 | \DAYOFWEEK(SALES_DATE) AS DAY_WEEK,\n\ 166 | \SALES_PERSON, SUM(SALES) AS UNITS_SOLD\n\ 167 | \FROM SALES\n\ 168 | \WHERE WEEK(SALES_DATE) = 13\n\ 169 | \GROUP BY ROLLUP ( WEEK(SALES_DATE), DAYOFWEEK(SALES_DATE), SALES_PERSON )\n\ 170 | \ORDER BY WEEK, DAY_WEEK, SALES_PERSON" 171 | 172 | ,p "SELECT WEEK(SALES_DATE) AS WEEK,\n\ 173 | \DAYOFWEEK(SALES_DATE) AS DAY_WEEK,\n\ 174 | \SALES_PERSON, SUM(SALES) AS UNITS_SOLD\n\ 175 | \FROM SALES\n\ 176 | \WHERE WEEK(SALES_DATE) = 13\n\ 177 | \GROUP BY CUBE ( WEEK(SALES_DATE), DAYOFWEEK(SALES_DATE), SALES_PERSON )\n\ 178 | \ORDER BY WEEK, DAY_WEEK, SALES_PERSON" 179 | 180 | ,p "SELECT SALES_PERSON,\n\ 181 | \MONTH(SALES_DATE) AS MONTH,\n\ 182 | \SUM(SALES) AS UNITS_SOLD\n\ 183 | \FROM SALES\n\ 184 | \GROUP BY GROUPING SETS ( (SALES_PERSON, MONTH(SALES_DATE)),\n\ 185 | \()\n\ 186 | \)\n\ 187 | \ORDER BY SALES_PERSON, MONTH" 188 | 189 | ,p "SELECT WEEK(SALES_DATE) AS WEEK,\n\ 190 | \DAYOFWEEK(SALES_DATE) AS DAY_WEEK,\n\ 191 | \SUM(SALES) AS UNITS_SOLD\n\ 192 | \FROM SALES\n\ 193 | \GROUP BY ROLLUP ( WEEK(SALES_DATE), DAYOFWEEK(SALES_DATE) )\n\ 194 | \ORDER BY WEEK, DAY_WEEK" 195 | 196 | ,p "SELECT MONTH(SALES_DATE) AS MONTH,\n\ 197 | \REGION,\n\ 198 | \SUM(SALES) AS UNITS_SOLD\n\ 199 | \FROM SALES\n\ 200 | \GROUP BY ROLLUP ( MONTH(SALES_DATE), REGION )\n\ 201 | \ORDER BY MONTH, REGION" 202 | 203 | ,p "SELECT WEEK(SALES_DATE) AS WEEK,\n\ 204 | \DAYOFWEEK(SALES_DATE) AS DAY_WEEK,\n\ 205 | \MONTH(SALES_DATE) AS MONTH,\n\ 206 | \REGION,\n\ 207 | \SUM(SALES) AS UNITS_SOLD\n\ 208 | \FROM SALES\n\ 209 | \GROUP BY GROUPING SETS ( ROLLUP( WEEK(SALES_DATE), DAYOFWEEK(SALES_DATE) ),\n\ 210 | \ROLLUP( MONTH(SALES_DATE), REGION ) )\n\ 211 | \ORDER BY WEEK, DAY_WEEK, MONTH, REGION" 212 | 213 | ,p "SELECT R1, R2,\n\ 214 | \WEEK(SALES_DATE) AS WEEK,\n\ 215 | \DAYOFWEEK(SALES_DATE) AS DAY_WEEK,\n\ 216 | \MONTH(SALES_DATE) AS MONTH,\n\ 217 | \REGION, SUM(SALES) AS UNITS_SOLD\n\ 218 | \FROM SALES,(VALUES('GROUP 1','GROUP 2')) AS X(R1,R2)\n\ 219 | \GROUP BY GROUPING SETS ((R1, ROLLUP(WEEK(SALES_DATE),\n\ 220 | \DAYOFWEEK(SALES_DATE))),\n\ 221 | \(R2,ROLLUP( MONTH(SALES_DATE), REGION ) ))\n\ 222 | \ORDER BY WEEK, DAY_WEEK, MONTH, REGION" 223 | 224 | {-,p "SELECT COALESCE(R1,R2) AS GROUP,\n\ 225 | \WEEK(SALES_DATE) AS WEEK,\n\ 226 | \DAYOFWEEK(SALES_DATE) AS DAY_WEEK,\n\ 227 | \MONTH(SALES_DATE) AS MONTH,\n\ 228 | \REGION, SUM(SALES) AS UNITS_SOLD\n\ 229 | \FROM SALES,(VALUES('GROUP 1','GROUP 2')) AS X(R1,R2)\n\ 230 | \GROUP BY GROUPING SETS ((R1, ROLLUP(WEEK(SALES_DATE),\n\ 231 | \DAYOFWEEK(SALES_DATE))),\n\ 232 | \(R2,ROLLUP( MONTH(SALES_DATE), REGION ) ))\n\ 233 | \ORDER BY GROUP, WEEK, DAY_WEEK, MONTH, REGION"-} 234 | -- as group - needs more subtle keyword blacklisting 235 | 236 | -- decimal as a function not allowed due to the reserved keyword 237 | -- handling: todo, review if this is ansi standard function or 238 | -- if there are places where reserved keywords can still be used 239 | ,p "SELECT MONTH(SALES_DATE) AS MONTH,\n\ 240 | \REGION,\n\ 241 | \SUM(SALES) AS UNITS_SOLD,\n\ 242 | \MAX(SALES) AS BEST_SALE,\n\ 243 | \CAST(ROUND(AVG(DECIMALx(SALES)),2) AS DECIMAL(5,2)) AS AVG_UNITS_SOLD\n\ 244 | \FROM SALES\n\ 245 | \GROUP BY CUBE(MONTH(SALES_DATE),REGION)\n\ 246 | \ORDER BY MONTH, REGION" 247 | 248 | ] 249 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/SQL2011AccessControl.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | Section 12 in Foundation 4 | 5 | grant, etc 6 | -} 7 | 8 | 9 | {-# LANGUAGE OverloadedStrings #-} 10 | module Language.SQL.SimpleSQL.SQL2011AccessControl (sql2011AccessControlTests) where 11 | 12 | import Language.SQL.SimpleSQL.TestTypes 13 | import Language.SQL.SimpleSQL.Syntax 14 | import Language.SQL.SimpleSQL.TestRunners 15 | import Data.Text (Text) 16 | 17 | sql2011AccessControlTests :: TestItem 18 | sql2011AccessControlTests = Group "sql 2011 access control tests" [ 19 | 20 | {- 21 | 12 Access control 22 | 23 | 12.1 24 | 25 | ::= 26 | 27 | | 28 | 29 | 12.2 30 | 31 | ::= 32 | GRANT TO [ { }... ] 33 | [ WITH HIERARCHY OPTION ] 34 | [ WITH GRANT OPTION ] 35 | [ GRANTED BY ] 36 | 37 | 12.3 38 | ::= 39 | ON 40 | 41 | ::= 42 | [ TABLE ] 43 | | DOMAIN 44 | | COLLATION 45 | | CHARACTER SET 46 | | TRANSLATION 47 | | TYPE 48 | | SEQUENCE 49 | | 50 | 51 | ::= 52 | ALL PRIVILEGES 53 | | [ { }... ] 54 | 55 | ::= 56 | SELECT 57 | | SELECT 58 | | SELECT 59 | | DELETE 60 | | INSERT [ ] 61 | | UPDATE [ ] 62 | | REFERENCES [ ] 63 | | USAGE 64 | | TRIGGER 65 | | UNDER 66 | | EXECUTE 67 | 68 | ::= 69 | [ { }... ] 70 | 71 | ::= 72 | 73 | 74 | ::= 75 | PUBLIC 76 | | 77 | 78 | ::= 79 | CURRENT_USER 80 | | CURRENT_ROLE 81 | -} 82 | 83 | s "grant all privileges on tbl1 to role1" 84 | $ GrantPrivilege [PrivAll] 85 | (PrivTable [Name Nothing "tbl1"]) 86 | [Name Nothing "role1"] WithoutGrantOption 87 | 88 | 89 | ,s "grant all privileges on tbl1 to role1,role2" 90 | $ GrantPrivilege [PrivAll] 91 | (PrivTable [Name Nothing "tbl1"]) 92 | [Name Nothing "role1",Name Nothing "role2"] WithoutGrantOption 93 | 94 | ,s "grant all privileges on tbl1 to role1 with grant option" 95 | $ GrantPrivilege [PrivAll] 96 | (PrivTable [Name Nothing "tbl1"]) 97 | [Name Nothing "role1"] WithGrantOption 98 | 99 | ,s "grant all privileges on table tbl1 to role1" 100 | $ GrantPrivilege [PrivAll] 101 | (PrivTable [Name Nothing "tbl1"]) 102 | [Name Nothing "role1"] WithoutGrantOption 103 | 104 | ,s "grant all privileges on domain mydom to role1" 105 | $ GrantPrivilege [PrivAll] 106 | (PrivDomain [Name Nothing "mydom"]) 107 | [Name Nothing "role1"] WithoutGrantOption 108 | 109 | ,s "grant all privileges on type t1 to role1" 110 | $ GrantPrivilege [PrivAll] 111 | (PrivType [Name Nothing "t1"]) 112 | [Name Nothing "role1"] WithoutGrantOption 113 | 114 | ,s "grant all privileges on sequence s1 to role1" 115 | $ GrantPrivilege [PrivAll] 116 | (PrivSequence [Name Nothing "s1"]) 117 | [Name Nothing "role1"] WithoutGrantOption 118 | 119 | ,s "grant select on table t1 to role1" 120 | $ GrantPrivilege [PrivSelect []] 121 | (PrivTable [Name Nothing "t1"]) 122 | [Name Nothing "role1"] WithoutGrantOption 123 | 124 | ,s "grant select(a,b) on table t1 to role1" 125 | $ GrantPrivilege [PrivSelect [Name Nothing "a", Name Nothing "b"]] 126 | (PrivTable [Name Nothing "t1"]) 127 | [Name Nothing "role1"] WithoutGrantOption 128 | 129 | ,s "grant delete on table t1 to role1" 130 | $ GrantPrivilege [PrivDelete] 131 | (PrivTable [Name Nothing "t1"]) 132 | [Name Nothing "role1"] WithoutGrantOption 133 | 134 | ,s "grant insert on table t1 to role1" 135 | $ GrantPrivilege [PrivInsert []] 136 | (PrivTable [Name Nothing "t1"]) 137 | [Name Nothing "role1"] WithoutGrantOption 138 | 139 | ,s "grant insert(a,b) on table t1 to role1" 140 | $ GrantPrivilege [PrivInsert [Name Nothing "a", Name Nothing "b"]] 141 | (PrivTable [Name Nothing "t1"]) 142 | [Name Nothing "role1"] WithoutGrantOption 143 | 144 | ,s "grant update on table t1 to role1" 145 | $ GrantPrivilege [PrivUpdate []] 146 | (PrivTable [Name Nothing "t1"]) 147 | [Name Nothing "role1"] WithoutGrantOption 148 | 149 | ,s "grant update(a,b) on table t1 to role1" 150 | $ GrantPrivilege [PrivUpdate [Name Nothing "a", Name Nothing "b"]] 151 | (PrivTable [Name Nothing "t1"]) 152 | [Name Nothing "role1"] WithoutGrantOption 153 | 154 | ,s "grant references on table t1 to role1" 155 | $ GrantPrivilege [PrivReferences []] 156 | (PrivTable [Name Nothing "t1"]) 157 | [Name Nothing "role1"] WithoutGrantOption 158 | 159 | ,s "grant references(a,b) on table t1 to role1" 160 | $ GrantPrivilege [PrivReferences [Name Nothing "a", Name Nothing "b"]] 161 | (PrivTable [Name Nothing "t1"]) 162 | [Name Nothing "role1"] WithoutGrantOption 163 | 164 | ,s "grant usage on table t1 to role1" 165 | $ GrantPrivilege [PrivUsage] 166 | (PrivTable [Name Nothing "t1"]) 167 | [Name Nothing "role1"] WithoutGrantOption 168 | 169 | ,s "grant trigger on table t1 to role1" 170 | $ GrantPrivilege [PrivTrigger] 171 | (PrivTable [Name Nothing "t1"]) 172 | [Name Nothing "role1"] WithoutGrantOption 173 | 174 | 175 | ,s "grant execute on specific function f to role1" 176 | $ GrantPrivilege [PrivExecute] 177 | (PrivFunction [Name Nothing "f"]) 178 | [Name Nothing "role1"] WithoutGrantOption 179 | 180 | ,s "grant select,delete on table t1 to role1" 181 | $ GrantPrivilege [PrivSelect [], PrivDelete] 182 | (PrivTable [Name Nothing "t1"]) 183 | [Name Nothing "role1"] WithoutGrantOption 184 | 185 | {- 186 | skipping for now: 187 | 188 | what is 'under' action? 189 | 190 | collation, character set, translation, member thing, methods 191 | 192 | for review 193 | 194 | some pretty big things missing in the standard: 195 | 196 | schema, database 197 | 198 | functions, etc., by argument types since they can be overloaded 199 | 200 | 201 | 202 | 12.4 203 | 204 | ::= 205 | CREATE ROLE [ WITH ADMIN ] 206 | -} 207 | 208 | ,s "create role rolee" 209 | $ CreateRole (Name Nothing "rolee") 210 | 211 | 212 | {- 213 | 12.5 214 | 215 | ::= 216 | GRANT [ { }... ] 217 | TO [ { }... ] 218 | [ WITH ADMIN OPTION ] 219 | [ GRANTED BY ] 220 | 221 | ::= 222 | 223 | -} 224 | 225 | ,s "grant role1 to public" 226 | $ GrantRole [Name Nothing "role1"] [Name Nothing "public"] WithoutAdminOption 227 | 228 | ,s "grant role1,role2 to role3,role4" 229 | $ GrantRole [Name Nothing "role1",Name Nothing "role2"] 230 | [Name Nothing "role3", Name Nothing "role4"] WithoutAdminOption 231 | 232 | ,s "grant role1 to role3 with admin option" 233 | $ GrantRole [Name Nothing "role1"] [Name Nothing "role3"] WithAdminOption 234 | 235 | 236 | {- 237 | 12.6 238 | 239 | ::= 240 | DROP ROLE 241 | -} 242 | 243 | ,s "drop role rolee" 244 | $ DropRole (Name Nothing "rolee") 245 | 246 | 247 | {- 248 | 12.7 249 | 250 | ::= 251 | 252 | | 253 | 254 | ::= 255 | REVOKE [ ] 256 | FROM [ { }... ] 257 | [ GRANTED BY ] 258 | 259 | 260 | ::= 261 | GRANT OPTION FOR 262 | | HIERARCHY OPTION FOR 263 | -} 264 | 265 | 266 | ,s "revoke select on t1 from role1" 267 | $ RevokePrivilege NoGrantOptionFor [PrivSelect []] 268 | (PrivTable [Name Nothing "t1"]) 269 | [Name Nothing "role1"] DefaultDropBehaviour 270 | 271 | ,s 272 | "revoke grant option for select on t1 from role1,role2 cascade" 273 | $ RevokePrivilege GrantOptionFor [PrivSelect []] 274 | (PrivTable [Name Nothing "t1"]) 275 | [Name Nothing "role1",Name Nothing "role2"] Cascade 276 | 277 | 278 | {- 279 | ::= 280 | REVOKE [ ADMIN OPTION FOR ] [ { }... ] 281 | FROM [ { }... ] 282 | [ GRANTED BY ] 283 | 284 | 285 | ::= 286 | 287 | -} 288 | 289 | ,s "revoke role1 from role2" 290 | $ RevokeRole NoAdminOptionFor [Name Nothing "role1"] 291 | [Name Nothing "role2"] DefaultDropBehaviour 292 | 293 | ,s "revoke role1,role2 from role3,role4" 294 | $ RevokeRole NoAdminOptionFor [Name Nothing "role1",Name Nothing "role2"] 295 | [Name Nothing "role3",Name Nothing "role4"] DefaultDropBehaviour 296 | 297 | 298 | ,s "revoke admin option for role1 from role2 cascade" 299 | $ RevokeRole AdminOptionFor [Name Nothing "role1"] [Name Nothing "role2"] Cascade 300 | 301 | ] 302 | 303 | s :: HasCallStack => Text -> Statement -> TestItem 304 | s src ast = testStatement ansi2011 src ast 305 | -------------------------------------------------------------------------------- /changelog: -------------------------------------------------------------------------------- 1 | 0.8.0 lexer has new option to output an invalid token on some kinds of 2 | parse errors 3 | switch tests to hspec 4 | improve parse error messages 5 | * and x.* changed to only parse in some expression contexts - 6 | select items and function application argument lists 7 | support sqlite 'without rowid' 8 | make types in columndefs optional 9 | allow column constraints and defaults to be in arbitrary order 10 | partially support parentheses at query expression level (some nested parens don't parse yet) 11 | 0.7.1 fix error message source quoting 12 | 0.7.0 support autoincrement for sqlite 13 | support table constraints without separating comma for sqlite 14 | switch source from literate to regular haskell 15 | use megaparsec instead of parsec 16 | use prettyprinter lib instead of pretty 17 | parsing nested block comments regressed - post a bug if you need this 18 | fixed fixity parsing of union, except and intersect (matches postgres docs now) 19 | removed the Errors module - the pretty printer function for errors is in the Parse module 20 | parses from and pretty prints to strict Text 21 | use strict Text instead of String everywhere 22 | tested with latest three main ghc releases (9.8.1, 9.6.4, and 9.4.8) and stack lts 22.5 23 | the makeSelect helper is now a distinct type, code using it will need some trivial 24 | tweaks, this is change so that code using makeSelect doesn't emit warnings 25 | overhaul website 26 | 0.6.1 added odbc handling to sqlsqerver dialect 27 | added sqlserver dialect case for convert function 28 | 0.6.0 29 | tested with ghc 8.8.1 also 30 | change the dialect handling - now a dialect is a bunch of flags 31 | plus a keyword list, and custom dialects are now feasible 32 | (still incomplete) 33 | fix parsing for a lot of things which are keywords in the standard 34 | fix bug with cte pretty printing an extra 'as', which the parser 35 | also incorrectly accepted 36 | bug fix: allow keywords that are quoted to be parsed as identifiers 37 | 38 | 0.5.0 39 | update to work with ghc 8.6.5, also tested with 8.4.4 and 8.2.1 40 | rename some of the modules Lexer -> Lex, Parser -> Parse 41 | add a separate lexer to simplify code and speed up parsing 42 | replace SqlIndent with new tool, SimpleSqlParserTool (amazing 43 | name) which can indent, and parse and lex. 44 | experiments in new approach to dealing with fixities with separate 45 | pass after parsing 46 | dml :add support for insert, update, delete and truncate 47 | ddl: add limited support for create schema, plus drop schema 48 | create, alter and drop table with defaults and constraints 49 | create, alter and drop for domain, view, sequence 50 | create and drop for assertion 51 | access control: simple create and drop for role 52 | simple grant and revoke 53 | limited support for transaction management: start transation, 54 | rollback, commit, savepoint 55 | fix the precendence of operators which was following the weird 56 | postgresql 9.4 and earlier precendences instead of the standard 57 | refactor the syntax for names, identifiers and strings slightly 58 | refactor the dialect support, add some support for postgresql 59 | syntax 60 | change parsing of identifiers and strings to not unescape the 61 | identifier or string text during parsing 62 | add some explicit parse failures for probably ambiguous text 63 | */ without /* (outside quoted identifier, string) will fail 64 | .,e,E following a number without whitespace always fails 65 | three symbols together fails explicitly, instead of trying to 66 | lex and giving a less good error at parse time (applies to | 67 | and : in postgres dialect) 68 | fix parsing of functions whose name is a keyword (e.g. abs) 69 | add basic support for parsing odbc syntax ({d 'literals'} {fn 70 | app(something)} and {oj t1 left outer join ... } 71 | rename ValueExpr -> ScalarExpr (I think scalar expression is 72 | slightly less incorrect) 73 | rename CombineQueryExpr to QueryExprSetOp and CombineOp to SetOperatorName 74 | use explicit data type for sign in interval literals 75 | add comments to statement syntax (aimed at codegen) 76 | add support for oracle type size units 'char' and 'byte', example: varchar2(55 byte) 77 | updated the makefile to use cabal v2 commands 78 | fix for parsing window functions with keyword names 79 | 0.4.4 80 | tested with ghc 8.2.1 and 8.4.3 81 | 0.4.3 82 | tested with ghc 8.0.2 and 8.2.1 83 | 0.4.1 (commit c156c5c34e91e1f7ef449d2c1ea14e282104fd90) 84 | tested with ghc 7.4.2, 7.6.3, 7.8.4,7.10.0.20150123 85 | simple demonstration of how dialects could be handled internally 86 | add ability to add comments to syntax tree to help with generating 87 | SQL code 88 | 0.4.0 (commit 7914898cc8f07bbaf8358d208469392346341964) 89 | now targets SQL:2011 90 | update to ghc 7.8.2 91 | remove dependency on haskell-src-exts 92 | derive Data and Typeable in all the syntax types 93 | improve the error messages a great deal 94 | sql features: 95 | parse schema qualified table names in from clause (thanks to Sönke 96 | Hahn) 97 | support multiline string literals 98 | support colon prefix host parameters and introducer 99 | support unique predicate 100 | support match predicate 101 | support array constructors and subscripting 102 | support character set literals 103 | support collate 104 | support escape for string literals as a postfix operator 105 | parse schema/whatever qualified ids in various places: identifiers 106 | (replaces equivalent functionality using '.' operator), function, 107 | aggregate, window function names, explicit tables and functions in 108 | from clauses, typenames 109 | support almost all typename syntax for SQL:2011 (just missing refs) 110 | support most multiset operations (missing some predicates only, 111 | likely to be added before next release) 112 | support two double quotes in a quoted identifier to represent a 113 | quote character in the identifier 114 | support filter and within group for aggregates 115 | support next value for 116 | parse special nullary functions 117 | annoying changes: 118 | replace Int with Integer in the syntax 119 | remove support for parsing clauses after the from clause if there 120 | is no from clause 121 | change the syntax representation of quantified comparison 122 | predicates 123 | change the hardcoded collate keyword in substring and trim to use 124 | the new collate postfix operator, this also changes the collation 125 | name to be an identifier instead of a string 126 | represent missing setquantifier as a literal default instead of as 127 | the actual default value (all in select, distinct in set 128 | operators) 129 | same for sort directions in order by 130 | implement complete interval literals (fixed the handling of the 131 | interval qualifier) 132 | make most of the standard reserved words actually reserved (still 133 | some gaps) 134 | change the natural in join abstract syntax to match the concrete 135 | syntax instead of combining natural, on and using into one field 136 | remove support for postgresql limit syntax 137 | bug fixes: 138 | fix some trailing whitespace issues in the keyword style functions, 139 | e.g. extract(day from x), dealing with trailing whitespace on 140 | the parens was fixed 141 | improve some cases of parsing chained prefix or postfix operators 142 | (still some issues here) 143 | fix bug where the 'as' was incorrectly optional in a 'with 144 | expression list item' 145 | fix bug in set operations where 'all' was assumed as the default 146 | instead of 'distinct', e.g. 'select * from t union select * from 147 | u' was parsed to 'select * from t union all select * from u' 148 | instead of 'select * from t union distinct select * from u'. 149 | fix corresponding bug where 'distinct' was being pretty printed in 150 | this case and 'all' was not since the assumed default was the 151 | wrong way round 152 | fix some trailing junk lexing issues with symbols and number 153 | literals 154 | fix number literals to accept upper case E 155 | 0.3.1 (commit 5cba9a1cac19d66166aed2876d809aef892ff59f) 156 | update to work with ghc 7.8.1 157 | 0.3.0 (commit 9e75fa93650b4f1a08d94f4225a243bcc50445ae) 158 | fix the basic operator fixity parsing 159 | swap the order in select item abstract syntax so it is now 160 | (expression, alias) which matches the order in the concrete 161 | syntax 162 | rename ScalarExpr -> ValueExpr 163 | rename Duplicates to SetQuantifier 164 | rename qeDuplicates to qeSetQuantifier 165 | rename OrderField to SortSpec 166 | rename InThing to InPredValue 167 | add support for ? for parameterized queries 168 | add new abstract syntax for special operators whose concrete 169 | syntax is a kind of limited named parameters syntax 170 | add more parsing for these operators: position, convert, 171 | translate, overlay, trim, and improve the substring parsing 172 | add support for multi keyword type names 173 | previously: 174 | double precision 175 | character varying 176 | now: 177 | double precision, 178 | character varying, 179 | char varying, 180 | character large object, 181 | char large object, 182 | national character, 183 | national char, 184 | national character varying, 185 | national char varying, 186 | national character large object, 187 | nchar large object, 188 | nchar varying, 189 | bit varying 190 | rename tools/PrettyIt to tools/SQLIdent and add to cabal file as 191 | optional executable (disabled by default) 192 | rename the qeFetch field in Select to qeFetchFirst 193 | change the pretty printer to use 'fetch first' instead of 194 | 'fetch next' 195 | 196 | 0.2.0 (commit 9ea29c1a0ceb2c3f3157fb161d1ea819ea5d64d4) 197 | '' quotes in string literal 198 | parse simple interval literal e.g. "interval '1 week'" 199 | support . in identifiers as a dot operator 200 | support quoted identifiers 201 | partial support for explicit window frames 202 | support multiple test expressions in when branches in case expressions 203 | rename CastOp to TypedLit 204 | support typenames with precision and scale in casts 205 | support nulls first and nulls last in order by 206 | support grouping expressions: group by (), grouping sets, cube, 207 | rollup and parens 208 | support with recursive 209 | support values table literal 210 | support 'table t' syntax 211 | rename fields qe1 and qe1 in combinequeryexpr to qe0 and qe1 212 | add support for functions in from clause 213 | add support for lateral in from clause 214 | support column aliases in common table expressions 215 | refactor the tests and add lots more 216 | parse * in any scalar context instead of trying to restrict it 217 | support row ctor without 'row' e.g. (a,b) = (c,d) 218 | add % ^ | & ~ operators 219 | support ansi standard syntax for offset n rows and fetch first n 220 | rows only 221 | fix keyword parsing to be case insensitive 222 | 223 | 0.1.0.0 (commit 9bf4012fc40a74ad9a039fcb936e3b9dfc3f90f0) 224 | initial release 225 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/Postgres.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | Here are some tests taken from the SQL in the postgres manual. Almost 4 | all of the postgres specific syntax has been skipped, this can be 5 | revisited when the dialect support is added. 6 | -} 7 | 8 | {-# LANGUAGE OverloadedStrings #-} 9 | module Language.SQL.SimpleSQL.Postgres (postgresTests) where 10 | 11 | import Language.SQL.SimpleSQL.TestTypes 12 | import Language.SQL.SimpleSQL.TestRunners 13 | import Data.Text (Text) 14 | 15 | postgresTests :: TestItem 16 | postgresTests = Group "postgresTests" 17 | 18 | {- 19 | lexical syntax section 20 | 21 | TODO: get all the commented out tests working 22 | -} 23 | 24 | [-- "SELECT 'foo'\n\ 25 | -- \'bar';" -- this should parse as select 'foobar' 26 | -- , 27 | t "SELECT name, (SELECT max(pop) FROM cities\n\ 28 | \ WHERE cities.state = states.name)\n\ 29 | \ FROM states;" 30 | ,t "SELECT ROW(1,2.5,'this is a test');" 31 | 32 | ,t "SELECT ROW(t.*, 42) FROM t;" 33 | ,t "SELECT ROW(t.f1, t.f2, 42) FROM t;" 34 | ,t "SELECT getf1(CAST(ROW(11,'this is a test',2.5) AS myrowtype));" 35 | 36 | ,t "SELECT ROW(1,2.5,'this is a test') = ROW(1, 3, 'not the same');" 37 | 38 | -- table is a reservered keyword? 39 | --,t "SELECT ROW(table.*) IS NULL FROM table;" 40 | ,t "SELECT ROW(tablex.*) IS NULL FROM tablex;" 41 | 42 | ,t "SELECT true OR somefunc();" 43 | 44 | ,t "SELECT somefunc() OR true;" 45 | 46 | -- queries section 47 | 48 | ,t "SELECT * FROM t1 CROSS JOIN t2;" 49 | ,t "SELECT * FROM t1 INNER JOIN t2 ON t1.num = t2.num;" 50 | ,t "SELECT * FROM t1 INNER JOIN t2 USING (num);" 51 | ,t "SELECT * FROM t1 NATURAL INNER JOIN t2;" 52 | ,t "SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num;" 53 | ,t "SELECT * FROM t1 LEFT JOIN t2 USING (num);" 54 | ,t "SELECT * FROM t1 RIGHT JOIN t2 ON t1.num = t2.num;" 55 | ,t "SELECT * FROM t1 FULL JOIN t2 ON t1.num = t2.num;" 56 | ,t "SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num AND t2.value = 'xxx';" 57 | ,t "SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num WHERE t2.value = 'xxx';" 58 | 59 | ,t "SELECT * FROM some_very_long_table_name s\n\ 60 | \JOIN another_fairly_long_name a ON s.id = a.num;" 61 | ,t "SELECT * FROM people AS mother JOIN people AS child\n\ 62 | \ ON mother.id = child.mother_id;" 63 | ,t "SELECT * FROM my_table AS a CROSS JOIN my_table AS b;" 64 | ,t "SELECT * FROM (my_table AS a CROSS JOIN my_table) AS b;" 65 | ,t "SELECT * FROM getfoo(1) AS t1;" 66 | ,t "SELECT * FROM foo\n\ 67 | \ WHERE foosubid IN (\n\ 68 | \ SELECT foosubid\n\ 69 | \ FROM getfoo(foo.fooid) z\n\ 70 | \ WHERE z.fooid = foo.fooid\n\ 71 | \ );" 72 | {-,t "SELECT *\n\ 73 | \ FROM dblink('dbname=mydb', 'SELECT proname, prosrc FROM pg_proc')\n\ 74 | \ AS t1(proname name, prosrc text)\n\ 75 | \ WHERE proname LIKE 'bytea%';"-} -- types in the alias?? 76 | 77 | ,t "SELECT * FROM foo, LATERAL (SELECT * FROM bar WHERE bar.id = foo.bar_id) ss;" 78 | ,t "SELECT * FROM foo, bar WHERE bar.id = foo.bar_id;" 79 | 80 | {-,t "SELECT p1.id, p2.id, v1, v2\n\ 81 | \FROM polygons p1, polygons p2,\n\ 82 | \ LATERAL vertices(p1.poly) v1,\n\ 83 | \ LATERAL vertices(p2.poly) v2\n\ 84 | \WHERE (v1 <-> v2) < 10 AND p1.id != p2.id;"-} -- <-> operator? 85 | 86 | {-,t "SELECT p1.id, p2.id, v1, v2\n\ 87 | \FROM polygons p1 CROSS JOIN LATERAL vertices(p1.poly) v1,\n\ 88 | \ polygons p2 CROSS JOIN LATERAL vertices(p2.poly) v2\n\ 89 | \WHERE (v1 <-> v2) < 10 AND p1.id != p2.id;"-} 90 | 91 | ,t "SELECT m.name\n\ 92 | \FROM manufacturers m LEFT JOIN LATERAL get_product_names(m.id) pname ON true\n\ 93 | \WHERE pname IS NULL;" 94 | 95 | 96 | ,t "SELECT * FROM fdt WHERE c1 > 5" 97 | 98 | ,t "SELECT * FROM fdt WHERE c1 IN (1, 2, 3)" 99 | 100 | ,t "SELECT * FROM fdt WHERE c1 IN (SELECT c1 FROM t2)" 101 | 102 | ,t "SELECT * FROM fdt WHERE c1 IN (SELECT c3 FROM t2 WHERE c2 = fdt.c1 + 10)" 103 | 104 | ,t "SELECT * FROM fdt WHERE c1 BETWEEN \n\ 105 | \ (SELECT c3 FROM t2 WHERE c2 = fdt.c1 + 10) AND 100" 106 | 107 | ,t "SELECT * FROM fdt WHERE EXISTS (SELECT c1 FROM t2 WHERE c2 > fdt.c1)" 108 | 109 | ,t "SELECT * FROM test1;" 110 | 111 | ,t "SELECT x FROM test1 GROUP BY x;" 112 | ,t "SELECT x, sum(y) FROM test1 GROUP BY x;" 113 | -- s.date changed to s.datex because of reserved keyword 114 | -- handling, not sure if this is correct or not for ansi sql 115 | ,t "SELECT product_id, p.name, (sum(s.units) * p.price) AS sales\n\ 116 | \ FROM products p LEFT JOIN sales s USING (product_id)\n\ 117 | \ GROUP BY product_id, p.name, p.price;" 118 | 119 | ,t "SELECT x, sum(y) FROM test1 GROUP BY x HAVING sum(y) > 3;" 120 | ,t "SELECT x, sum(y) FROM test1 GROUP BY x HAVING x < 'c';" 121 | ,t "SELECT product_id, p.name, (sum(s.units) * (p.price - p.cost)) AS profit\n\ 122 | \ FROM products p LEFT JOIN sales s USING (product_id)\n\ 123 | \ WHERE s.datex > CURRENT_DATE - INTERVAL '4 weeks'\n\ 124 | \ GROUP BY product_id, p.name, p.price, p.cost\n\ 125 | \ HAVING sum(p.price * s.units) > 5000;" 126 | 127 | ,t "SELECT a, b, c FROM t" 128 | 129 | ,t "SELECT tbl1.a, tbl2.a, tbl1.b FROM t" 130 | 131 | ,t "SELECT tbl1.*, tbl2.a FROM t" 132 | 133 | ,t "SELECT a AS value, b + c AS sum FROM t" 134 | 135 | ,t "SELECT a \"value\", b + c AS sum FROM t" 136 | 137 | ,t "SELECT DISTINCT select_list t" 138 | 139 | ,t "VALUES (1, 'one'), (2, 'two'), (3, 'three');" 140 | 141 | ,t "SELECT 1 AS column1, 'one' AS column2\n\ 142 | \UNION ALL\n\ 143 | \SELECT 2, 'two'\n\ 144 | \UNION ALL\n\ 145 | \SELECT 3, 'three';" 146 | 147 | ,t "SELECT * FROM (VALUES (1, 'one'), (2, 'two'), (3, 'three')) AS t (num,letter);" 148 | 149 | ,t "WITH regional_sales AS (\n\ 150 | \ SELECT region, SUM(amount) AS total_sales\n\ 151 | \ FROM orders\n\ 152 | \ GROUP BY region\n\ 153 | \ ), top_regions AS (\n\ 154 | \ SELECT region\n\ 155 | \ FROM regional_sales\n\ 156 | \ WHERE total_sales > (SELECT SUM(total_sales)/10 FROM regional_sales)\n\ 157 | \ )\n\ 158 | \SELECT region,\n\ 159 | \ product,\n\ 160 | \ SUM(quantity) AS product_units,\n\ 161 | \ SUM(amount) AS product_sales\n\ 162 | \FROM orders\n\ 163 | \WHERE region IN (SELECT region FROM top_regions)\n\ 164 | \GROUP BY region, product;" 165 | 166 | ,t "WITH RECURSIVE t(n) AS (\n\ 167 | \ VALUES (1)\n\ 168 | \ UNION ALL\n\ 169 | \ SELECT n+1 FROM t WHERE n < 100\n\ 170 | \)\n\ 171 | \SELECT sum(n) FROM t" 172 | 173 | ,t "WITH RECURSIVE included_parts(sub_part, part, quantity) AS (\n\ 174 | \ SELECT sub_part, part, quantity FROM parts WHERE part = 'our_product'\n\ 175 | \ UNION ALL\n\ 176 | \ SELECT p.sub_part, p.part, p.quantity\n\ 177 | \ FROM included_parts pr, parts p\n\ 178 | \ WHERE p.part = pr.sub_part\n\ 179 | \ )\n\ 180 | \SELECT sub_part, SUM(quantity) as total_quantity\n\ 181 | \FROM included_parts\n\ 182 | \GROUP BY sub_part" 183 | 184 | ,t "WITH RECURSIVE search_graph(id, link, data, depth) AS (\n\ 185 | \ SELECT g.id, g.link, g.data, 1\n\ 186 | \ FROM graph g\n\ 187 | \ UNION ALL\n\ 188 | \ SELECT g.id, g.link, g.data, sg.depth + 1\n\ 189 | \ FROM graph g, search_graph sg\n\ 190 | \ WHERE g.id = sg.link\n\ 191 | \)\n\ 192 | \SELECT * FROM search_graph;" 193 | 194 | {-,t "WITH RECURSIVE search_graph(id, link, data, depth, path, cycle) AS (\n\ 195 | \ SELECT g.id, g.link, g.data, 1,\n\ 196 | \ ARRAY[g.id],\n\ 197 | \ false\n\ 198 | \ FROM graph g\n\ 199 | \ UNION ALL\n\ 200 | \ SELECT g.id, g.link, g.data, sg.depth + 1,\n\ 201 | \ path || g.id,\n\ 202 | \ g.id = ANY(path)\n\ 203 | \ FROM graph g, search_graph sg\n\ 204 | \ WHERE g.id = sg.link AND NOT cycle\n\ 205 | \)\n\ 206 | \SELECT * FROM search_graph;"-} -- ARRAY 207 | 208 | {-,t "WITH RECURSIVE search_graph(id, link, data, depth, path, cycle) AS (\n\ 209 | \ SELECT g.id, g.link, g.data, 1,\n\ 210 | \ ARRAY[ROW(g.f1, g.f2)],\n\ 211 | \ false\n\ 212 | \ FROM graph g\n\ 213 | \ UNION ALL\n\ 214 | \ SELECT g.id, g.link, g.data, sg.depth + 1,\n\ 215 | \ path || ROW(g.f1, g.f2),\n\ 216 | \ ROW(g.f1, g.f2) = ANY(path)\n\ 217 | \ FROM graph g, search_graph sg\n\ 218 | \ WHERE g.id = sg.link AND NOT cycle\n\ 219 | \)\n\ 220 | \SELECT * FROM search_graph;"-} -- ARRAY 221 | 222 | ,t "WITH RECURSIVE t(n) AS (\n\ 223 | \ SELECT 1\n\ 224 | \ UNION ALL\n\ 225 | \ SELECT n+1 FROM t\n\ 226 | \)\n\ 227 | \SELECT n FROM t --LIMIT 100;" -- limit is not standard 228 | 229 | -- select page reference 230 | 231 | ,t "SELECT f.title, f.did, d.name, f.date_prod, f.kind\n\ 232 | \ FROM distributors d, films f\n\ 233 | \ WHERE f.did = d.did" 234 | 235 | ,t "SELECT kind, sum(len) AS total\n\ 236 | \ FROM films\n\ 237 | \ GROUP BY kind\n\ 238 | \ HAVING sum(len) < interval '5 hours';" 239 | 240 | ,t "SELECT * FROM distributors ORDER BY name;" 241 | ,t "SELECT * FROM distributors ORDER BY 2;" 242 | 243 | ,t "SELECT distributors.name\n\ 244 | \ FROM distributors\n\ 245 | \ WHERE distributors.name LIKE 'W%'\n\ 246 | \UNION\n\ 247 | \SELECT actors.name\n\ 248 | \ FROM actors\n\ 249 | \ WHERE actors.name LIKE 'W%';" 250 | 251 | ,t "WITH t AS (\n\ 252 | \ SELECT random() as x FROM generate_series(1, 3)\n\ 253 | \ )\n\ 254 | \SELECT * FROM t\n\ 255 | \UNION ALL\n\ 256 | \SELECT * FROM t" 257 | 258 | ,t "WITH RECURSIVE employee_recursive(distance, employee_name, manager_name) AS (\n\ 259 | \ SELECT 1, employee_name, manager_name\n\ 260 | \ FROM employee\n\ 261 | \ WHERE manager_name = 'Mary'\n\ 262 | \ UNION ALL\n\ 263 | \ SELECT er.distance + 1, e.employee_name, e.manager_name\n\ 264 | \ FROM employee_recursive er, employee e\n\ 265 | \ WHERE er.employee_name = e.manager_name\n\ 266 | \ )\n\ 267 | \SELECT distance, employee_name FROM employee_recursive;" 268 | 269 | ,t "SELECT m.name AS mname, pname\n\ 270 | \FROM manufacturers m, LATERAL get_product_names(m.id) pname;" 271 | 272 | ,t "SELECT m.name AS mname, pname\n\ 273 | \FROM manufacturers m LEFT JOIN LATERAL get_product_names(m.id) pname ON true;" 274 | 275 | ,t "SELECT 2+2;" 276 | 277 | -- simple-sql-parser doesn't support where without from 278 | -- this can be added for the postgres dialect when it is written 279 | --,t "SELECT distributors.* WHERE distributors.name = 'Westward';" 280 | 281 | ] 282 | where 283 | t :: HasCallStack => Text -> TestItem 284 | t src = testParseQueryExpr postgres src 285 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/ErrorMessages.hs: -------------------------------------------------------------------------------- 1 | 2 | {- 3 | 4 | Quick tests for error messages, all the tests use the entire formatted 5 | output of parse failures to compare, it's slightly fragile. Most of 6 | the tests use a huge golden file which contains tons of parse error 7 | examples. 8 | 9 | -} 10 | 11 | 12 | {-# LANGUAGE OverloadedStrings #-} 13 | {-# LANGUAGE QuasiQuotes #-} 14 | module Language.SQL.SimpleSQL.ErrorMessages 15 | (errorMessageTests 16 | ) where 17 | 18 | import Language.SQL.SimpleSQL.TestTypes 19 | import Language.SQL.SimpleSQL.Parse 20 | import qualified Language.SQL.SimpleSQL.Lex as L 21 | import Language.SQL.SimpleSQL.TestRunners 22 | --import Language.SQL.SimpleSQL.Syntax 23 | import Language.SQL.SimpleSQL.Expectations 24 | import Test.Hspec (it) 25 | import Debug.Trace 26 | 27 | import Data.Text (Text) 28 | import qualified Data.Text as T 29 | import qualified Data.Text.IO as T 30 | import Test.Hspec.Golden 31 | (Golden(..) 32 | ) 33 | 34 | import qualified Text.RawString.QQ as R 35 | import System.FilePath (()) 36 | import Text.Show.Pretty (ppShow) 37 | 38 | errorMessageTests :: TestItem 39 | errorMessageTests = Group "error messages" 40 | [gp (parseQueryExpr ansi2011 "" Nothing) prettyError [R.r| 41 | 42 | select 43 | a 44 | from t 45 | where 46 | something 47 | order by 1,2,3 where 48 | 49 | |] 50 | [R.r|8:16: 51 | | 52 | 8 | order by 1,2,3 where 53 | | ^^^^^ 54 | unexpected where 55 | |] 56 | ,gp (L.lexSQL ansi2011 False "" Nothing) L.prettyError [R.r| 57 | 58 | select 59 | a 60 | from t 61 | where 62 | something 63 | order by 1,2,3 $@ 64 | 65 | |] 66 | [R.r|8:16: 67 | | 68 | 8 | order by 1,2,3 $@ 69 | | ^ 70 | unexpected '$' 71 | |] 72 | ,let fn = "expected-parse-errors" 73 | got = generateParseResults parseErrorData 74 | in GoldenErrorTest fn parseErrorData $ it "parse error regressions" $ myGolden (T.unpack fn) got 75 | ] 76 | where 77 | gp :: (Show a, HasCallStack) => (Text -> Either e a) -> (e -> Text) -> Text -> Text -> TestItem 78 | gp parse pret src err = 79 | GeneralParseFailTest src err $ 80 | it (T.unpack src) $ 81 | let f1 = parse src 82 | ex = shouldFailWith pret 83 | quickTrace = 84 | case f1 of 85 | Left f | pret f /= err -> 86 | trace (T.unpack ("check\n[" <> pret f <>"]\n["<> err <> "]\n")) 87 | _ -> id 88 | in quickTrace (f1 `ex` err) 89 | 90 | ------------------------------------------------------------------------------ 91 | 92 | -- golden parse error tests 93 | 94 | myGolden :: String -> Text -> Golden Text 95 | myGolden name actualOutput = 96 | Golden { 97 | output = actualOutput, 98 | encodePretty = show, 99 | writeToFile = T.writeFile, 100 | readFromFile = T.readFile, 101 | goldenFile = name "golden", 102 | actualFile = Just (name "actual"), 103 | failFirstTime = False 104 | } 105 | 106 | parseErrorData :: [(Text,Text,Text)] 107 | parseErrorData = 108 | concat 109 | [simpleExpressions1 110 | ,pgExprs 111 | ,sqlServerIden 112 | ,mysqliden 113 | ,paramvariations 114 | ,odbcexpr 115 | ,odbcqexpr 116 | ,queryExprExamples 117 | ,statementParseErrorExamples] 118 | 119 | generateParseResults :: [(Text,Text,Text)] -> Text 120 | generateParseResults dat = 121 | let testLine (parser,dialect,src) = 122 | let d = case dialect of 123 | "ansi2011" -> ansi2011 124 | "postgres" -> postgres 125 | "sqlserver" -> sqlserver 126 | "mysql" -> mysql 127 | "params" -> ansi2011{diAtIdentifier=True, diHashIdentifier= True} 128 | "odbc" -> ansi2011{diOdbc=True} 129 | _ -> error $ "unknown dialect: " <> T.unpack dialect 130 | res = case parser of 131 | "queryExpr" -> 132 | either prettyError (T.pack . ppShow) 133 | $ parseQueryExpr d "" Nothing src 134 | "scalarExpr" -> 135 | either prettyError (T.pack . ppShow) 136 | $ parseScalarExpr d "" Nothing src 137 | "statement" -> 138 | either prettyError (T.pack . ppShow) 139 | $ parseStatement d "" Nothing src 140 | _ -> error $ "unknown parser: " <> T.unpack parser 141 | -- prepend a newline to multi line fields, so they show 142 | -- nice in a diff in meld or similar 143 | resadj = if '\n' `T.elem` res 144 | then T.cons '\n' res 145 | else res 146 | in T.unlines [parser, dialect, src, resadj] 147 | in T.unlines $ map testLine dat 148 | 149 | parseExampleStrings :: Text -> [Text] 150 | parseExampleStrings = filter (not . T.null) . map T.strip . T.splitOn ";" 151 | 152 | simpleExpressions1 :: [(Text,Text,Text)] 153 | simpleExpressions1 = 154 | concat $ flip map (parseExampleStrings simpleExprData) $ \e -> 155 | [("scalarExpr", "ansi2011", e) 156 | ,("queryExpr", "ansi2011", "select " <> e) 157 | ,("queryExpr", "ansi2011", "select " <> e <> ",") 158 | ,("queryExpr", "ansi2011", "select " <> e <> " from")] 159 | where 160 | simpleExprData = [R.r| 161 | 'test 162 | ; 163 | 'test''t 164 | ; 165 | 'test'' 166 | ; 167 | 3.23e- 168 | ; 169 | . 170 | ; 171 | 3.23e 172 | ; 173 | a.3 174 | ; 175 | 3.a 176 | ; 177 | 3.2a 178 | ; 179 | 4iden 180 | ; 181 | 4iden. 182 | ; 183 | iden.4iden 184 | ; 185 | 4iden.* 186 | ; 187 | from 188 | ; 189 | from.a 190 | ; 191 | a.from 192 | ; 193 | not 194 | ; 195 | 4 + 196 | ; 197 | 4 + from 198 | ; 199 | (5 200 | ; 201 | (5 + 202 | ; 203 | (5 + 6 204 | ; 205 | (5 + from) 206 | ; 207 | case 208 | ; 209 | case a 210 | ; 211 | case a when b c end 212 | ; 213 | case a when b then c 214 | ; 215 | case a else d end 216 | ; 217 | case a from c end 218 | ; 219 | case a when from then to end 220 | ; 221 | /* blah 222 | ; 223 | /* blah /* stuff */ 224 | ; 225 | /* * 226 | ; 227 | /* / 228 | ; 229 | $$something$ 230 | ; 231 | $$something 232 | ; 233 | $$something 234 | x 235 | ; 236 | $a$something$b$ 237 | ; 238 | $a$ 239 | ; 240 | ''' 241 | ; 242 | ''''' 243 | ; 244 | "a 245 | ; 246 | "a"" 247 | ; 248 | """ 249 | ; 250 | """"" 251 | ; 252 | "" 253 | ; 254 | */ 255 | ; 256 | :3 257 | ; 258 | @3 259 | ; 260 | #3 261 | ; 262 | ::: 263 | ; 264 | ||| 265 | ; 266 | ... 267 | ; 268 | " 269 | ; 270 | ] 271 | ; 272 | ) 273 | ; 274 | [test 275 | ; 276 | [] 277 | ; 278 | [[test]] 279 | ; 280 | `open 281 | ; 282 | ``` 283 | ; 284 | `` 285 | ; 286 | } 287 | ; 288 | mytype(4 '4'; 289 | ; 290 | app(3 291 | ; 292 | app( 293 | ; 294 | app(something 295 | ; 296 | app(something, 297 | ; 298 | count(* 299 | ; 300 | count(* filter (where something > 5) 301 | ; 302 | count(*) filter (where something > 5 303 | ; 304 | count(*) filter ( 305 | ; 306 | sum(a over (order by b) 307 | ; 308 | sum(a) over (order by b 309 | ; 310 | sum(a) over ( 311 | ; 312 | rank(a,c within group (order by b) 313 | ; 314 | rank(a,c) within group (order by b 315 | ; 316 | rank(a,c) within group ( 317 | ; 318 | array[ 319 | ; 320 | (a 321 | ; 322 | ( 323 | ; 324 | a >* 325 | ; 326 | a >* b 327 | ; 328 | ( ( a 329 | ; 330 | ( ( a ) 331 | ; 332 | ( ( a + ) 333 | |] 334 | 335 | pgExprs :: [(Text,Text,Text)] 336 | pgExprs = flip map (parseExampleStrings src) $ \e -> 337 | ("scalarExpr", "postgres", e) 338 | where src = [R.r| 339 | $$something$ 340 | ; 341 | $$something 342 | ; 343 | $$something 344 | x 345 | ; 346 | $a$something$b$ 347 | ; 348 | $a$ 349 | ; 350 | ::: 351 | ; 352 | ||| 353 | ; 354 | ... 355 | ; 356 | 357 | |] 358 | 359 | sqlServerIden :: [(Text,Text,Text)] 360 | sqlServerIden = flip map (parseExampleStrings src) $ \e -> 361 | ("scalarExpr", "sqlserver", e) 362 | where src = [R.r| 363 | ] 364 | ; 365 | [test 366 | ; 367 | [] 368 | ; 369 | [[test]] 370 | 371 | |] 372 | 373 | mysqliden :: [(Text,Text,Text)] 374 | mysqliden = flip map (parseExampleStrings src) $ \e -> 375 | ("scalarExpr", "mysql", e) 376 | where src = [R.r| 377 | `open 378 | ; 379 | ``` 380 | ; 381 | `` 382 | 383 | |] 384 | 385 | paramvariations :: [(Text,Text,Text)] 386 | paramvariations = flip map (parseExampleStrings src) $ \e -> 387 | ("scalarExpr", "params", e) 388 | where src = [R.r| 389 | :3 390 | ; 391 | @3 392 | ; 393 | #3 394 | 395 | |] 396 | 397 | 398 | odbcexpr :: [(Text,Text,Text)] 399 | odbcexpr = flip map (parseExampleStrings src) $ \e -> 400 | ("scalarExpr", "odbc", e) 401 | where src = [R.r| 402 | {d '2000-01-01' 403 | ; 404 | {fn CHARACTER_LENGTH(string_exp) 405 | 406 | |] 407 | 408 | odbcqexpr :: [(Text,Text,Text)] 409 | odbcqexpr = flip map (parseExampleStrings src) $ \e -> 410 | ("queryExpr", "odbc", e) 411 | where src = [R.r| 412 | select * from {oj t1 left outer join t2 on expr 413 | 414 | |] 415 | 416 | 417 | 418 | queryExprExamples :: [(Text,Text,Text)] 419 | queryExprExamples = flip map (parseExampleStrings src) $ \e -> 420 | ("queryExpr", "ansi2011", e) 421 | where src = [R.r| 422 | select a select 423 | ; 424 | select a from t, 425 | ; 426 | select a from t select 427 | ; 428 | select a from t(a) 429 | ; 430 | select a from (t 431 | ; 432 | select a from (t having 433 | ; 434 | select a from t a b 435 | ; 436 | select a from t as 437 | ; 438 | select a from t as having 439 | ; 440 | select a from (1234) 441 | ; 442 | select a from (1234 443 | ; 444 | select a from a wrong join b 445 | ; 446 | select a from a natural wrong join b 447 | ; 448 | select a from a left wrong join b 449 | ; 450 | select a from a left wrong join b 451 | ; 452 | select a from a join b select 453 | ; 454 | select a from a join b on select 455 | ; 456 | select a from a join b on (1234 457 | ; 458 | select a from a join b using(a 459 | ; 460 | select a from a join b using(a, 461 | ; 462 | select a from a join b using(a,) 463 | ; 464 | select a from a join b using(1234 465 | ; 466 | select a from t order no a 467 | ; 468 | select a from t order by a where c 469 | ; 470 | select 'test 471 | ' 472 | ; 473 | select a as 474 | ; 475 | select a as from t 476 | ; 477 | select a as, 478 | ; 479 | select a, 480 | ; 481 | select a, from t 482 | ; 483 | select a as from 484 | ; 485 | select a as from from 486 | ; 487 | select a as from2 from 488 | ; 489 | select a fromt 490 | ; 491 | select a b fromt 492 | 493 | ; 494 | select a from t u v 495 | ; 496 | select a from t as 497 | ; 498 | select a from t, 499 | ; 500 | select a from group by b 501 | ; 502 | select a from t join group by a 503 | ; 504 | select a from t join 505 | ; 506 | select a from (@ 507 | ; 508 | select a from () 509 | ; 510 | select a from t left join u on 511 | ; 512 | select a from t left join u on group by a 513 | ; 514 | select a from t left join u using 515 | ; 516 | select a from t left join u using ( 517 | ; 518 | select a from t left join u using (a 519 | ; 520 | select a from t left join u using (a, 521 | ; 522 | select a from (select a from) 523 | ; 524 | select a from (select a 525 | 526 | ; 527 | select a from t where 528 | ; 529 | select a from t group by a having b where 530 | ; 531 | select a from t where (a 532 | ; 533 | select a from t where group by b 534 | 535 | ; 536 | select a from t group by 537 | ; 538 | select a from t group 539 | ; 540 | select a from t group by a as 541 | ; 542 | select a from t group by a, 543 | ; 544 | select a from t group by order by 545 | ; 546 | select a <<== b from t 547 | ; 548 | /* 549 | ; 550 | select * as a 551 | ; 552 | select t.* as a 553 | ; 554 | select 3 + * 555 | ; 556 | select case when * then 1 end 557 | ; 558 | select (*) 559 | ; 560 | select * from (select a 561 | from t 562 | ; 563 | select * from (select a(stuff) 564 | from t 565 | 566 | ; 567 | select * 568 | from (select a,b 569 | from t 570 | where a = 1 571 | and b > a 572 | 573 | ; 574 | select * 575 | from (select a,b 576 | from t 577 | where a = 1 578 | and b > a 579 | from t) 580 | 581 | |] 582 | 583 | 584 | statementParseErrorExamples :: [(Text,Text,Text)] 585 | statementParseErrorExamples = flip map (parseExampleStrings src) $ \e -> 586 | ("statement", "ansi2011", e) 587 | where src = [R.r| 588 | create 589 | ; 590 | drop 591 | ; 592 | delete this 593 | ; 594 | delete where 7 595 | ; 596 | delete from where t 597 | ; 598 | truncate nothing 599 | ; 600 | truncate nothing nothing 601 | ; 602 | truncate table from 603 | ; 604 | truncate table t u 605 | ; 606 | insert t select u 607 | ; 608 | insert into t insert 609 | ; 610 | insert into t (1,2) 611 | ; 612 | insert into t( 613 | ; 614 | insert into t(1 615 | ; 616 | insert into t(a 617 | ; 618 | insert into t(a, 619 | ; 620 | insert into t(a,b) 621 | ; 622 | insert into t(a,b) values 623 | ; 624 | insert into t(a,b) values ( 625 | ; 626 | insert into t(a,b) values (1 627 | ; 628 | insert into t(a,b) values (1, 629 | ; 630 | insert into t(a,b) values (1,2) and stuff 631 | ; 632 | update set 1 633 | ; 634 | update t u 635 | ; 636 | update t u v 637 | ; 638 | update t set a 639 | ; 640 | update t set a= 641 | ; 642 | update t set a=1, 643 | ; 644 | update t set a=1 where 645 | ; 646 | update t set a=1 where 1 also 647 | ; 648 | create table 649 | ; 650 | create table t ( 651 | a 652 | ) 653 | ; 654 | create table t ( 655 | a 656 | ; 657 | create table t ( 658 | a, 659 | ) 660 | ; 661 | create table t ( 662 | ) 663 | ; 664 | create table t ( 665 | ; 666 | create table t 667 | ; 668 | create table t. ( 669 | ; 670 | truncate table t. 671 | ; 672 | drop table t. where 673 | ; 674 | update t. set 675 | ; 676 | delete from t. where 677 | ; 678 | insert into t. values 679 | ; 680 | with a as (select * from t 681 | select 1 682 | ; 683 | with a as (select * from t 684 | ; 685 | with a as ( 686 | ; 687 | with a ( 688 | ; 689 | with as (select * from t) 690 | select 1 691 | ; 692 | with (select * from t) as a 693 | select 1 694 | 695 | 696 | |] 697 | 698 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/LexerTests.hs: -------------------------------------------------------------------------------- 1 | 2 | 3 | -- Test for the lexer 4 | 5 | 6 | {- 7 | TODO: 8 | figure out a way to do quickcheck testing: 9 | 1. generate valid tokens and check they parse 10 | 11 | 2. combine two generated tokens together for the combo testing 12 | 13 | this especially will work much better for the postgresql extensible 14 | operator tests which doing exhaustively takes ages and doesn't bring 15 | much benefit over testing a few using quickcheck. 16 | -} 17 | 18 | {-# LANGUAGE OverloadedStrings #-} 19 | module Language.SQL.SimpleSQL.LexerTests (lexerTests) where 20 | 21 | import Language.SQL.SimpleSQL.TestTypes 22 | import Language.SQL.SimpleSQL.Lex 23 | (Token(..) 24 | ,tokenListWillPrintAndLex 25 | ) 26 | import Language.SQL.SimpleSQL.TestRunners 27 | 28 | import qualified Data.Text as T 29 | import Data.Text (Text) 30 | 31 | --import Debug.Trace 32 | --import Data.Char (isAlpha) 33 | -- import Data.List 34 | 35 | lexerTests :: TestItem 36 | lexerTests = Group "lexerTests" $ 37 | [bootstrapTests 38 | ,ansiLexerTests 39 | ,postgresLexerTests 40 | ,sqlServerLexerTests 41 | ,oracleLexerTests 42 | ,mySqlLexerTests 43 | ,odbcLexerTests 44 | ] 45 | 46 | -- quick sanity tests to see something working 47 | bootstrapTests :: TestItem 48 | bootstrapTests = Group "bootstrap tests" $ 49 | [t "iden" [Identifier Nothing "iden"] 50 | 51 | ,t "\"a1normal \"\" iden\"" [Identifier (Just ("\"","\"")) "a1normal \"\" iden"] 52 | 53 | ,t "'string'" [SqlString "'" "'" "string"] 54 | 55 | ,t " " [Whitespace " "] 56 | ,t "\t " [Whitespace "\t "] 57 | ,t " \n " [Whitespace " \n "] 58 | 59 | ,t "--" [LineComment "--"] 60 | ,t "--\n" [LineComment "--\n"] 61 | ,t "--stuff" [LineComment "--stuff"] 62 | ,t "-- stuff" [LineComment "-- stuff"] 63 | ,t "-- stuff\n" [LineComment "-- stuff\n"] 64 | ,t "--\nstuff" [LineComment "--\n", Identifier Nothing "stuff"] 65 | ,t "-- com \nstuff" [LineComment "-- com \n", Identifier Nothing "stuff"] 66 | 67 | ,t "/*test1*/" [BlockComment "/*test1*/"] 68 | ,t "/**/" [BlockComment "/**/"] 69 | ,t "/***/" [BlockComment "/***/"] 70 | ,t "/* * */" [BlockComment "/* * */"] 71 | ,t "/*test*/" [BlockComment "/*test*/"] 72 | ,t "/*te/*st*/*/" [BlockComment "/*te/*st*/*/"] 73 | ,t "/*te*st*/" [BlockComment "/*te*st*/"] 74 | ,t "/*lines\nmore lines*/" [BlockComment "/*lines\nmore lines*/"] 75 | ,t "/*test1*/\n" [BlockComment "/*test1*/", Whitespace "\n"] 76 | ,t "/*test1*/stuff" [BlockComment "/*test1*/", Identifier Nothing "stuff"] 77 | 78 | ,t "1" [SqlNumber "1"] 79 | ,t "42" [SqlNumber "42"] 80 | 81 | ,tp "$1" [PositionalArg 1] 82 | ,tp "$200" [PositionalArg 200] 83 | 84 | ,t ":test" [PrefixedVariable ':' "test"] 85 | 86 | ] ++ map (\a -> t a [Symbol a]) ( 87 | ["!=", "<>", ">=", "<=", "||"] 88 | ++ map T.singleton ("(),-+*/<>=." :: [Char])) 89 | where 90 | t :: HasCallStack => Text -> [Token] -> TestItem 91 | t src ast = testLex ansi2011 src ast 92 | tp :: HasCallStack => Text -> [Token] -> TestItem 93 | tp src ast = testLex ansi2011{diPositionalArg=True} src ast 94 | 95 | 96 | ansiLexerTable :: [(Text,[Token])] 97 | ansiLexerTable = 98 | -- single char symbols 99 | map (\s -> (T.singleton s,[Symbol $ T.singleton s])) "+-^*/%~&|?<>[]=,;()" 100 | -- multi char symbols 101 | ++ map (\s -> (s,[Symbol s])) [">=","<=","!=","<>","||"] 102 | ++ (let idens = ["a", "_a", "test", "table", "Stuff", "STUFF"] 103 | -- simple identifiers 104 | in map (\i -> (i, [Identifier Nothing i])) idens 105 | <> map (\i -> ("\"" <> i <> "\"", [Identifier (Just ("\"","\"")) i])) idens 106 | -- todo: in order to make lex . pretty id, need to 107 | -- preserve the case of the u 108 | <> map (\i -> ("u&\"" <> i <> "\"", [Identifier (Just ("u&\"","\"")) i])) idens 109 | -- host param 110 | <> map (\i -> (T.cons ':' i, [PrefixedVariable ':' i])) idens 111 | ) 112 | -- quoted identifiers with embedded double quotes 113 | -- the lexer doesn't unescape the quotes 114 | ++ [("\"anormal \"\" iden\"", [Identifier (Just ("\"","\"")) "anormal \"\" iden"])] 115 | -- strings 116 | -- the lexer doesn't apply escapes at all 117 | ++ [("'string'", [SqlString "'" "'" "string"]) 118 | ,("'normal '' quote'", [SqlString "'" "'" "normal '' quote"]) 119 | ,("'normalendquote '''", [SqlString "'" "'" "normalendquote ''"]) 120 | ,("'\n'", [SqlString "'" "'" "\n"])] 121 | -- csstrings 122 | ++ map (\c -> (c <> "'test'", [SqlString (c <> "'") "'" "test"])) 123 | ["n", "N","b", "B","x", "X", "u&"] 124 | -- numbers 125 | ++ [("10", [SqlNumber "10"]) 126 | ,(".1", [SqlNumber ".1"]) 127 | ,("5e3", [SqlNumber "5e3"]) 128 | ,("5e+3", [SqlNumber "5e+3"]) 129 | ,("5e-3", [SqlNumber "5e-3"]) 130 | ,("10.2", [SqlNumber "10.2"]) 131 | ,("10.2e7", [SqlNumber "10.2e7"])] 132 | -- whitespace 133 | ++ concat [[(T.singleton a,[Whitespace $ T.singleton a]) 134 | ,(T.singleton a <> T.singleton b, [Whitespace (T.singleton a <> T.singleton b)])] 135 | | a <- " \n\t", b <- " \n\t"] 136 | -- line comment 137 | ++ map (\c -> (c, [LineComment c])) 138 | ["--", "-- ", "-- this is a comment", "-- line com\n"] 139 | -- block comment 140 | ++ map (\c -> (c, [BlockComment c])) 141 | ["/**/", "/* */","/* this is a comment */" 142 | ,"/* this *is/ a comment */" 143 | ] 144 | 145 | 146 | ansiLexerTests :: TestItem 147 | ansiLexerTests = Group "ansiLexerTests" $ 148 | [Group "ansi lexer token tests" $ [l s t | (s,t) <- ansiLexerTable] 149 | ,Group "ansi generated combination lexer tests" $ 150 | [ l (s <> s1) (t <> t1) 151 | | (s,t) <- ansiLexerTable 152 | , (s1,t1) <- ansiLexerTable 153 | , tokenListWillPrintAndLex ansi2011 $ t <> t1 154 | 155 | ] 156 | ,Group "ansiadhoclexertests" $ 157 | [l "" [] 158 | ,l "-- line com\nstuff" [LineComment "-- line com\n",Identifier Nothing "stuff"] 159 | ] ++ 160 | [-- want to make sure this gives a parse error 161 | f "*/" 162 | -- combinations of pipes: make sure they fail because they could be 163 | -- ambiguous and it is really unclear when they are or not, and 164 | -- what the result is even when they are not ambiguous 165 | ,f "|||" 166 | ,f "||||" 167 | ,f "|||||" 168 | -- another user experience thing: make sure extra trailing 169 | -- number chars are rejected rather than attempting to parse 170 | -- if the user means to write something that is rejected by this code, 171 | -- then they can use whitespace to make it clear and then it will parse 172 | ,f "12e3e4" 173 | ,f "12e3e4" 174 | ,f "12e3e4" 175 | ,f "12e3.4" 176 | ,f "12.4.5" 177 | ,f "12.4e5.6" 178 | ,f "12.4e5e7"] 179 | ] 180 | where 181 | l :: HasCallStack => Text -> [Token] -> TestItem 182 | l src ast = testLex ansi2011 src ast 183 | f :: HasCallStack => Text -> TestItem 184 | f src = lexFails ansi2011 src 185 | 186 | 187 | {- 188 | todo: lexing tests 189 | do quickcheck testing: 190 | can try to generate valid tokens then check they parse 191 | 192 | same as above: can also try to pair tokens, create an accurate 193 | function to say which ones can appear adjacent, and test 194 | 195 | I think this plus the explicit lists of tokens like above which do 196 | basic sanity + explicit edge casts will provide a high level of 197 | assurance. 198 | -} 199 | 200 | 201 | 202 | postgresLexerTable :: [(Text,[Token])] 203 | postgresLexerTable = 204 | -- single char symbols 205 | map (\s -> (T.singleton s,[Symbol $ T.singleton s])) "+-^*/%~&|?<>[]=,;():" 206 | -- multi char symbols 207 | ++ map (\s -> (s,[Symbol s])) [">=","<=","!=","<>","||", "::","..",":="] 208 | -- generic symbols 209 | 210 | ++ (let idens = ["a", "_a", "test", "table", "Stuff", "STUFF"] 211 | -- simple identifiers 212 | in map (\i -> (i, [Identifier Nothing i])) idens 213 | ++ map (\i -> ("\"" <> i <> "\"", [Identifier (Just ("\"","\"")) i])) idens 214 | -- todo: in order to make lex . pretty id, need to 215 | -- preserve the case of the u 216 | ++ map (\i -> ("u&\"" <> i <> "\"", [Identifier (Just ("u&\"","\"")) i])) idens 217 | -- host param 218 | ++ map (\i -> (T.cons ':' i, [PrefixedVariable ':' i])) idens 219 | ) 220 | -- positional var 221 | ++ [("$1", [PositionalArg 1])] 222 | -- quoted identifiers with embedded double quotes 223 | ++ [("\"normal \"\" iden\"", [Identifier (Just ("\"","\"")) "normal \"\" iden"])] 224 | -- strings 225 | ++ [("'string'", [SqlString "'" "'" "string"]) 226 | ,("'normal '' quote'", [SqlString "'" "'" "normal '' quote"]) 227 | ,("'normalendquote '''", [SqlString "'" "'" "normalendquote ''"]) 228 | ,("'\n'", [SqlString "'" "'" "\n"]) 229 | ,("E'\n'", [SqlString "E'" "'" "\n"]) 230 | ,("e'this '' quote'", [SqlString "e'" "'" "this '' quote"]) 231 | ,("e'this \\' quote'", [SqlString "e'" "'" "this \\' quote"]) 232 | ,("'not this \\' quote", [SqlString "'" "'" "not this \\" 233 | ,Whitespace " " 234 | ,Identifier Nothing "quote"]) 235 | ,("$$ string 1 $$", [SqlString "$$" "$$" " string 1 "]) 236 | ,("$$ string $ 2 $$", [SqlString "$$" "$$" " string $ 2 "]) 237 | ,("$a$ $$string 3$$ $a$", [SqlString "$a$" "$a$" " $$string 3$$ "]) 238 | ] 239 | -- csstrings 240 | ++ map (\c -> (c <> "'test'", [SqlString (c <> "'") "'" "test"])) 241 | ["n", "N","b", "B","x", "X", "u&", "e", "E"] 242 | -- numbers 243 | ++ [("10", [SqlNumber "10"]) 244 | ,(".1", [SqlNumber ".1"]) 245 | ,("5e3", [SqlNumber "5e3"]) 246 | ,("5e+3", [SqlNumber "5e+3"]) 247 | ,("5e-3", [SqlNumber "5e-3"]) 248 | ,("10.2", [SqlNumber "10.2"]) 249 | ,("10.2e7", [SqlNumber "10.2e7"])] 250 | -- whitespace 251 | ++ concat [[(T.singleton a,[Whitespace $ T.singleton a]) 252 | ,(T.singleton a <> T.singleton b, [Whitespace $ T.singleton a <> T.singleton b])] 253 | | a <- " \n\t", b <- " \n\t"] 254 | -- line comment 255 | ++ map (\c -> (c, [LineComment c])) 256 | ["--", "-- ", "-- this is a comment", "-- line com\n"] 257 | -- block comment 258 | ++ map (\c -> (c, [BlockComment c])) 259 | ["/**/", "/* */","/* this is a comment */" 260 | ,"/* this *is/ a comment */" 261 | ] 262 | 263 | {- 264 | An operator name is a sequence of up to NAMEDATALEN-1 (63 by default) characters from the following list: 265 | 266 | + - * / < > = ~ ! @ # % ^ & | ` ? 267 | 268 | There are a few restrictions on operator names, however: 269 | -- and /* cannot appear anywhere in an operator name, since they will be taken as the start of a comment. 270 | 271 | A multiple-character operator name cannot end in + or -, unless the name also contains at least one of these characters: 272 | 273 | ~ ! @ # % ^ & | ` ? 274 | 275 | todo: 'negative' tests 276 | symbol then -- 277 | symbol then /* 278 | operators without one of the exception chars 279 | followed by + or - without whitespace 280 | 281 | also: do the testing for the ansi compatibility special cases 282 | -} 283 | 284 | postgresShortOperatorTable :: [(Text,[Token])] 285 | postgresShortOperatorTable = 286 | [ (x, [Symbol x]) | x <- someValidPostgresOperators 2] 287 | 288 | 289 | postgresExtraOperatorTable :: [(Text,[Token])] 290 | postgresExtraOperatorTable = 291 | [ (x, [Symbol x]) | x <- someValidPostgresOperators 4] 292 | 293 | 294 | someValidPostgresOperators :: Int -> [Text] 295 | someValidPostgresOperators l = 296 | [ x 297 | | n <- [1..l] 298 | , x <- combos "+-*/<>=~!@#%^&|`?" n 299 | , not ("--" `T.isInfixOf` x || "/*" `T.isInfixOf` x || "*/" `T.isInfixOf` x) 300 | , not (T.last x `T.elem` "+-") 301 | || or (map (`T.elem` x) "~!@#%^&|`?") 302 | ] 303 | 304 | {- 305 | These are postgres operators, which if followed immediately by a + or 306 | -, will lex as separate operators rather than one operator including 307 | the + or -. 308 | -} 309 | 310 | somePostgresOpsWhichWontAddTrailingPlusMinus :: Int -> [Text] 311 | somePostgresOpsWhichWontAddTrailingPlusMinus l = 312 | [ x 313 | | n <- [1..l] 314 | , x <- combos "+-*/<>=" n 315 | , not ("--" `T.isInfixOf` x || "/*" `T.isInfixOf` x || "*/" `T.isInfixOf` x) 316 | , not (T.last x `T.elem` "+-") 317 | ] 318 | 319 | postgresLexerTests :: TestItem 320 | postgresLexerTests = Group "postgresLexerTests" $ 321 | [Group "postgres lexer token tests" $ 322 | [l s t | (s,t) <- postgresLexerTable] 323 | ,Group "postgres generated lexer token tests" $ 324 | [l s t | (s,t) <- postgresShortOperatorTable ++ postgresExtraOperatorTable] 325 | ,Group "postgres generated combination lexer tests" $ 326 | [ l (s <> s1) (t <> t1) 327 | | (s,t) <- postgresLexerTable ++ postgresShortOperatorTable 328 | , (s1,t1) <- postgresLexerTable ++ postgresShortOperatorTable 329 | , tokenListWillPrintAndLex postgres $ t ++ t1 330 | 331 | ] 332 | ,Group "generated postgres edgecase lexertests" $ 333 | [l s t 334 | | (s,t) <- edgeCaseCommentOps 335 | ++ edgeCasePlusMinusOps 336 | ++ edgeCasePlusMinusComments] 337 | 338 | ,Group "adhoc postgres lexertests" $ 339 | -- need more tests for */ to make sure it is caught if it is in the middle of a 340 | -- sequence of symbol letters 341 | [f "*/" 342 | ,f ":::" 343 | ,f "::::" 344 | ,f ":::::" 345 | ,f "@*/" 346 | ,f "-*/" 347 | ,f "12e3e4" 348 | ,f "12e3e4" 349 | ,f "12e3e4" 350 | ,f "12e3.4" 351 | ,f "12.4.5" 352 | ,f "12.4e5.6" 353 | ,f "12.4e5e7" 354 | -- special case allow this to lex to 1 .. 2 355 | -- this is for 'for loops' in plpgsql 356 | ,l "1..2" [SqlNumber "1", Symbol "..", SqlNumber "2"] 357 | ] 358 | ] 359 | where 360 | edgeCaseCommentOps = 361 | [ (x <> "/* "-- "+", [Symbol x, Symbol "+"]) 372 | , (x <> "-", [Symbol x, Symbol "-"]) ] 373 | | x <- somePostgresOpsWhichWontAddTrailingPlusMinus 2 374 | ] 375 | edgeCasePlusMinusComments = 376 | [("---", [LineComment "---"]) 377 | ,("+--", [Symbol "+", LineComment "--"]) 378 | ,("-/**/", [Symbol "-", BlockComment "/**/"]) 379 | ,("+/**/", [Symbol "+", BlockComment "/**/"]) 380 | ] 381 | l :: HasCallStack => Text -> [Token] -> TestItem 382 | l src ast = testLex postgres src ast 383 | f :: HasCallStack => Text -> TestItem 384 | f src = lexFails postgres src 385 | 386 | sqlServerLexerTests :: TestItem 387 | sqlServerLexerTests = Group "sqlServerLexTests" $ 388 | [l s t | (s,t) <- 389 | [("@variable", [(PrefixedVariable '@' "variable")]) 390 | ,("#variable", [(PrefixedVariable '#' "variable")]) 391 | ,("[quoted identifier]", [(Identifier (Just ("[", "]")) "quoted identifier")]) 392 | ]] 393 | where 394 | l :: HasCallStack => Text -> [Token] -> TestItem 395 | l src ast = testLex sqlserver src ast 396 | 397 | oracleLexerTests :: TestItem 398 | oracleLexerTests = Group "oracleLexTests" $ 399 | [] -- nothing oracle specific atm 400 | 401 | mySqlLexerTests :: TestItem 402 | mySqlLexerTests = Group "mySqlLexerTests" $ 403 | [ l s t | (s,t) <- 404 | [("`quoted identifier`", [(Identifier (Just ("`", "`")) "quoted identifier")]) 405 | ] 406 | ] 407 | where 408 | l :: HasCallStack => Text -> [Token] -> TestItem 409 | l src ast = testLex mysql src ast 410 | 411 | odbcLexerTests :: TestItem 412 | odbcLexerTests = Group "odbcLexTests" $ 413 | [ lo s t | (s,t) <- 414 | [("{}", [Symbol "{", Symbol "}"]) 415 | ]] 416 | ++ [lno "{" 417 | ,lno "}"] 418 | where 419 | lo :: HasCallStack => Text -> [Token] -> TestItem 420 | lo src ast = testLex (sqlserver {diOdbc = True}) src ast 421 | lno :: HasCallStack => Text -> TestItem 422 | lno src = lexFails (sqlserver{diOdbc = False}) src 423 | 424 | 425 | combos :: [Char] -> Int -> [Text] 426 | combos _ 0 = [T.empty] 427 | combos l n = [ T.cons x tl | x <- l, tl <- combos l (n - 1) ] 428 | 429 | -------------------------------------------------------------------------------- /Language/SQL/SimpleSQL/Dialect.hs: -------------------------------------------------------------------------------- 1 | 2 | 3 | -- Data types to represent different dialect options 4 | 5 | {-# LANGUAGE DeriveDataTypeable #-} 6 | {-# LANGUAGE OverloadedStrings #-} 7 | module Language.SQL.SimpleSQL.Dialect 8 | (Dialect(..) 9 | ,ansi2011 10 | ,mysql 11 | ,postgres 12 | ,oracle 13 | ,sqlserver 14 | ) where 15 | 16 | import Data.Text (Text) 17 | import Data.Data (Data,Typeable) 18 | 19 | -- | Used to set the dialect used for parsing and pretty printing, 20 | -- very unfinished at the moment. 21 | -- 22 | -- The keyword handling works as follows: 23 | -- 24 | -- There is a list of reserved keywords. These will never parse as 25 | -- anything other than as a keyword, unless they are in one of the 26 | -- other lists. 27 | -- 28 | -- There is a list of \'identifier\' keywords. These are reserved 29 | -- keywords, with an exception that they will parse as an 30 | -- identifier in a scalar expression. They won't parse as 31 | -- identifiers in other places, e.g. column names or aliases. 32 | -- 33 | -- There is a list of \'app\' keywords. These are reserved keywords, 34 | -- with an exception that they will also parse in an \'app-like\' 35 | -- construct - a regular function call, or any of the aggregate and 36 | -- window variations. 37 | -- 38 | -- There is a list of special type names. This list serves two 39 | -- purposes - it is a list of the reserved keywords which are also 40 | -- type names, and it is a list of all the multi word type names. 41 | -- 42 | -- Every keyword should appear in the keywords lists, and then you can 43 | -- add them to the other lists if you want exceptions. Most things 44 | -- that refer to functions, types or variables that are keywords in 45 | -- the ansi standard, can be removed from the keywords lists 46 | -- completely with little effect. With most of the actual SQL 47 | -- keywords, removing them from the keyword list will result in 48 | -- lots of valid syntax no longer parsing (and probably bad parse 49 | -- error messages too). 50 | -- 51 | -- In the code, all special syntax which looks identical to regular 52 | -- identifiers or function calls (apart from the name), is treated 53 | -- like a regular identifier or function call. 54 | -- 55 | -- It's easy to break the parser by removing the wrong words from 56 | -- the keywords list or adding the wrong words to the other lists. 57 | 58 | data Dialect = Dialect 59 | { -- | reserved keywords 60 | diKeywords :: [Text] 61 | -- | keywords with identifier exception 62 | ,diIdentifierKeywords :: [Text] 63 | -- | keywords with app exception 64 | ,diAppKeywords :: [Text] 65 | -- | keywords with type exception plus all the type names which 66 | -- are multiple words 67 | ,diSpecialTypeNames :: [Text] 68 | -- | allow ansi fetch first syntax 69 | ,diFetchFirst :: Bool 70 | -- | allow limit keyword (mysql, postgres, 71 | -- ...) 72 | ,diLimit :: Bool 73 | -- | allow parsing ODBC syntax 74 | ,diOdbc :: Bool 75 | -- | allow quoting identifiers with \`backquotes\` 76 | ,diBackquotedIden :: Bool 77 | -- | allow quoting identifiers with [square brackets] 78 | ,diSquareBracketQuotedIden :: Bool 79 | -- | allow identifiers with a leading at @example 80 | ,diAtIdentifier :: Bool 81 | -- | allow identifiers with a leading \# \#example 82 | ,diHashIdentifier :: Bool 83 | -- | allow positional identifiers like this: $1 84 | ,diPositionalArg :: Bool 85 | -- | allow postgres style dollar strings 86 | ,diDollarString :: Bool 87 | -- | allow strings with an e - e"example" 88 | ,diEString :: Bool 89 | -- | allow postgres style symbols 90 | ,diPostgresSymbols :: Bool 91 | -- | allow sql server style symbols 92 | ,diSqlServerSymbols :: Bool 93 | -- | allow sql server style for CONVERT function in format CONVERT(data_type(length), expression, style) 94 | ,diConvertFunction :: Bool 95 | -- | allow creating autoincrement columns 96 | ,diAutoincrement :: Bool 97 | -- | allow omitting the comma between constraint clauses 98 | ,diNonCommaSeparatedConstraints :: Bool 99 | -- | allow marking tables as "without rowid" 100 | ,diWithoutRowidTables :: Bool 101 | -- | allow omitting types for columns 102 | ,diOptionalColumnTypes :: Bool 103 | -- | allow mixing in DEFAULT clauses with other constraints 104 | ,diDefaultClausesAsConstraints :: Bool 105 | } 106 | deriving (Eq,Show,Read,Data,Typeable) 107 | 108 | -- | ansi sql 2011 dialect 109 | ansi2011 :: Dialect 110 | ansi2011 = Dialect {diKeywords = ansi2011ReservedKeywords 111 | ,diIdentifierKeywords = [] 112 | ,diAppKeywords = ["set"] 113 | ,diSpecialTypeNames = ansi2011TypeNames 114 | ,diFetchFirst = True 115 | ,diLimit = False 116 | ,diOdbc = False 117 | ,diBackquotedIden = False 118 | ,diSquareBracketQuotedIden = False 119 | ,diAtIdentifier = False 120 | ,diHashIdentifier = False 121 | ,diPositionalArg = False 122 | ,diDollarString = False 123 | ,diEString = False 124 | ,diPostgresSymbols = False 125 | ,diSqlServerSymbols = False 126 | ,diConvertFunction = False 127 | ,diAutoincrement = False 128 | ,diNonCommaSeparatedConstraints = False 129 | ,diWithoutRowidTables = False 130 | ,diOptionalColumnTypes = False 131 | ,diDefaultClausesAsConstraints = False 132 | } 133 | 134 | -- | mysql dialect 135 | mysql :: Dialect 136 | mysql = addLimit ansi2011 {diFetchFirst = False 137 | ,diBackquotedIden = True 138 | } 139 | 140 | -- | postgresql dialect 141 | postgres :: Dialect 142 | postgres = addLimit ansi2011 {diPositionalArg = True 143 | ,diDollarString = True 144 | ,diEString = True 145 | ,diPostgresSymbols = True} 146 | 147 | -- | oracle dialect 148 | oracle :: Dialect 149 | oracle = ansi2011 -- {} 150 | 151 | -- | microsoft sql server dialect 152 | sqlserver :: Dialect 153 | sqlserver = ansi2011 {diSquareBracketQuotedIden = True 154 | ,diAtIdentifier = True 155 | ,diHashIdentifier = True 156 | ,diOdbc = True 157 | ,diSqlServerSymbols = True 158 | ,diConvertFunction = True} 159 | 160 | addLimit :: Dialect -> Dialect 161 | addLimit d = d {diKeywords = "limit": diKeywords d 162 | ,diLimit = True} 163 | 164 | 165 | {- 166 | The keyword handling is quite strong - an alternative way to do it 167 | would be to have as few keywords as possible, and only require them 168 | to be quoted when this is needed to resolve a parsing ambiguity. 169 | 170 | I don't think this is a good idea for genuine keywords (it probably is 171 | for all the 'fake' keywords in the standard - things which are 172 | essentially function names, or predefined variable names, or type 173 | names, eetc.). 174 | 175 | 1. working out exactly when each keyword would need to be quoted is 176 | quite error prone, and might change as the parser implementation is 177 | maintained - which would be terrible for users 178 | 179 | 2. it's not user friendly for the user to deal with a whole load of 180 | special cases - either something is a keyword, then you know you must 181 | always quote it, or it isn't, then you know you never need to quote 182 | it 183 | 184 | 3. I think not having exceptions makes for better error messages for 185 | the user, and a better sql code maintenance experience. 186 | 187 | This might not match actual existing SQL products that well, some of 188 | which I think have idiosyncratic rules about when a keyword must be 189 | quoted. If you want to match one of these dialects exactly with this 190 | parser, I think it will be a lot of work. 191 | -} 192 | 193 | ansi2011ReservedKeywords :: [Text] 194 | ansi2011ReservedKeywords = 195 | [--"abs" -- function 196 | "all" -- keyword only? 197 | ,"allocate" -- keyword 198 | ,"alter" -- keyword 199 | ,"and" -- keyword 200 | --,"any" -- keyword? and function 201 | ,"are" -- keyword 202 | ,"array" -- keyword, and used in some special places, like array[...], and array(subquery) 203 | --,"array_agg" -- function 204 | -- ,"array_max_cardinality" -- function 205 | ,"as" -- keyword 206 | ,"asensitive" -- keyword 207 | ,"asymmetric" -- keyword 208 | ,"at" -- keyword 209 | ,"atomic" -- keyword 210 | ,"authorization" -- keyword 211 | --,"avg" -- function 212 | ,"begin" -- keyword 213 | --,"begin_frame" -- identifier 214 | --,"begin_partition" -- identifier 215 | ,"between" -- keyword 216 | ,"bigint" -- type 217 | ,"binary" -- type 218 | ,"blob" -- type 219 | ,"boolean" -- type 220 | ,"both" -- keyword 221 | ,"by" -- keyword 222 | ,"call" -- keyword 223 | ,"called" -- keyword 224 | -- ,"cardinality" -- function + identifier? 225 | ,"cascaded" -- keyword 226 | ,"case" -- keyword 227 | ,"cast" -- special function 228 | -- ,"ceil" -- function 229 | -- ,"ceiling" -- function 230 | ,"char" -- type (+ keyword?) 231 | --,"char_length" -- function 232 | ,"character" -- type 233 | --,"character_length" -- function 234 | ,"check" -- keyword 235 | ,"clob" -- type 236 | ,"close" -- keyword 237 | -- ,"coalesce" -- function 238 | ,"collate" -- keyword 239 | --,"collect" -- function 240 | ,"column" -- keyword 241 | ,"commit" -- keyword 242 | ,"condition" -- keyword 243 | ,"connect" -- keyword 244 | ,"constraint" --keyword 245 | --,"contains" -- keyword? 246 | --,"convert" -- function? 247 | --,"corr" -- function 248 | ,"corresponding" --keyword 249 | --,"count" --function 250 | --,"covar_pop" -- function 251 | --,"covar_samp" --function 252 | ,"create" -- keyword 253 | ,"cross" -- keyword 254 | ,"cube" -- keyword 255 | --,"cume_dist" -- function 256 | ,"current" -- keyword 257 | -- ,"current_catalog" --identifier? 258 | --,"current_date" -- identifier 259 | --,"current_default_transform_group" -- identifier 260 | --,"current_path" -- identifier 261 | --,"current_role" -- identifier 262 | -- ,"current_row" -- identifier 263 | -- ,"current_schema" -- identifier 264 | -- ,"current_time" -- identifier 265 | --,"current_timestamp" -- identifier 266 | --,"current_transform_group_for_type" -- identifier, or keyword? 267 | --,"current_user" -- identifier 268 | ,"cursor" -- keyword 269 | ,"cycle" --keyword 270 | ,"date" -- type 271 | --,"day" -- keyword? - the parser needs it to not be a keyword to parse extract at the moment 272 | ,"deallocate" -- keyword 273 | ,"dec" -- type 274 | ,"decimal" -- type 275 | ,"declare" -- keyword 276 | --,"default" -- identifier + keyword 277 | ,"delete" -- keyword 278 | --,"dense_rank" -- functino 279 | ,"deref" -- keyword 280 | ,"describe" -- keyword 281 | ,"deterministic" 282 | ,"disconnect" 283 | ,"distinct" 284 | ,"double" 285 | ,"drop" 286 | ,"dynamic" 287 | ,"each" 288 | --,"element" 289 | ,"else" 290 | ,"end" 291 | -- ,"end_frame" -- identifier 292 | -- ,"end_partition" -- identifier 293 | ,"end-exec" -- no idea what this is 294 | ,"equals" 295 | ,"escape" 296 | --,"every" 297 | ,"except" 298 | ,"exec" 299 | ,"execute" 300 | ,"exists" 301 | ,"exp" 302 | ,"external" 303 | ,"extract" 304 | --,"false" 305 | ,"fetch" 306 | ,"filter" 307 | -- ,"first_value" 308 | ,"float" 309 | --,"floor" 310 | ,"for" 311 | ,"foreign" 312 | -- ,"frame_row" -- identifier 313 | ,"free" 314 | ,"from" 315 | ,"full" 316 | ,"function" 317 | --,"fusion" 318 | ,"get" 319 | ,"global" 320 | ,"grant" 321 | ,"group" 322 | --,"grouping" 323 | ,"groups" 324 | ,"having" 325 | ,"hold" 326 | --,"hour" 327 | ,"identity" 328 | ,"in" 329 | ,"indicator" 330 | ,"inner" 331 | ,"inout" 332 | ,"insensitive" 333 | ,"insert" 334 | ,"int" 335 | ,"integer" 336 | ,"intersect" 337 | --,"intersection" 338 | ,"interval" 339 | ,"into" 340 | ,"is" 341 | ,"join" 342 | --,"lag" 343 | ,"language" 344 | ,"large" 345 | --,"last_value" 346 | ,"lateral" 347 | --,"lead" 348 | ,"leading" 349 | ,"left" 350 | ,"like" 351 | ,"like_regex" 352 | --,"ln" 353 | ,"local" 354 | ,"localtime" 355 | ,"localtimestamp" 356 | --,"lower" 357 | ,"match" 358 | --,"max" 359 | ,"member" 360 | ,"merge" 361 | ,"method" 362 | --,"min" 363 | --,"minute" 364 | --,"mod" 365 | ,"modifies" 366 | --,"module" 367 | --,"month" 368 | ,"multiset" 369 | ,"national" 370 | ,"natural" 371 | ,"nchar" 372 | ,"nclob" 373 | ,"new" 374 | ,"no" 375 | ,"none" 376 | ,"normalize" 377 | ,"not" 378 | --,"nth_value" 379 | ,"ntile" 380 | --,"null" 381 | --,"nullif" 382 | ,"numeric" 383 | ,"octet_length" 384 | ,"occurrences_regex" 385 | ,"of" 386 | ,"offset" 387 | ,"old" 388 | ,"on" 389 | ,"only" 390 | ,"open" 391 | ,"or" 392 | ,"order" 393 | ,"out" 394 | ,"outer" 395 | ,"over" 396 | ,"overlaps" 397 | ,"overlay" 398 | ,"parameter" 399 | ,"partition" 400 | ,"percent" 401 | --,"percent_rank" 402 | --,"percentile_cont" 403 | --,"percentile_disc" 404 | ,"period" 405 | ,"portion" 406 | ,"position" 407 | ,"position_regex" 408 | --,"power" 409 | ,"precedes" 410 | ,"precision" 411 | ,"prepare" 412 | ,"primary" 413 | ,"procedure" 414 | ,"range" 415 | --,"rank" 416 | ,"reads" 417 | ,"real" 418 | ,"recursive" 419 | ,"ref" 420 | ,"references" 421 | ,"referencing" 422 | --,"regr_avgx" 423 | --,"regr_avgy" 424 | --,"regr_count" 425 | --,"regr_intercept" 426 | --,"regr_r2" 427 | --,"regr_slope" 428 | --,"regr_sxx" 429 | --,"regr_sxy" 430 | --,"regr_syy" 431 | ,"release" 432 | ,"result" 433 | ,"return" 434 | ,"returns" 435 | ,"revoke" 436 | ,"right" 437 | ,"rollback" 438 | ,"rollup" 439 | --,"row" 440 | --,"row_number" 441 | ,"rows" 442 | ,"savepoint" 443 | ,"scope" 444 | ,"scroll" 445 | ,"search" 446 | --,"second" 447 | ,"select" 448 | ,"sensitive" 449 | --,"session_user" 450 | ,"set" 451 | ,"similar" 452 | ,"smallint" 453 | --,"some" 454 | ,"specific" 455 | ,"specifictype" 456 | ,"sql" 457 | ,"sqlexception" 458 | ,"sqlstate" 459 | ,"sqlwarning" 460 | --,"sqrt" 461 | --,"start" 462 | ,"static" 463 | --,"stddev_pop" 464 | --,"stddev_samp" 465 | ,"submultiset" 466 | --,"substring" 467 | ,"substring_regex" 468 | ,"succeeds" 469 | --,"sum" 470 | ,"symmetric" 471 | ,"system" 472 | --,"system_time" 473 | --,"system_user" 474 | ,"table" 475 | ,"tablesample" 476 | ,"then" 477 | ,"time" 478 | ,"timestamp" 479 | ,"timezone_hour" 480 | ,"timezone_minute" 481 | ,"to" 482 | ,"trailing" 483 | ,"translate" 484 | ,"translate_regex" 485 | ,"translation" 486 | ,"treat" 487 | ,"trigger" 488 | ,"truncate" 489 | --,"trim" 490 | --,"trim_array" 491 | --,"true" 492 | ,"uescape" 493 | ,"union" 494 | ,"unique" 495 | --,"unknown" 496 | ,"unnest" 497 | ,"update" 498 | ,"upper" 499 | --,"user" 500 | ,"using" 501 | --,"value" 502 | ,"values" 503 | ,"value_of" 504 | --,"var_pop" 505 | --,"var_samp" 506 | ,"varbinary" 507 | ,"varchar" 508 | ,"varying" 509 | ,"versioning" 510 | ,"when" 511 | ,"whenever" 512 | ,"where" 513 | --,"width_bucket" 514 | ,"window" 515 | ,"with" 516 | ,"within" 517 | ,"without" 518 | --,"year" 519 | ] 520 | 521 | 522 | ansi2011TypeNames :: [Text] 523 | ansi2011TypeNames = 524 | ["double precision" 525 | ,"character varying" 526 | ,"char varying" 527 | ,"character large object" 528 | ,"char large object" 529 | ,"national character" 530 | ,"national char" 531 | ,"national character varying" 532 | ,"national char varying" 533 | ,"national character large object" 534 | ,"nchar large object" 535 | ,"nchar varying" 536 | ,"bit varying" 537 | ,"binary large object" 538 | ,"binary varying" 539 | -- reserved keyword typenames: 540 | ,"array" 541 | ,"bigint" 542 | ,"binary" 543 | ,"blob" 544 | ,"boolean" 545 | ,"char" 546 | ,"character" 547 | ,"clob" 548 | ,"date" 549 | ,"dec" 550 | ,"decimal" 551 | ,"double" 552 | ,"float" 553 | ,"int" 554 | ,"integer" 555 | ,"nchar" 556 | ,"nclob" 557 | ,"numeric" 558 | ,"real" 559 | ,"smallint" 560 | ,"time" 561 | ,"timestamp" 562 | ,"varchar" 563 | ,"varbinary" 564 | ] 565 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/SQL2011DataManipulation.hs: -------------------------------------------------------------------------------- 1 | 2 | -- Section 14 in Foundation 3 | 4 | 5 | {-# LANGUAGE OverloadedStrings #-} 6 | module Language.SQL.SimpleSQL.SQL2011DataManipulation (sql2011DataManipulationTests) where 7 | 8 | import Language.SQL.SimpleSQL.TestTypes 9 | import Language.SQL.SimpleSQL.Syntax 10 | import Language.SQL.SimpleSQL.TestRunners 11 | import Data.Text (Text) 12 | 13 | sql2011DataManipulationTests :: TestItem 14 | sql2011DataManipulationTests = Group "sql 2011 data manipulation tests" 15 | [ 16 | 17 | 18 | {- 19 | 14 Data manipulation 20 | 21 | 22 | 14.1 23 | 24 | ::= 25 | DECLARE 26 | FOR 27 | 28 | 14.2 29 | 30 | ::= 31 | [ ] [ ] CURSOR 32 | [ ] 33 | [ ] 34 | 35 | ::= 36 | SENSITIVE 37 | | INSENSITIVE 38 | | ASENSITIVE 39 | 40 | ::= 41 | SCROLL 42 | | NO SCROLL 43 | 44 | ::= 45 | WITH HOLD 46 | | WITHOUT HOLD 47 | 48 | ::= 49 | WITH RETURN 50 | | WITHOUT RETURN 51 | 52 | 14.3 53 | 54 | ::= 55 | [ ] 56 | 57 | ::= 58 | FOR { READ ONLY | UPDATE [ OF ] } 59 | 60 | 14.4 61 | 62 | ::= 63 | OPEN 64 | 65 | 14.5 66 | 67 | ::= 68 | FETCH [ [ ] FROM ] INTO 69 | 70 | ::= 71 | NEXT 72 | | PRIOR 73 | | FIRST 74 | | LAST 75 | | { ABSOLUTE | RELATIVE } 76 | 77 | ::= 78 | [ { }... ] 79 | 80 | 81 | 14.6 82 | 83 | ::= 84 | CLOSE 85 | 86 | 14.7 ::= 89 | SELECT [ ] 91 |
92 | 93 |
104 | | ONLY
105 | 106 | 14.9 107 | 108 | ::= 109 | DELETE FROM 110 | [ FOR PORTION OF 111 | FROM TO ] 112 | [ [ AS ] ] 113 | [ WHERE ] 114 | -} 115 | 116 | s "delete from t" 117 | $ Delete [Name Nothing "t"] Nothing Nothing 118 | 119 | ,s "delete from t as u" 120 | $ Delete [Name Nothing "t"] (Just (Name Nothing "u")) Nothing 121 | 122 | ,s "delete from t where x = 5" 123 | $ Delete [Name Nothing "t"] Nothing 124 | (Just $ BinOp (Iden [Name Nothing "x"]) [Name Nothing "="] (NumLit "5")) 125 | 126 | 127 | ,s "delete from t as u where u.x = 5" 128 | $ Delete [Name Nothing "t"] (Just (Name Nothing "u")) 129 | (Just $ BinOp (Iden [Name Nothing "u", Name Nothing "x"]) [Name Nothing "="] (NumLit "5")) 130 | 131 | {- 132 | 14.10 133 | 134 | ::= 135 | TRUNCATE TABLE [ ] 136 | 137 | ::= 138 | CONTINUE IDENTITY 139 | | RESTART IDENTITY 140 | -} 141 | 142 | ,s "truncate table t" 143 | $ Truncate [Name Nothing "t"] DefaultIdentityRestart 144 | 145 | ,s "truncate table t continue identity" 146 | $ Truncate [Name Nothing "t"] ContinueIdentity 147 | 148 | ,s "truncate table t restart identity" 149 | $ Truncate [Name Nothing "t"] RestartIdentity 150 | 151 | 152 | {- 153 | 14.11 154 | 155 | ::= 156 | INSERT INTO 157 | 158 | ::= 159 |
160 | 161 | ::= 162 | 163 | | 164 | | 165 | 166 | ::= 167 | [ ] 168 | [ ] 169 | 170 | 171 | ::= 172 | [ ] 173 | [ ] 174 | 175 | 176 | ::= 177 | OVERRIDING USER VALUE 178 | | OVERRIDING SYSTEM VALUE 179 | 180 | ::= 181 | DEFAULT VALUES 182 | 183 | ::= 184 | 185 | -} 186 | 187 | ,s "insert into t select * from u" 188 | $ Insert [Name Nothing "t"] Nothing 189 | $ InsertQuery $ toQueryExpr $ makeSelect 190 | {msSelectList = [(Star, Nothing)] 191 | ,msFrom = [TRSimple [Name Nothing "u"]]} 192 | 193 | ,s "insert into t(a,b,c) select * from u" 194 | $ Insert [Name Nothing "t"] (Just [Name Nothing "a", Name Nothing "b", Name Nothing "c"]) 195 | $ InsertQuery $ toQueryExpr $ makeSelect 196 | {msSelectList = [(Star, Nothing)] 197 | ,msFrom = [TRSimple [Name Nothing "u"]]} 198 | 199 | ,s "insert into t default values" 200 | $ Insert [Name Nothing "t"] Nothing DefaultInsertValues 201 | 202 | ,s "insert into t values(1,2)" 203 | $ Insert [Name Nothing "t"] Nothing 204 | $ InsertQuery $ Values [[NumLit "1", NumLit "2"]] 205 | 206 | ,s "insert into t values (1,2),(3,4)" 207 | $ Insert [Name Nothing "t"] Nothing 208 | $ InsertQuery $ Values [[NumLit "1", NumLit "2"] 209 | ,[NumLit "3", NumLit "4"]] 210 | 211 | ,s 212 | "insert into t values (default,null,array[],multiset[])" 213 | $ Insert [Name Nothing "t"] Nothing 214 | $ InsertQuery $ Values [[Iden [Name Nothing "default"] 215 | ,Iden [Name Nothing "null"] 216 | ,Array (Iden [Name Nothing "array"]) [] 217 | ,MultisetCtor []]] 218 | 219 | 220 | {- 221 | 14.12 222 | 223 | ::= 224 | MERGE INTO [ [ AS ] ] 225 | USING
226 | ON 227 | 228 | merge into t 229 | using t 230 | on a = b 231 | merge operation specification 232 | 233 | merge into t as u 234 | using (table factor | joined expression) 235 | 236 | MERGE INTO tablename USING table_reference ON (condition) 237 | WHEN MATCHED THEN 238 | UPDATE SET column1 = value1 [, column2 = value2 ...] 239 | WHEN NOT MATCHED THEN 240 | INSERT (column1 [, column2 ...]) VALUES (value1 [, value2 ... 241 | 242 | merge into t23 243 | using t42 244 | on t42.id = t23.id 245 | when matched then 246 | update 247 | set t23.col1 = t42.col1 248 | when not matched then 249 | insert (id, col1) 250 | values (t42.id, t42.col1) 251 | 252 | 253 | 254 | MERGE INTO TableA u 255 | 256 | USING (SELECT b.Key1, b.ColB1, c.ColC1 257 | 258 | FROM TableB b 259 | 260 | INNER JOIN TableC c ON c.KeyC1 = b.KeyB1 261 | 262 | ) s 263 | 264 | ON (u.KeyA1 = s.KeyA1) 265 | 266 | WHEN MATCHED THEN 267 | 268 | UPDATE SET u.ColA1 = s.ColB1, u.ColA2 = s.ColC1 269 | 270 | 271 | MERGE INTO Department 272 | USING NewDept AS ND 273 | ON nd.Department_Number = Department. 274 | Department_Number 275 | WHEN MATCHED THEN UPDATE 276 | SET budget_amount = nd.Budget_Amount 277 | WHEN NOT MATCHED THEN INSERT 278 | VALUES 279 | (nd.Department_Number, nd.Department_ 280 | Name, nd.Budget_Amount, 281 | nd.Manager_Employee_Number); 282 | 283 | 284 | MERGE INTO Orders2 285 | USING Orders3 286 | ON ORDERS3.Order_Number = Orders2. 287 | Order_Number 288 | WHEN NOT MATCHED THEN INSERT 289 | Orders3.order_number, Orders3. 290 | invoice_number, 291 | Orders3.customer_number, Orders3. 292 | initial_order_date, 293 | Orders3.invoice_date, Orders3. 294 | invoice_amount); 295 | 296 | MERGE INTO Orders2 297 | USING Orders3 298 | ON ORDERS3.Order_Number = Orders2. 299 | Order_Number AND 1=0 300 | WHEN NOT MATCHED THEN INSERT 301 | (Orders3.order_number, Orders3.invoice_number, 302 | Orders3.customer_number, Orders3. 303 | initial_order_date, 304 | Orders3.invoice_date, Orders3. 305 | invoice_amount); 306 | 307 | MERGE INTO Department 308 | USING NewDept AS ND 309 | ON nd.Department_Number = Department. 310 | Department_Number 311 | WHEN MATCHED THEN UPDATE 312 | SET budget_amount = nd.Budget_Amount 313 | LOGGING ALL ERRORS WITH NO LIMIT; 314 | 315 | 316 | MERGE INTO Department 317 | USING 318 | (SELECT Department_Number, 319 | department_name, 320 | Budget_Amount, 321 | Manager_Employee_Number 322 | FROM NewDept 323 | WHERE Department_Number IN 324 | (SELECT Department_Number 325 | FROM Employee)) AS m 326 | ON m.Department_Number = Department. 327 | Department_Number 328 | WHEN MATCHED THEN UPDATE 329 | SET budget_amount = m.Budget_Amount 330 | WHEN NOT MATCHED THEN INSERT 331 | (m.Department_Number, m.Department_ 332 | Name, m.Budget_Amount, 333 | m.Manager_Employee_Number) 334 | LOGGING ALL ERRORS WITH NO LIMIT; 335 | 336 | 337 | MERGE INTO Customers AS c 338 | USING Moved AS m 339 | ON m.SSN = c.SSN 340 | WHEN MATCHED 341 | THEN UPDATE 342 | SET Street = m.Street, 343 | HouseNo = m.HouseNo, 344 | City = m.City; 345 | 346 | MERGE INTO CentralOfficeAccounts AS C -- Target 347 | USING BranchOfficeAccounts AS B -- Source 348 | ON C.account_nbr = B.account_nbr 349 | WHEN MATCHED THEN -- On match update 350 | UPDATE SET C.company_name = B.company_name, 351 | C.primary_contact = B.primary_contact, 352 | C.contact_phone = B.contact_phone 353 | WHEN NOT MATCHED THEN -- Add missing 354 | INSERT (account_nbr, company_name, primary_contact, contact_phone) 355 | VALUES (B.account_nbr, B.company_name, B.primary_contact, B.contact_phone); 356 | 357 | SELECT account_nbr, company_name, primary_contact, contact_phone 358 | FROM CentralOfficeAccounts; 359 | 360 | 361 | 362 | MERGE INTO CentralOfficeAccounts AS C -- Target 363 | USING BranchOfficeAccounts AS B -- Source 364 | ON C.account_nbr = B.account_nbr 365 | WHEN MATCHED -- On match update 366 | AND (C.company_name <> B.company_name -- Additional search conditions 367 | OR C.primary_contact <> B.primary_contact 368 | OR C.contact_phone <> B.contact_phone) THEN 369 | UPDATE SET C.company_name = B.company_name, 370 | C.primary_contact = B.primary_contact, 371 | C.contact_phone = B.contact_phone 372 | WHEN NOT MATCHED THEN -- Add missing 373 | INSERT (account_nbr, company_name, primary_contact, contact_phone) 374 | VALUES (B.account_nbr, B.company_name, B.primary_contact, B.contact_phone); 375 | 376 | 377 | 378 | MERGE INTO CentralOfficeAccounts AS C -- Target 379 | USING BranchOfficeAccounts AS B -- Source 380 | ON C.account_nbr = B.account_nbr 381 | WHEN MATCHED -- On match update 382 | AND (C.company_name <> B.company_name -- Additional search conditions 383 | OR C.primary_contact <> B.primary_contact 384 | OR C.contact_phone <> B.contact_phone) THEN 385 | UPDATE SET C.company_name = B.company_name, 386 | C.primary_contact = B.primary_contact, 387 | C.contact_phone = B.contact_phone 388 | WHEN NOT MATCHED THEN -- Add missing 389 | INSERT (account_nbr, company_name, primary_contact, contact_phone) 390 | VALUES (B.account_nbr, B.company_name, B.primary_contact, B.contact_phone) 391 | WHEN SOURCE NOT MATCHED THEN -- Delete missing from source 392 | DELETE; 393 | 394 | SELECT account_nbr, company_name, primary_contact, contact_phone 395 | FROM CentralOfficeAccounts; 396 | 397 | 398 | 399 | 400 | ::= 401 | 402 | 403 | ::= 404 | ... 405 | 406 | ::= 407 | 408 | | 409 | 410 | ::= 411 | WHEN MATCHED [ AND ] 412 | THEN 413 | 414 | ::= 415 | 416 | | 417 | 418 | ::= 419 | WHEN NOT MATCHED [ AND ] 420 | THEN 421 | 422 | ::= 423 | UPDATE SET 424 | 425 | ::= 426 | DELETE 427 | 428 | ::= 429 | INSERT [ ] 430 | [ ] 431 | VALUES 432 | 433 | ::= 434 | 435 | [ { }... ] 436 | 437 | 438 | ::= 439 | 440 | | 441 | 442 | 14.13 443 | 444 | ::= 445 | UPDATE [ [ AS ] ] 446 | SET 447 | WHERE CURRENT OF 448 | 449 | 14.14 450 | 451 | ::= 452 | UPDATE 453 | [ FOR PORTION OF 454 | FROM TO ] 455 | [ [ AS ] ] 456 | SET 457 | [ WHERE ] 458 | -} 459 | 460 | 461 | ,s "update t set a=b" 462 | $ Update [Name Nothing "t"] Nothing 463 | [Set [Name Nothing "a"] (Iden [Name Nothing "b"])] Nothing 464 | 465 | ,s "update t set a=b, c=5" 466 | $ Update [Name Nothing "t"] Nothing 467 | [Set [Name Nothing "a"] (Iden [Name Nothing "b"]) 468 | ,Set [Name Nothing "c"] (NumLit "5")] Nothing 469 | 470 | 471 | ,s "update t set a=b where a>5" 472 | $ Update [Name Nothing "t"] Nothing 473 | [Set [Name Nothing "a"] (Iden [Name Nothing "b"])] 474 | $ Just $ BinOp (Iden [Name Nothing "a"]) [Name Nothing ">"] (NumLit "5") 475 | 476 | 477 | ,s "update t as u set a=b where u.a>5" 478 | $ Update [Name Nothing "t"] (Just $ Name Nothing "u") 479 | [Set [Name Nothing "a"] (Iden [Name Nothing "b"])] 480 | $ Just $ BinOp (Iden [Name Nothing "u",Name Nothing "a"]) 481 | [Name Nothing ">"] (NumLit "5") 482 | 483 | ,s "update t set (a,b)=(3,5)" 484 | $ Update [Name Nothing "t"] Nothing 485 | [SetMultiple [[Name Nothing "a"],[Name Nothing "b"]] 486 | [NumLit "3", NumLit "5"]] Nothing 487 | 488 | 489 | 490 | {- 491 | 14.15 492 | 493 | ::= 494 | [ { }... ] 495 | 496 | ::= 497 | 498 | | 499 | 500 | ::= 501 | 502 | | 503 | 504 | ::= 505 | 506 | 507 | ::= 508 | [ { }... ] 509 | 510 | ::= 511 | 512 | 513 | ::= 514 | 515 | | 516 | 517 | 518 | ::= 519 | 520 | 521 | ::= 522 | 523 | 524 | ::= 525 | 526 | | 527 | 528 | ::= 529 | 530 | | 531 | 532 | 14.16 533 | 534 | ::= 535 | DECLARE LOCAL TEMPORARY TABLE
536 | [ ON COMMIT
ROWS ] 537 | 538 | declare local temporary table t (a int) [on commit {preserve | delete} rows] 539 | 540 | 14.17 541 | 542 | ::= 543 | FREE LOCATOR [ { }... ] 544 | 545 | ::= 546 | 547 | | 548 | | 549 | 550 | 14.18 551 | 552 | ::= 553 | HOLD LOCATOR [ { }... ] 554 | -} 555 | 556 | 557 | ] 558 | 559 | s :: HasCallStack => Text -> Statement -> TestItem 560 | s src ast = testStatement ansi2011 src ast 561 | -------------------------------------------------------------------------------- /tests/Language/SQL/SimpleSQL/ScalarExprs.hs: -------------------------------------------------------------------------------- 1 | 2 | -- Tests for parsing scalar expressions 3 | 4 | {-# LANGUAGE OverloadedStrings #-} 5 | module Language.SQL.SimpleSQL.ScalarExprs (scalarExprTests) where 6 | 7 | import Language.SQL.SimpleSQL.TestTypes 8 | import Language.SQL.SimpleSQL.Syntax 9 | import Language.SQL.SimpleSQL.TestRunners 10 | 11 | import Data.Text (Text) 12 | 13 | scalarExprTests :: TestItem 14 | scalarExprTests = Group "scalarExprTests" 15 | [literals 16 | ,identifiers 17 | ,star 18 | ,parameter 19 | ,dots 20 | ,app 21 | ,caseexp 22 | ,convertfun 23 | ,operators 24 | ,parens 25 | ,subqueries 26 | ,aggregates 27 | ,windowFunctions 28 | ,functionsWithReservedNames 29 | ] 30 | 31 | t :: HasCallStack => Text -> ScalarExpr -> TestItem 32 | t src ast = testScalarExpr ansi2011 src ast 33 | 34 | td :: HasCallStack => Dialect -> Text -> ScalarExpr -> TestItem 35 | td d src ast = testScalarExpr d src ast 36 | 37 | 38 | 39 | literals :: TestItem 40 | literals = Group "literals" 41 | [t "3" $ NumLit "3" 42 | ,t "3." $ NumLit "3." 43 | ,t "3.3" $ NumLit "3.3" 44 | ,t ".3" $ NumLit ".3" 45 | ,t "3.e3" $ NumLit "3.e3" 46 | ,t "3.3e3" $ NumLit "3.3e3" 47 | ,t ".3e3" $ NumLit ".3e3" 48 | ,t "3e3" $ NumLit "3e3" 49 | ,t "3e+3" $ NumLit "3e+3" 50 | ,t "3e-3" $ NumLit "3e-3" 51 | ,t "'string'" $ StringLit "'" "'" "string" 52 | ,t "'string with a '' quote'" $ StringLit "'" "'" "string with a '' quote" 53 | ,t "'1'" $ StringLit "'" "'" "1" 54 | ,t "interval '3' day" 55 | $ IntervalLit Nothing "3" (Itf "day" Nothing) Nothing 56 | ,t "interval '3' day (3)" 57 | $ IntervalLit Nothing "3" (Itf "day" $ Just (3,Nothing)) Nothing 58 | ,t "interval '3 weeks'" $ TypedLit (TypeName [Name Nothing "interval"]) "3 weeks" 59 | ] 60 | 61 | identifiers :: TestItem 62 | identifiers = Group "identifiers" 63 | [t "iden1" $ Iden [Name Nothing "iden1"] 64 | --,("t.a", Iden2 "t" "a") 65 | ,t "\"quoted identifier\"" $ Iden [Name (Just ("\"","\"")) "quoted identifier"] 66 | ,t "\"from\"" $ Iden [Name (Just ("\"","\"")) "from"] 67 | ] 68 | 69 | star :: TestItem 70 | star = Group "star" 71 | [t "count(*)" $ App [Name Nothing "count"] [Star] 72 | ,t "ROW(t.*,42)" $ App [Name Nothing "ROW"] [QStar [Name Nothing "t"], NumLit "42"] 73 | ] 74 | 75 | parameter :: TestItem 76 | parameter = Group "parameter" 77 | [td ansi2011 "?" Parameter 78 | ,td postgres "$13" $ PositionalArg 13] 79 | 80 | dots :: TestItem 81 | dots = Group "dot" 82 | [t "t.a" $ Iden [Name Nothing "t",Name Nothing "a"] 83 | ,t "a.b.c" $ Iden [Name Nothing "a",Name Nothing "b",Name Nothing "c"] 84 | ,t "ROW(t.*,42)" $ App [Name Nothing "ROW"] [QStar [Name Nothing "t"], NumLit "42"] 85 | ] 86 | 87 | app :: TestItem 88 | app = Group "app" 89 | [t "f()" $ App [Name Nothing "f"] [] 90 | ,t "f(a)" $ App [Name Nothing "f"] [Iden [Name Nothing "a"]] 91 | ,t "f(a,b)" $ App [Name Nothing "f"] [Iden [Name Nothing "a"], Iden [Name Nothing "b"]] 92 | ] 93 | 94 | caseexp :: TestItem 95 | caseexp = Group "caseexp" 96 | [t "case a when 1 then 2 end" 97 | $ Case (Just $ Iden [Name Nothing "a"]) [([NumLit "1"] 98 | ,NumLit "2")] Nothing 99 | 100 | ,t "case a when 1 then 2 when 3 then 4 end" 101 | $ Case (Just $ Iden [Name Nothing "a"]) [([NumLit "1"], NumLit "2") 102 | ,([NumLit "3"], NumLit "4")] Nothing 103 | 104 | ,t "case a when 1 then 2 when 3 then 4 else 5 end" 105 | $ Case (Just $ Iden [Name Nothing "a"]) [([NumLit "1"], NumLit "2") 106 | ,([NumLit "3"], NumLit "4")] 107 | (Just $ NumLit "5") 108 | 109 | ,t "case when a=1 then 2 when a=3 then 4 else 5 end" 110 | $ Case Nothing [([BinOp (Iden [Name Nothing "a"]) [Name Nothing "="] (NumLit "1")], NumLit "2") 111 | ,([BinOp (Iden [Name Nothing "a"]) [Name Nothing "="] (NumLit "3")], NumLit "4")] 112 | (Just $ NumLit "5") 113 | 114 | ,t "case a when 1,2 then 10 when 3,4 then 20 end" 115 | $ Case (Just $ Iden [Name Nothing "a"]) [([NumLit "1",NumLit "2"] 116 | ,NumLit "10") 117 | ,([NumLit "3",NumLit "4"] 118 | ,NumLit "20")] 119 | Nothing 120 | ] 121 | 122 | convertfun :: TestItem 123 | convertfun = Group "convert" 124 | [td sqlserver "CONVERT(varchar, 25.65)" 125 | $ Convert (TypeName [Name Nothing "varchar"]) (NumLit "25.65") Nothing 126 | ,td sqlserver "CONVERT(datetime, '2017-08-25')" 127 | $ Convert (TypeName [Name Nothing "datetime"]) (StringLit "'" "'" "2017-08-25") Nothing 128 | ,td sqlserver "CONVERT(varchar, '2017-08-25', 101)" 129 | $ Convert (TypeName [Name Nothing "varchar"]) (StringLit "'" "'" "2017-08-25") (Just 101) 130 | ] 131 | 132 | operators :: TestItem 133 | operators = Group "operators" 134 | [binaryOperators 135 | ,unaryOperators 136 | ,casts 137 | ,miscOps] 138 | 139 | binaryOperators :: TestItem 140 | binaryOperators = Group "binaryOperators" 141 | [t "a + b" $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "+"] (Iden [Name Nothing "b"]) 142 | -- sanity check fixities 143 | -- todo: add more fixity checking 144 | 145 | ,t "a + b * c" 146 | $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "+"] 147 | (BinOp (Iden [Name Nothing "b"]) [Name Nothing "*"] (Iden [Name Nothing "c"])) 148 | 149 | ,t "a * b + c" 150 | $ BinOp (BinOp (Iden [Name Nothing "a"]) [Name Nothing "*"] (Iden [Name Nothing "b"])) 151 | [Name Nothing "+"] (Iden [Name Nothing "c"]) 152 | ] 153 | 154 | unaryOperators :: TestItem 155 | unaryOperators = Group "unaryOperators" 156 | [t "not a" $ PrefixOp [Name Nothing "not"] $ Iden [Name Nothing "a"] 157 | ,t "not not a" $ PrefixOp [Name Nothing "not"] $ PrefixOp [Name Nothing "not"] $ Iden [Name Nothing "a"] 158 | ,t "+a" $ PrefixOp [Name Nothing "+"] $ Iden [Name Nothing "a"] 159 | ,t "-a" $ PrefixOp [Name Nothing "-"] $ Iden [Name Nothing "a"] 160 | ] 161 | 162 | 163 | casts :: TestItem 164 | casts = Group "operators" 165 | [t "cast('1' as int)" 166 | $ Cast (StringLit "'" "'" "1") $ TypeName [Name Nothing "int"] 167 | 168 | ,t "int '3'" 169 | $ TypedLit (TypeName [Name Nothing "int"]) "3" 170 | 171 | ,t "cast('1' as double precision)" 172 | $ Cast (StringLit "'" "'" "1") $ TypeName [Name Nothing "double precision"] 173 | 174 | ,t "cast('1' as float(8))" 175 | $ Cast (StringLit "'" "'" "1") $ PrecTypeName [Name Nothing "float"] 8 176 | 177 | ,t "cast('1' as decimal(15,2))" 178 | $ Cast (StringLit "'" "'" "1") $ PrecScaleTypeName [Name Nothing "decimal"] 15 2 179 | 180 | ,t "double precision '3'" 181 | $ TypedLit (TypeName [Name Nothing "double precision"]) "3" 182 | ] 183 | 184 | subqueries :: TestItem 185 | subqueries = Group "unaryOperators" 186 | [t "exists (select a from t)" $ SubQueryExpr SqExists ms 187 | ,t "(select a from t)" $ SubQueryExpr SqSq ms 188 | 189 | ,t "a in (select a from t)" 190 | $ In True (Iden [Name Nothing "a"]) (InQueryExpr ms) 191 | 192 | ,t "a not in (select a from t)" 193 | $ In False (Iden [Name Nothing "a"]) (InQueryExpr ms) 194 | 195 | ,t "a > all (select a from t)" 196 | $ QuantifiedComparison (Iden [Name Nothing "a"]) [Name Nothing ">"] CPAll ms 197 | 198 | ,t "a = some (select a from t)" 199 | $ QuantifiedComparison (Iden [Name Nothing "a"]) [Name Nothing "="] CPSome ms 200 | 201 | ,t "a <= any (select a from t)" 202 | $ QuantifiedComparison (Iden [Name Nothing "a"]) [Name Nothing "<="] CPAny ms 203 | ] 204 | where 205 | ms = toQueryExpr $ makeSelect 206 | {msSelectList = [(Iden [Name Nothing "a"],Nothing)] 207 | ,msFrom = [TRSimple [Name Nothing "t"]] 208 | } 209 | 210 | miscOps :: TestItem 211 | miscOps = Group "unaryOperators" 212 | [t "a in (1,2,3)" 213 | $ In True (Iden [Name Nothing "a"]) $ InList $ map NumLit ["1","2","3"] 214 | 215 | ,t "a is null" $ PostfixOp [Name Nothing "is null"] (Iden [Name Nothing "a"]) 216 | ,t "a is not null" $ PostfixOp [Name Nothing "is not null"] (Iden [Name Nothing "a"]) 217 | ,t "a is true" $ PostfixOp [Name Nothing "is true"] (Iden [Name Nothing "a"]) 218 | ,t "a is not true" $ PostfixOp [Name Nothing "is not true"] (Iden [Name Nothing "a"]) 219 | ,t "a is false" $ PostfixOp [Name Nothing "is false"] (Iden [Name Nothing "a"]) 220 | ,t "a is not false" $ PostfixOp [Name Nothing "is not false"] (Iden [Name Nothing "a"]) 221 | ,t "a is unknown" $ PostfixOp [Name Nothing "is unknown"] (Iden [Name Nothing "a"]) 222 | ,t "a is not unknown" $ PostfixOp [Name Nothing "is not unknown"] (Iden [Name Nothing "a"]) 223 | ,t "a is distinct from b" $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "is distinct from"] (Iden [Name Nothing "b"]) 224 | 225 | ,t "a is not distinct from b" 226 | $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "is not distinct from"] (Iden [Name Nothing "b"]) 227 | 228 | ,t "a like b" $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "like"] (Iden [Name Nothing "b"]) 229 | ,t "a not like b" $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "not like"] (Iden [Name Nothing "b"]) 230 | ,t "a is similar to b"$ BinOp (Iden [Name Nothing "a"]) [Name Nothing "is similar to"] (Iden [Name Nothing "b"]) 231 | 232 | ,t "a is not similar to b" 233 | $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "is not similar to"] (Iden [Name Nothing "b"]) 234 | 235 | ,t "a overlaps b" $ BinOp (Iden [Name Nothing "a"]) [Name Nothing "overlaps"] (Iden [Name Nothing "b"]) 236 | 237 | -- special operators 238 | 239 | ,t "a between b and c" $ SpecialOp [Name Nothing "between"] [Iden [Name Nothing "a"] 240 | ,Iden [Name Nothing "b"] 241 | ,Iden [Name Nothing "c"]] 242 | 243 | ,t "a not between b and c" $ SpecialOp [Name Nothing "not between"] [Iden [Name Nothing "a"] 244 | ,Iden [Name Nothing "b"] 245 | ,Iden [Name Nothing "c"]] 246 | ,t "(1,2)" 247 | $ SpecialOp [Name Nothing "rowctor"] [NumLit "1", NumLit "2"] 248 | 249 | 250 | -- keyword special operators 251 | 252 | ,t "extract(day from t)" 253 | $ SpecialOpK [Name Nothing "extract"] (Just $ Iden [Name Nothing "day"]) [("from", Iden [Name Nothing "t"])] 254 | 255 | ,t "substring(x from 1 for 2)" 256 | $ SpecialOpK [Name Nothing "substring"] (Just $ Iden [Name Nothing "x"]) [("from", NumLit "1") 257 | ,("for", NumLit "2")] 258 | 259 | ,t "substring(x from 1)" 260 | $ SpecialOpK [Name Nothing "substring"] (Just $ Iden [Name Nothing "x"]) [("from", NumLit "1")] 261 | 262 | ,t "substring(x for 2)" 263 | $ SpecialOpK [Name Nothing "substring"] (Just $ Iden [Name Nothing "x"]) [("for", NumLit "2")] 264 | 265 | ,t "substring(x from 1 for 2 collate C)" 266 | $ SpecialOpK [Name Nothing "substring"] (Just $ Iden [Name Nothing "x"]) 267 | [("from", NumLit "1") 268 | ,("for", Collate (NumLit "2") [Name Nothing "C"])] 269 | 270 | -- this doesn't work because of a overlap in the 'in' parser 271 | 272 | ,t "POSITION( string1 IN string2 )" 273 | $ SpecialOpK [Name Nothing "position"] (Just $ Iden [Name Nothing "string1"]) [("in", Iden [Name Nothing "string2"])] 274 | 275 | ,t "CONVERT(char_value USING conversion_char_name)" 276 | $ SpecialOpK [Name Nothing "convert"] (Just $ Iden [Name Nothing "char_value"]) 277 | [("using", Iden [Name Nothing "conversion_char_name"])] 278 | 279 | ,t "TRANSLATE(char_value USING translation_name)" 280 | $ SpecialOpK [Name Nothing "translate"] (Just $ Iden [Name Nothing "char_value"]) 281 | [("using", Iden [Name Nothing "translation_name"])] 282 | 283 | {- 284 | OVERLAY(string PLACING embedded_string FROM start 285 | [FOR length]) 286 | -} 287 | 288 | ,t "OVERLAY(string PLACING embedded_string FROM start)" 289 | $ SpecialOpK [Name Nothing "overlay"] (Just $ Iden [Name Nothing "string"]) 290 | [("placing", Iden [Name Nothing "embedded_string"]) 291 | ,("from", Iden [Name Nothing "start"])] 292 | 293 | ,t "OVERLAY(string PLACING embedded_string FROM start FOR length)" 294 | $ SpecialOpK [Name Nothing "overlay"] (Just $ Iden [Name Nothing "string"]) 295 | [("placing", Iden [Name Nothing "embedded_string"]) 296 | ,("from", Iden [Name Nothing "start"]) 297 | ,("for", Iden [Name Nothing "length"])] 298 | 299 | {- 300 | TRIM( [ [{LEADING | TRAILING | BOTH}] [removal_char] FROM ] 301 | target_string 302 | [COLLATE collation_name] ) 303 | -} 304 | 305 | 306 | 307 | ,t "trim(from target_string)" 308 | $ SpecialOpK [Name Nothing "trim"] Nothing 309 | [("both", StringLit "'" "'" " ") 310 | ,("from", Iden [Name Nothing "target_string"])] 311 | 312 | ,t "trim(leading from target_string)" 313 | $ SpecialOpK [Name Nothing "trim"] Nothing 314 | [("leading", StringLit "'" "'" " ") 315 | ,("from", Iden [Name Nothing "target_string"])] 316 | 317 | ,t "trim(trailing from target_string)" 318 | $ SpecialOpK [Name Nothing "trim"] Nothing 319 | [("trailing", StringLit "'" "'" " ") 320 | ,("from", Iden [Name Nothing "target_string"])] 321 | 322 | ,t "trim(both from target_string)" 323 | $ SpecialOpK [Name Nothing "trim"] Nothing 324 | [("both", StringLit "'" "'" " ") 325 | ,("from", Iden [Name Nothing "target_string"])] 326 | 327 | 328 | ,t "trim(leading 'x' from target_string)" 329 | $ SpecialOpK [Name Nothing "trim"] Nothing 330 | [("leading", StringLit "'" "'" "x") 331 | ,("from", Iden [Name Nothing "target_string"])] 332 | 333 | ,t "trim(trailing 'y' from target_string)" 334 | $ SpecialOpK [Name Nothing "trim"] Nothing 335 | [("trailing", StringLit "'" "'" "y") 336 | ,("from", Iden [Name Nothing "target_string"])] 337 | 338 | ,t "trim(both 'z' from target_string collate C)" 339 | $ SpecialOpK [Name Nothing "trim"] Nothing 340 | [("both", StringLit "'" "'" "z") 341 | ,("from", Collate (Iden [Name Nothing "target_string"]) [Name Nothing "C"])] 342 | 343 | ,t "trim(leading from target_string)" 344 | $ SpecialOpK [Name Nothing "trim"] Nothing 345 | [("leading", StringLit "'" "'" " ") 346 | ,("from", Iden [Name Nothing "target_string"])] 347 | 348 | ] 349 | 350 | aggregates :: TestItem 351 | aggregates = Group "aggregates" 352 | [t "count(*)" $ App [Name Nothing "count"] [Star] 353 | 354 | ,t "sum(a order by a)" 355 | $ AggregateApp [Name Nothing "sum"] SQDefault [Iden [Name Nothing "a"]] 356 | [SortSpec (Iden [Name Nothing "a"]) DirDefault NullsOrderDefault] Nothing 357 | 358 | ,t "sum(all a)" 359 | $ AggregateApp [Name Nothing "sum"] All [Iden [Name Nothing "a"]] [] Nothing 360 | 361 | ,t "count(distinct a)" 362 | $ AggregateApp [Name Nothing "count"] Distinct [Iden [Name Nothing "a"]] [] Nothing 363 | ] 364 | 365 | windowFunctions :: TestItem 366 | windowFunctions = Group "windowFunctions" 367 | [t "max(a) over ()" $ WindowApp [Name Nothing "max"] [Iden [Name Nothing "a"]] [] [] Nothing 368 | ,t "count(*) over ()" $ WindowApp [Name Nothing "count"] [Star] [] [] Nothing 369 | 370 | ,t "max(a) over (partition by b)" 371 | $ WindowApp [Name Nothing "max"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"]] [] Nothing 372 | 373 | ,t "max(a) over (partition by b,c)" 374 | $ WindowApp [Name Nothing "max"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"],Iden [Name Nothing "c"]] [] Nothing 375 | 376 | ,t "sum(a) over (order by b)" 377 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [] 378 | [SortSpec (Iden [Name Nothing "b"]) DirDefault NullsOrderDefault] Nothing 379 | 380 | ,t "sum(a) over (order by b desc,c)" 381 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [] 382 | [SortSpec (Iden [Name Nothing "b"]) Desc NullsOrderDefault 383 | ,SortSpec (Iden [Name Nothing "c"]) DirDefault NullsOrderDefault] Nothing 384 | 385 | ,t "sum(a) over (partition by b order by c)" 386 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"]] 387 | [SortSpec (Iden [Name Nothing "c"]) DirDefault NullsOrderDefault] Nothing 388 | 389 | ,t "sum(a) over (partition by b order by c range unbounded preceding)" 390 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"]] 391 | [SortSpec (Iden [Name Nothing "c"]) DirDefault NullsOrderDefault] 392 | $ Just $ FrameFrom FrameRange UnboundedPreceding 393 | 394 | ,t "sum(a) over (partition by b order by c range 5 preceding)" 395 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"]] 396 | [SortSpec (Iden [Name Nothing "c"]) DirDefault NullsOrderDefault] 397 | $ Just $ FrameFrom FrameRange $ Preceding (NumLit "5") 398 | 399 | ,t "sum(a) over (partition by b order by c range current row)" 400 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"]] 401 | [SortSpec (Iden [Name Nothing "c"]) DirDefault NullsOrderDefault] 402 | $ Just $ FrameFrom FrameRange Current 403 | 404 | ,t "sum(a) over (partition by b order by c rows 5 following)" 405 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"]] 406 | [SortSpec (Iden [Name Nothing "c"]) DirDefault NullsOrderDefault] 407 | $ Just $ FrameFrom FrameRows $ Following (NumLit "5") 408 | 409 | ,t "sum(a) over (partition by b order by c range unbounded following)" 410 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"]] 411 | [SortSpec (Iden [Name Nothing "c"]) DirDefault NullsOrderDefault] 412 | $ Just $ FrameFrom FrameRange UnboundedFollowing 413 | 414 | ,t "sum(a) over (partition by b order by c \n\ 415 | \range between 5 preceding and 5 following)" 416 | $ WindowApp [Name Nothing "sum"] [Iden [Name Nothing "a"]] [Iden [Name Nothing "b"]] 417 | [SortSpec (Iden [Name Nothing "c"]) DirDefault NullsOrderDefault] 418 | $ Just $ FrameBetween FrameRange 419 | (Preceding (NumLit "5")) 420 | (Following (NumLit "5")) 421 | 422 | ] 423 | 424 | parens :: TestItem 425 | parens = Group "parens" 426 | [t "(a)" $ Parens (Iden [Name Nothing "a"]) 427 | ,t "(a + b)" $ Parens (BinOp (Iden [Name Nothing "a"]) [Name Nothing "+"] (Iden [Name Nothing "b"])) 428 | ] 429 | 430 | functionsWithReservedNames :: TestItem 431 | functionsWithReservedNames = Group "functionsWithReservedNames" $ map f 432 | ["abs" 433 | ,"char_length" 434 | ] 435 | where 436 | f fn = t (fn <> "(a)") $ App [Name Nothing fn] [Iden [Name Nothing "a"]] 437 | --------------------------------------------------------------------------------