├── .gitignore
├── .travis.yml
├── Dockerfile
├── LICENSE
├── MANIFEST.in
├── Makefile
├── README.md
├── antlr_plsql
├── __init__.py
├── antlr_py
│ └── __init__.py
├── ast.py
├── plsql.g4
└── speaker.yml
├── pytest.ini
├── requirements.txt
├── setup.py
└── tests
├── __init__.py
├── examples
├── aggregate01.sql
├── analytic_query02.sql
├── analytic_query03.sql
├── analytic_query04.sql
├── analytic_query05.sql
├── analytic_query06.sql
├── analytic_query07.sql
├── at_at_binary_expr.sql
├── bindvar01.sql
├── bindvar02.sql
├── bindvar03.sql
├── bindvar04.sql
├── bindvar05.sql
├── case_when01.sql
├── case_when02.sql
├── case_when03.sql
├── case_when04.sql
├── case_when05.sql
├── cast_multiset01.sql
├── cast_multiset02.sql
├── cast_multiset03.sql
├── cast_multiset04.sql
├── cast_multiset05.sql
├── cast_multiset06.sql
├── cast_multiset07.sql
├── cast_multiset08.sql
├── columns01.sql
├── comment01.sql
├── concat_01.sql
├── concat_02.sql
├── concat_03.sql
├── condition01.sql
├── condition02.sql
├── condition03.sql
├── condition04.sql
├── condition05.sql
├── condition06.sql
├── condition07.sql
├── condition08.sql
├── condition09.sql
├── condition10.sql
├── condition11.sql
├── condition12.sql
├── condition14.sql
├── condition15.sql
├── condition16.sql
├── connect_by01.sql
├── connect_by02.sql
├── connect_by03.sql
├── connect_by04.sql
├── connect_by05.sql
├── datetime01.sql
├── datetime02.sql
├── datetime03.sql
├── datetime04.sql
├── datetime05.sql
├── dblink01.sql
├── explain01.sql
├── explain_example_1.sql
├── explain_example_2.sql
├── explain_example_3.sql
├── explain_example_4.sql
├── explain_example_5.sql
├── extract_example.sql
├── flashback01.sql
├── for_update01.sql
├── for_update02.sql
├── for_update03.sql
├── for_update04.sql
├── for_update05.sql
├── for_update06.sql
├── for_update07.sql
├── for_update08.sql
├── function01.sql
├── function02.sql
├── function03.sql
├── function04.sql
├── function05.sql
├── groupby01.sql
├── groupby02.sql
├── groupby03.sql
├── groupby04.sql
├── groupby05.sql
├── groupby06.sql
├── groupby07.sql
├── interval01.sql
├── interval02.sql
├── interval03.sql
├── interval04.sql
├── join01.sql
├── join02.sql
├── join03.sql
├── join04.sql
├── join05.sql
├── join06.sql
├── join07.sql
├── join08.sql
├── join09.sql
├── join10.sql
├── join11.sql
├── join12.sql
├── join13.sql
├── join14.sql
├── join15.sql
├── join16.sql
├── join17.sql
├── join18.sql
├── join19.sql
├── join20.sql
├── join21.sql
├── keywordasidentifier01.sql
├── keywordasidentifier02.sql
├── keywordasidentifier03.sql
├── keywordasidentifier04.sql
├── keywordasidentifier05.sql
├── lexer01.sql
├── lexer02.sql
├── lexer03.sql
├── lexer04.sql
├── lexer05.sql
├── like01.sql
├── merge01.sql
├── merge02.sql
├── merge03.sql
├── merge04.sql
├── model_clause01.sql
├── model_clause02.sql
├── model_clause03.sql
├── model_clause04.sql
├── model_clause05.sql
├── multiple_values.sql
├── numbers01.sql
├── object_access01.sql
├── order_by01.sql
├── order_by02.sql
├── order_by03.sql
├── order_by04.sql
├── order_by05.sql
├── order_by06.sql
├── pivot01.sql
├── pivot02.sql
├── pivot03.sql
├── pivot04.sql
├── pivot05.sql
├── pivot06.sql
├── pivot07.sql
├── pivot08.sql
├── pivot09.sql
├── pivot10.sql
├── pivot11.sql
├── pivot12.sql
├── position_function.sql
├── query_factoring01.sql
├── query_factoring02.sql
├── query_factoring03.sql
├── query_factoring04.sql
├── query_factoring05.sql
├── query_factoring06.sql
├── query_factoring07.sql
├── query_factoring08.sql
├── query_factoring09.sql
├── query_factoring10.sql
├── query_factoring11.sql
├── sample01.sql
├── simple02.sql
├── simple03.sql
├── simple04.sql
├── simple05.sql
├── simple06.sql
├── simple07.sql
├── simple08.sql
├── simple09.sql
├── simple10.sql
├── simple11.sql
├── simple12.sql
├── simple13.sql
├── union01.sql
├── union02.sql
├── union03.sql
├── union04.sql
├── union05.sql
├── union06.sql
├── union07.sql
├── union08.sql
├── union09.sql
├── union10.sql
└── xmltable02.sql
├── test_ast.py
├── test_examples.py
├── test_speaker.py
├── v0.2.yml
├── v0.3.yml
└── v0.5.yml
/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore hidden vagrant folder
2 | .vagrant
3 | # Ignore java files
4 | *.java
5 | *.class
6 | # Vim swapfiles
7 | .*.swp
8 |
9 | # Python ----------------------------------------------------------------------
10 | # Byte-compiled / optimized / DLL files
11 | __pycache__/
12 | *.py[cod]
13 | *$py.class
14 |
15 | # C extensions
16 | *.so
17 |
18 | # Distribution / packaging
19 | .Python
20 | env/
21 | build/
22 | develop-eggs/
23 | dist/
24 | downloads/
25 | eggs/
26 | .eggs/
27 | lib/
28 | lib64/
29 | parts/
30 | sdist/
31 | var/
32 | *.egg-info/
33 | .installed.cfg
34 | *.egg
35 |
36 | # PyInstaller
37 | # Usually these files are written by a python script from a template
38 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
39 | *.manifest
40 | *.spec
41 |
42 | # Installer logs
43 | pip-log.txt
44 | pip-delete-this-directory.txt
45 |
46 | # Unit test / coverage reports
47 | htmlcov/
48 | .tox/
49 | .coverage
50 | .coverage.*
51 | .cache
52 | nosetests.xml
53 | coverage.xml
54 | *,cover
55 | .hypothesis/
56 | .pytest_cache/
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Ipython Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # Mac stuff -------------------------------------------------------------------
79 | .DS_Store
80 | tests/.DS_Store
81 |
82 | # Custom stuff ------------
83 | tests/dump_*.yml
84 | antlr_plsql/js/*
85 | !antlr_plsql/js/index.js
86 | antlr_plsql/antlr_py/*
87 | !antlr_plsql/antlr_py/__init__.py
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: false
2 | language: python
3 | python:
4 | - '3.5'
5 | services:
6 | - docker
7 | before_install: docker build -t antlr_plsql .
8 | script: docker run -v $(pwd):/usr/src/app antlr_plsql make build test
9 | deploy:
10 | provider: pypi
11 | user: datacamp
12 | password:
13 | secure: sFMACOnPcBSb5dFCQX3tVz/f8C6ZJVzjEFLyTaD6YbIyIEu2VWDzLyXG9HEgZAjVKagbaGfcN6PmNAIVKa9UZBZ1xWRhWQ1YBNmM1/V+dlw3XzpPlD0tMboU1kX+GVhiYov6k3G3Ts/2zdGsiSjFN3xqQVJwLXVET4M6QM81e1X8p7laJ868HbW702QEUg8p1XtKUiQOhQFTdbqnbiMM/BAMmrxX2sLghthp4d4VHxuo3C/klycUc/cfmAMEYxutku9F2rfmPH2k95HJFnSjYv9UqJpxMbxcdyUd14ibh3V1Ofn/KkKYghiIf7f8q+wjpoZxbty2VYj/XFbkDpqpebf4hazb5mTobSYNAV5hHwgYpwdsFUvhsSQU2Sae6+txkpaZhhn5q/TRRw/r/Gno4hmgUC9AInFTNGw9entQT0iiUs5yMQj4Yz+Kfiyux/Y+/YWxfMkVc+9j6ODHf0Mb0843mbklQ+ABY3NSOt1SMVAJ/ohWtURHZpyw7GArPPiXCC7d5DIAMgqVxhv4W/u3yRFTAtrxoPd7ngwRmIDyQcsobALJlxx/JFT2I9ZGs4zjdzP76qjR3G0aQ2WmDThyvgqzx/pAy5s+ITOOSv1OeaeiAjOjPp94h0trbk0H9/ir/Mi0Ll0DF1mAPR8HJAPn/6UqMQjWwShdbnB3xNROamU=
14 | on:
15 | tags: true
16 | distributions: sdist bdist_wheel
17 | repo: datacamp/antlr-plsql
18 | skip_cleanup: true
19 | skip_upload_docs: true
20 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.5
2 |
3 | RUN apt-get update && apt-get install -y \
4 | build-essential \
5 | default-jdk \
6 | maven
7 |
8 | RUN cd /usr/local/lib && curl -O https://www.antlr.org/download/antlr-4.7.1-complete.jar
9 | ENV CLASSPATH=".:/usr/local/lib/antlr-4.7.1-complete.jar:$CLASSPATH"
10 | RUN echo "java -Xmx500M -cp \"/usr/local/lib/antlr-4.7.1-complete.jar:$CLASSPATH\" org.antlr.v4.Tool \$@" >> /usr/local/bin/antlr4 && chmod u+x /usr/local/bin/antlr4
11 | RUN echo "alias grun='java org.antlr.v4.runtime.misc.TestRig'" >> ~/.bashrc
12 |
13 | COPY . /usr/src/app
14 | WORKDIR /usr/src/app
15 |
16 | RUN pip install -r requirements.txt
17 | RUN pip install -e .
18 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published
637 | by the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
662 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include requirements.txt
3 | include antlr_plsql/antlr_py/*
4 | include antlr_plsql/speaker.yml
5 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | JS_DIR=antlr_plsql/js
2 | PY_DIR=antlr_plsql/antlr_py
3 |
4 | .PHONY: clean
5 |
6 | all: clean test
7 |
8 | buildpy:
9 | antlr4 -Dlanguage=Python3 -o $(PY_DIR) -visitor antlr_plsql/plsql.g4 \
10 | && mv $(PY_DIR)/antlr_plsql/* $(PY_DIR) && rmdir $(PY_DIR)/antlr_plsql
11 |
12 | buildjs:
13 | antlr4 -Dlanguage=JavaScript -o $(JS_DIR) antlr_plsql/plsql.g4 \
14 | && mv $(JS_DIR)/antlr_plsql/* $(JS_DIR) && rmdir $(JS_DIR)/antlr_plsql
15 |
16 | build: buildpy
17 |
18 | clean:
19 | find . \( -name \*.pyc -o -name \*.pyo -o -name __pycache__ \) -prune -exec rm -rf {} +
20 | rm -rf antlr_plsql.egg-info
21 |
22 | test: clean
23 | pytest
24 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # antlr-plsql
2 |
3 | [](https://travis-ci.org/datacamp/antlr-plsql)
4 | [](https://badge.fury.io/py/antlr-plsql)
5 |
6 | ## Development
7 |
8 | ANTLR requires Java, so we suggest you use Docker when building grammars. The `Makefile` contains directives to clean, build, test and deploy the ANTLR grammar. It does not run Docker itself, so run `make` inside Docker.
9 |
10 | ### Build the grammar
11 |
12 | ```bash
13 | # Build the docker container
14 | docker build -t antlr_plsql .
15 |
16 | # Run the container to build the python grammar
17 | # Write parser files to local file system through volume mounting
18 | docker run -it -v ${PWD}:/usr/src/app antlr_plsql make build
19 | ```
20 |
21 | ### Set up the Python module
22 |
23 | Now that the Python parsing files are available, you can install them with `pip`:
24 |
25 | ```bash
26 | pip install -r requirements.txt
27 | pip install -e .
28 | ```
29 |
30 | And parse SQL code in Python:
31 |
32 | ```python
33 | from antlr_plsql import ast
34 | ast.parse("SELECT a from b")
35 | ```
36 |
37 | ### Using the AST viewer
38 |
39 | If you're actively developing on the ANLTR grammar or the tree shaping, it's a good idea to set up the [AST viewer](https://github.com/datacamp/ast-viewer) locally so you can immediately see the impact of your changes in a visual way.
40 |
41 | - Clone the ast-viewer repo and build the Docker image according to the instructions.
42 | - Spin up a docker container that volume mounts the Python package, symlink-installs the package and runs the server on port 3000:
43 |
44 | ```bash
45 | docker run -it \
46 | -u root \
47 | -v ~/workspace/antlr-plsql:/app/app/antlr-plsql \
48 | -p 3000:3000 \
49 | ast-viewer \
50 | /bin/bash -c "echo 'Install development requirements in development:' \
51 | && pip install --no-deps -e app/antlr-plsql \
52 | && python3 run.py"
53 | ```
54 |
55 | When simultaneously developing other packages, volume mount and install those too:
56 |
57 | ```bash
58 | docker run -it \
59 | -u root \
60 | -v ~/workspace/antlr-ast:/app/app/antlr-ast \
61 | -v ~/workspace/antlr-plsql:/app/app/antlr-plsql \
62 | -v ~/workspace/antlr-tsql:/app/app/antlr-tsql \
63 | -p 3000:3000 \
64 | ast-viewer \
65 | /bin/bash -c "echo 'Install development requirements in development:' \
66 | && pip install --no-deps -e app/antlr-ast \
67 | && pip install --no-deps -e app/antlr-plsql \
68 | && pip install --no-deps -e app/antlr-tsql \
69 | && python3 run.py"
70 | ```
71 |
72 | - If you update the tree shaping logic in this repo, the app will auto-update.
73 | - If you change the grammar, you will have to first rebuild the grammar (with the `antlr_plsql` docker image) and restart the `ast-viewer` container.
74 |
75 | ### Run tests
76 |
77 | ```bash
78 | # Similar to building the grammar, but running tests
79 | # and not saving the generated files
80 | docker build -t antlr_plsql .
81 | docker run -t antlr_plsql make build test
82 | ```
83 |
84 | Or run the test locally, first build the grammar then run:
85 |
86 | ```python
87 | pytest
88 | ```
89 |
90 | ## Travis deployment
91 |
92 | - Builds the Docker image.
93 | - Runs the Docker image to build the grammar and run the unit tests.
94 | - Deploys the resulting python files to PyPi when a new release is made, so they can be installed easily.
95 |
--------------------------------------------------------------------------------
/antlr_plsql/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.9.1"
2 | from . import antlr_py as grammar
3 | from . import ast
4 |
--------------------------------------------------------------------------------
/antlr_plsql/antlr_py/__init__.py:
--------------------------------------------------------------------------------
1 | from .plsqlLexer import plsqlLexer as Lexer
2 | from .plsqlListener import plsqlListener as Listener
3 | from .plsqlParser import plsqlParser as Parser
4 | from .plsqlVisitor import plsqlVisitor as Visitor
5 |
--------------------------------------------------------------------------------
/antlr_plsql/ast.py:
--------------------------------------------------------------------------------
1 | import yaml
2 | import pkg_resources
3 | import inspect
4 |
5 | from antlr_ast.ast import (
6 | parse as parse_ast,
7 | process_tree,
8 | BaseAstVisitor,
9 | AliasNode,
10 | BaseNodeTransformer,
11 | get_alias_nodes,
12 | Speaker,
13 | # references for export: # TODO: put package exports in __init__?
14 | Terminal,
15 | BaseNode as AstNode,
16 | AntlrException as ParseError,
17 | dump_node, # TODO only used in tests
18 | )
19 | from antlr_ast.inputstream import CaseTransformInputStream
20 |
21 | from antlr_plsql import grammar
22 |
23 | # AST -------------------------------------------------------------------------
24 | # TODO: Finish Unary+Binary Expr
25 | # sql_script
26 |
27 |
28 | def parse(sql_text, start="sql_script", **kwargs):
29 | antlr_tree = parse_ast(
30 | grammar, sql_text, start, transform=CaseTransformInputStream.UPPER, **kwargs
31 | )
32 | simple_tree = process_tree(
33 | antlr_tree, base_visitor_cls=AstVisitor, transformer_cls=Transformer
34 | )
35 |
36 | return simple_tree
37 |
38 |
39 | # AliasNodes
40 |
41 |
42 | class Script(AliasNode):
43 | _fields_spec = ["body"]
44 | _rules = [("Sql_script", "_from_sql_script")]
45 | _priority = 0
46 |
47 | @classmethod
48 | def _from_sql_script(cls, node):
49 | alias = cls.from_spec(node)
50 | alias.body = cls.combine(
51 | node.unit_statement, node.sql_plus_command
52 | ) # todo: how does unit_statement get dml prop: transformer sets it (content of list fields is replaced)
53 | return alias
54 |
55 |
56 | class SelectStmt(AliasNode):
57 | _fields_spec = [
58 | "pref",
59 | "target_list",
60 | "into_clause",
61 | "from_clause",
62 | "where_clause",
63 | "hierarchical_query_clause",
64 | "group_by_clause",
65 | "having_clause",
66 | "model_clause",
67 | "for_update_clause",
68 | "order_by_clause",
69 | "limit_clause",
70 | "with_clause",
71 | ]
72 | _rules = [
73 | ("Query_block", "_from_query_block"),
74 | ("Select_statement", "_from_select"),
75 | ]
76 |
77 | _priority = 1
78 |
79 | @classmethod
80 | def _from_query_block(cls, node):
81 | # allowing arbitrary order for some clauses makes their results a single item list
82 | # future todo: represent unpacking with *
83 | # could also do testing to make sure clauses weren't specified multiple times
84 | query = cls.from_spec(node)
85 | unlist_clauses = cls._fields[cls._fields.index("group_by_clause") :]
86 | for k in unlist_clauses:
87 | attr = getattr(query, k, [])
88 | if isinstance(attr, list):
89 | setattr(query, k, attr[0] if len(attr) else None)
90 |
91 | return query
92 |
93 | @classmethod
94 | def _from_select(cls, node, helper):
95 | select = node.subquery
96 |
97 | while helper.isinstance(select, "SubqueryParen"):
98 | # unpack brackets recursively
99 | select = select.subquery
100 |
101 | # strict: use safe access because only one rule alias has this property
102 | if select.query_block:
103 | select = select.query_block
104 |
105 | with_clause = node.subquery_factoring_clause
106 | if with_clause:
107 | select.with_clause = with_clause.factoring_element
108 |
109 | if not helper.isinstance(select, "SubqueryCompound"):
110 | select = cls.from_spec(select)
111 |
112 | return select
113 |
114 |
115 | class Union(AliasNode):
116 | _fields_spec = ["left", "op", "right", "order_by_clause", "with_clause"]
117 | _rules = [("SubqueryCompound", "_from_subquery_compound")]
118 |
119 | @classmethod
120 | def _from_subquery_compound(cls, node):
121 | # hoists up ORDER BY clauses from the right SELECT statement
122 | # since the final ORDER BY applies to the entire statement (not just subquery)
123 | union = cls.from_spec(node)
124 |
125 | order_by = getattr(union.right.query_block, "order_by_clause", None)
126 | union.order_by_clause = order_by
127 | # remove from right SELECT
128 | if order_by:
129 | union.right.query_block.order_by_clause = None
130 |
131 | return union
132 |
133 |
134 | class Identifier(AliasNode):
135 | _fields_spec = ["fields"]
136 | _rules = ["Dot_id"]
137 |
138 |
139 | # TODO: similar nodes for keyword( combination)s?
140 | class Star(AliasNode):
141 | _fields_spec = []
142 | _rules = ["Star"]
143 |
144 |
145 | class TableAliasExpr(AliasNode):
146 | # TODO order_by_clause
147 | _fields_spec = ["alias=query_name", "alias_columns=paren_column_list", "subquery"]
148 | _rules = ["Factoring_element"]
149 |
150 |
151 | class AliasExpr(AliasNode):
152 | _fields_spec = [
153 | # Alias_expr
154 | "alias",
155 | "expr",
156 | # TableRefAux
157 | "expr=table_ref_aux.dml_table_expression_clause",
158 | "alias=table_alias",
159 | ]
160 | _rules = [("Alias_expr", "_unpack_alias"), ("TableRefAux", "_unpack_alias")]
161 |
162 | @classmethod
163 | def _unpack_alias(cls, node):
164 | alias = cls.from_spec(node)
165 | if alias.alias is not None:
166 | return alias
167 | else:
168 | return alias.expr # TODO: occurrence?
169 |
170 |
171 | class BinaryExpr(AliasNode):
172 | _fields_spec = ["left", "op", "right"]
173 | _rules = [
174 | "BinaryExpr",
175 | "IsExpr",
176 | "RelExpr",
177 | "MemberExpr",
178 | "AndExpr",
179 | "OrExpr",
180 | ("ModExpr", "_from_mod"),
181 | ("BetweenExpr", "_from_mod"),
182 | ("LikeExpr", "_from_mod"),
183 | ("InExpr", "_from_in_expr"),
184 | ]
185 |
186 | @classmethod
187 | def _from_mod(cls, node):
188 | bin_expr = cls.from_spec(node)
189 | if node.NOT:
190 | return UnaryExpr(node, {"op": node.NOT, "expr": bin_expr})
191 |
192 | return bin_expr
193 |
194 | @classmethod
195 | def _from_in_expr(cls, node):
196 | # NOT IN produces unary expression
197 | bin_or_unary = cls._from_mod(node)
198 | right = node.subquery or node.expression_list
199 | if isinstance(bin_or_unary, UnaryExpr):
200 | bin_or_unary.expr.right = right
201 | else:
202 | bin_or_unary.right = right
203 | return bin_or_unary
204 |
205 |
206 | class UnaryExpr(AliasNode):
207 | _fields_spec = ["op", "expr", "expr=unary_expression"]
208 | _rules = ["UnaryExpr", "CursorExpr", "NotExpr"]
209 |
210 |
211 | class OrderByExpr(AliasNode):
212 | _fields_spec = ["expr=order_by_elements"]
213 | _rules = ["Order_by_clause"]
214 |
215 |
216 | class GroupBy(AliasNode):
217 | _fields_spec = ["expr=group_by_elements"]
218 | _rules = ["Group_by_clause"]
219 |
220 |
221 | class SortBy(AliasNode):
222 | _fields_spec = ["expr=expression", "direction", "nulls"]
223 | _rules = ["Order_by_elements"]
224 |
225 |
226 | class JoinExpr(AliasNode):
227 | _fields_spec = [
228 | "left=table_ref",
229 | "join_type=join_clause.join_type",
230 | "right=join_clause.table_ref",
231 | "cond=join_clause.join_on_part.condition",
232 | # fields below are Oracle specific
233 | "using=join_clause.join_using_part",
234 | "query_partition_clause=join_clause.query_partition_clause",
235 | ]
236 | _rules = ["JoinExpr"]
237 |
238 |
239 | from collections.abc import Sequence
240 |
241 |
242 | class Call(AliasNode):
243 | _fields_spec = [
244 | "name",
245 | "name=dot_id",
246 | "pref",
247 | "args",
248 | "args=concatenation",
249 | "args=function_argument.argument",
250 | "args=function_argument_analytic",
251 | "component=regular_id",
252 | "within_clause",
253 | "over_clause",
254 | ]
255 | _rules = [
256 | ("Aggregate_windowed_function", "_from_aggregate_call"),
257 | "ExtractCall",
258 | "FuncCall",
259 | ("WithinOrOverCall", "_from_within"),
260 | ("String_function", "_from_str_func"),
261 | ]
262 |
263 | @classmethod
264 | def _from_aggregate_call(cls, node):
265 | alias = cls.from_spec(node)
266 | name = node.children[0]
267 | alias.name = (name if isinstance(name, str) else name.get_text()).upper()
268 |
269 | if alias.args is None:
270 | alias.args = []
271 | elif not isinstance(alias.args, Sequence):
272 | alias.args = [alias.args]
273 | return alias
274 |
275 | @classmethod
276 | def _from_within(cls, node):
277 | alias = cls.from_spec(node)
278 |
279 | within_or_over_part = node.within_or_over_part
280 | if within_or_over_part is not None:
281 | # works only for one such clause
282 | # TODO: convention for fields where multiple possible
283 | # >1 (>0): always, mostly, sometimes, exceptionally?
284 | for el in cls.extend_node_list([], within_or_over_part):
285 | within_clause = el.order_by_clause
286 | if within_clause is not None:
287 | alias.within_clause = within_clause
288 | over_clause = el.over_clause
289 | if over_clause is not None:
290 | alias.over_clause = over_clause
291 |
292 | return alias
293 |
294 | @classmethod
295 | def _from_str_func(cls, node):
296 | alias = cls.from_spec(node)
297 | # todo: is field list if it is list in one (other) alternative?
298 | alias.args = cls.combine(
299 | node.expression,
300 | node.atom,
301 | node.expressions,
302 | node.quoted_string,
303 | node.table_element,
304 | node.standard_function,
305 | )
306 | return alias
307 |
308 |
309 | class Cast(AliasNode):
310 | _fields_spec = [
311 | "type=type_spec",
312 | "statement=subquery",
313 | "statement=concatenation",
314 | "statement=expression",
315 | "statement=atom",
316 | ]
317 | _rules = ["CastCall"]
318 |
319 |
320 | class OverClause(AliasNode):
321 | _fields_spec = [
322 | "partition=query_partition_clause",
323 | "order_by_clause",
324 | "windowing_clause",
325 | ]
326 | _rules = ["Over_clause"]
327 |
328 |
329 | class Case(AliasNode):
330 | _fields_spec = [
331 | "switches=simple_case_when_part",
332 | "switches=searched_case_when_part",
333 | "else_expr=case_else_part",
334 | ]
335 | _rules = ["Simple_case_statement", "Searched_case_statement"] # case_statement
336 | # 'label' in grammar not correct?
337 |
338 |
339 | class CaseWhen(AliasNode):
340 | _fields_spec = ["when=whenExpr", "then=thenExpr"]
341 | _rules = ["Simple_case_when_part", "Searched_case_when_part"]
342 |
343 |
344 | class PartitionBy(AliasNode):
345 | _fields_spec = ["expression"]
346 | _rules = ["Query_partition_clause"]
347 |
348 |
349 | class RenameColumn(AliasNode):
350 | _fields_spec = ["old_name=old_column_name", "new_name=new_column_name"]
351 | _rules = ["Rename_column_clause"]
352 |
353 |
354 | class Column(AliasNode):
355 | _fields_spec = [
356 | "name=column_name",
357 | "data_type=datatype",
358 | "data_type=type_name",
359 | "constraints=inline_constraint",
360 | ]
361 | _rules = ["Column_definition"]
362 |
363 |
364 | class AddColumns(AliasNode):
365 | _fields_spec = ["columns=column_definition"]
366 | _rules = ["Add_column_clause"]
367 |
368 |
369 | class DropColumn(AliasNode):
370 | _fields_spec = ["names"]
371 | _rules = ["Drop_column_clause"]
372 |
373 |
374 | class AlterColumn(AliasNode):
375 | _fields_spec = ["name=column_name", "op", "data_type=datatype", "expression"]
376 | _rules = ["Alter_column_clause"]
377 |
378 |
379 | class Reference(AliasNode):
380 | _fields_spec = ["table=tableview_name", "columns=paren_column_list"]
381 | _rules = ["References_clause"]
382 |
383 |
384 | class CreateTable(AliasNode):
385 | _fields_spec = [
386 | "name=tableview_name",
387 | "temporary=TEMPORARY",
388 | "query=select_statement",
389 | # todo: + syntax (also multiple fields, e.g. constraints)
390 | "columns=relational_table.relational_properties.column_definition",
391 | ]
392 | _rules = ["Create_table"]
393 |
394 |
395 | class AlterTable(AliasNode):
396 | _fields_spec = [
397 | "name=tableview_name",
398 | "changes=column_clauses",
399 | "changes=constraint_clauses",
400 | ]
401 | _rules = ["Alter_table"]
402 |
403 |
404 | class AddConstraints(AliasNode):
405 | _fields_spec = ["constraints=out_of_line_constraint"]
406 |
407 |
408 | class DropConstraints(AliasNode):
409 | # TODO: check exercises
410 | _fields_spec = ["constraints=drop_constraint_clause"]
411 |
412 |
413 | class DropConstraint(AliasNode):
414 | _fields_spec = ["name=drop_primary_key_or_unique_or_generic_clause"]
415 | _rules = ["Drop_constraint_clause"]
416 |
417 |
418 | class DropTable(AliasNode):
419 | _fields_spec = ["name=tableview_name", "existence_check"]
420 | _rules = [("Drop_table", "_from_table")]
421 |
422 | @classmethod
423 | def _from_table(cls, node):
424 | alias = cls.from_spec(node)
425 |
426 | # TODO: format? make combined rule and set using _field_spec?
427 | if node.IF and node.EXISTS:
428 | alias.existence_check = "if_exists"
429 | return alias
430 |
431 |
432 | class Constraint(AliasNode):
433 | _fields_spec = [
434 | "name=constraint_name",
435 | "type",
436 | "columns=paren_column_list",
437 | "columns=foreign_key_clause.paren_column_list",
438 | "reference=foreign_key_clause.references_clause",
439 | ]
440 | _rules = [("Out_of_line_constraint", "_from_constraint")]
441 |
442 | @classmethod
443 | def _from_constraint(cls, node):
444 | alias = cls.from_spec(node)
445 |
446 | foreign_key_clause = node.foreign_key_clause
447 | if node.UNIQUE:
448 | alias.type = "unique"
449 | elif node.PRIMARY and node.KEY:
450 | # TODO: format? make combined primary_key rule and set using _field_spec?
451 | alias.type = "primary_key"
452 | elif (
453 | foreign_key_clause and foreign_key_clause.FOREIGN and foreign_key_clause.KEY
454 | ):
455 | alias.type = "foreign_key"
456 | elif node.CHECK:
457 | alias.type = "check"
458 | return alias
459 |
460 |
461 | class InsertStmt(AliasNode):
462 | # TODO: use path field spec in more places
463 | _fields_spec = [
464 | "table=single_table_insert.insert_into_clause.general_table_ref",
465 | "columns=single_table_insert.insert_into_clause.paren_column_list",
466 | "values=single_table_insert.values_clause.expression_list",
467 | "query=single_table_insert.select_statement",
468 | ]
469 | _rules = ["Insert_statement"]
470 |
471 |
472 | class UpdateStmt(AliasNode):
473 | _fields_spec = [
474 | "table=general_table_ref",
475 | "where_clause",
476 | "from_clause",
477 | "updates=update_set_clause.column_based_update_set_clause",
478 | ]
479 | _rules = ["Update_statement"]
480 |
481 |
482 | class Update(AliasNode):
483 | # TODO: BinExpr? Not fit for multiple columns combined?
484 | _fields_spec = ["column=column_name", "expression"]
485 | _rules = ["Column_based_update_set_clause"]
486 |
487 |
488 | class DeleteStmt(AliasNode):
489 | _fields_spec = ["table=general_table_ref", "where_clause"]
490 | _rules = ["Delete_statement"]
491 |
492 |
493 | # class FunctionArgument
494 |
495 |
496 | # PARSE TREE VISITOR ----------------------------------------------------------
497 |
498 |
499 | class Transformer(BaseNodeTransformer):
500 | @staticmethod
501 | def visit_Relational_operator(node):
502 | return Terminal.from_text(node.get_text(), node._ctx) # node.children[0]?
503 |
504 | @staticmethod
505 | def visit_SubqueryParen(node):
506 | # todo: auto-simplify?
507 | return node.subquery
508 |
509 | @staticmethod
510 | def visit_StarTable(node):
511 | identifier = node.dot_id
512 | identifier.fields += [node.star] # todo
513 | return identifier
514 |
515 | @staticmethod
516 | def visit_Function_argument_analytic(node):
517 | # future todo: declarative? needed?
518 | if not (node.respect_or_ignore_nulls or node.keep_clause):
519 | return node.argument
520 | else:
521 | return node
522 |
523 | # def visitIs_part(self, ctx):
524 | # return ctx
525 |
526 | @staticmethod
527 | def visit_Constraint_clauses(node):
528 | # future todo: declarative?
529 | # - create grammar rule for each branch
530 | # - predicate in _rules
531 | if node.ADD:
532 | return AddConstraints.from_spec(node)
533 | if node.drop_constraint_clause:
534 | return DropConstraints.from_spec(node)
535 |
536 | @staticmethod
537 | def visit_Where_clause(node):
538 | return node.current_of_clause or node.expression
539 |
540 | @staticmethod
541 | def visit_Limit_clause(node):
542 | return node.expression
543 |
544 | @staticmethod
545 | def visit_Column_alias(node):
546 | return node.r_id or node.alias_quoted_string
547 |
548 | @staticmethod
549 | def visit_Having_clause(node):
550 | return node.condition
551 |
552 | @staticmethod
553 | def visit_From_clause(node):
554 | return node.table_ref
555 |
556 | @staticmethod
557 | def visit_Case_else_part(node):
558 | return node.seq_of_statements or node.expression
559 |
560 | @staticmethod
561 | def visit_Table_alias(node):
562 | return node.r_id or node.alias_quoted_string
563 |
564 | @staticmethod
565 | def visit_Expression_list(node):
566 | return node.expression
567 |
568 | @staticmethod
569 | def visit_Into_clause(node):
570 | return node.variable_name
571 |
572 | @staticmethod
573 | def visit_Drop_primary_key_or_unique_or_generic_clause(node):
574 | return node.constraint_name
575 |
576 |
577 | # TODO: port from remove_terminal:
578 | # - table_ref_list
579 |
580 |
581 | # Add visit methods to Transformer for all nodes (in _rules) that convert to AliasNode instances
582 |
583 | alias_nodes = get_alias_nodes(globals().values())
584 | Transformer.bind_alias_nodes(alias_nodes)
585 |
586 |
587 | # Create Speaker
588 |
589 | speaker_cfg = yaml.safe_load(
590 | pkg_resources.resource_stream("antlr_plsql", "speaker.yml")
591 | )
592 | speaker = Speaker(**speaker_cfg)
593 |
594 |
595 | class AstVisitor(BaseAstVisitor):
596 | def visitTerminal(self, ctx):
597 | """Converts case insensitive keywords and identifiers to lowercase"""
598 | text = str(super().visitTerminal(ctx))
599 | quotes = ["'", '"']
600 | if not (text[0] in quotes and text[-1] in quotes):
601 | text = text.lower()
602 | return Terminal.from_text(text, ctx)
603 |
604 |
605 | if __name__ == "__main__":
606 | query = """
607 | SELECT id FROM artists WHERE id > 100
608 | """
609 | parse(query)
610 |
--------------------------------------------------------------------------------
/antlr_plsql/speaker.yml:
--------------------------------------------------------------------------------
1 | nodes:
2 | AliasExpr: 'alias expression'
3 | BinaryExpr: 'binary expression `{node.op}`'
4 | Call: 'function call `{node.name}`'
5 | # Identifier:
6 | JoinExpr: 'join expression'
7 | OrderByExpr: 'order by expression'
8 | SelectStmt: '`SELECT` statement'
9 | SortBy: 'sorting expression'
10 | Star: 'star (`*`)'
11 | UnaryExpr:
12 | name: 'unary expression `{node.op}`'
13 | fields:
14 | expr: 'code'
15 | Union: '`{node.op}`'
16 | fields:
17 | target_list: 'target list'
18 | expr: 'expression'
19 | op: 'operator'
20 | order_by_clause: '`ORDER BY` clause'
21 | into_clause: '`INTO` clause'
22 | from_clause: '`FROM` clause'
23 | where_clause: '`WHERE` clause'
24 | group_by_clause: '`GROUP BY` clause'
25 | having_clause: '`HAVING` clause'
26 | model_clause: '`MODEL` clause'
27 | for_update_clause: '`FOR UPDATE` clause'
28 | limit_clause: '`LIMIT` clause'
29 | over_clause: '`OVER` clause'
30 | left: 'left operand'
31 | right: 'right operand'
32 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | addopts = -m "not examples"
3 |
4 | # TODO: awaiting PR
5 | # https://github.com/RKrahl/pytest-dependency/pull/25
6 | filterwarnings =
7 | ignore::DeprecationWarning
8 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | antlr4-python3-runtime~=4.7.2
2 | antlr-ast~=0.7.0
3 | pyyaml~=5.1
4 | pytest~=3.8.1
5 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import re
4 | import ast
5 | from os import path
6 | from setuptools import setup
7 |
8 | PACKAGE_NAME = "antlr_plsql"
9 | REQUIREMENT_NAMES = ["antlr-ast", "antlr4-python3-runtime", "pyyaml"]
10 |
11 | HERE = path.abspath(path.dirname(__file__))
12 | VERSION_FILE = path.join(HERE, PACKAGE_NAME, "__init__.py")
13 | REQUIREMENTS_FILE = path.join(HERE, "requirements.txt")
14 | README_FILE = path.join(HERE, "README.md")
15 |
16 | with open(VERSION_FILE, encoding="utf-8") as fp:
17 | _version_re = re.compile(r"__version__\s+=\s+(.*)")
18 | VERSION = str(ast.literal_eval(_version_re.search(fp.read()).group(1)))
19 | with open(REQUIREMENTS_FILE, encoding="utf-8") as fp:
20 | req_txt = fp.read()
21 | _requirements_re_template = r"^({}(?:\s*[~<>=]+\s*\S*)?)\s*(?:#.*)?$"
22 | REQUIREMENTS = [
23 | re.search(_requirements_re_template.format(requirement), req_txt, re.M).group(0)
24 | for requirement in REQUIREMENT_NAMES
25 | ]
26 | with open(README_FILE, encoding="utf-8") as fp:
27 | README = fp.read()
28 |
29 | setup(
30 | name=PACKAGE_NAME.replace("_", "-"),
31 | version=VERSION,
32 | packages=[PACKAGE_NAME],
33 | install_requires=REQUIREMENTS,
34 | description="A procedural sql parser, written in Antlr4",
35 | long_description=README,
36 | long_description_content_type="text/markdown",
37 | author="Michael Chow",
38 | author_email="michael@datacamp.com",
39 | maintainer="Jeroen Hermans",
40 | maintainer_email="content-engineering@datacamp.com",
41 | url="https://github.com/datacamp/antlr-plsql",
42 | include_package_data=True,
43 | classifiers=[
44 | "Programming Language :: Python :: 3",
45 | "License :: OSI Approved :: GNU Affero General Public License v3",
46 | "Operating System :: OS Independent",
47 | ],
48 | )
49 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/datacamp/antlr-plsql/d3915a1a3f1f7434b9e8e863367dbfbc1e062acc/tests/__init__.py
--------------------------------------------------------------------------------
/tests/examples/aggregate01.sql:
--------------------------------------------------------------------------------
1 | with
2 | codes2codelocales as
3 | (
4 | select t6.cdl_name as cod_name, t7.cdl_name as cod_cod_name, t14.cod_oid
5 | from servicedesk.itsm_codes t14
6 | left outer join servicedesk.itsm_codes_locale t6 on (t6.cdl_cod_oid=t14.cod_oid)
7 | left outer join servicedesk.itsm_codes_locale t7 on (t7.cdl_cod_oid=t14.cod_cod_oid)
8 | )
9 | , incident as
10 | (
11 | select t1.*,
12 | c2cl1.cod_name as "closure code", c2cl1.cod_cod_name as "closure code parent",
13 | c2cl2.cod_name as "reason caused code", c2cl2.cod_cod_name as "reason caused code parent",
14 | t11.cdl_name "severity", t13.cdl_name "business impact", t16.cdl_name "priority",
15 | t2.rct_name "status", t12.rct_name "category", t99.rct_name "folder"
16 | from servicedesk.itsm_incidents t1
17 | join servicedesk.itsm_codes_locale t11 on (t1.inc_sev_oid=t11.cdl_cod_oid)
18 | join servicedesk.itsm_codes_locale t13 on (t1.inc_imp_oid=t13.cdl_cod_oid)
19 | join servicedesk.itsm_codes_locale t16 on (t1.inc_pri_oid=t16.cdl_cod_oid)
20 | join servicedeskrepo.rep_codes_text t2 on (t1.inc_sta_oid=t2.rct_rcd_oid)
21 | join servicedeskrepo.rep_codes_text t12 on (t1.inc_cat_oid=t12.rct_rcd_oid)
22 | join servicedeskrepo.rep_codes_text t99 on (t1.inc_poo_oid=t99.rct_rcd_oid)
23 | left outer join codes2codelocales c2cl1 on (t1.inc_clo_oid=c2cl1.cod_oid)
24 | left outer join codes2codelocales c2cl2 on (t1.inc_cla_oid=c2cl2.cod_oid)
25 | where t1."reg_created" between sysdate-1 and sysdate
26 | )
27 | , workgrouphistory as
28 | (
29 | select i.inc_id
30 | , max(t101.hin_subject) keep (dense_rank first order by (t101.reg_created)) as "first"
31 | , max(t101.hin_subject) keep (dense_rank last order by (t101.reg_created)) as "last"
32 | from
33 | servicedesk.itsm_historylines_incident t101
34 | join incident i on (t101.hin_inc_oid = i.inc_oid)
35 | -- from servicedesk.itsm_incidents i (t101.hin_inc_oid = i.inc_oid)
36 | where t101.hin_subject like 'to workgroup from%'
37 | -- and i."reg_created" between sysdate-1 and sysdate
38 | group by i.inc_id
39 | )
40 | select
41 | incident.inc_id "id"
42 | ,incident."status"
43 | ,incident.inc_description "description"
44 | ,t4.wog_searchcode "workgroup"
45 | ,t5.per_searchcode "person"
46 | ,incident.inc_solution "solution"
47 | ,incident."closure code"
48 | ,incident."closure code parent"
49 | ,incident."reason caused code"
50 | ,incident."reason caused code parent"
51 | ,t10.cit_searchcode "ci"
52 | ,incident."severity"
53 | ,incident."category"
54 | ,incident."business impact"
55 | ,incident."priority"
56 | ,to_char(incident."reg_created", 'dd-mm-yy hh24:mi:ss') "registered"
57 | ,to_char(incident."inc_deadline", 'dd-mm-yy hh24:mi:ss') "deadline"
58 | ,to_char(incident."inc_actualfinish", 'dd-mm-yy hh24:mi:ss') "finish"
59 | ,t3.icf_incshorttext3 "message group"
60 | ,t3.icf_incshorttext4 "application"
61 | ,t3.icf_incshorttext2 "msg id"
62 | ,incident."folder"
63 | ,workgrouphistory."first" "first wg"
64 | ,workgrouphistory."last" "last wg"
65 | ,t102.hin_subject "frirst pri"
66 | from incident
67 | join servicedesk.itsm_inc_custom_fields t3 on (incident.inc_oid=t3.icf_inc_oid)
68 | join servicedesk.itsm_workgroups t4 on (incident.inc_assign_workgroup=t4.wog_oid)
69 | join workgrouphistory on (incident.inc_id = workgrouphistory.inc_id)
70 | left outer join servicedesk.itsm_persons t5 on (incident.inc_assign_person_to=t5.per_oid)
71 | left outer join servicedesk.itsm_configuration_items t10 on (incident.inc_cit_oid=t10.cit_oid)
72 | left outer join servicedesk.itsm_historylines_incident t102 on (incident.inc_oid = t102.hin_inc_oid and t102.hin_subject like 'priority set to%')
73 |
--------------------------------------------------------------------------------
/tests/examples/analytic_query02.sql:
--------------------------------------------------------------------------------
1 | select time_id, product
2 | , last_value(quantity ignore nulls) over (partition by product order by time_id) quantity
3 | , last_value(quantity respect nulls) over (partition by product order by time_id) quantity
4 | from ( select times.time_id, product, quantity
5 | from inventory partition by (product)
6 | right outer join times on (times.time_id = inventory.time_id)
7 | where times.time_id between to_date('01/04/01', 'dd/mm/yy')
8 | and to_date('06/04/01', 'dd/mm/yy'))
9 | order by 2,1
10 |
11 |
12 |
--------------------------------------------------------------------------------
/tests/examples/analytic_query03.sql:
--------------------------------------------------------------------------------
1 | select times.time_id, product, quantity from inventory
2 | partition by (product)
3 | right outer join times on (times.time_id = inventory.time_id)
4 | where times.time_id between to_date('01/04/01', 'dd/mm/yy')
5 | and to_date('06/04/01', 'dd/mm/yy')
6 | order by 2,1
7 |
8 |
9 |
--------------------------------------------------------------------------------
/tests/examples/analytic_query04.sql:
--------------------------------------------------------------------------------
1 | select deptno
2 | , ename
3 | , hiredate
4 | , listagg(ename, ',') within group (order by hiredate) over (partition by deptno) as employees
5 | from emp
--------------------------------------------------------------------------------
/tests/examples/analytic_query05.sql:
--------------------------------------------------------------------------------
1 | select metric_id ,bsln_guid ,timegroup ,obs_value as obs_value
2 | , cume_dist () over (partition by metric_id, bsln_guid, timegroup order by obs_value ) as cume_dist
3 | , count(1) over (partition by metric_id, bsln_guid, timegroup ) as n
4 | , row_number () over (partition by metric_id, bsln_guid, timegroup order by obs_value) as rrank
5 | , percentile_disc(:b7 ) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as mid_tail_value
6 | , max(obs_value) over (partition by metric_id, bsln_guid, timegroup ) as max_val
7 | , min(obs_value) over (partition by metric_id, bsln_guid, timegroup ) as min_val
8 | , avg(obs_value) over (partition by metric_id, bsln_guid, timegroup ) as avg_val
9 | , stddev(obs_value) over (partition by metric_id, bsln_guid, timegroup ) as sdev_val
10 | , percentile_cont(0.25) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_25
11 | , percentile_cont(0.5) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_50
12 | , percentile_cont(0.75) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_75
13 | , percentile_cont(0.90) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_90
14 | , percentile_cont(0.95) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_95
15 | , percentile_cont(0.99) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_99
16 | from timegrouped_rawdata d
17 |
18 |
--------------------------------------------------------------------------------
/tests/examples/analytic_query06.sql:
--------------------------------------------------------------------------------
1 | select trim(both ' ' from ' a ') from dual where trim(:a) is not null
2 |
--------------------------------------------------------------------------------
/tests/examples/analytic_query07.sql:
--------------------------------------------------------------------------------
1 | with
2 | clus_tab as (
3 | select id,
4 | a.attribute_name aname,
5 | a.conditional_operator op,
6 | nvl(a.attribute_str_value,
7 | round(decode(a.attribute_name, n.col,
8 | a.attribute_num_value * n.scale + n.shift,
9 | a.attribute_num_value),4)) val,
10 | a.attribute_support support,
11 | a.attribute_confidence confidence
12 | from table(dbms_data_mining.get_model_details_km('km_sh_clus_sample')) t,
13 | table(t.rule.antecedent) a,
14 | km_sh_sample_norm n
15 | where a.attribute_name = n.col (+) and a.attribute_confidence > 0.55
16 | ),
17 | clust as (
18 | select id,
19 | cast(collect(cattr(aname, op, to_char(val), support, confidence)) as cattrs) cl_attrs
20 | from clus_tab
21 | group by id
22 | ),
23 | custclus as (
24 | select t.cust_id, s.cluster_id, s.probability
25 | from (select
26 | cust_id
27 | , cluster_set(km_sh_clus_sample, null, 0.2 using *) pset
28 | from km_sh_sample_apply_prepared
29 | where cust_id = 101362) t,
30 | table(t.pset) s
31 | )
32 | select a.probability prob, a.cluster_id cl_id,
33 | b.attr, b.op, b.val, b.supp, b.conf
34 | from custclus a,
35 | (select t.id, c.*
36 | from clust t,
37 | table(t.cl_attrs) c) b
38 | where a.cluster_id = b.id
39 | order by prob desc, cl_id asc, conf desc, attr asc, val asc
40 |
--------------------------------------------------------------------------------
/tests/examples/at_at_binary_expr.sql:
--------------------------------------------------------------------------------
1 | SELECT title, description
2 | FROM film
3 | WHERE to_tsvector(title) @@ to_tsquery('elf');
--------------------------------------------------------------------------------
/tests/examples/bindvar01.sql:
--------------------------------------------------------------------------------
1 | insert into p
2 | (
3 | a1,
4 | b2,
5 | c3,
6 | d4,
7 | e5,
8 | f6,
9 | g7,
10 | h8
11 | )
12 | values
13 | ( :b1, :b2, :b3, :b4, :5, :6, :7, :8)
14 |
15 |
--------------------------------------------------------------------------------
/tests/examples/bindvar02.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from a
3 | where a=:3
4 | and b= : 4
5 | and c= :5and :a = :b
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/bindvar03.sql:
--------------------------------------------------------------------------------
1 | select count(*), max(scn)
2 | from
3 | (
4 | select sp.bo#, sp.pmoptype, sp.scn, sp.flags
5 | from sumpartlog$ sp, sumdep$ sd
6 | where sd.sumobj# = :1 and sd.p_obj# = sp.bo#
7 | group by sp.bo#, sp.pmoptype, sp.scn, sp.flags
8 | minus
9 | select sp.bo#, sp.pmoptype, sp.scn, sp.flags
10 | from sumpartlog$ sp
11 | where sp.bo# not in
12 | (
13 | select sk.detailobj# from sumkey$ sk where sk.sumobj# = :1 and sk.detailcolfunction in (2,3)
14 | )
15 | and bitand(sp.flags, 2) != 2 and sp.pmoptype in (2,3,5,7)
16 | group by sp.bo#, sp.pmoptype, sp.scn, sp.flags
17 | )
18 | where scn > : 2
19 |
--------------------------------------------------------------------------------
/tests/examples/bindvar04.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from
3 | (
4 | select *
5 | from "rme" "rm"
6 | where "rm".a-interval:"sys_b_07" day(:"sys_b_08") to second(:"sys_b_09")
7 | )
8 |
--------------------------------------------------------------------------------
/tests/examples/bindvar05.sql:
--------------------------------------------------------------------------------
1 | select object_name, object_id,
2 | decode(status, 'INVALID', 'TRUE', 'FALSE') invalid,
3 | 'TRUE' runnable,
4 | plsql_debug
5 | from sys.dba_objects o, dba_plsql_object_settings s
6 | where o.owner = :schema
7 | and s.owner = :schema
8 | and s.name = o.object_name
9 | and s.type = 'PACKAGE'
10 | and object_type = 'PACKAGE'
11 | and subobject_name is null
12 | and object_id not in ( select purge_object from recyclebin )
13 | and upper(object_name) in upper(:name)
14 |
--------------------------------------------------------------------------------
/tests/examples/case_when01.sql:
--------------------------------------------------------------------------------
1 | select
2 | ROOT,LEV,OBJ,LinK,PaTH,cycle,
3 | case
4 | when (LEV - LEaD(LEV) over (order by orD)) < 0 then 0
5 | else 1
6 | end is_LEaF
7 | from T
8 |
9 |
10 |
--------------------------------------------------------------------------------
/tests/examples/case_when02.sql:
--------------------------------------------------------------------------------
1 | select
2 | STaLENESS, OSIZE, OBJ#, TYPE#,
3 | case
4 | when STaLENESS > .5 then 128
5 | when STaLENESS > .1 then 256
6 | else 0
7 | end + aFLaGS aFLaGS,
8 | STaTUS,
9 | SID,
10 | SERIaL#,
11 | PaRT#,
12 | BO#
13 | ,
14 | case
15 | when is_FULL_EVENTS_HisTorY = 1 then SRC.Bor_LasT_STaTUS_TIME
16 | else
17 | case GREaTEST (NVL (WP.Bor_LasT_STaT_TIME, date '1900-01-01'), NVL (SRC.Bor_LasT_STaTUS_TIME, date '1900-01-01'))
18 | when date '1900-01-01' then null
19 | when WP.Bor_LasT_STaT_TIME then WP.Bor_LasT_STaT_TIME
20 | when SRC.Bor_LasT_STaTUS_TIME then SRC.Bor_LasT_STaTUS_TIME
21 | else null
22 | end
23 | end
24 | ,
25 | case GREaTEST (NVL (WP.Bor_LasT_STaT_TIME, date '1900-01-01'), NVL (SRC.Bor_LasT_STaTUS_TIME, date '1900-01-01'))
26 | when date '1900-01-01' then null
27 | when WP.Bor_LasT_STaT_TIME then WP.Bor_LasT_STaT_TIME
28 | when SRC.Bor_LasT_STaTUS_TIME then SRC.Bor_LasT_STaTUS_TIME
29 | else null
30 | end
31 | from X
32 |
33 |
--------------------------------------------------------------------------------
/tests/examples/case_when03.sql:
--------------------------------------------------------------------------------
1 | select
2 | case (STaTUS)
3 | when 'N' then 1
4 | when 'B' then 2
5 | when 'a' then 3
6 | end as STaTE
7 | from VaLUE
8 | where KID=:B2 and RID=:B1
9 |
10 |
--------------------------------------------------------------------------------
/tests/examples/case_when04.sql:
--------------------------------------------------------------------------------
1 | select
2 | case when row_number() over (partition by bo# order by staleness, osize, obj#) = 1 then 32 else 0 end + 64 aflags
3 | from f
4 |
--------------------------------------------------------------------------------
/tests/examples/case_when05.sql:
--------------------------------------------------------------------------------
1 | select staleness
2 | , osize, obj#
3 | , type#
4 | , case when row_number() over (partition by bo# order by staleness, osize, obj#) = 1 then 64 else 0 end
5 | +
6 | case when row_number() over (partition by (select tcp0.bo# from tabcompart$ tcp0 where tcp0.obj#=st0.bo#) order by staleness, osize, obj#) = 1 then 32
7 | else 0 end aflags
8 | , 0 status
9 | , :b3 sid
10 | , :b2 serial#
11 | , part#, bo#
12 | from st0
--------------------------------------------------------------------------------
/tests/examples/cast_multiset01.sql:
--------------------------------------------------------------------------------
1 | select t1.department_id, t2.*
2 | from hr_info t1, table(cast(multiset(
3 | select t3.last_name, t3.department_id, t3.salary
4 | from people t3
5 | where t3.department_id = t1.department_id)
6 | as people_tab_typ)) t2
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/cast_multiset02.sql:
--------------------------------------------------------------------------------
1 | select title
2 | from
3 | table(select courses from department where name = 'history')
4 | where name like '%etruscan%'
5 |
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/cast_multiset03.sql:
--------------------------------------------------------------------------------
1 | select
2 | 1
3 | , cursor(select 1 from dual) c1
4 | , cursor(select 2, 3 from dual) as c2
5 | from
6 | table(select 1 from dual)
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/cast_multiset04.sql:
--------------------------------------------------------------------------------
1 | select e1.last_name from employees e1
2 | where f( cursor(select e2.hire_date from employees e2 where e1.employee_id = e2.manager_id), e1.hire_date) = 1
3 | order by last_name
--------------------------------------------------------------------------------
/tests/examples/cast_multiset05.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from
3 | table
4 | (
5 | function_name()
6 | )
7 |
--------------------------------------------------------------------------------
/tests/examples/cast_multiset06.sql:
--------------------------------------------------------------------------------
1 | select
2 | cast(collect(cattr(aname, op, to_char(val), support, confidence)) as cattrs) cl_attrs
3 | from a
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/cast_multiset07.sql:
--------------------------------------------------------------------------------
1 | select
2 | "a3"."r_id" "r_id"
3 | from
4 | "pe" "a3",
5 | "me" "a2"
6 | where
7 | "a3"."m_id"="a2"."m_id"
8 | and "a2"."mi_t" =
9 | any
10 | (((
11 | select "a4"."sys$"."id"
12 | from the
13 | (
14 | select "qa"."u_pkg"."getchartable"
15 | (
16 | "qa"."u_pkg"."glist"
17 | (
18 | cursor
19 | (
20 | select "qa"."u_pkg"."glist"
21 | (
22 | cursor
23 | (
24 | select "a6"."mi_t" "mi_t"
25 | from "me" "a6"
26 | connect by "a6"."mi_uid"=prior "a6"."mi_id"
27 | start with "a6"."mi_t"=:b1
28 | )
29 | )
30 | "lst"
31 | from "sys"."dual" "a5"
32 | )
33 | )
34 | )
35 | from dual
36 | )
37 | "a4"
38 | )))
39 |
40 |
--------------------------------------------------------------------------------
/tests/examples/cast_multiset08.sql:
--------------------------------------------------------------------------------
1 | select * from table (cast (f_int_date_varchar2() as table_int_date_varchar2))
2 |
--------------------------------------------------------------------------------
/tests/examples/columns01.sql:
--------------------------------------------------------------------------------
1 | select a, b,
2 | a d,
3 | ddd as ddd,
4 | ddd as "dfdf",
5 | x as
6 | from dual
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/comment01.sql:
--------------------------------------------------------------------------------
1 | -- com1
2 | select * /*
3 | com2 */
4 | from dual -- com3
5 |
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/concat_01.sql:
--------------------------------------------------------------------------------
1 | SELECT CONCAT('a', 'b')
2 |
--------------------------------------------------------------------------------
/tests/examples/concat_02.sql:
--------------------------------------------------------------------------------
1 | SELECT CONCAT('a', 'b', 'c', 'd', 'e', 'f')
2 |
--------------------------------------------------------------------------------
/tests/examples/concat_03.sql:
--------------------------------------------------------------------------------
1 | SELECT ltrim(concat(house_num, ' ', street)) AS address
2 | FROM evanston311;
3 |
--------------------------------------------------------------------------------
/tests/examples/condition01.sql:
--------------------------------------------------------------------------------
1 | select department_id, last_name, salary
2 | from employees x
3 | where salary > (select avg(salary)
4 | from employees
5 | where x.department_id = department_id)
6 | order by department_id
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/condition02.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from employees x
3 | where salary > (select avg(salary) from x)
4 | and 1 = 1
5 | and hiredate = sysdate
6 | and to_yminterval('01-00') < sysdate
7 | and to_yminterval('01-00') + x < sysdate
8 |
9 |
--------------------------------------------------------------------------------
/tests/examples/condition03.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from employees x
3 | where salary > (select avg(salary) from x)
4 | and 1 = 1
5 | and hiredate = sysdate
6 | and to_yminterval('01-00') < sysdate
7 | and to_yminterval('01-00') + x < sysdate
8 | or a=b
9 | and d=e
10 |
11 |
--------------------------------------------------------------------------------
/tests/examples/condition04.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from t
3 | where
4 | (
5 | (
6 | (
7 | (
8 | (
9 | (
10 | (
11 | (
12 | (
13 | (
14 | (
15 | (
16 | (
17 | (
18 | (
19 | ( type = '2' ) or ( type = '3' )
20 | ) and ( t.cde < 20 )
21 | ) and ( t.se = 'xxx' )
22 | ) and ( t.id = '000000000002' )
23 | ) and ( ( t.attr_1 is null ) or ( t.attr_1 = '*' ) )
24 | ) and ( ( t.attr_2 is null ) or ( t.attr_2 = '*' ) )
25 | ) and ( ( t.attr_3 is null ) or ( t.attr_3 = '*' ) )
26 | ) and ( ( t.attr_4 is null ) or ( t.attr_4 = '*' ) )
27 | ) and ( ( t.attr_5 is null ) or ( t.attr_5 = '*' ) )
28 | ) and ( ( t.itype is null ) or ( t.itype = '*' ) )
29 | ) and ( ( t.inbr is null ) or ( t.inbr = '*' ) )
30 | ) and ( ( t.stat = '01' ) or ( t.stat = '*' ) )
31 | ) and ( ( t.orgn is null ) or ( t.orgn = '*' ) )
32 | ) and ( t.mbr = '0000000000001' )
33 | ) and ( t.nbr is null )
34 | )
35 |
36 |
37 |
--------------------------------------------------------------------------------
/tests/examples/condition05.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from t
3 | where
4 | ( t.type = '2' ) or ( t.type = '3' )
5 | and t.cde < 20
6 | and t.se = 'xxx'
7 | and t.id = '000000000002'
8 | and ( ( t.sku_attr_1 is null ) or ( t.sku_attr_1 = '*' ) )
9 | and ( ( t.sku_attr_2 is null ) or ( t.sku_attr_2 = '*' ) )
10 | and ( ( t.sku_attr_3 is null ) or ( t.sku_attr_3 = '*' ) )
11 | and ( ( t.sku_attr_4 is null ) or ( t.sku_attr_4 = '*' ) )
12 | and ( ( t.sku_attr_5 is null ) or ( t.sku_attr_5 = '*' ) )
13 | and ( ( t.itype is null ) or ( t.itype = '*' ) )
14 | and ( ( t.bnbr is null ) or ( t.bnbr = '*' ) )
15 | and ( ( t.stat = '01' ) or ( t.stat = '*' ) )
16 | and ( ( t.orgn is null ) or ( t.orgn = '*' ) )
17 | and ( t.mbr = '0000000000001' )
18 | and ( t.nbr is null )
19 |
20 |
21 |
--------------------------------------------------------------------------------
/tests/examples/condition06.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from t1, t2
3 | where (trunc(t1.timestamp) between to_date('110226','yymmdd') and to_date('110326','yymmdd'))
4 | and t1.code(+) = 'cn'
5 | and t1.id(+)=t2.id
6 | and t1.cid=t2.cid
7 | and t1.mid = 1245714070376993504
8 | and t1.tmst >= to_date('110226','yymmdd')
9 | -- note: this is possible too "column_spec outer_join_sign conditional_operator
10 | and shipper.alt_party_code(+) is null
11 | and t2.code(+) = 'sh'
12 | and t1.sid(+)=t2.sid
13 | and ( ( t1.scode like 'mmm' and t2.scode like 'xax' ) )
14 |
15 |
--------------------------------------------------------------------------------
/tests/examples/condition07.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from append
3 | where
4 | -- note space between '>' and '='
5 | (length(w.numer) > = 8)
6 |
--------------------------------------------------------------------------------
/tests/examples/condition08.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from "p"
3 | where
4 | -- note there are no parens around 231092
5 | ( ( "p"."id" in 231092 ) )
6 |
--------------------------------------------------------------------------------
/tests/examples/condition09.sql:
--------------------------------------------------------------------------------
1 | select
2 | sum(nvl(pl.qty,0))
3 | from
4 | oline ol,
5 | pline pl,
6 | blocation bl
7 | where
8 | ol.id = pl.id
9 | and pl.no = pl.no
10 | and bl.id = pl.id
11 | and
12 | (
13 | (select count(*) from la.sa where pl.id like sa.bid) > 0
14 | or
15 | (select count(*) from la.sa where bl.id like sa.id) > 0
16 | )
17 |
--------------------------------------------------------------------------------
/tests/examples/condition10.sql:
--------------------------------------------------------------------------------
1 | select department_id, last_name, salary
2 | from employees x
3 | where
4 | 1 = 1
5 | and
6 | (
7 | (
8 | HI
9 | )
10 | >
11 | (
12 | .1 * T.ROWCNT
13 | )
14 | )
15 |
16 |
--------------------------------------------------------------------------------
/tests/examples/condition11.sql:
--------------------------------------------------------------------------------
1 | select distinct X
2 | from X,Y,Z
3 | where
4 | X.id = Z.id (+)
5 | and nvl(X.cid, '^') = nvl(Y.clientid (+), '^')
6 | and 0 = Lib.SKU(X.sid, nvl(Z.cid, '^'))
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/condition12.sql:
--------------------------------------------------------------------------------
1 | select * from v.e
2 | where
3 | cid <> rid
4 | and rid not in
5 | (
6 | (select distinct rid from v.s )
7 | union
8 | (select distinct rid from v.p )
9 | )
10 | and "timestamp" <= 1298505600000
11 |
12 |
--------------------------------------------------------------------------------
/tests/examples/condition14.sql:
--------------------------------------------------------------------------------
1 | select * from dual where trim(sxhnode_key) is not null
2 |
3 |
--------------------------------------------------------------------------------
/tests/examples/condition15.sql:
--------------------------------------------------------------------------------
1 | select
2 | "a3"."r_id" "r_id"
3 | from
4 | "pe" "a3",
5 | "me" "a2"
6 | where
7 | "a3"."m_id"="a2"."m_id"
8 | and "a2"."mi_t" =
9 | any
10 | (((
11 | select "a4"."sys$"."id"
12 | from t "a4"
13 | )))
14 |
--------------------------------------------------------------------------------
/tests/examples/condition16.sql:
--------------------------------------------------------------------------------
1 | select * from persons p
2 | where value(p) is of type(only employee_t)
3 |
--------------------------------------------------------------------------------
/tests/examples/connect_by01.sql:
--------------------------------------------------------------------------------
1 | with o as
2 | (
3 | select 'a' obj, 'b' link from dual union all
4 | select 'a', 'c' from dual union all
5 | select 'c', 'd' from dual union all
6 | select 'd', 'c' from dual union all
7 | select 'd', 'e' from dual union all
8 | select 'e', 'e' from dual
9 | )
10 | select
11 | connect_by_root obj root,
12 | level,
13 | obj,link,
14 | sys_connect_by_path(obj||'->'||link,','),
15 | connect_by_iscycle,
16 | connect_by_isleaf
17 | from o
18 | connect by nocycle obj=prior link
19 | start with obj='a'
20 |
21 |
--------------------------------------------------------------------------------
/tests/examples/connect_by02.sql:
--------------------------------------------------------------------------------
1 | select lpad(' ',2*(level-1)) || last_name org_chart,
2 | employee_id, manager_id, job_id
3 | from employees
4 | start with job_id = 'ad_pres'
5 | connect by prior employee_id = manager_id and level <= 2
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/connect_by03.sql:
--------------------------------------------------------------------------------
1 | select lpad(' ',2*(level-1)) || last_name org_chart,
2 | employee_id, manager_id, job_id
3 | from employees
4 | where job_id != 'fi_mgr'
5 | start with job_id = 'ad_vp'
6 | connect by prior employee_id = manager_id
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/connect_by04.sql:
--------------------------------------------------------------------------------
1 | select lpad(' ',2*(level-1)) || last_name org_chart,
2 | employee_id, manager_id, job_id
3 | from employees
4 | start with job_id = 'ad_vp'
5 | connect by prior employee_id = manager_id
6 |
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/connect_by05.sql:
--------------------------------------------------------------------------------
1 | with liste as (
2 | select substr(:liste, instr(','||:liste||',', ',', 1, rn),
3 | instr(','||:liste||',', ',', 1, rn+1) -
4 | instr(','||:liste||',', ',', 1, rn)-1) valeur
5 | from (
6 | select rownum rn from dual
7 | connect by level<=length(:liste) - length(replace(:liste,',',''))+1))
8 | select trim(valeur)
9 | from liste
10 |
--------------------------------------------------------------------------------
/tests/examples/datetime01.sql:
--------------------------------------------------------------------------------
1 | select from_tz(cast(to_date('1999-12-01 11:00:00','yyyy-mm-dd hh:mi:ss') as timestamp), 'america/new_york') at time zone 'america/los_angeles' "west coast time" from dual
2 |
--------------------------------------------------------------------------------
/tests/examples/datetime02.sql:
--------------------------------------------------------------------------------
1 | select
2 | dbin.db_name,
3 | dbin.instance_name,
4 | dbin.version,
5 | case when s1.startup_time = s2.startup_time then 0 else 1 end as bounce,
6 | cast(s1.end_interval_time as date) as begin_time,
7 | cast(s2.end_interval_time as date) as end_time,
8 | round((cast( (case when s2.end_interval_time > s1.end_interval_time then s2.end_interval_time else s1.end_interval_time end) as date)
9 | - cast(s1.end_interval_time as date)) * 86400) as int_secs,
10 | case when (s1.status <> 0 or s2.status <> 0) then 1 else 0 end as err_detect,
11 | round( greatest( (extract(day from s2.flush_elapsed) * 86400)
12 | + (extract(hour from s2.flush_elapsed) * 3600)
13 | + (extract(minute from s2.flush_elapsed) * 60)
14 | + extract(second from s2.flush_elapsed),
15 | (extract(day from s1.flush_elapsed) * 86400)
16 | + (extract(hour from s1.flush_elapsed) * 3600)
17 | + (extract(minute from s1.flush_elapsed) * 60)
18 | + extract(second from s1.flush_elapsed),0 )
19 | ) as max_flush_secs
20 | from wrm$_snapshot s1 , wrm$_database_instance dbin , wrm$_snapshot s2
21 | where s1.dbid = :dbid
22 | and s2.dbid = :dbid
23 | and s1.instance_number = :inst_num
24 | and s2.instance_number = :inst_num
25 | and s1.snap_id = :bid
26 | and s2.snap_id = :eid
27 | and dbin.dbid = s1.dbid
28 | and dbin.instance_number = s1.instance_number
29 | and dbin.startup_time = s1.startup_time
30 |
31 |
--------------------------------------------------------------------------------
/tests/examples/datetime03.sql:
--------------------------------------------------------------------------------
1 | select
2 | timestamp '2009-10-29 01:30:00' at time zone 'us/pacific'from dual
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/datetime04.sql:
--------------------------------------------------------------------------------
1 | select
2 | timestamp '2009-10-29 01:30:00'
3 | from dual
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/datetime05.sql:
--------------------------------------------------------------------------------
1 | select date '1900-01-01' from dual
2 |
--------------------------------------------------------------------------------
/tests/examples/dblink01.sql:
--------------------------------------------------------------------------------
1 | select last_name, department_name
2 | from employees@remote, departments
3 | where employees.department_id = departments.department_id
4 |
5 |
6 |
--------------------------------------------------------------------------------
/tests/examples/explain01.sql:
--------------------------------------------------------------------------------
1 | explain plan
2 | set statement_id = 'raise in tokyo'
3 | into plan_table
4 | for update employees
5 | set salary = salary * 1.10
6 | where department_id =
7 | (select department_id from departments
8 | where location_id = 1700)
9 |
--------------------------------------------------------------------------------
/tests/examples/explain_example_1.sql:
--------------------------------------------------------------------------------
1 | EXPLAIN SELECT * FROM test;
--------------------------------------------------------------------------------
/tests/examples/explain_example_2.sql:
--------------------------------------------------------------------------------
1 | EXPLAIN
2 | SELECT
3 | f.film_id,
4 | title,
5 | name category_name
6 | FROM
7 | film f
8 | INNER JOIN film_category fc
9 | ON fc.film_id = f.film_id
10 | INNER JOIN category c
11 | ON c.category_id = fc.category_id
12 | ORDER BY
13 | title;
--------------------------------------------------------------------------------
/tests/examples/explain_example_3.sql:
--------------------------------------------------------------------------------
1 | EXPLAIN ANALYZE
2 | SELECT
3 | f.film_id,
4 | title,
5 | name category_name
6 | FROM
7 | film f
8 | INNER JOIN film_category fc
9 | ON fc.film_id = f.film_id
10 | INNER JOIN category c
11 | ON c.category_id = fc.category_id
12 | ORDER BY
13 | title;
--------------------------------------------------------------------------------
/tests/examples/explain_example_4.sql:
--------------------------------------------------------------------------------
1 | EXPLAIN VERBOSE
2 | SELECT
3 | f.film_id,
4 | title,
5 | name category_name
6 | FROM
7 | film f
8 | INNER JOIN film_category fc
9 | ON fc.film_id = f.film_id
10 | INNER JOIN category c
11 | ON c.category_id = fc.category_id
12 | ORDER BY
13 | title;
--------------------------------------------------------------------------------
/tests/examples/explain_example_5.sql:
--------------------------------------------------------------------------------
1 | EXPLAIN ANALYZE VERBOSE
2 | SELECT
3 | f.film_id,
4 | title,
5 | name category_name
6 | FROM
7 | film f
8 | INNER JOIN film_category fc
9 | ON fc.film_id = f.film_id
10 | INNER JOIN category c
11 | ON c.category_id = fc.category_id
12 | ORDER BY
13 | title;
--------------------------------------------------------------------------------
/tests/examples/extract_example.sql:
--------------------------------------------------------------------------------
1 | SELECT EXTRACT('month' FROM payment_date) AS month
2 | FROM payment
--------------------------------------------------------------------------------
/tests/examples/flashback01.sql:
--------------------------------------------------------------------------------
1 | select value(p$) from "XDB"."XDB$SCHEMA" as of snapshot(:2) p$ where SYS_NC_OID$ = :1
2 |
--------------------------------------------------------------------------------
/tests/examples/for_update01.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select * from employees)
2 | for update of employee_id
3 |
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/for_update02.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select employee_id+1 as employee_id from employees)
2 | for update
3 |
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/for_update03.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select employee_id+1 as employee_id from employees)
2 | for update of employee_id
3 |
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/for_update04.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select employee_id+1 as employee_id from employees)
2 | for update of employee_id nowait
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/for_update05.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select employee_id+1 as employee_id from employees)
2 | for update of employee_id wait 10
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/for_update06.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select employee_id+1 as employee_id from employees)
2 | for update of employee_id skip locked
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/for_update07.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select employee_id+1 as employee_id from employees)
2 | for update of a, b.c, d skip locked
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/for_update08.sql:
--------------------------------------------------------------------------------
1 | select su.ttype ,su.cid ,su.s_id ,sessiontimezone
2 | from sku su
3 | where (nvl(su.up,'n')='n' and su.ttype=:b0)
4 | for update of su.up
5 | order by su.d
6 |
--------------------------------------------------------------------------------
/tests/examples/function01.sql:
--------------------------------------------------------------------------------
1 | select decode(decode(decode( (select count(1) from dual), a, 1, 0), 0, 1), 1, 0) from dual
2 | -- select decode(decode(decode(decode(x, 0, 1) , 0, 1), 1, 0 ), 0, 1) from dual
3 |
--------------------------------------------------------------------------------
/tests/examples/function02.sql:
--------------------------------------------------------------------------------
1 | select set(x) from dual
2 |
--------------------------------------------------------------------------------
/tests/examples/function03.sql:
--------------------------------------------------------------------------------
1 | select trim(both from con.ke)
2 | from dual
3 |
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/function04.sql:
--------------------------------------------------------------------------------
1 | select lnnvl( 2 > 1) from dual
2 |
--------------------------------------------------------------------------------
/tests/examples/function05.sql:
--------------------------------------------------------------------------------
1 | select count(*)
2 | from employees
3 | where lnnvl(commission_pct >= .2)
4 |
--------------------------------------------------------------------------------
/tests/examples/groupby01.sql:
--------------------------------------------------------------------------------
1 | select :b3 as l_snap_id , :b2 as p_dbid , :b1 as p_instance_number , nvl(pid, -9) pid , nvl(serial#, -9) serial# , decode(pid, null, null, max(spid)) spid ,
2 | decode(pid, null, null, max(program)) program , decode(pid, null, null, max(background)) background , sum(pga_used_mem) pga_used_mem ,
3 | sum(pga_alloc_mem) pga_alloc_mem , sum(pga_freeable_mem) pga_freeable_mem , max(pga_alloc_mem) max_pga_alloc_mem , max(pga_max_mem) max_pga_max_mem ,
4 | decode(pid, null, avg(pga_alloc_mem), null) avg_pga_alloc_mem , decode(pid, null, stddev(pga_alloc_mem), null) stddev_pga_alloc_mem ,
5 | decode(pid, null, count(pid), null) num_processes
6 | from v$process
7 | where program != 'pseudo'
8 | group by grouping sets ( (pid, serial#), () )
9 |
10 |
--------------------------------------------------------------------------------
/tests/examples/groupby02.sql:
--------------------------------------------------------------------------------
1 | select
2 | decode(pid, null, null, max(program)) program , decode(pid, null, null, max(background)) background , sum(pga_used_mem) pga_used_mem ,
3 | sum(pga_alloc_mem) pga_alloc_mem , sum(pga_freeable_mem) pga_freeable_mem , max(pga_alloc_mem) max_pga_alloc_mem , max(pga_max_mem) max_pga_max_mem ,
4 | decode(pid, null, avg(pga_alloc_mem), null) avg_pga_alloc_mem , decode(pid, null, stddev(pga_alloc_mem), null) stddev_pga_alloc_mem ,
5 | decode(pid, null, count(pid), null) num_processes
6 | from v$process
7 | where program != 'pseudo'
8 | group by grouping sets ( (), ((pid+1), serial#) )
9 |
10 |
11 |
--------------------------------------------------------------------------------
/tests/examples/groupby03.sql:
--------------------------------------------------------------------------------
1 | select * from x
2 | group by grouping sets
3 | ( a, 1 )
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/groupby04.sql:
--------------------------------------------------------------------------------
1 | select * from x
2 | group by grouping sets
3 | ( (a),1 )
4 |
--------------------------------------------------------------------------------
/tests/examples/groupby05.sql:
--------------------------------------------------------------------------------
1 | select * from x
2 | group by grouping sets
3 | ( ((a),b), ((a),b) )
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/groupby06.sql:
--------------------------------------------------------------------------------
1 | select
2 | prod_category, prod_subcategory, country_id, cust_city, count(*)
3 | from products, sales, customers
4 | where sales.prod_id = products.prod_id
5 | and sales.cust_id=customers.cust_id
6 | and sales.time_id = '01-oct-00'
7 | and customers.cust_year_of_birth between 1960 and 1970
8 | group by grouping sets
9 | (
10 | (prod_category, prod_subcategory, country_id, cust_city),
11 | (prod_category, prod_subcategory, country_id),
12 | (prod_category, prod_subcategory),
13 | country_id
14 | )
15 | order by prod_category, prod_subcategory, country_id, cust_city
16 |
--------------------------------------------------------------------------------
/tests/examples/groupby07.sql:
--------------------------------------------------------------------------------
1 | select decode((tt || tc), '56', count(distinct cn), '57', sum(nu)) as q
2 | from t
3 | where tt='500'
4 | and tc in ('6','7')
5 | and to_char(c,'mm') = '03'
6 | having sum(nu) > 0
7 | group by tn, ui, (tt || tc)
8 | order by 1
9 |
--------------------------------------------------------------------------------
/tests/examples/interval01.sql:
--------------------------------------------------------------------------------
1 | select (systimestamp - order_date) day(9) to second from orders
2 | where order_id = 2458
3 |
--------------------------------------------------------------------------------
/tests/examples/interval02.sql:
--------------------------------------------------------------------------------
1 | select interval '42' day from dual
2 |
--------------------------------------------------------------------------------
/tests/examples/interval03.sql:
--------------------------------------------------------------------------------
1 | select
2 | interval '4 5:12:10.222' day to second(3)
3 | ,interval '4 5:12' day to minute
4 | ,interval '400 5' day(3) to hour
5 | ,interval '400' day(3)
6 | ,interval '11:12:10.2222222' hour to second(7)
7 | ,interval '11:20' hour to minute
8 | ,interval '10' hour
9 | ,interval '10:22' minute to second
10 | ,interval '10' minute
11 | ,interval '4' day
12 | ,interval '25' hour
13 | ,interval '40' minute
14 | ,interval '120' hour(3)
15 | ,interval '30.12345' second(2,4)
16 | ,interval :a day
17 | from dual
18 |
--------------------------------------------------------------------------------
/tests/examples/interval04.sql:
--------------------------------------------------------------------------------
1 | select interval'20' day - interval'240' hour from dual
2 |
--------------------------------------------------------------------------------
/tests/examples/join01.sql:
--------------------------------------------------------------------------------
1 | select d.department_id as d_dept_id, e.department_id as e_dept_id, e.last_name
2 | from departments d full outer join employees e
3 | on d.department_id = e.department_id
4 | order by d.department_id, e.last_name
5 |
6 |
--------------------------------------------------------------------------------
/tests/examples/join02.sql:
--------------------------------------------------------------------------------
1 | select department_id as d_e_dept_id, e.last_name
2 | from departments d full outer join employees e
3 | using (department_id)
4 | order by department_id, e.last_name
5 |
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/join03.sql:
--------------------------------------------------------------------------------
1 | select d.department_id, e.last_name
2 | from m.departments d right outer join n.employees e
3 | on d.department_id = e.department_id
4 | order by d.department_id, e.last_name
5 |
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/join04.sql:
--------------------------------------------------------------------------------
1 | select d.department_id, e.last_name
2 | from departments d, employees e
3 | where d.department_id = e.department_id(+)
4 | order by d.department_id, e.last_name
5 |
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/join05.sql:
--------------------------------------------------------------------------------
1 | select times.time_id, product, quantity from inventory
2 | partition by (product)
3 | right outer join times on (times.time_id = inventory.time_id)
4 | where times.time_id between to_date('01/04/01', 'dd/mm/yy')
5 | and to_date('06/04/01', 'dd/mm/yy')
6 | order by 2,1
7 |
8 |
9 |
--------------------------------------------------------------------------------
/tests/examples/join06.sql:
--------------------------------------------------------------------------------
1 | select * from dual d1
2 | join dual d2 on (d1.dummy = d2.dummy)
3 | join dual d3 on(d1.dummy = d3.dummy)
4 | join dual on(d1.dummy = dual.dummy)
5 |
6 |
--------------------------------------------------------------------------------
/tests/examples/join07.sql:
--------------------------------------------------------------------------------
1 | select * from t1
2 | join t2 tt2 using(c)
3 | join t3 tt3 using(d)
4 | join t3 using(d)
5 |
6 |
--------------------------------------------------------------------------------
/tests/examples/join08.sql:
--------------------------------------------------------------------------------
1 | select * from dual t1
2 | join (select * from dual) tt2 using(dummy)
3 | join (select * from dual) using(dummy)
4 | join (select * from dual) d on(d.dummy=tt3.dummy)
5 | inner join (select * from dual) tt2 using(dummy)
6 | inner join (select * from dual) using(dummy)
7 | inner join (select * from dual) d on(d.dummy=t1.dummy)
8 |
9 |
--------------------------------------------------------------------------------
/tests/examples/join09.sql:
--------------------------------------------------------------------------------
1 | select * from dual t1
2 | left outer join (select * from dual) tt2 using(dummy)
3 | left outer join (select * from dual) using(dummy)
4 | left outer join (select * from dual) d on(d.dummy=tt3.dummy)
5 | inner join (select * from dual) tt2 using(dummy)
6 | inner join (select * from dual) using(dummy)
7 | inner join (select * from dual) d on(d.dummy=t1.dummy)
8 |
9 |
--------------------------------------------------------------------------------
/tests/examples/join10.sql:
--------------------------------------------------------------------------------
1 | select * from dual t1,
2 | (
3 | dual left outer join (select * from dual) tt2 using(dummy)
4 | )
5 |
6 |
--------------------------------------------------------------------------------
/tests/examples/join11.sql:
--------------------------------------------------------------------------------
1 | select * from t1, ( t2 left outer join t3 using(dummy) )
2 |
--------------------------------------------------------------------------------
/tests/examples/join12.sql:
--------------------------------------------------------------------------------
1 | select * from dual,( dual left outer join tt2 using(dummy) )
2 |
3 |
--------------------------------------------------------------------------------
/tests/examples/join13.sql:
--------------------------------------------------------------------------------
1 | select * from t1,
2 | ((((
3 | t2 left outer join t3 using(dummy)
4 | ))))
5 |
--------------------------------------------------------------------------------
/tests/examples/join14.sql:
--------------------------------------------------------------------------------
1 | select * from dual t1,
2 | (
3 | (
4 | (
5 | dual t2 join dual t3 using(dummy) )
6 | left outer join dual t4 using(dummy) )
7 | left outer join dual t5 using(dummy) )
8 |
9 |
10 |
--------------------------------------------------------------------------------
/tests/examples/join15.sql:
--------------------------------------------------------------------------------
1 | select * from dual t1, ( dual t2 join dual t3 using(dummy)) left outer join dual t4 using (dummy)
2 |
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/join16.sql:
--------------------------------------------------------------------------------
1 | select * from dual t1,
2 | dual t2 join dual t3 using(dummy)
3 | left outer join dual t4 using(dummy)
4 | left outer join dual t5 using(dummy)
5 |
6 |
--------------------------------------------------------------------------------
/tests/examples/join17.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from hdr a
3 | inner join sh s
4 | inner join ca c
5 | on c.id = s.id
6 | on a.va = s.va
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/join18.sql:
--------------------------------------------------------------------------------
1 | select department_id as d_e_dept_id, e.last_name
2 | from departments
3 | full outer join employees on (a=b)
4 | left outer join employees on (a=b)
5 | right outer join employees on (a=b)
6 | join employees on (a=b)
7 | inner join employees on (a=b)
8 | cross join employees
9 | natural join employees
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/tests/examples/join19.sql:
--------------------------------------------------------------------------------
1 | select d1.*, d2.* from dual d1 cross join dual d2
2 |
--------------------------------------------------------------------------------
/tests/examples/join20.sql:
--------------------------------------------------------------------------------
1 | select d1.*, d2.* from dual cross join dual
2 |
--------------------------------------------------------------------------------
/tests/examples/join21.sql:
--------------------------------------------------------------------------------
1 | select * from sys.dual natural join sys.dual
2 |
3 |
--------------------------------------------------------------------------------
/tests/examples/keywordasidentifier01.sql:
--------------------------------------------------------------------------------
1 | select timestamp, avg, cume_dist from nulls
2 |
--------------------------------------------------------------------------------
/tests/examples/keywordasidentifier02.sql:
--------------------------------------------------------------------------------
1 | select m.model from model
2 |
--------------------------------------------------------------------------------
/tests/examples/keywordasidentifier03.sql:
--------------------------------------------------------------------------------
1 | select ind.index_owner,ind.index_name,ind.uniqueness
2 | , ind.status,ind.index_type,ind.temporary, ind.partitioned,ind.funcidx_status
3 | , ind.join_index,ind.columns,ie.column_expression
4 | , ind.index_name sdev_link_name,'INDEX' sdev_link_type, ind.index_owner sdev_link_owner
5 | from
6 | (
7 | select index_owner,table_owner,index_name,uniqueness, status,index_type,temporary, partitioned,funcidx_status, join_index
8 | , max(decode(position,1 ,column_name))|| max(decode(position,2 ,', '||column_name))|| max(decode(position,3 ,', '||column_name))|| max(decode(position,4 ,', '||column_name))|| max(decode(position,5 ,', '||column_name))|| max(decode(position,6 ,', '||column_name))|| max(decode(position,7 ,', '||column_name))|| max(decode(position,8 ,', '||column_name))|| max(decode(position,9 ,', '||column_name))|| max(decode(position,10,', '||column_name)) columns
9 |
10 | from
11 | (
12 | select di.owner index_owner,dc.table_owner,dc.index_name,di.uniqueness, di.status, di.index_type, di.temporary, di.partitioned,di.funcidx_status, di.join_index
13 | , dc.column_name,dc.column_position position
14 | from all_ind_columns dc,all_indexes di
15 | where dc.table_owner = :OBJECT_OWNER
16 | and dc.table_name = :OBJECT_NAME
17 | and dc.index_name = di.index_name
18 | and dc.index_owner = di.owner
19 | )
20 | group by index_owner,table_owner,index_name,uniqueness, status, index_type, temporary, partitioned,funcidx_status, join_index
21 | ) ind,
22 | ALL_IND_EXPRESSIONS ie
23 | where ind.index_name = ie.index_name(+)
24 | and ind.index_owner = ie.index_owner(+)
25 |
--------------------------------------------------------------------------------
/tests/examples/keywordasidentifier04.sql:
--------------------------------------------------------------------------------
1 | select bs.keep keep, bs.keep_until keep_until
2 | from v$backup_set bs
3 | union all
4 | select null keep, null keep_until
5 | from v$backup_piece bp
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/keywordasidentifier05.sql:
--------------------------------------------------------------------------------
1 | select exception from exception
--------------------------------------------------------------------------------
/tests/examples/lexer01.sql:
--------------------------------------------------------------------------------
1 | select * from dual where 1 < > 2 and 1 ! = 2 and 1 ^ /*aaa */ = 2
2 |
3 |
--------------------------------------------------------------------------------
/tests/examples/lexer02.sql:
--------------------------------------------------------------------------------
1 | select 'A' | | 'B' from dual
2 |
3 |
--------------------------------------------------------------------------------
/tests/examples/lexer03.sql:
--------------------------------------------------------------------------------
1 | select :1, :X, :1 + 1, 1 + :2 from A where A=:3 and b= : 4 and c= :5and :A = :b
2 |
3 |
--------------------------------------------------------------------------------
/tests/examples/lexer04.sql:
--------------------------------------------------------------------------------
1 | select tbl$or$idx$part$num("sys"."wrh$_seg_stat",0,4,0,"rowid") as c1 from t1
2 |
--------------------------------------------------------------------------------
/tests/examples/lexer05.sql:
--------------------------------------------------------------------------------
1 | select tbl$or$idx$part$num("sys"."wrh:_seg_stat",0,4,0,"rowid") as c1 from t1
2 |
--------------------------------------------------------------------------------
/tests/examples/like01.sql:
--------------------------------------------------------------------------------
1 | select last_name
2 | from employees
3 | where last_name
4 | like '%a\_b%' escape '\'
5 | order by last_name
6 |
--------------------------------------------------------------------------------
/tests/examples/merge01.sql:
--------------------------------------------------------------------------------
1 | merge into bonuses d
2 | using (select employee_id.* from employees) s
3 | on (employee_id = a)
4 | when matched then update set d.bonus = bonus
5 | delete where (salary > 8000)
6 | when not matched then insert (d.employee_id, d.bonus)
7 | values (s.employee_id, s.salary)
8 | where (s.salary <= 8000)
9 |
10 |
--------------------------------------------------------------------------------
/tests/examples/merge02.sql:
--------------------------------------------------------------------------------
1 | merge into bonuses d
2 | using (select employee_id.* from employees) s
3 | on (employee_id = a)
4 | when not matched then insert (d.employee_id, d.bonus)
5 | values (s.employee_id, s.salary)
6 | where (s.salary <= 8000)
7 | when matched then update set d.bonus = bonus
8 | delete where (salary > 8000)
9 |
10 |
--------------------------------------------------------------------------------
/tests/examples/merge03.sql:
--------------------------------------------------------------------------------
1 | merge /*+ dynamic_sampling(mm 4) dynamic_sampling_est_cdn(mm)
2 | dynamic_sampling(m 4) dynamic_sampling_est_cdn(m) */
3 |
4 | into sys.mon_mods_all$ mm
5 | using
6 | (
7 | select decode(grouping_id(tp.bo#,tsp.pobj#,m.obj#),3,tp.bo#,1,tsp.pobj#,m.obj#) obj#, sum(m.inserts) inserts, sum(m.updates) updates, sum(m.deletes) deletes,
8 | decode(sum(bitand(m.flags,1)),0,0,1) +decode(sum(bitand(m.flags,2)),0,0,2) +decode(sum(bitand(m.flags,4)),0,0,4) flags, sum(m.drop_segments) drop_segments
9 | from sys.mon_mods$ m, sys.tabcompart$ tp, sys.tabsubpart$ tsp
10 | where m.obj# = tsp.obj# and tp.obj# = tsp.pobj#
11 | group by rollup(tp.bo#,tsp.pobj#,m.obj#) having grouping_id(tp.bo#,tsp.pobj#,m.obj#) < 7
12 | ) v on
13 | (mm.obj# = v.obj#)
14 | when matched then
15 | update set mm.inserts = mm.inserts + v.inserts, mm.updates = mm.updates + v.updates, mm.deletes = mm.deletes + v.deletes,
16 | mm.flags = mm.flags + v.flags - bitand(mm.flags,v.flags) , mm.drop_segments = mm.drop_segments + v.drop_segments
17 | when not matched then insert values (v.obj#, v.inserts, v.updates, v.deletes, sysdate, v.flags, v.drop_segments)
18 |
--------------------------------------------------------------------------------
/tests/examples/merge04.sql:
--------------------------------------------------------------------------------
1 | merge /*+ dynamic_sampling(mm 4) dynamic_sampling_est_cdn(mm)
2 | dynamic_sampling(m 4) dynamic_sampling_est_cdn(m) */
3 |
4 | into sys.mon_mods_all$ mm
5 | using
6 | (
7 | select decode(grouping_id(tp.bo#,tsp.pobj#,m.obj#),3,tp.bo#,1,tsp.pobj#,m.obj#) obj#, sum(m.inserts) inserts, sum(m.updates) updates, sum(m.deletes) deletes,
8 | decode(sum(bitand(m.flags,1)),0,0,1) +decode(sum(bitand(m.flags,2)),0,0,2) +decode(sum(bitand(m.flags,4)),0,0,4) flags, sum(m.drop_segments) drop_segments
9 | from sys.mon_mods$ m, sys.tabcompart$ tp, sys.tabsubpart$ tsp
10 | where m.obj# = tsp.obj# and tp.obj# = tsp.pobj#
11 | group by rollup(tp.bo#,tsp.pobj#,m.obj#) having grouping_id(tp.bo#,tsp.pobj#,m.obj#) < 7
12 | order by 1, 2, 3
13 | ) v on
14 | (mm.obj# = v.obj#)
15 | when matched then
16 | update set mm.inserts = mm.inserts + v.inserts, mm.updates = mm.updates + v.updates, mm.deletes = mm.deletes + v.deletes,
17 | mm.flags = mm.flags + v.flags - bitand(mm.flags,v.flags) , mm.drop_segments = mm.drop_segments + v.drop_segments
18 | when not matched then insert values (v.obj#, v.inserts, v.updates, v.deletes, sysdate, v.flags, v.drop_segments)
19 |
--------------------------------------------------------------------------------
/tests/examples/model_clause01.sql:
--------------------------------------------------------------------------------
1 | select country,prod,year,s
2 | from sales_view_ref
3 | model
4 | partition by (country)
5 | dimension by (prod, year)
6 | measures (sale s)
7 | ignore nav
8 | -- cell_reference_options
9 | unique dimension
10 | -- here starts model_rules_clause
11 | rules upsert sequential order
12 | (
13 | s[prod='mouse pad', year=2001] = s['mouse pad', 1999] + s['mouse pad', 2000],
14 | s['standard mouse', 2002] = s['standard mouse', 2001]
15 | )
16 | order by country, prod, year
17 |
18 |
19 |
--------------------------------------------------------------------------------
/tests/examples/model_clause02.sql:
--------------------------------------------------------------------------------
1 | select country, year, sale, csum
2 | from
3 | (select country, year, sum(sale) sale
4 | from sales_view_ref
5 | group by country, year
6 | )
7 | model dimension by (country, year)
8 | measures (sale, 0 csum)
9 | rules (csum[any, any]=
10 | sum(sale) over (partition by country
11 | order by year
12 | rows unbounded preceding)
13 | )
14 | order by country, year
15 |
16 |
17 |
--------------------------------------------------------------------------------
/tests/examples/model_clause03.sql:
--------------------------------------------------------------------------------
1 | select country,prod,year,s
2 | from sales_view_ref
3 | model
4 | partition by (country)
5 | dimension by (prod, year)
6 | measures (sale s)
7 | ignore nav
8 | unique dimension
9 | rules upsert sequential order
10 | (
11 | s[prod='mouse pad'] = 1,
12 | s['standard mouse'] = 2
13 | )
14 | order by country, prod, year
15 |
16 |
--------------------------------------------------------------------------------
/tests/examples/model_clause04.sql:
--------------------------------------------------------------------------------
1 | select country, year, sale, csum
2 | from
3 | (select country, year, salex sale
4 | from sales_view_ref
5 | group by country, year
6 | )
7 | model dimension by (country, year)
8 | measures (sale, 0 csum)
9 | rules
10 | (
11 | s['standard mouse'] = 2
12 | )
13 | order by country, year
14 |
15 |
--------------------------------------------------------------------------------
/tests/examples/model_clause05.sql:
--------------------------------------------------------------------------------
1 | select country, year, sale, csum
2 | from
3 | (select country, year, salex sale
4 | from sales_view_ref
5 | group by country, year
6 | ) m
7 | model dimension by (country, year)
8 | measures (sale, 0 csum)
9 | rules
10 | (
11 | s['standard mouse'] = 2
12 | )
13 | order by country, year
14 |
15 |
--------------------------------------------------------------------------------
/tests/examples/multiple_values.sql:
--------------------------------------------------------------------------------
1 | INSERT INTO oscars (title, award)
2 | VALUES
3 | ('TRANSLATION SUMMER', 'Best Film'),
4 | ('DORADO NOTTING', 'Best Film');
--------------------------------------------------------------------------------
/tests/examples/numbers01.sql:
--------------------------------------------------------------------------------
1 | select 25
2 | , +6.34
3 | , 0.5
4 | , 25e-03
5 | , -1 -- Here are some valid floating-point number literals:
6 | , 25f
7 | , +6.34F
8 | , 0.5d
9 | , -1D
10 | , (sysdate -1d) -- here we substract "one" in decimal format
11 | , sysdate -1m -- here we substract "one" and "m" is column's alias
12 | , sysdate -1dm
13 | from dual
14 |
15 |
--------------------------------------------------------------------------------
/tests/examples/object_access01.sql:
--------------------------------------------------------------------------------
1 | select
2 | extractvalue(value(t), '/select_list_item/pos') + 1 pos,
3 | extractvalue(value(t), '/select_list_item/value') res,
4 | extractvalue(value(t), '/select_list_item/nonnulls') nonnulls,
5 | extractvalue(value(t), '/select_list_item/ndv') ndv,
6 | extractvalue(value(t), '/select_list_item/split') split,
7 | extractvalue(value(t), '/select_list_item/rsize') rsize,
8 | extractvalue(value(t), '/select_list_item/rowcnt') rowcnt,
9 | extract(value(t), '/select_list_item/hash_val').getclobval() hashval
10 | from
11 | table
12 | (
13 | xmlsequence
14 | (
15 | extract(:b1 , '/process_result/select_list_item')
16 | )
17 | ) t
18 |
19 |
--------------------------------------------------------------------------------
/tests/examples/order_by01.sql:
--------------------------------------------------------------------------------
1 | select * from dual order by 1
2 |
--------------------------------------------------------------------------------
/tests/examples/order_by02.sql:
--------------------------------------------------------------------------------
1 | select * from dual order by 1 asc
2 |
--------------------------------------------------------------------------------
/tests/examples/order_by03.sql:
--------------------------------------------------------------------------------
1 | select * from dual order by m.year, m.title, f(a)
2 |
--------------------------------------------------------------------------------
/tests/examples/order_by04.sql:
--------------------------------------------------------------------------------
1 | select * from dual order by a nulls first, b nulls last
2 |
--------------------------------------------------------------------------------
/tests/examples/order_by05.sql:
--------------------------------------------------------------------------------
1 | select * from dual order siblings by a nulls first, b nulls last, c nulls last, d nulls last, e nulls last
--------------------------------------------------------------------------------
/tests/examples/order_by06.sql:
--------------------------------------------------------------------------------
1 | with a as (select * from dual order by 1) select * from a
2 |
--------------------------------------------------------------------------------
/tests/examples/pivot01.sql:
--------------------------------------------------------------------------------
1 | select * from pivot_table
2 | unpivot (yearly_total for order_mode in (store as 'direct',
3 | internet as 'online'))
4 | order by year, order_mode
5 |
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/pivot02.sql:
--------------------------------------------------------------------------------
1 | select * from (
2 | select times_purchased as "puchase frequency", state_code
3 | from customers t
4 | )
5 | pivot xml
6 | (
7 | count(state_code)
8 | for state_code in (select state_code from preferred_states)
9 | )
10 | order by 1
11 |
12 |
--------------------------------------------------------------------------------
/tests/examples/pivot03.sql:
--------------------------------------------------------------------------------
1 | select * from (
2 | select times_purchased as "purchase frequency", state_code
3 | from customers t
4 | )
5 | pivot xml
6 | (
7 | count(state_code)
8 | for state_code in (any)
9 | )
10 | order by 1
11 |
12 |
--------------------------------------------------------------------------------
/tests/examples/pivot04.sql:
--------------------------------------------------------------------------------
1 | select value
2 | from
3 | (
4 | (
5 | select
6 | 'a' v1,
7 | 'e' v2,
8 | 'i' v3,
9 | 'o' v4,
10 | 'u' v5
11 | from dual
12 | )
13 | unpivot
14 | (
15 | value
16 | for value_type in
17 | (v1,v2,v3,v4,v5)
18 | )
19 | )
20 |
21 |
--------------------------------------------------------------------------------
/tests/examples/pivot05.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from (select customer_id, product_code, quantity
3 | from pivot_test)
4 | pivot xml (sum(quantity) as sum_quantity for (product_code) in (select distinct product_code
5 | from pivot_test))
6 |
--------------------------------------------------------------------------------
/tests/examples/pivot06.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from (select product_code, quantity
3 | from pivot_test)
4 | pivot xml (sum(quantity) as sum_quantity for (product_code) in (select distinct product_code
5 | from pivot_test
6 | where id < 10))
--------------------------------------------------------------------------------
/tests/examples/pivot07.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from (select customer_id, product_code, quantity
3 | from pivot_test)
4 | pivot (sum(quantity) as sum_quantity for (product_code) in ('a' as a, 'b' as b, 'c' as c))
5 | order by customer_id
6 |
--------------------------------------------------------------------------------
/tests/examples/pivot08.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from (select product_code, quantity
3 | from pivot_test)
4 | pivot (sum(quantity) as sum_quantity for (product_code) in ('a' as a, 'b' as b, 'c' as c))
--------------------------------------------------------------------------------
/tests/examples/pivot09.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from (s join d using(c))
3 | pivot
4 | (
5 | max(c_c_p) as max_ccp
6 | , max(d_c_p) max_dcp
7 | , max(d_x_p) dxp
8 | , count(1) cnt
9 | for (i, p) in
10 | (
11 | (1,1) as one_one,
12 | (1,2) as one_two,
13 | (1,3) as one_three,
14 | (2,1) as two_one,
15 | (2,2) as two_two,
16 | (2,3) as two_three
17 | )
18 | )
19 | where d_t = 'p'
20 |
--------------------------------------------------------------------------------
/tests/examples/pivot10.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from s join d using(c)
3 | pivot
4 | (
5 | max(c_c_p) as max_ccp
6 | , max(d_c_p) max_dcp
7 | , max(d_x_p) dxp
8 | , count(1) cnt
9 | for (i, p) in
10 | (
11 | (1,1) as one_one,
12 | (1,2) as one_two,
13 | (1,3) as one_three,
14 | (2,1) as two_one,
15 | (2,2) as two_two,
16 | (2,3) as two_three
17 | )
18 | )
19 | where d_t = 'p'
20 |
--------------------------------------------------------------------------------
/tests/examples/pivot11.sql:
--------------------------------------------------------------------------------
1 | select *
2 | from s
3 | pivot
4 | (
5 | max(c_c_p) as max_ccp
6 | , max(d_c_p) max_dcp
7 | , max(d_x_p) dxp
8 | , count(1) cnt
9 | for (i, p) in
10 | (
11 | (1,1) as one_one,
12 | (1,2) as one_two,
13 | (1,3) as one_three,
14 | (2,1) as two_one,
15 | (2,2) as two_two,
16 | (2,3) as two_three
17 | )
18 | )
19 | join d using(c)
20 | where d_t = 'p'
21 |
--------------------------------------------------------------------------------
/tests/examples/pivot12.sql:
--------------------------------------------------------------------------------
1 | select value from
2 | (
3 | (
4 | select
5 | 'a' v1,
6 | 'e' v2,
7 | 'i' v3,
8 | 'o' v4,
9 | 'u' v5
10 | from dual
11 | )
12 | unpivot include nulls
13 | (
14 | value
15 | for value_type in
16 | (v1, v2,v3,v4,v5) -- Also can give ANY here.
17 | )
18 | )
--------------------------------------------------------------------------------
/tests/examples/position_function.sql:
--------------------------------------------------------------------------------
1 | SELECT
2 | SUBSTRING(address, POSITION(' ' IN address)+1, LENGTH(address))
3 | FROM
4 | address;
--------------------------------------------------------------------------------
/tests/examples/query_factoring01.sql:
--------------------------------------------------------------------------------
1 | with
2 | reports_to_101 (eid, emp_last, mgr_id, reportlevel) as
3 | (
4 | select employee_id, last_name, manager_id, 0 reportlevel
5 | from employees
6 | where employee_id = 101
7 | union all
8 | select e.employee_id, e.last_name, e.manager_id, reportlevel+1
9 | from reports_to_101 r, employees e
10 | where r.eid = e.manager_id
11 | )
12 | select eid, emp_last, mgr_id, reportlevel
13 | from reports_to_101 r, auto a
14 | where r.c1 = a.c2
15 | order by reportlevel, eid
16 |
17 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring02.sql:
--------------------------------------------------------------------------------
1 | with
2 | reports_to_101 (eid, emp_last, mgr_id, reportlevel, mgr_list)
3 | as
4 | (
5 | select employee_id, last_name, manager_id, 0 reportlevel
6 | , cast(manager_id as varchar2(2000))
7 | from employees
8 | where employee_id = 101
9 | union all
10 | select e.employee_id, e.last_name, e.manager_id, reportlevel+1
11 | , cast(mgr_list || ',' || manager_id as varchar2(2000))
12 | from reports_to_101 r, employees e
13 | where r.eid = e.manager_id
14 | )
15 | select eid, emp_last, mgr_id, reportlevel, mgr_list
16 | from reports_to_101
17 | order by reportlevel, eid
18 |
19 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring03.sql:
--------------------------------------------------------------------------------
1 | with
2 | reports_to_101 (eid, emp_last, mgr_id, reportlevel) as
3 | (
4 | select employee_id, last_name, manager_id, 0 reportlevel
5 | from employees
6 | where employee_id = 101
7 | union all
8 | select e.employee_id, e.last_name, e.manager_id, reportlevel+1
9 | from reports_to_101 r, employees e
10 | where r.eid = e.manager_id
11 | )
12 | select eid, emp_last, mgr_id, reportlevel
13 | from reports_to_101
14 | where reportlevel <= 1
15 | order by reportlevel, eid
16 |
17 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring04.sql:
--------------------------------------------------------------------------------
1 | with
2 | org_chart (eid, emp_last, mgr_id, reportlevel, salary, job_id) as
3 | (
4 | select employee_id, last_name, manager_id, 0 reportlevel, salary, job_id
5 | from employees
6 | where manager_id is null
7 | union all
8 | select e.employee_id, e.last_name, e.manager_id,
9 | r.reportlevel+1 reportlevel, e.salary, e.job_id
10 | from org_chart r, employees e
11 | where r.eid = e.manager_id
12 | )
13 | search depth first by emp_last set order1
14 | select lpad(' ',2*reportlevel)||emp_last emp_name, eid, mgr_id, salary, job_id
15 | from org_chart
16 | order by order1
17 |
18 |
19 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring05.sql:
--------------------------------------------------------------------------------
1 | with
2 | x1 as (select max(y1) from klm1),
3 | x2 as (select max(y2) from klm2),
4 | x3 as (select max(y3) from klm3),
5 | x4 as (select max(y4) from klm4)
6 | select
7 | distinct
8 | -1,
9 | +1,
10 | a + b * (a * d) as aaa,
11 | t1.region_name,
12 | t2.division_name,
13 | t1.region_name as a,
14 | t2.division_name as aaaa,
15 | a.*,
16 | sum(t3.amount),
17 | sum(count(1)) + count(*)
18 | , sum(1) + (select count(1) from ddd) a
19 | from dual, fff
20 | where a is null
21 | or b is not null
22 | and ( a like 'd')
23 | and 1 = 0
24 | and a.b is a set
25 | union
26 | select a from dual
27 |
28 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring06.sql:
--------------------------------------------------------------------------------
1 | with
2 | dept_costs as (
3 | select department_name, sum(salary) dept_total
4 | from employees e, departments d
5 | where e.department_id = d.department_id
6 | group by department_name),
7 | avg_cost as (
8 | select sum(dept_total)/count(*) avg
9 | from dept_costs)
10 | select * from dept_costs
11 | where dept_total >
12 | (select avvg from avg_cost)
13 | order by department_name
14 |
15 |
16 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring07.sql:
--------------------------------------------------------------------------------
1 | with timegrouped_rawdata
2 | as
3 | (
4 | select /*+ leading(sn di md sh ot) cardinality(ot 1000) */
5 | sh.metric_id as metric_id ,
6 | ot.bsln_guid as bsln_guid ,
7 | ot.timegroup as timegroup ,
8 | sh.value as obs_value
9 | from
10 | dba_hist_snapshot sn ,
11 | dba_hist_database_instance di ,
12 | sys.wrh$_sysmetric_history sh ,
13 | bsln_metric_defaults md ,
14 | table(:b1 ) ot
15 | where sn.dbid = :b6
16 | and sn.snap_id between :b5 and :b4
17 | and di.dbid = sn.dbid
18 | and di.instance_number = sn.instance_number
19 | and di.startup_time = sn.startup_time
20 | and di.instance_name = :b3
21 | and sh.snap_id = sn.snap_id
22 | and sh.dbid = sn.dbid
23 | and sh.instance_number = sn.instance_number
24 | and sh.group_id = 2
25 | and sh.metric_id = md.metric_id
26 | and md.status = :b2
27 | and ot.obs_time = trunc(sh.end_time, 'hh24')
28 | )
29 | (
30 | select
31 | bsln_statistics_t
32 | (
33 | bsln_guid ,metric_id ,:b11 ,:b10 ,timegroup ,sample_count ,average ,minimum ,maximum ,sdev ,
34 | pctile_25 ,pctile_50 ,pctile_75 ,pctile_90 ,pctile_95 ,pctile_99 ,est_sample_count ,est_slope ,est_intercept ,
35 | case when est_slope = 0 then 0 else greatest(0,nvl(100-(25*power((1-est_mu1/est_slope), 2)*(est_sample_count-1) ),0)) end ,
36 | ln( 1000) * est_slope + est_intercept ,
37 | ln(10000) * est_slope + est_intercept
38 | )
39 | from
40 | (
41 | select metric_id ,bsln_guid ,timegroup ,est_mu as est_slope ,est_mu * ln(alpha) + x_m as est_intercept ,to_number(null) as est_fit_quality ,
42 | case when count_below_x_j > 0 then (sum_below_x_j + (n-m+1)*(x_j-x_m))/count_below_x_j - x_j else to_number(null) end as est_mu1 ,
43 | est_sample_count ,n as sample_count ,average ,minimum ,maximum ,sdev ,pctile_25 ,pctile_50 ,pctile_75 ,pctile_90 ,pctile_95 ,pctile_99
44 | from
45 | (
46 | select metric_id ,bsln_guid ,timegroup ,max(n) as n ,count(rrank) as est_sample_count ,
47 | case when count(rrank) > 3 then ( sum(obs_value) + ( max(n) - max(rrank) ) * max(obs_value) - (max(n) - min(rrank) + 1) * min(obs_value) ) / (count(rrank)-1)
48 | else to_number(null) end as est_mu ,
49 | (max(n) - min(rrank) + 1) / (max(n) + 1) as alpha ,
50 | min(obs_value) as x_m ,max(obs_value) as x_l ,max(rrank) as l ,min(rrank) as m ,max(mid_tail_value) as x_j ,
51 | sum(case when obs_value < mid_tail_value then obs_value else 0 end ) as sum_below_x_j ,
52 | sum(case when cume_dist < :b7 then 1 else 0 end ) as count_below_x_j ,
53 | max(max_val) as maximum ,max(min_val) as minimum ,max(avg_val) as average ,max(sdev_val) as sdev ,max(pctile_25) as pctile_25 ,max(pctile_50) as pctile_50 ,
54 | max(pctile_75) as pctile_75 ,max(pctile_90) as pctile_90 ,max(pctile_95) as pctile_95 ,max(pctile_99) as pctile_99
55 | from
56 | (
57 | select metric_id ,bsln_guid ,timegroup ,obs_value as obs_value,
58 | cume_dist () over (partition by metric_id, bsln_guid, timegroup order by obs_value ) as cume_dist ,
59 | count(1) over (partition by metric_id, bsln_guid, timegroup ) as n ,
60 | row_number () over (partition by metric_id, bsln_guid, timegroup order by obs_value) as rrank ,
61 | percentile_disc(:b7 ) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as mid_tail_value ,
62 | max(obs_value) over (partition by metric_id, bsln_guid, timegroup ) as max_val ,
63 | min(obs_value) over (partition by metric_id, bsln_guid, timegroup ) as min_val ,
64 | avg(obs_value) over (partition by metric_id, bsln_guid, timegroup ) as avg_val ,
65 | stddev(obs_value) over (partition by metric_id, bsln_guid, timegroup ) as sdev_val ,
66 | percentile_cont(0.25) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_25 ,
67 | percentile_cont(0.5) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_50 ,
68 | percentile_cont(0.75) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_75 ,
69 | percentile_cont(0.90) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_90 ,
70 | percentile_cont(0.95) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_95 ,
71 | percentile_cont(0.99) within group (order by obs_value asc) over (partition by metric_id, bsln_guid, timegroup) as pctile_99
72 | from timegrouped_rawdata d
73 | ) x
74 | where x.cume_dist >= :b9 and x.cume_dist <= :b8
75 | group by metric_id ,bsln_guid ,timegroup
76 | )
77 | )
78 | )
79 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring08.sql:
--------------------------------------------------------------------------------
1 | with -- qf
2 | x1 as ( select * from t1 ),
3 | x2 as ( select * from t2 join t3 on (t2.a2 = t3.a3))
4 | select
5 | *
6 | from
7 | x1
8 | join x2 on (x1.a1 = x2.a2)
9 | join t4 on (x1.a1 = t4.a4)
10 |
11 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring09.sql:
--------------------------------------------------------------------------------
1 | with rn as (
2 | select rownum rn
3 | from dual
4 | connect by level <= (select max(cases) from t1))
5 | select pname
6 | from t1, rn
7 | where rn <= cases
8 | order by pname
9 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring10.sql:
--------------------------------------------------------------------------------
1 | with o(obj,link) as
2 | (
3 | select 'a', 'b' from dual union all
4 | select 'a', 'c' from dual union all
5 | select 'c', 'd' from dual union all
6 | select 'd', 'c' from dual union all
7 | select 'd', 'e' from dual union all
8 | select 'e', 'e' from dual
9 | ),
10 | t(root,lev,obj,link,path) as (
11 | select obj,1,obj,link,cast(obj||'->'||link
12 | as varchar2(4000))
13 | from o
14 | where obj='a' -- start with
15 | union all
16 | select
17 | t.root,t.lev+1,o.obj,o.link,
18 | t.path||', '||o.obj||
19 | '->'
20 | ||o.link
21 | from t, o
22 | where t.link=o.obj
23 | )
24 | search depth first by obj set ord
25 | cycle obj set cycle to 1 default 0
26 | select root,lev,obj,link,path,cycle,
27 | case
28 | when (lev - lead(lev) over (order by ord)) < 0
29 | then 0
30 | else 1
31 | end is_leaf
32 | from t
33 |
34 |
--------------------------------------------------------------------------------
/tests/examples/query_factoring11.sql:
--------------------------------------------------------------------------------
1 | with col_generator as (
2 | select t1.batch_id, decode(t1.action, 'sent', t1.actdate) sent,
3 | decode(t2.action,'recv', t2.actdate) received
4 | from test t1, test t2
5 | where t2.batch_id(+) = t1.batch_id)
6 | select batch_id, max(sent) sent, max(received) received
7 | from col_generator
8 | group by batch_id
9 | order by 1
10 |
--------------------------------------------------------------------------------
/tests/examples/sample01.sql:
--------------------------------------------------------------------------------
1 | select * from
2 | (
3 | select 1 as c1 from "sys"."obj$" sample block (14.285714 , 1) seed (1) "o"
4 | ) samplesub
5 |
--------------------------------------------------------------------------------
/tests/examples/simple02.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select employee_id+1 as employee_id from employees)
2 | for update
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/simple03.sql:
--------------------------------------------------------------------------------
1 | select employee_id from (select employee_id+1 as employee_id from employees)
2 | for update of employee_id
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/simple04.sql:
--------------------------------------------------------------------------------
1 | select * from
2 | (
3 | (
4 | select * from dual
5 | )
6 | unpivot
7 | (
8 | value for value_type in (dummy)
9 | )
10 | )
11 |
12 |
--------------------------------------------------------------------------------
/tests/examples/simple05.sql:
--------------------------------------------------------------------------------
1 | select * from
2 | (
3 | select * from a
4 | unpivot
5 | (
6 | value for value_type in (dummy)
7 | )
8 | )
9 |
10 |
--------------------------------------------------------------------------------
/tests/examples/simple06.sql:
--------------------------------------------------------------------------------
1 | select * from (( select * from dual)) a
2 |
3 |
--------------------------------------------------------------------------------
/tests/examples/simple07.sql:
--------------------------------------------------------------------------------
1 | select * from dual for update
2 | of dual
3 |
4 |
--------------------------------------------------------------------------------
/tests/examples/simple08.sql:
--------------------------------------------------------------------------------
1 | select a, b, c, d, e, 1, 2, f(a,b,c,1+1) from dual
2 |
3 |
--------------------------------------------------------------------------------
/tests/examples/simple09.sql:
--------------------------------------------------------------------------------
1 | select a||last_name,
2 | employee_id
3 | from employees
4 | start with job_id = 'ad_vp'
5 | connect by prior employee_id = manager_id
6 |
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/simple10.sql:
--------------------------------------------------------------------------------
1 | select a as over from over
2 |
--------------------------------------------------------------------------------
/tests/examples/simple11.sql:
--------------------------------------------------------------------------------
1 | select a.* from dual
2 |
--------------------------------------------------------------------------------
/tests/examples/simple12.sql:
--------------------------------------------------------------------------------
1 | select
2 | +1,
3 | t2.division_name as aaaa,
4 | a.*,
5 | sum(t3.amount)
6 | from dual
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/simple13.sql:
--------------------------------------------------------------------------------
1 | select * from (dual), (dual d), (dual) d
2 |
--------------------------------------------------------------------------------
/tests/examples/union01.sql:
--------------------------------------------------------------------------------
1 | select 'a' obj, 'b' link from dual union all
2 | select 'a', 'c' from dual union all
3 | select 'c', 'd' from dual union all
4 | select 'd', 'c' from dual union all
5 | select 'd', 'e' from dual union all
6 | select 'e', 'e' from dual
7 |
8 |
--------------------------------------------------------------------------------
/tests/examples/union02.sql:
--------------------------------------------------------------------------------
1 | select distinct job_id from hr.jobs
2 | union all
3 | select distinct job_id from hr.job_history
4 |
5 |
--------------------------------------------------------------------------------
/tests/examples/union03.sql:
--------------------------------------------------------------------------------
1 | select distinct job_id from hr.jobs
2 | union all
3 | (
4 | select distinct job_id from hr.job_history
5 | )
6 |
7 |
--------------------------------------------------------------------------------
/tests/examples/union04.sql:
--------------------------------------------------------------------------------
1 | (
2 | select distinct job_id from hr.jobs
3 | )
4 | union all
5 | (
6 | select distinct job_id from hr.job_history
7 | union all
8 | ((((
9 | select distinct job_id from hr.job_history
10 | union all
11 | (
12 | select distinct job_id from hr.job_history
13 | )
14 | )))
15 | union all
16 | select distinct job_id from hr.job_history
17 | )
18 | )
19 | union all
20 | (
21 | select distinct job_id from hr.job_history
22 | union all
23 | (
24 | select distinct job_id from hr.job_history
25 | union all
26 | (
27 | select distinct job_id from hr.job_history
28 | )
29 | )
30 | )
31 | union all
32 | (
33 | select distinct job_id from hr.job_history
34 | union all
35 | select distinct job_id from hr.job_history
36 | )
37 | union all
38 | select distinct job_id from hr.job_history
39 | union all
40 | select distinct job_id from hr.job_history
41 | union all
42 | select distinct job_id from hr.job_history
43 | union all
44 | select distinct job_id from hr.job_history
45 |
46 |
--------------------------------------------------------------------------------
/tests/examples/union05.sql:
--------------------------------------------------------------------------------
1 | select * from dual
2 | union all
3 | (
4 | select * from dual
5 | )
6 | union all
7 | (
8 | select * from dual
9 | )
10 | union all
11 | (
12 | select * from dual
13 | )
14 | union all
15 | (
16 | select * from dual
17 | )
18 | union all
19 | (
20 | select * from dual
21 | )
22 | union all
23 | (
24 | select * from dual
25 | )
26 | union all
27 | (
28 | select * from dual
29 | )
30 | order by 1 asc, 2 asc
31 |
--------------------------------------------------------------------------------
/tests/examples/union06.sql:
--------------------------------------------------------------------------------
1 | ( select "x"."r_no",
2 | "x"."i_id",
3 | "x"."ind",
4 | "x"."item",
5 | '0' "o"
6 | from "x"
7 | where ("x"."r_no" = :a)
8 | minus
9 | select "y"."r_no",
10 | "y"."i_id",
11 | "y"."ind",
12 | "y"."item",
13 | '0' "o"
14 | from "y"
15 | where ("y"."r_no" = :a)
16 | )
17 | union
18 | ( select "y"."r_no",
19 | "y"."i_id",
20 | "y"."ind",
21 | "y"."item",
22 | '1' "o"
23 | from "y"
24 | where ("y"."r_no" = :a)
25 | minus
26 | select "x"."r_no",
27 | "x"."i_id",
28 | "x"."ind",
29 | "x"."item",
30 | '1' "o"
31 | from "x"
32 | where ("x"."r_no" = :a)
33 | )
34 | order by 4,3,1
35 |
36 |
--------------------------------------------------------------------------------
/tests/examples/union07.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- note: this query was copied from the druid project
3 | -- http://code.alibabatech.com/wiki/display/druid/home
4 | --
5 | select * from (
6 | select row_.*, rownum rownum_
7 | from (
8 | select *
9 | from
10 | (
11 | select results.*,row_number() over ( partition by results.object_id order by results.gmt_modified desc) rn
12 | from
13 | (
14 | (
15 | select sus.id id, sus.gmt_create gmt_create,
16 | sus.gmt_modified gmt_modified, sus.company_id company_id,
17 | sus.object_id object_id, sus.object_type object_type,
18 | sus.confirm_type confirm_type, sus.operator operator,
19 | sus.filter_type filter_type, sus.member_id member_id,
20 | sus.member_fuc_q member_fuc_q, sus.risk_type risk_type , 'y' is_draft
21 | from f_u_c_ sus , a_b_c_draft p ,
22 | member m
23 | where 1=1 and p.company_id = m.company_id
24 | and m.login_id=?
25 | and p.sale_type in( ? )
26 | and p.id=sus.object_id
27 | )
28 | union
29 | (
30 | select sus.id id, sus.gmt_create gmt_create,
31 | sus.gmt_modified gmt_modified, sus.company_id company_id,
32 | sus.object_id object_id, sus.object_type object_type,
33 | sus.confirm_type confirm_type, sus.operator operator,
34 | sus.filter_type filter_type, sus.member_id member_id,
35 | sus.member_fuc_q member_fuc_q, sus.risk_type risk_type , 'n' is_draft
36 | from f_u_c_ sus , a_b_c p ,member m
37 | where 1=1
38 | and p.company_id = m.company_id
39 | and m.login_id=?
40 | and p.sale_type in( ? )
41 | and p.id=sus.object_id
42 | )
43 | ) results
44 | ) where rn = 1 order by gmt_modified desc
45 | )row_ where rownum <= ?
46 | )
47 | where rownum_ >= ?
48 |
--------------------------------------------------------------------------------
/tests/examples/union08.sql:
--------------------------------------------------------------------------------
1 | select * from dual where exists (
2 | select * from dual
3 | union all
4 | select * from dual
5 | )
6 |
--------------------------------------------------------------------------------
/tests/examples/union09.sql:
--------------------------------------------------------------------------------
1 | select * from (
2 | select row_.*
3 | from (
4 | select *
5 | from
6 | (
7 | select results.*, 1 rn
8 | from
9 | (
10 | (
11 | select dummy
12 | from dual
13 | where 1=1
14 | )
15 | union
16 | (
17 | select dummy
18 | from dual
19 | where 1=1
20 | )
21 | ) results
22 | )
23 | where rn = 1 order by dummy desc
24 | )row_
25 | where rownum <= 1
26 | )
27 | where rownum >= 1
28 |
--------------------------------------------------------------------------------
/tests/examples/union10.sql:
--------------------------------------------------------------------------------
1 | select
2 | (
3 | (
4 | select 'y' from dual
5 | where exists ( select 1 from dual where 1 = 0 )
6 | )
7 | union
8 | (
9 | select 'n' from dual
10 | where not exists ( select 1 from dual where 1 = 0 )
11 | )
12 | )
13 | as yes_no
14 | from dual
15 |
--------------------------------------------------------------------------------
/tests/examples/xmltable02.sql:
--------------------------------------------------------------------------------
1 | select xmlelement("other_attrs", xmlelement("parsing_user_id", parsing_user_id)).getClobVal() other
2 | from f
3 |
--------------------------------------------------------------------------------
/tests/test_ast.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import os
3 | from antlr_ast.ast import AntlrException
4 | from antlr_plsql import ast
5 |
6 |
7 | def test_ast_parse_strict():
8 | with pytest.raises(AntlrException):
9 | ast.parse("SELECT x FROM ____", strict=True) # ____ is ungrammatical
10 | # Test export of exception class
11 | with pytest.raises(ast.ParseError):
12 | ast.parse("SELECT x FROM ____!", strict=True) # ____! is ungrammatical
13 |
14 |
15 | def test_unparsed_to_text():
16 | sql_txt = "SELECT CURSOR (SELECT a FROM b) FROM c"
17 | tree = ast.parse(sql_txt)
18 | cursor = tree.body[0].target_list[0]
19 |
20 | assert isinstance(cursor, ast.UnaryExpr)
21 | assert cursor.get_text(sql_txt) == "CURSOR (SELECT a FROM b)"
22 | assert cursor.expr.get_text(sql_txt) == "SELECT a FROM b"
23 |
24 |
25 | def test_ast_dump():
26 | sql_txt = "SELECT a, b FROM x WHERE a < 10"
27 | tree = ast.parse(sql_txt)
28 | ast.dump_node(tree)
29 | # TODO test .to_json()
30 |
31 |
32 | @pytest.mark.parametrize(
33 | "sql_text, start",
34 | [
35 | ("SELECT a, b FROM x WHERE a < 10;", "sql_script"),
36 | ("SELECT * FROM x", "sql_script"),
37 | ("SELECT CURSOR (SELECT a FROM b) FROM c", "sql_script"),
38 | ("SELECT a FROM x", "subquery"),
39 | ("WHERE x < 10", "where_clause"),
40 | ],
41 | )
42 | def test_ast_dumps_noerr(sql_text, start):
43 | tree = ast.parse(sql_text, start)
44 | import json
45 |
46 | d = json.dumps(ast.dump_node(tree))
47 | # TODO test .to_json()
48 |
49 |
50 | def test_ast_dumps_unary():
51 | tree = ast.parse("-1", "unary_expression")
52 | assert ast.dump_node(tree) == {
53 | "type": "UnaryExpr",
54 | "data": {
55 | "op": {"type": "Terminal", "data": {"value": "-"}},
56 | "expr": {"type": "Terminal", "data": {"value": "1"}},
57 | },
58 | }
59 | # TODO test .to_json()
60 |
61 |
62 | def test_select_fields_shaped():
63 | select = ast.parse(
64 | """
65 | SELECT a,b
66 | FROM x,y
67 | GROUP BY a, b
68 | ORDER BY a, b
69 |
70 | """,
71 | "subquery",
72 | )
73 | for field in select._fields:
74 | # TODO: update Unshaped test
75 | pass
76 |
77 |
78 | @pytest.mark.parametrize(
79 | "sql_text",
80 | [
81 | "SELECT a FROM co AS c INNER JOIN ec AS e ON c.code = e.code",
82 | "SELECT a FROM co AS c INNER JOIN ec ON c.code = ec.code",
83 | "SELECT a FROM co INNER JOIN ec AS e ON co.code = e.code",
84 | "SELECT a FROM co INNER JOIN ec ON co.code = ec.code",
85 | ],
86 | )
87 | def test_inner_join(sql_text):
88 | tree = ast.parse(sql_text)
89 | assert tree.body[0].from_clause[0].join_type == "inner"
90 |
91 |
92 | @pytest.mark.parametrize(
93 | "sql_text",
94 | [
95 | "SELECT a AS c FROM d RIGHT JOIN e ON f.g = h.j RIGHT JOIN i ON j.k = l.m",
96 | "SELECT a AS c FROM d RIGHT JOIN e ON f.g = h.j RIGHT JOIN i ON j.k = l.m ORDER BY n",
97 | "SELECT a.b AS c FROM d RIGHT JOIN e ON f.g = h.j RIGHT JOIN i ON j.k = l.m",
98 | "SELECT a.b AS c FROM d RIGHT JOIN e ON f.g = h.j RIGHT JOIN i ON j.k = l.m ORDER BY n",
99 | ],
100 | )
101 | def test_double_inner_join(sql_text):
102 | tree = ast.parse(sql_text)
103 | frm = tree.body[0].from_clause[0]
104 | assert frm.join_type == "right"
105 | assert frm.right.fields == ["i"]
106 | assert frm.left.join_type == "right"
107 | assert frm.left.left.fields == ["d"]
108 | assert frm.left.right.fields == ["e"]
109 |
110 |
111 | @pytest.mark.parametrize(
112 | "sql_text",
113 | [
114 | "SELECT a AS c FROM d as ad RIGHT JOIN e as ae ON f.g = h.j RIGHT JOIN i as ai ON j.k = l.m",
115 | "SELECT a AS c FROM d as ad RIGHT JOIN e as ae ON f.g = h.j RIGHT JOIN i as ai ON j.k = l.m ORDER BY n",
116 | "SELECT a.b AS c FROM d as ad RIGHT JOIN e as ae ON f.g = h.j RIGHT JOIN i as ai ON j.k = l.m",
117 | "SELECT a.b AS c FROM d as ad RIGHT JOIN e as ae ON f.g = h.j RIGHT JOIN i as ai ON j.k = l.m ORDER BY n",
118 | ],
119 | )
120 | def test_double_inner_join_with_aliases(sql_text):
121 | tree = ast.parse(sql_text)
122 | frm = tree.body[0].from_clause[0]
123 | assert frm.join_type == "right"
124 | assert frm.right.expr.fields == ["i"]
125 | assert frm.left.join_type == "right"
126 | assert frm.left.left.expr.fields == ["d"]
127 | assert frm.left.right.expr.fields == ["e"]
128 |
129 |
130 | def test_ast_select_paren():
131 | node = ast.parse("(SELECT a FROM b)", "subquery")
132 | assert isinstance(node, ast.SelectStmt)
133 |
134 |
135 | def ast_examples_parse(fname):
136 | import yaml
137 |
138 | dirname = os.path.dirname(__file__)
139 | data = yaml.safe_load(open(dirname + "/" + fname))
140 | res = {}
141 | for start, cmds in data["code"].items():
142 | res[start] = []
143 | for cmd in cmds:
144 | res[start].append([cmd, repr(ast.parse(cmd, start, strict=True))])
145 | print(res)
146 | filename = "dump_" + fname
147 | with open(dirname + "/" + filename, "w") as out_f:
148 | yaml.dump(res, out_f)
149 | return filename
150 |
151 |
152 | @pytest.mark.parametrize("fname", ["v0.2.yml", "v0.3.yml", "v0.5.yml"])
153 | def test_ast_examples_parse(fname):
154 | return ast_examples_parse(fname)
155 |
156 |
157 | @pytest.mark.parametrize(
158 | "stu",
159 | [
160 | "SELECT \"Preserve\" FROM B WHERE B.NAME = 'Casing'",
161 | "SELECT \"Preserve\" FROM b WHERE b.NAME = 'Casing'",
162 | "SELECT \"Preserve\" FROM b WHERE b.name = 'Casing'",
163 | "select \"Preserve\" FROM B WHERE B.NAME = 'Casing'",
164 | "select \"Preserve\" from B where B.NAME = 'Casing'",
165 | "select \"Preserve\" from b WHERE b.name = 'Casing'",
166 | ],
167 | )
168 | def test_case_insensitivity(stu):
169 | lowercase = "select \"Preserve\" from b where b.name = 'Casing'"
170 | assert repr(ast.parse(lowercase, strict=True)) == repr(ast.parse(stu, strict=True))
171 |
172 |
173 | @pytest.mark.parametrize(
174 | "stu",
175 | [
176 | "SELECT \"Preserve\" FROM B WHERE B.NAME = 'casing'",
177 | "SELECT \"Preserve\" FROM b WHERE b.NAME = 'CASING'",
178 | "SELECT \"preserve\" FROM b WHERE b.name = 'Casing'",
179 | "select \"PRESERVE\" FROM B WHERE B.NAME = 'Casing'",
180 | ],
181 | )
182 | def test_case_sensitivity(stu):
183 | lowercase = "select \"Preserve\" from b where b.name = 'Casing'"
184 | assert repr(ast.parse(lowercase, strict=True)) != repr(ast.parse(stu, strict=True))
185 |
--------------------------------------------------------------------------------
/tests/test_examples.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import os
3 | from antlr_plsql import ast
4 | from tests.test_ast import ast_examples_parse
5 |
6 | crnt_dir = os.path.dirname(__file__)
7 | examples = os.path.join(crnt_dir, "examples")
8 | # examples_sql_script = os.path.join(crnt_dir, "examples-sql-script")
9 |
10 |
11 | def load_examples(dir_path):
12 | return [
13 | [fname, open(os.path.join(dir_path, fname)).read()]
14 | for fname in os.listdir(dir_path)
15 | ]
16 |
17 |
18 | # @pytest.mark.examples
19 | @pytest.mark.parametrize("name, query", load_examples(examples))
20 | def test_examples(name, query):
21 | ast.parse(query)
22 |
23 |
24 | # @pytest.mark.parametrize("name, query", load_examples(examples_sql_script))
25 | # def test_examples_sql_script(name, query):
26 | # ast.parse(query)
27 |
28 |
29 | def load_dump(fname):
30 | import yaml
31 |
32 | dirname = os.path.dirname(__file__)
33 | dump_data = yaml.safe_load(open(dirname + "/" + fname))
34 |
35 | all_cmds = []
36 | for start, cmds in dump_data.items():
37 | for cmd, res in cmds:
38 | all_cmds.append((start, cmd, res))
39 | return all_cmds
40 |
41 |
42 | @pytest.mark.parametrize(
43 | "start,cmd,res",
44 | [
45 | *load_dump(ast_examples_parse("v0.2.yml")),
46 | *load_dump(ast_examples_parse("v0.3.yml")),
47 | *load_dump(ast_examples_parse("v0.5.yml")),
48 | ],
49 | )
50 | def test_dump(start, cmd, res):
51 | assert repr(ast.parse(cmd, start, strict=True)) == res
52 |
--------------------------------------------------------------------------------
/tests/test_speaker.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import os
3 | from antlr_plsql import ast
4 | from antlr_ast.ast import Speaker
5 | import yaml
6 |
7 |
8 | @pytest.fixture
9 | def speaker():
10 | return Speaker(
11 | nodes={"SelectStmt": "SELECT statement", "Call": "function call `{node.name}`"},
12 | fields={"target_list": "target list"},
13 | )
14 |
15 |
16 | def test_select_statement(speaker):
17 | select = ast.parse("SELECT x FROM y", start="subquery")
18 | assert speaker.describe(select, "{node_name}") == "SELECT statement"
19 |
20 |
21 | def test_select_target_list(speaker):
22 | select = ast.parse("SELECT x FROM y", start="subquery")
23 | assert speaker.describe(
24 | select, field="target_list", fmt="The {field_name} of {node_name}"
25 | )
26 |
27 |
28 | def test_call_name(speaker):
29 | call = ast.parse("COUNT(*)", start="standard_function")
30 | assert speaker.describe(call, fmt="{node_name}") == "function call `COUNT`"
31 |
32 |
33 | @pytest.mark.parametrize(
34 | "start, code",
35 | [
36 | ("selected_element", "id as id2"),
37 | ("binary_expression", "1 + 2"),
38 | ("standard_function", "COUNT(*)"),
39 | ("selected_element", "id"),
40 | # TODO: from_clause terminal used to be dropped, but after rewrite that keeps the data type, which is a list
41 | # remove visit_From_clause?
42 | # ('from_clause', 'FROM a JOIN b'),
43 | ("order_by_clause", "ORDER BY id"),
44 | ("subquery", "SELECT x FROM y"),
45 | ("order_by_elements", "id ASC"),
46 | ("selected_element", "*"),
47 | ("unary_expression", "-1"),
48 | ("subquery", "SELECT x FROM y UNION SELECT m FROM n"),
49 | ],
50 | )
51 | def test_print_speaker(code, start):
52 | speaker = Speaker(**yaml.safe_load(open("antlr_plsql/speaker.yml")))
53 |
54 | tree = ast.parse(code, start=start)
55 |
56 | print("\n\n{} -----------------------\n\n".format(tree.__class__.__name__))
57 | # node printout
58 | print(speaker.describe(tree))
59 | # fields
60 | for field_name in tree._fields:
61 | print(
62 | speaker.describe(
63 | tree, field=field_name, fmt="The {field_name} of the {node_name}"
64 | )
65 | )
66 |
--------------------------------------------------------------------------------
/tests/v0.2.yml:
--------------------------------------------------------------------------------
1 | parser_name: plsql
2 | code:
3 | sql_script:
4 | - "SELECT x FROM y"
5 | - "SELECT x FROM y;"
6 | - "SELECT a, b FROM y;"
7 | - "SELECT * FROM y;"
8 | - "SELECT DISTINCT x FROM y;"
9 | - "SELECT COUNT(*) FROM y"
10 | - "SELECT x FROM y LIMIT 1"
11 | - "SELECT x FROM y ORDER BY a, b ASC NULLS FIRST"
12 | selected_element:
13 | - "x AS y"
14 | - "x / 60 AS y"
15 | - "COUNT(x) * 100 / COUNT(*) AS perc_dead"
16 | standard_function:
17 | - "SUM(x)"
18 | - "MIN(x)"
19 | # TODO: PARTITION BY shaping not implemented yet
20 | - "MAX(x) OVER (PARTITION BY a)"
21 | - "COUNT(DISTINCT x)"
22 | where_clause:
23 | # Note: current_of_clause returns unshaped
24 | - "WHERE x > 1"
25 | - "WHERE a = 1 OR b = 2"
26 | - "WHERE (a = 1 OR b = 2) AND (c = 3 OR d = 4)"
27 | # valid in postgres! (don't know what it does)
28 | - "WHERE (1, 2, 3)"
29 | expression:
30 | - "a BETWEEN 1994 AND 2000"
31 | - "a IN (1, 2, 3 + 4, x)"
32 | - "a IN (SELECT x FROM y)"
33 | - "x IS NULL"
34 | - "x IS NOT NULL"
35 | - "x LIKE '%how'"
36 | - "x LIKE '_r%'"
37 | - "x NOT LIKE '_r%'"
38 | - "AVG(x)"
39 | - "x / 60"
40 | - "MIN(x) % 2"
41 | - "MAX(x) - MIN(x)"
42 | order_by_clause:
43 | - "ORDER BY imdb_score DESC"
44 | - "ORDER BY birthdate, name"
45 | # TODO: Siblings not put on AST, does it matter?
46 | - "ORDER SIBLINGS BY a, b, ASC NULLS FIRST"
47 | group_by_clause:
48 | - "GROUP BY x"
49 | # Note: having moved into subquery
50 | #- "GROUP BY x HAVING x > 10"
51 | having_clause:
52 | - "HAVING x > 10"
53 |
--------------------------------------------------------------------------------
/tests/v0.3.yml:
--------------------------------------------------------------------------------
1 | parser_name: plsql
2 | code:
3 | table_ref:
4 | - "a JOIN b ON a.id = b.id"
5 | - "a LEFT JOIN b ON id = id"
6 | - "a LEFT OUTER JOIN b USING (id)"
7 | - "y JOIN z ON id = id JOIN c ON XXX = XXX"
8 | - "y JOIN z USING (id1, id2)"
9 | - "y JOIN (a JOIN b ON id = id) ON XXX = XXX"
10 | # Note: partition included on JoinExpr but currently unshaped
11 | - "y PARTITION BY (yid) JOIN x PARTITION BY (xid) USING acolumn"
12 | subquery:
13 | - "SELECT x FROM y UNION SELECT m FROM n"
14 | - "SELECT x FROM y EXCEPT SELECT m FROM n"
15 | - "SELECT x FROM y EXCEPT ALL SELECT m FROM n"
16 | - "SELECT x FROM y UNION SELECT m FROM n ORDER BY id"
17 | - "(SELECT x FROM y ORDER BY id) UNION (SELECT m FROM n ORDER BY id2)"
18 | - "(SELECT a FROM b ORDER BY id1) UNION (SELECT x FROM y ORDER BY id2) ORDER BY id3"
19 | - "SELECT x FROM y UNION (SELECT a FROM b UNION SELECT m FROM n) ORDER BY id"
20 | # TODO grammar can't parse
21 | #- "SELECT 1 UNION SELECT 2"
22 | expression:
23 | # bugfixes
24 | - "a IN (SELECT x FROM y)"
25 | - "a IN (1, 1 + 1)"
26 |
27 |
--------------------------------------------------------------------------------
/tests/v0.5.yml:
--------------------------------------------------------------------------------
1 | parser_name: plsql
2 | code:
3 | subquery:
4 | # bugfix
5 | - "SELECT date FROM a"
6 | - "SELECT size FROM a" # size keyword as column id
7 | - "SELECT a FROM bee AS b" # table alias using "AS"
8 | - "SELECT a INTO b, c FROM d" # shaping of INTO clause (before was unshaped)
9 |
--------------------------------------------------------------------------------