├── .gitignore
├── .thumbs.yml
├── LICENSE
├── Makefile
├── README.md
├── dialyzer.ignore-warnings
├── docs
├── ddl_compiler.md
├── img
│ └── SQL_Foundation_Doc.png
├── lexer_keywords.md
├── lexer_parser.md
├── riak_test_and_test_strategy.md
└── the_query_pipeline.md
├── include
└── riak_ql_ddl.hrl
├── priv
├── keyword_generator.rb
└── riak_ql_keywords.csv
├── rebar
├── rebar.config
├── src
├── riak_ql.app.src
├── riak_ql_cmd.erl
├── riak_ql_component.erl
├── riak_ql_ddl.erl
├── riak_ql_ddl_compiler.erl
├── riak_ql_ddl_util.erl
├── riak_ql_describe.erl
├── riak_ql_inverse_distrib_fns.erl
├── riak_ql_lexer.xrl
├── riak_ql_parser.yrl
├── riak_ql_quanta.erl
├── riak_ql_show_create_table.erl
├── riak_ql_to_string.erl
└── riak_ql_window_agg_fns.erl
├── test
├── compiler_basic_1.erl
├── lexer_tests.erl
├── parser_arithmetic_tests.erl
├── parser_canonicalise_where_tests.erl
├── parser_create_table_tests.erl
├── parser_delete_tests.erl
├── parser_describe_tests.erl
├── parser_explain_tests.erl
├── parser_function_tests.erl
├── parser_insert_tests.erl
├── parser_select_aggregate_tests.erl
├── parser_select_tests.erl
├── parser_select_where_tests.erl
├── parser_show_create_table_tests.erl
├── parser_show_tables_tests.erl
├── parser_test_utils.hrl
├── parser_tests.erl
└── query_ddl.erl
└── tools.mk
/.gitignore:
--------------------------------------------------------------------------------
1 | .eunit
2 | deps
3 | *.o
4 | *.beam
5 | *.plt
6 | erl_crash.dump
7 | ebin
8 | rel/example_project
9 | .concrete/DEV_MODE
10 | .rebar
11 | *~
12 | src/riak_ql_lexer.erl
13 | src/riak_ql_parser.erl
14 | _build
15 | rebar.lock
16 | .local_dialyzer_plt
17 | riak_ql
18 | dialyzer_unhandled_warnings
19 | dialyzer_warnings
20 |
--------------------------------------------------------------------------------
/.thumbs.yml:
--------------------------------------------------------------------------------
1 | docker: true
2 | docker_image: "davidxd/yoko1603dash1"
3 | minimum_reviewers: 2
4 | build_steps:
5 | - make clean deps compile
6 | - make test
7 | - make xref
8 | - make dialyzer
9 | merge: true
10 | org_mode: true
11 | timeout: 1790
12 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
203 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: deps test
2 |
3 | DIALYZER_FLAGS =
4 |
5 | all: deps compile
6 |
7 | compile: deps
8 | ./rebar compile
9 |
10 | deps:
11 | ./rebar get-deps
12 |
13 | clean:
14 | ./rebar clean
15 | rm -rf test.*-temp-data
16 |
17 | distclean: clean
18 | ./rebar delete-deps
19 |
20 | test:
21 | @# delete the lexer and parser beams because the rebar2 will not
22 | @# recompile them on changes
23 | @rm -f src/riak_ql_parser.erl src/riak_ql_lexer.erl \
24 | ebin/riak_ql_parser.beam ebin/riak_ql_parser.* \
25 | .eunit/riak_ql_parser.* .eunit/riak_ql_lexer.*
26 | @# call the compile target as well, also needed for the lexer/parser files
27 | ./rebar compile eunit skip_deps=true
28 |
29 | DIALYZER_APPS = kernel stdlib sasl erts ssl tools os_mon runtime_tools crypto inets \
30 | xmerl webtool snmp public_key mnesia eunit syntax_tools compiler
31 |
32 | shell:
33 | @# the shell command is broken in the riak_ql version of rebar and doesn't
34 | @# do distribution anyway.
35 | erl -pa ebin -pa deps/*/ebin -sname riak_ql -setcookie riak_ql
36 |
37 | include tools.mk
38 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Riak QL
2 |
3 | ## Introduction
4 |
5 | riak_ql provides a number of different functions to Riak
6 |
7 | * a lexer/parser for the SQL sub-set we support
8 | * a function for calculating time quanta for Time Series
9 | * a compiler for generating helper modules to validate and manipulate records that correspond to a defined table schema in the DDL
10 |
11 | Link to the official and still private [docs](https://github.com/basho/private_basho_docs/tree/timeseries/1.0.0/source/languages/en/riakts).
12 |
13 | This README is an overview of the repo. Individual sub-systems have their own documentation which will be linked to as appropriate.
14 |
15 | ## Table Of Contents
16 |
17 | This document contains the following sections:
18 |
19 | * Repository Contents
20 | * Runtime Tools
21 | * SQL Lexer/Parser
22 | * Time Quantiser Fn
23 | * DDL Compiler
24 | * Runtime Query Fns
25 | * Testing Strategy
26 |
27 | ## Repository Contents
28 |
29 | This application contains the following files:
30 |
31 | * `riak_ql_cmd.erl`
32 | * `riak_ql_ddl_compiler.erl`
33 | * `riak_ql_ddl.erl`
34 | * `riak_ql_lexer.xrl`
35 | * `riak_ql_parser.yrl`
36 | * `riak_ql_quanta.erl`
37 | * `riak_ql_to_string.erl`
38 | * `riak_ql_window_agg_fns.erl`
39 |
40 | ----
41 |
42 | ## Runtime Tools
43 |
44 | There is an escript that lets you run the lexer/parser from the command line - it is designed to help developers/operators check their syntax.
45 |
46 | * `riak_ql_cmd.erl`
47 |
48 | Please read the inline documentation for this module.
49 |
50 | ## SQL Lexer/Parser
51 |
52 | The SQL Lexer/Parser takes a string representation of a SQL query and then compiles it. The modules that perform this are:
53 |
54 | * `riak_ql_lexer.xrl`
55 | * `riak_ql_parser.yrl`
56 |
57 | Running `./rebar compile` transforms this pair of leex and yecc files into the executable Erlang counterparts:
58 |
59 | * `riak_ql_lexer.erl`
60 | * `riak_ql_parser.erl`
61 |
62 | For more details of the lexer and parser see the [Lexer And Parser](./docs/lexer_parser.md)
63 |
64 | To understand how the lexer/parser fits into the query pipeline see [Query Pipeline](./docs/the_query_pipeline.md)
65 |
66 | There is a ruby script and set of SQL keywords which can be used to generate some of the lexer components of `riak_lexer.xrl`:
67 |
68 | * `priv/keyword_general.rb`
69 | * `priv/riak_ql_keywords.csv`
70 |
71 | For more details see the [Lexer Keywords](./docs/lexer_keywords.md)
72 |
73 | This code generates one of two output records:
74 |
75 | * `ddl_v1{}` - which captures the `CREATE TABLE...` statement
76 | * `riak_select_v1{}` - which captures a `SELECT * FROM...` statement
77 |
78 | **NOTE:** the output of the lexer is a proplist of record field names and values - the actual record is constructed *over the fence* from `riak_ql` in `riak_kv` for the purposes of simplifying inter-repo dependency management.
79 |
80 | ## Time Quantiser
81 |
82 | Time quantisation is done by the module:
83 |
84 | * `riak_ql_quanta.erl`
85 |
86 | Please read the inline documentation for this module.
87 |
88 | ## DDL Compiler
89 |
90 | The DDL compiler is implemented by:
91 |
92 | * `riak_ql_ddl_compiler.erl`
93 | * `riak_ql_ddl.erl`
94 |
95 | When a `CREATE TABLE...` statement if fed into the lexer/parser it generates a `#ddl_v1{}` - a data description language. This captures the structure of the table. The DDL compiler then generates a helper module for that table which allows for the programmatic manipulation of data in that particular format via a common and fixed API.
96 |
97 | The module `riak_ql_ddl_compiler.erl` performs the compilation and the module `riak_ql_ddl.erl` provides a set of wrappers around the compiled module to add utility functions to the API.
98 |
99 | For more details see the [DDL Compiler](./docs/ddl_compiler.md)
100 |
101 | ## Runtime Query Fns
102 |
103 | The runtime query system performs operations on data in the query pipeline by calling a set of library functions. These are defined in:
104 |
105 | * `riak_ql_window_agg_fns.erl`
106 |
107 | ## Testing Strategy
108 |
109 | Details of the testing strategy are written up in [`riak_test` And Test Strategy](./doc/riak_test_and_test_strategy.md)
110 |
--------------------------------------------------------------------------------
/dialyzer.ignore-warnings:
--------------------------------------------------------------------------------
1 | riak_ql_parser.yrl
2 | riak_ql_parser.erl
3 |
--------------------------------------------------------------------------------
/docs/ddl_compiler.md:
--------------------------------------------------------------------------------
1 | # Introduction To The DDL Compiler
2 |
3 | ```
4 | Table_def = "CREATE TABLE MyTable (myfamily varchar not null, myseries varchar not null, time timestamp not null, weather varchar not null, temperature double, PRIMARY KEY ((myfamily, myseries, quantum(time, 10, 'm')), myfamily, myseries, time))".
5 |
6 | {ok, DDL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(Table_def)).
7 | {ModName, AST} = riak_ql_ddl_compiler:compile(DDL).
8 |
9 | riak_ql_ddl_compiler:write_source_to_files("/tmp", DDL, AST).
10 | ```
11 |
12 | ### Generated DDL Modules
13 |
14 | The structure and interfaces of the generated modules is shown as per this `.erl` file which has been reverse generated from the AST that riak_kv_ddl_compiler emits. The comments contain details of the fields and keys used in the creation of the DDL.
15 |
16 | ```erlang
17 | %%% Generated Module, DO NOT EDIT
18 | %%%
19 | %%% Validates the DDL
20 | %%%
21 | %%% Table : timeseries_filter_test
22 | %%% Fields : [{riak_field_v1,<<"geohash">>,1,varchar,false},
23 | %%% {riak_field_v1,<<"user">>,2,varchar,false},
24 | %%% {riak_field_v1,<<"time">>,3,timestamp,false},
25 | %%% {riak_field_v1,<<"weather">>,4,varchar,false},
26 | %%% {riak_field_v1,<<"temperature">>,5,varchar,true}]
27 | %%% Partition_Key : {key_v1,[{hash_fn_v1,riak_ql_quanta,quantum,
28 | %%% [{param_v1,[<<"time">>]},15,s],
29 | %%% undefined}]}
30 | %%% Local_Key : {key_v1,[{param_v1,[<<"time">>]},{param_v1,[<<"user">>]}]}
31 | %%%
32 | %%%
33 | -module('riak_ql_table_timeseries_filter_test$1').
34 |
35 | -export([validate_obj/1, add_column_info/1,
36 | get_field_type/1, is_field_valid/1, extract/2,
37 | get_ddl/0]).
38 |
39 | validate_obj({Var1_geohash, Var2_user, Var3_time,
40 | Var4_weather, Var5_temperature})
41 | when Var5_temperature =:= [] orelse
42 | is_binary(Var5_temperature),
43 | is_binary(Var4_weather),
44 | is_integer(Var3_time) andalso Var3_time > 0,
45 | is_binary(Var2_user), is_binary(Var1_geohash) ->
46 | true;
47 | validate_obj(_) -> false.
48 |
49 | add_column_info({Var1_geohash, Var2_user, Var3_time,
50 | Var4_weather, Var5_temperature}) ->
51 | [{<<"geohash">>, Var1_geohash}, {<<"user">>, Var2_user},
52 | {<<"time">>, Var3_time}, {<<"weather">>, Var4_weather},
53 | {<<"temperature">>, Var5_temperature}].
54 |
55 | extract(Obj, [<<"geohash">>]) when is_tuple(Obj) ->
56 | element(1, Obj);
57 | extract(Obj, [<<"user">>]) when is_tuple(Obj) ->
58 | element(2, Obj);
59 | extract(Obj, [<<"time">>]) when is_tuple(Obj) ->
60 | element(3, Obj);
61 | extract(Obj, [<<"weather">>]) when is_tuple(Obj) ->
62 | element(4, Obj);
63 | extract(Obj, [<<"temperature">>]) when is_tuple(Obj) ->
64 | element(5, Obj).
65 |
66 | get_field_type([<<"geohash">>]) -> varchar;
67 | get_field_type([<<"user">>]) -> varchar;
68 | get_field_type([<<"time">>]) -> timestamp;
69 | get_field_type([<<"weather">>]) -> varchar;
70 | get_field_type([<<"temperature">>]) -> varchar.
71 |
72 | is_field_valid([<<"geohash">>]) -> true;
73 | is_field_valid([<<"user">>]) -> true;
74 | is_field_valid([<<"time">>]) -> true;
75 | is_field_valid([<<"weather">>]) -> true;
76 | is_field_valid([<<"temperature">>]) -> true;
77 | is_field_valid([<<"*">>]) -> true;
78 | is_field_valid(_) -> false.
79 |
80 | get_ddl() ->
81 | {ddl_v1, <<"timeseries_filter_test">>,
82 | [{riak_field_v1, <<"geohash">>, 1, varchar, false},
83 | {riak_field_v1, <<"user">>, 2, varchar, false},
84 | {riak_field_v1, <<"time">>, 3, timestamp, false},
85 | {riak_field_v1, <<"weather">>, 4, varchar, false},
86 | {riak_field_v1, <<"temperature">>, 5, varchar, true}],
87 | {key_v1,
88 | [{hash_fn_v1, riak_ql_quanta, quantum,
89 | [{param_v1, [<<"time">>]}, 15, s], undefined}]},
90 | {key_v1,
91 | [{param_v1, [<<"time">>]}, {param_v1, [<<"user">>]}]}}.
92 |
93 | ```
94 |
95 |
--------------------------------------------------------------------------------
/docs/img/SQL_Foundation_Doc.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/basho/riak_ql/359f2979d75d374b4ba7d0f9ac5a39149a9c2af4/docs/img/SQL_Foundation_Doc.png
--------------------------------------------------------------------------------
/docs/lexer_keywords.md:
--------------------------------------------------------------------------------
1 | # Introduction
2 |
3 | This document outlines how to add new keywords to the lexer:
4 |
5 | 1. Add them to the `priv/riak_ql_keywords.csv` file, one per line. Keeping this file in alphabetic order simplifies future changes.
6 | 2. Run `ruby priv/keyword_generator.rb` with Ruby 1.9.3 or newer.
7 | 3. Replace the keyword definitions near the top of `src/riak_ql_lexer.xrl` with the regex definitions (first chunk of output) from the message generator.
8 | 4. Replace the keyword rules in the `src/riak_ql_lexer.xrl` file with the second chunk of output from the message generator.
9 | 5. Save and commit the csv and lexer changes.
10 |
11 | If this is done correctly, the commit diff should simply be a few lines added to the csv and a few lines added to the lexer.
12 |
--------------------------------------------------------------------------------
/docs/lexer_parser.md:
--------------------------------------------------------------------------------
1 | # The Lexer-Parser
2 |
3 | ## 1 Overview
4 |
5 | The query system uses declarative SQL-like language to instruct riak to behave in certain ways.
6 |
7 | Wherever possible it uses pure SQL as defined in the SQL spec - if there is variation it has a prejudice to being compatible with PostgreSQL.
8 |
9 | The specification used can be found in our Google Docs folders:
10 | https://drive.google.com/drive/u/0/folders/0B-2H_9GJ40JpUGZld19GTHB3b2M
11 |
12 | To this end non-terminals in the parser MUST follow the structure of the SQL spec.
13 |
14 | The SQL statements are written as strings and are compiled first to proplists which are then converted to erlang records as shown:
15 |
16 | ```
17 | String Tokens Erlang
18 | Records
19 |
20 | Client +--------> Lexer +--------> Parser +----------> proplist -> #ddl_v1{}
21 | | CREATE
22 | | TABLE
23 | |
24 | |
25 | +----------> proplist -> #riak_ql_select_v1{}
26 | | SELECT
27 | | FROM
28 | |
29 | |
30 | +----------> proplist -> #riak_sql_decribe_v1{}
31 | DESCRIBE
32 | ```
33 |
34 | At the moment 3 types of statement are supported:
35 | - `CREATE TABLE` statements (these are not standard SQL)
36 | - SQL `SELECT FROM` queries (these conform to a valid sub-set of SQL)
37 | - SQL `DESCRIBE` queries (these conform to a valid sub-set of SQL)
38 |
39 | The lexer/parser functions are generated using the tools `leex` and `yecc` (the erlang equivalents of `lex` and `yacc`) from the language definition files `riak_ql_lexer.xrl` and `riak_ql_parser.yrl`. `rebar` detects these files at run time - and leex/yeccs them into `riak_ql_lexer.erl` and `riak_ql_parser.erl` which it then compiles as normal.
40 |
41 | These three data structures that are emitted are used in different ways.
42 |
43 | **NOTE:** to make inter-repository dependencies easier to manage the lexer/parser in `riak_ql` emits proplists of key/value pairs which are poured into the relevant record on ingest to `riak_kv`.
44 |
45 | ## 2 Emitted data structures
46 |
47 | ### 2.a The `CREATE TABLE` Record
48 |
49 | The create table function outputs a `ddl_v1{}` record which encapsulates the description of the table and is used by `riak_ql_ddl_compiler.erl` to generate a helper function which can be used against data that should conform to that definition. See the documentation for the [ddl compiler](./ddl_compiler.md)
50 |
51 | One of the peculiarities of the data structures emitted by the lexer-parser is that 'field names' are emitted as 'paths' - that is the field `<<"myfield">>` is represented as `[<<"myfield">>]`. The reason for this is to enable future implementation of sets and maps - that is nested data structures. In a world without maps the name of a field is a degenerate path of length 1. If the top level construct was a map called `<<"mymap">>` which contained named fields `<<"field1">>`, `<<"field2>>"` etc, etc, these would be accessed by the paths that looked like `[<<"mymap">>, <<"field1">>]`.
52 |
53 | ### 2.b The `SELECT FROM` Record
54 |
55 | The record `#riak_ql_select_v1{}` is the backbone of the query system. It goes through a set of transformations described in the documentation [the query pipeline](./the_query_pipeline.md). For ease of exposition the fields that correspond to the clauses of a SQL SELECT statement are in upper-class, eg 'SELECT', 'FROM', 'WHERE', 'ORDER BY' and 'LIMIT'.
56 |
57 | Fields are emitted by the lexer/parser in a manner that is consonant with the path description in the section on the `CREATE TABLE` record.
58 |
59 | The `#riak_ql_select_v1{}` is validated, checked and rewritten as described in the document the [Query Pipeline](./doc/the_query_pipeline.md)
60 |
61 | ### 2.c The `DESCRIBE` Record
62 |
63 | The record `#riak_sql_describe{}` at the moment simply contains the name of the table to be described. Its processing is trivial: on being passed to the query system, the appropriate DDL is read and converted into a user-friendly form and returned to the user.
64 |
65 | ## 3 Design Principles
66 |
67 | ### 3.a Conformity
68 |
69 | There are two prejudices that inform development of the SQL toolchain in `riak_ql`:
70 | * fidelity to the SQL spec
71 | * in case of uncertainly be compatible with PostgreSQL
72 |
73 | ### 3.b Lisp-like
74 |
75 | Various intermediate data structures emitted by the lexer/parser (particularly for `SELECT` queries) are YASL, or Lisp-like. As a general design rule we are moving towards a cleaner Lisp representation.
76 |
77 | ## 4 TODO/Outstanding Questions
78 |
79 | * how deep do we want to go with the lexer/parser?
80 | * do we want to describe the various components?
--------------------------------------------------------------------------------
/docs/riak_test_and_test_strategy.md:
--------------------------------------------------------------------------------
1 | # Testing The Query System
2 |
3 | This document can be displayed as a presentation with:
4 | http://remarkjs.com/remarkise
5 |
6 | ---
7 |
8 | ## Overview
9 |
10 | The testing strategy for the query system follows a tiered-approach.
11 |
12 | The purpose of this is to enable developers to be 'slapdash' in their changes secure in the knowledge that:
13 | * there is great regression coverage
14 | * the test suite will triage bugs for them and inform them in what order to fix bugs
15 |
16 | In addition `riak-shell` has a quick-regression capability.
17 |
18 | ---
19 |
20 | ## Testing _Layers_
21 |
22 | There are six layers of tests, which should be fixed in descending order:
23 |
24 | 1 plain `eunit` tests
25 | * simple
26 | * QuickCheck
27 |
28 | 2 `ts_simple_*` tests in `riak_test`
29 |
30 | 3 `ts_cluster_*` tests in `riak_test`
31 |
32 | 4 `ts_degraded_*` tests in `riak_test`
33 |
34 | 5 `ts_*_eqc` QuickCheck tests running under `riak_test` (these are still under development)
35 | * 'good' input tests based on generating valid inputs
36 | * 'bad' input tests based on generating invalid inputs
37 |
38 | 6 `ts_updgrade_downgrade_*_eqc` upgrade/downgrade QuickCheck tests running under `riak_test` (these are still under development)
39 |
40 | ---
41 |
42 | ## Test Types
43 |
44 | Unit tests run against individual modules
45 |
46 | The `ts_simple_*` tests run against a cluster of 1 node.
47 |
48 | The `ts_cluster_*` tests run against a full cluster.
49 |
50 | The `ts_degraded_*` tests run against a cluster with at least 1 member taken down.
51 |
52 | The theory is that bugs that a test suite checks for cast shadows down the suite - so if there is a unit test failing in 1 - then it should appear as failures in some or all of 2, 3, 4, 5 and 6.
53 |
54 | The main Quick Check tests are the backstop.
55 |
56 | ---
57 |
58 | ## Self Triaging Test Suite
59 |
60 | The 'slapdash' approach is make a set of changes and then:
61 |
62 | * get a bloodbath of failing tests
63 | * fix the type 1 test fails
64 | * retest
65 | * fix the type 2 test fails
66 | * retest
67 | * rinse, repeat up the ladder.
68 |
69 | To help with this approach TS also uses the standard Erlang Common Test (`ct`) framework. This allows for more granular testing and better reporting.
70 |
71 | ---
72 |
73 | ## Common Test and `riak_test`
74 |
75 | There are a number of misconceptions about the relationship of `riak_test` and `ct`.
76 |
77 | `riak_test` is a large and powerful set of libraries for manipulating riak clusters - including intercept facilities for inserting code shims into running riak nodes and instrumenting sub-systems for testing purposes. `riak_test` also includes an underpowered and fairly primitive test runner.
78 |
79 | `ct` is an industrial-strength battle hardened test runner for handling tens and hundreds of thousands of tests, distributed over dozens of test machines. `ct` knows less than nothing about riak and cares even less.
80 |
81 | In Time Series we have the `riak_test` runner invoke suites of `ct` tests and let `ct` handle the orchestration. Individual tests in the `ct` suite use the various powerful `riak_test` library functions to manipulate the riak cluster under test as appropriate.
82 |
83 | ---
84 |
85 | ## Relationship Of `ct` To `riak_test`
86 |
87 | This is a Venn Diagram of the overlap:
88 |
89 | ```
90 | +------------------------------------+ +-------------------------------------+
91 | | | | |
92 | | +-------------+ | | |
93 | | Common Test | | | | Riak Test |
94 | | | Giddy Up | | | intercept and cluster manipulation |
95 | | | | | | libraries |
96 | | +-------------+ | | |
97 | | | | |
98 | | +-------------+ | | |
99 | | | | | | |
100 | | | riak_test | | | |
101 | | | test runner | | | |
102 | | | | | | |
103 | | +-------------+ | | |
104 | | | | |
105 | +------------------------------------+ +-------------------------------------+
106 | ```
107 |
108 | ---
109 |
110 | ## `riak-shell` And `riak_test`
111 |
112 | `riak_shell` can capture user logs which can be fed back into `riak_shell` in either **replay** or **regression** mode.
113 |
114 | Details of how to capture these logs is given in the `riak_shell` README:
115 | https://github.com/basho/riak_shell/blob/riak_ts-develop/README.md
116 |
117 | These regression logs can be productionised in `riak_test`. For an example see:
118 | https://github.com/basho/riak_test/blob/riak_ts-develop/tests/ts_cluster_riak_shell_regression_log.erl
119 |
120 | ---
121 |
122 | # Fin
--------------------------------------------------------------------------------
/include/riak_ql_ddl.hrl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% riak_ql_ddl.hrl: defines records used in the data description language
4 | %%
5 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
6 | %%
7 | %% This file is provided to you under the Apache License,
8 | %% Version 2.0 (the "License"); you may not use this file
9 | %% except in compliance with the License. You may obtain
10 | %% a copy of the License at
11 | %%
12 | %% http://www.apache.org/licenses/LICENSE-2.0
13 | %%
14 | %% Unless required by applicable law or agreed to in writing,
15 | %% software distributed under the License is distributed on an
16 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17 | %% KIND, either express or implied. See the License for the
18 | %% specific language governing permissions and limitations
19 | %% under the License.
20 | %%
21 | %% -------------------------------------------------------------------
22 |
23 | -ifndef(RIAK_QL_DDL_HRL).
24 | -define(RIAK_QL_DDL_HRL, included).
25 |
26 | %% NOTE: Every time there is a change to the DDL helper
27 | %% or anything related to DDL changes, this number must be
28 | %% incremented. It is independent of the DDL record version (below).
29 | -define(RIAK_QL_DDL_COMPILER_VERSION, 2).
30 |
31 | -record(riak_field_v1, {
32 | name = <<>> :: binary(),
33 | position :: undefined | pos_integer(),
34 | type :: undefined | riak_ql_ddl:external_field_type(),
35 | optional = false :: boolean()
36 | }).
37 |
38 |
39 | -define(SQL_PARAM, #param_v2).
40 | -define(SQL_PARAM_RECORD_NAME, param_v2).
41 | -define(SQL_PARAM_RECORD_VERSION, v2).
42 | -record(param_v1, {
43 | name = [<<>>] :: [binary()]
44 | }).
45 | -record(?SQL_PARAM_RECORD_NAME, {
46 | name = [<<>>] :: [binary()],
47 | ordering = undefined :: undefined | ascending | descending
48 | }).
49 |
50 | -record(hash_fn_v1, {
51 | mod :: atom(),
52 | fn :: atom(),
53 | args = [] :: [?SQL_PARAM{} | any()],
54 | type :: riak_ql_ddl:external_field_type()
55 | }).
56 |
57 | -define(DDL_KEY, #key_v1).
58 | -define(DDL_KEY_RECORD_NAME, key_v1).
59 | -record(key_v1, {
60 | ast = [] :: [#hash_fn_v1{} | ?SQL_PARAM{}]
61 | }).
62 |
63 | -define(DDL, #ddl_v2).
64 | -define(DDL_RECORD_NAME, ddl_v2).
65 | -define(DDL_RECORD_VERSION, v2).
66 | -type ddl_version() :: v1 | v2.
67 | -record(ddl_v1, {
68 | table :: binary(),
69 | fields = [] :: [#riak_field_v1{}],
70 | partition_key :: #key_v1{} | none,
71 | local_key :: #key_v1{}
72 | }).
73 | -record(?DDL_RECORD_NAME, {
74 | table :: binary(),
75 | fields = [] :: [#riak_field_v1{}],
76 | partition_key :: #key_v1{} | none,
77 | local_key :: #key_v1{},
78 | minimum_capability = v1 :: ddl_version()
79 | }).
80 |
81 | -define(SQL_NULL, []).
82 |
83 | -endif.
84 |
--------------------------------------------------------------------------------
/priv/keyword_generator.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | # -------------------------------------------------------------------
3 | #
4 | # Generate the Erlang keyword definition from the CSV file
5 | #
6 | # Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | #
8 | # This file is provided to you under the Apache License,
9 | # Version 2.0 (the "License"); you may not use this file
10 | # except in compliance with the License. You may obtain
11 | # a copy of the License at
12 | #
13 | # http://www.apache.org/licenses/LICENSE-2.0
14 | #
15 | # Unless required by applicable law or agreed to in writing,
16 | # software distributed under the License is distributed on an
17 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | # KIND, either express or implied. See the License for the
19 | # specific language governing permissions and limitations
20 | # under the License.
21 | #
22 | # -------------------------------------------------------------------
23 |
24 | require 'csv'
25 | require 'pp'
26 |
27 | ERLANG_RESERVED = %w{and of or not}
28 |
29 | csv_filename =
30 | ARGV[0] ||
31 | File.join(File.dirname(__FILE__), '.', 'riak_ql_keywords.csv')
32 |
33 | keywords = Hash.new
34 |
35 | CSV.foreach(csv_filename) do |row|
36 | keyword = row.first
37 | kw_name = keyword.gsub(/\s+/,'_').upcase
38 | keywords[kw_name] = keyword
39 | end
40 |
41 | # definitions
42 | # SELECT = (S|s)(E|e)(L|l)(E|e)(C|c)(T|t)
43 | keywords.each do |kw_name, keyword|
44 | kw_regexp =
45 | keyword.chars.map do |c|
46 | next '\s' if c =~ /\s/
47 | next c unless c =~ /[a-zA-Z]/
48 | "(#{c.upcase}|#{c.downcase})"
49 | end.join
50 |
51 | puts "#{kw_name} = #{kw_regexp}"
52 | end
53 |
54 | puts
55 |
56 | # rules
57 | # {SELECT} : {token, {select, TokenChars}}.
58 | keywords.each do |kw_name, keyword|
59 | kw_token = kw_name.downcase
60 | if ERLANG_RESERVED.include? kw_token
61 | kw_token += '_'
62 | end
63 | puts "{#{kw_name}} : {token, {#{kw_token}, list_to_binary(TokenChars)}}."
64 | end
65 |
--------------------------------------------------------------------------------
/priv/riak_ql_keywords.csv:
--------------------------------------------------------------------------------
1 | and
2 | asc
3 | boolean
4 | by
5 | create
6 | desc
7 | describe
8 | double
9 | false
10 | from
11 | insert
12 | into
13 | key
14 | limit
15 | not
16 | null
17 | offset
18 | or
19 | order
20 | primary
21 | quantum
22 | select
23 | show
24 | sint64
25 | table
26 | timestamp
27 | true
28 | values
29 | varchar
30 | where
31 |
--------------------------------------------------------------------------------
/rebar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/basho/riak_ql/359f2979d75d374b4ba7d0f9ac5a39149a9c2af4/rebar
--------------------------------------------------------------------------------
/rebar.config:
--------------------------------------------------------------------------------
1 | %% -*- erlang -*-
2 | {cover_enabled, false}.
3 | {erl_opts, [
4 | debug_info,
5 | warnings_as_errors,
6 | {i, "deps/riak_kv/include"}
7 | ]}.
8 |
9 | {xref_warnings, false}.
10 |
11 | {xref_checks, [undefined_function_calls, undefined_functions,
12 | locals_not_used,
13 | deprecated_function_calls, deprecated_functions]}.
14 |
15 | {eunit_opts, [no_tty, {report, {unite_compact, []}}]}.
16 |
17 | %% == escriptize ==
18 | {escript_emu_args, "%%! -escript main riak_ql_cmd -smp disable +A 0\n"}.
19 | {escript_incl_apps, [sext]}.
20 |
21 | {deps, [
22 | {unite, "", {git, "https://github.com/basho/unite.git", {tag, "v0.0.1p2"}}},
23 | {sext, ".*", {git, "https://github.com/basho/sext.git", {tag, "1.1p6"}}},
24 | {mochiweb, ".*", {git, "https://github.com/basho/mochiweb.git", {tag, "v2.9.0p2"}}},
25 | {merl, ".*", {git, "https://github.com/basho/merl.git", {tag, "0.1.0-basho"}}}
26 | ]}.
27 |
--------------------------------------------------------------------------------
/src/riak_ql.app.src:
--------------------------------------------------------------------------------
1 | %% -*- tab-width: 4;erlang-indent-level: 4;indent-tabs-mode: nil -*-
2 | %% ex: ts=4 sw=4 et
3 | {application, riak_ql,
4 | [
5 | {description, "Query language libraries for Riak"},
6 | {vsn, git},
7 | {applications, [
8 | kernel,
9 | stdlib,
10 | sasl
11 | ]},
12 | {registered, []},
13 | {env, [
14 | ]}
15 | ]}.
16 |
--------------------------------------------------------------------------------
/src/riak_ql_cmd.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% An escript command for linting riak_ql queries. Create the
4 | %% riak_ql command by running:
5 | %%
6 | %% ./rebar escriptize
7 | %%
8 | %% There should now be a runnable riak_ql file in the project
9 | %% directory. Usage:
10 | %%
11 | %% ./riak_ql "SELECT * FROM my_table"
12 | %% ./riak_ql "CREATE TABLE my_table(my_field int, PRIMARY KEY(my_field))"
13 | %%
14 | %% This lints the last given argument, a syntax error is printed
15 | %% if one exists, otherwise there is no output. To print out the
16 | %% generated ddl add -ddl before the last command:
17 | %%
18 | %% ./riak_ql -ddl "SELECT * FROM my_table"
19 | %%
20 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
21 | %%
22 | %% This file is provided to you under the Apache License,
23 | %% Version 2.0 (the "License"); you may not use this file
24 | %% except in compliance with the License. You may obtain
25 | %% a copy of the License at
26 | %%
27 | %% http://www.apache.org/licenses/LICENSE-2.0
28 | %%
29 | %% Unless required by applicable law or agreed to in writing,
30 | %% software distributed under the License is distributed on an
31 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
32 | %% KIND, either express or implied. See the License for the
33 | %% specific language governing permissions and limitations
34 | %% under the License.
35 | %%
36 | %% -------------------------------------------------------------------
37 | -module(riak_ql_cmd).
38 |
39 | -export([main/1]).
40 |
41 | -include("riak_ql_ddl.hrl").
42 |
43 | %%
44 | main([_|_] = Args) ->
45 | Query = lists:last(Args),
46 | Lexed = riak_ql_lexer:get_tokens(Query),
47 | case riak_ql_parser:ql_parse(Lexed) of
48 | {ddl, DDL, WithProperties} ->
49 | maybe_print_ddl(Args, {DDL, WithProperties});
50 | {error, {Token,_,_}} ->
51 | io:format("Error: syntax error before ~s~n", [Token]),
52 | % return an error code for the proc if an error has occurred
53 | erlang:halt(1);
54 | {_, SQL} ->
55 | maybe_print_ddl(Args, SQL)
56 | end;
57 | main([]) ->
58 | io:format(
59 | "Invalid usage, try: ./riak_ql \"SELECT * FROM my_table\"~n").
60 |
61 | %%
62 | maybe_print_ddl(Args, DDL) ->
63 | case lists:member("-ddl", Args) of
64 | true -> io:format("~p~n", [DDL]);
65 | false -> ok
66 | end.
67 |
--------------------------------------------------------------------------------
/src/riak_ql_component.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% riak_ql_component: Common Components Code (CCC)
4 | %%
5 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
6 | %%
7 | %% This file is provided to you under the Apache License,
8 | %% Version 2.0 (the "License"); you may not use this file
9 | %% except in compliance with the License. You may obtain
10 | %% a copy of the License at
11 | %%
12 | %% http://www.apache.org/licenses/LICENSE-2.0
13 | %%
14 | %% Unless required by applicable law or agreed to in writing,
15 | %% software distributed under the License is distributed on an
16 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17 | %% KIND, either express or implied. See the License for the
18 | %% specific language governing permissions and limitations
19 | %% under the License.
20 | %%
21 | %% -------------------------------------------------------------------
22 | -module(riak_ql_component).
23 |
24 | %% API
25 | -type component_version() :: pos_integer().
26 | -export_type([component_version/0]).
27 |
28 |
--------------------------------------------------------------------------------
/src/riak_ql_ddl_util.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% riak_ql_ddl_util: utility functions for use in the riak ddl helper
4 | %% modules generated by the riak_ql_ddl_compiler
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 | -module(riak_ql_ddl_util).
24 |
25 | -export([
26 | is_valid_delete_where_clause/1,
27 | make_delete_key/2
28 | ]).
29 |
30 | -include("riak_ql_ddl.hrl").
31 |
32 | -define(EMPTY_ERRORS, []).
33 | -define(EMPTY_KVs, []).
34 |
35 | make_delete_key(#key_v1{ast = AST}, Where) ->
36 | KVs = make_KV(Where, ?EMPTY_KVs),
37 | Key = [proplists:get_value(K, KVs) || ?SQL_PARAM{name = [K]} <- AST],
38 | Stripped = [X || X <- Key, X =/= undefined],
39 | %% all fields in the WHERE clause must be in the AST
40 | %% therefore all three lengths must be the same
41 | ASTLen = length(AST),
42 | KVLen = length(KVs),
43 | StrippedLen = length(Stripped),
44 | case KVLen of
45 | ASTLen -> case ASTLen of
46 | StrippedLen -> {ok, Key};
47 | _ -> {error, ["invalid key"]}
48 | end;
49 | _ -> {error, ["invalid key"]}
50 | end.
51 |
52 | is_valid_delete_where_clause(Where) ->
53 | case is_valid2(Where, ?EMPTY_ERRORS) of
54 | ?EMPTY_ERRORS -> true;
55 | Errors -> {error, Errors}
56 | end.
57 |
58 | is_valid2([], Errors) ->
59 | %% we don't care about normalising the errors, but we do about
60 | %% the results
61 | lists:flatten(Errors);
62 | is_valid2([{and_, LHS, RHS} | T], Errors) ->
63 | E1 = is_valid2([LHS], ?EMPTY_ERRORS),
64 | E2 = is_valid2([RHS], ?EMPTY_ERRORS),
65 | is_valid2(T, E1 ++ E2 ++ Errors);
66 | is_valid2([{'=', _Field, _Val} | T], Errors) ->
67 | is_valid2(T, Errors);
68 | is_valid2([Other | T], Errors) ->
69 | Msg = io_lib:format("Invalid clause ~p~n", [Other]),
70 | is_valid2(T, [Msg | Errors]).
71 |
72 | make_KV([], KVs) ->
73 | lists:flatten(KVs);
74 | make_KV([{and_, LHS, RHS} | T], KVs) ->
75 | KV1 = make_KV([LHS], ?EMPTY_KVs),
76 | KV2 = make_KV([RHS], ?EMPTY_KVs),
77 | make_KV(T, KV1 ++ KV2 ++ KVs);
78 | make_KV([{'=', Field, {_Type, Val}} | T], KVs) ->
79 | make_KV(T, [{Field, Val} | KVs]).
80 |
81 | -ifdef(TEST).
82 | -include_lib("eunit/include/eunit.hrl").
83 |
84 | is_valid_where_clause_test() ->
85 | W = [{and_,
86 | {'=',<<"time">>,{integer,11}},
87 | {and_,
88 | {'=',<<"region">>,{binary,<<"aaa">>}},
89 | {'=',<<"state">>,{binary,<<"bbb">>}}}}
90 | ],
91 | Expected = true,
92 | ?assertEqual(Expected, is_valid_delete_where_clause(W)).
93 |
94 | is_valid_where_clause_failing_1_test() ->
95 | W = [{and_,
96 | {'=',<<"time">>,{integer,11}},
97 | {or_,
98 | {'=',<<"region">>,{binary,<<"aaa">>}},
99 | {'=',<<"state">>,{binary,<<"bbb">>}}}}
100 | ],
101 | Expected = "Invalid clause ",
102 | {error, Errors} = is_valid_delete_where_clause(W),
103 | {Got, _} = lists:split(15, Errors),
104 | ?assertEqual(Expected, Got).
105 |
106 | is_valid_where_clause_failing_2_test() ->
107 | W = [{and_,
108 | {'=',<<"time">>,{integer,11}},
109 | {and_,
110 | {'>',<<"region">>,{binary,<<"aaa">>}},
111 | {flobber,<<"state">>,{binary,<<"bbb">>}}}}
112 | ],
113 | Expected = "Invalid clause ",
114 | {error, Errors} = is_valid_delete_where_clause(W),
115 | {Got, _} = lists:split(15, Errors),
116 | ?assertEqual(Expected, Got).
117 |
118 | make_key_test() ->
119 | W = [{and_,
120 | {'=',<<"time">>,{integer,11}},
121 | {and_,
122 | {'=',<<"region">>,{binary,<<"aaa">>}},
123 | {'=',<<"state">>,{binary,<<"bbb">>}}}}
124 | ],
125 | Key = #key_v1{ast = [
126 | ?SQL_PARAM{name = [<<"region">>]},
127 | ?SQL_PARAM{name = [<<"state">>]},
128 | ?SQL_PARAM{name = [<<"time">>]}
129 | ]},
130 | {ok, Got} = make_delete_key(Key, W),
131 | Expected = [<<"aaa">>, <<"bbb">>, 11],
132 | ?assertEqual( Expected, Got).
133 |
134 | -endif.
135 |
--------------------------------------------------------------------------------
/src/riak_ql_describe.erl:
--------------------------------------------------------------------------------
1 | %%-------------------------------------------------------------------
2 | %%
3 | %% riak_ql_describe
4 | %%
5 | %% Copyright (C) 2016 Basho Technologies, Inc. All rights reserved
6 | %%
7 | %% This file is provided to you under the Apache License,
8 | %% Version 2.0 (the "License"); you may not use this file
9 | %% except in compliance with the License. You may obtain
10 | %% a copy of the License at
11 | %%
12 | %% http://www.apache.org/licenses/LICENSE-2.0
13 | %%
14 | %% Unless required by applicable law or agreed to in writing,
15 | %% software distributed under the License is distributed on an
16 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17 | %% KIND, either express or implied. See the License for the
18 | %% specific language governing permissions and limitations
19 | %% under the License.
20 | %%
21 | %%-------------------------------------------------------------------
22 |
23 | -module(riak_ql_describe).
24 |
25 | -export([describe/1]).
26 |
27 | -include("riak_ql_ddl.hrl").
28 |
29 | %%
30 | -spec describe(?DDL{}) -> {ok, {ColNames::[binary()],
31 | ColTypes::[riak_ql_ddl:external_field_type()],
32 | Rows::[[any()]]}}.
33 | describe(?DDL{fields = FieldSpecs,
34 | partition_key = #key_v1{ast = PKSpec},
35 | local_key = #key_v1{ast = LKSpec}}) ->
36 | ColumnNames = [<<"Column">>, <<"Type">>, <<"Nullable">>, <<"Partition Key">>, <<"Local Key">>, <<"Interval">>, <<"Unit">>, <<"Sort Order">>],
37 | ColumnTypes = [ varchar, varchar, boolean, sint64, sint64, sint64, varchar, varchar],
38 | Quantum = find_quantum_field(PKSpec),
39 | Rows =
40 | [[Name,
41 | atom_to_binary(Type, latin1),
42 | Nullable,
43 | column_pk_position(Name, PKSpec),
44 | column_lk_position(Name, LKSpec),
45 | column_quantum_interval(Name, Quantum),
46 | column_quantum_unit(Name, Quantum),
47 | column_lk_order(Name, LKSpec)
48 | ]
49 | || #riak_field_v1{name = Name,
50 | type = Type,
51 | optional = Nullable} <- FieldSpecs],
52 | {ok, {ColumnNames, ColumnTypes, Rows}}.
53 |
54 | %% Return the sort order of the local key for this column, or null if it is not
55 | %% a local key or has an undefined sort order.
56 | column_lk_order(Name, LK) when is_binary(Name) ->
57 | case lists:keyfind([Name], #riak_field_v1.name, LK) of
58 | ?SQL_PARAM{ordering = descending} ->
59 | <<"DESC">>;
60 | ?SQL_PARAM{ordering = ascending} ->
61 | <<"ASC">>;
62 | _ ->
63 | ?SQL_NULL
64 | end.
65 |
66 | %% the following two functions are identical, for the way fields and
67 | %% keys are represented as of 2015-12-18; duplication here is a hint
68 | %% of things to come.
69 | -spec column_pk_position(binary(), [?SQL_PARAM{}]) -> integer() | [].
70 | column_pk_position(Col, KSpec) ->
71 | find_column_index(Col, KSpec, 1).
72 |
73 | -spec column_lk_position(binary(), [?SQL_PARAM{}]) -> integer() | [].
74 | column_lk_position(Col, KSpec) ->
75 | find_column_index(Col, KSpec, 1).
76 |
77 | %% Extract the quantum column information, if it exists in the table definition
78 | %% and put in two additional columns
79 | -spec column_quantum_interval(Col :: binary(), PKSpec::#hash_fn_v1{}|[]) ->
80 | integer()|[].
81 | column_quantum_interval(Col, #hash_fn_v1{args = [?SQL_PARAM{name = [Col]}, Interval, _]}) ->
82 | Interval;
83 | column_quantum_interval(_, _) ->
84 | ?SQL_NULL.
85 |
86 | -spec column_quantum_unit(Col::binary(), PKSpec::#hash_fn_v1{}|[]) ->
87 | binary()|[].
88 | column_quantum_unit(Col, #hash_fn_v1{args = [?SQL_PARAM{name = [Col]}, _, Unit]}) ->
89 | atom_to_binary(Unit, latin1);
90 | column_quantum_unit(_, _) ->
91 | ?SQL_NULL.
92 |
93 | %% Find the field associated with the quantum, if there is one
94 | -spec find_quantum_field([?SQL_PARAM{}|#hash_fn_v1{}]) -> [] | #hash_fn_v1{}.
95 | find_quantum_field([]) ->
96 | ?SQL_NULL;
97 | find_quantum_field([Q = #hash_fn_v1{}|_]) ->
98 | Q;
99 | find_quantum_field([_|T]) ->
100 | find_quantum_field(T).
101 |
102 | find_column_index(_, [], _) ->
103 | ?SQL_NULL;
104 | find_column_index(Col, [?SQL_PARAM{name = [Col]} | _], Pos) ->
105 | Pos;
106 | find_column_index(Col, [#hash_fn_v1{args = [?SQL_PARAM{name = [Col]} | _]} | _], Pos) ->
107 | Pos;
108 | find_column_index(Col, [_ | Rest], Pos) ->
109 | find_column_index(Col, Rest, Pos + 1).
110 |
111 | %%-------------------------------------------------------------------
112 | %% Unit tests
113 | %%-------------------------------------------------------------------
114 |
115 | -ifdef(TEST).
116 | -include_lib("eunit/include/eunit.hrl").
117 |
118 | assert_column_values(ColName, Expected, {Cols, _, Rows}) when is_binary(ColName),
119 | is_list(Expected) ->
120 | Index = (catch lists:foldl(
121 | fun(E, Acc) when E == ColName ->
122 | throw(Acc);
123 | (_, Acc) ->
124 | Acc + 1
125 | end, 1, Cols)),
126 | % ?debugFmt("INDEX ~p COLS ~p~nROWS ~p", [Index, Cols, Rows]),
127 | Actual = [lists:nth(Index,R) || R <- Rows],
128 | ?assertEqual(
129 | Expected,
130 | Actual
131 | ).
132 |
133 | names_types_and_rows_are_same_length_test() ->
134 | {ddl, DDL, []} =
135 | riak_ql_parser:ql_parse(
136 | riak_ql_lexer:get_tokens(
137 | "CREATE TABLE tab ("
138 | "a varchar NOT NULL,"
139 | "b varchar NOT NULL,"
140 | "c timestamp NOT NULL,"
141 | "PRIMARY KEY ((a, b, quantum(c, 15, m)), a, b, c))")),
142 | {ok, {Names, Types, [Row|_]}} = describe(DDL),
143 | ?assertEqual(length(Names), length(Types)),
144 | ?assertEqual(length(Names), length(Row)).
145 |
146 | describe_table_column_names_test() ->
147 | {ddl, DDL, []} =
148 | riak_ql_parser:ql_parse(
149 | riak_ql_lexer:get_tokens(
150 | "CREATE TABLE tab ("
151 | "a VARCHAR NOT NULL,"
152 | "b VARCHAR NOT NULL,"
153 | "c TIMESTAMP NOT NULL,"
154 | "PRIMARY KEY ((a, b, quantum(c, 15, m)), a, b, c))")),
155 | {ok, Result} = describe(DDL),
156 | assert_column_values(
157 | <<"Column">>,
158 | [<<"a">>, <<"b">>, <<"c">>],
159 | Result
160 | ).
161 |
162 | describe_nullable_test() ->
163 | {ddl, DDL, []} =
164 | riak_ql_parser:ql_parse(
165 | riak_ql_lexer:get_tokens(
166 | "CREATE TABLE tab ("
167 | " f VARCHAR NOT NULL,"
168 | " s VARCHAR NOT NULL,"
169 | " t TIMESTAMP NOT NULL,"
170 | " w SINT64,"
171 | " p DOUBLE,"
172 | " PRIMARY KEY ((f, s, t), f, s, t))")),
173 | {ok, Result} = describe(DDL),
174 | assert_column_values(<<"Nullable">>, [false,false,false,true,true], Result).
175 |
176 | describe_table_quantum_test() ->
177 | {ddl, DDL, []} =
178 | riak_ql_parser:ql_parse(
179 | riak_ql_lexer:get_tokens(
180 | "CREATE TABLE tab ("
181 | "a VARCHAR NOT NULL,"
182 | "b VARCHAR NOT NULL,"
183 | "c TIMESTAMP NOT NULL,"
184 | "PRIMARY KEY ((a, b, quantum(c, 15, m)), a, b, c DESC))")),
185 | {ok, Result} = describe(DDL),
186 | assert_column_values(<<"Interval">>, [[], [], 15], Result),
187 | assert_column_values(<<"Unit">>, [[], [], <<"m">>], Result).
188 |
189 | describe_table_column_types_test() ->
190 | {ddl, DDL, []} =
191 | riak_ql_parser:ql_parse(
192 | riak_ql_lexer:get_tokens(
193 | "CREATE TABLE tab ("
194 | "a VARCHAR NOT NULL,"
195 | "b SINT64 NOT NULL,"
196 | "c TIMESTAMP NOT NULL,"
197 | "d DOUBLE NOT NULL,"
198 | "e BOOLEAN NOT NULL,"
199 | "PRIMARY KEY ((a), a))")),
200 | {ok, Result} = describe(DDL),
201 | assert_column_values(
202 | <<"Type">>,
203 | [<<"varchar">>, <<"sint64">>, <<"timestamp">>, <<"double">>, <<"boolean">>],
204 | Result
205 | ).
206 |
207 | describe_table_columns_no_quantum_test() ->
208 | {ddl, DDL, []} =
209 | riak_ql_parser:ql_parse(
210 | riak_ql_lexer:get_tokens(
211 | "CREATE TABLE fafa ("
212 | " f VARCHAR NOT NULL,"
213 | " s VARCHAR NOT NULL,"
214 | " t TIMESTAMP NOT NULL,"
215 | " w SINT64 NOT NULL,"
216 | " p DOUBLE,"
217 | " PRIMARY KEY ((f, s, t), f, s, t))")),
218 | {ok, Result} = describe(DDL),
219 | NullRow = [[],[],[],[],[]],
220 | assert_column_values(<<"Interval">>, NullRow, Result),
221 | assert_column_values(<<"Unit">>, NullRow, Result).
222 |
223 | describe_table_descending_keys_test() ->
224 | {ddl, DDL, []} =
225 | riak_ql_parser:ql_parse(
226 | riak_ql_lexer:get_tokens(
227 | "CREATE TABLE tab ("
228 | "a VARCHAR NOT NULL,"
229 | "b VARCHAR NOT NULL,"
230 | "c TIMESTAMP NOT NULL,"
231 | "PRIMARY KEY ((a, b, quantum(c, 15, m)), a, b, c DESC))")),
232 | {ok, Result} = describe(DDL),
233 | assert_column_values(
234 | <<"Sort Order">>,
235 | [[], [], <<"DESC">>],
236 | Result
237 | ).
238 |
239 | describe_table_ascending_keys_test() ->
240 | {ddl, DDL, []} =
241 | riak_ql_parser:ql_parse(
242 | riak_ql_lexer:get_tokens(
243 | "CREATE TABLE tab ("
244 | "a VARCHAR NOT NULL,"
245 | "b VARCHAR NOT NULL,"
246 | "c TIMESTAMP NOT NULL,"
247 | "PRIMARY KEY ((a, b, quantum(c, 15, m)), a, b ASC, c ASC))")),
248 | {ok, Result} = describe(DDL),
249 | assert_column_values(
250 | <<"Sort Order">>,
251 | [[], <<"ASC">>, <<"ASC">>],
252 | Result
253 | ).
254 |
255 | -endif.
256 |
--------------------------------------------------------------------------------
/src/riak_ql_inverse_distrib_fns.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% riak_ql_inverse_distrib_fns: implementation of inverse distribution functions
4 | %% for the query runner
5 | %%
6 | %% Copyright (c) 2017 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 | -module(riak_ql_inverse_distrib_fns).
24 |
25 |
26 | -export(['PERCENTILE_DISC'/3,
27 | 'PERCENTILE_CONT'/3,
28 | 'MEDIAN'/3,
29 | 'MODE'/3]).
30 | -export([fn_arity/1,
31 | fn_type_signature/2,
32 | fn_param_check/2,
33 | supported_functions/0]).
34 |
35 | -type invdist_function() :: 'PERCENTILE_CONT'
36 | | 'PERCENTILE_DISC'
37 | | 'MEDIAN'
38 | | 'MODE'.
39 | -export_type([invdist_function/0]).
40 |
41 | -include("riak_ql_ddl.hrl").
42 |
43 | supported_functions() ->
44 | ['PERCENTILE_DISC', 'PERCENTILE_CONT', 'MEDIAN', 'MODE'].
45 |
46 | -spec fn_type_signature(invdist_function(), [riak_ql_ddl:external_field_type()]) ->
47 | riak_ql_ddl:external_field_type() |
48 | {error, term()}.
49 | fn_type_signature('PERCENTILE_DISC', [ColumnType, double])
50 | when ColumnType == sint64;
51 | ColumnType == double;
52 | ColumnType == timestamp -> ColumnType;
53 | fn_type_signature('PERCENTILE_CONT', [ColumnType, double])
54 | when ColumnType == sint64;
55 | ColumnType == double;
56 | ColumnType == timestamp -> double;
57 | fn_type_signature('MEDIAN', [ColumnType])
58 | when ColumnType == sint64;
59 | ColumnType == double;
60 | ColumnType == timestamp -> ColumnType;
61 | fn_type_signature('MODE', [ColumnType])
62 | when ColumnType == sint64;
63 | ColumnType == double;
64 | ColumnType == timestamp -> ColumnType;
65 | fn_type_signature(Fn, Args) ->
66 | {error, {argument_type_mismatch, Fn, Args}}.
67 |
68 | -spec fn_arity(invdist_function()) -> non_neg_integer().
69 | fn_arity('PERCENTILE_CONT') -> 2;
70 | fn_arity('PERCENTILE_DISC') -> 2;
71 | fn_arity('MEDIAN') -> 1;
72 | fn_arity('MODE') -> 1;
73 | fn_arity(_) -> {error, invalid_function}.
74 |
75 | -spec fn_param_check(invdist_function(), [riak_ql_ddl:external_field_type()]) ->
76 | ok | {error, WhichParamInvalid::pos_integer()}.
77 | fn_param_check(PcntlFn, [Pc])
78 | when (PcntlFn == 'PERCENTILE_CONT' orelse PcntlFn == 'PERCENTILE_DISC') andalso
79 | (Pc >= 0.0 andalso Pc =< 1.0) ->
80 | ok;
81 | fn_param_check(PcntlFn, [_Pc])
82 | when (PcntlFn == 'PERCENTILE_CONT' orelse PcntlFn == 'PERCENTILE_DISC') ->
83 | {error, 2};
84 | fn_param_check('MEDIAN', []) ->
85 | ok;
86 | fn_param_check('MODE', []) ->
87 | ok.
88 |
89 |
90 | %% functions defined
91 | %%
92 | %% Note that ValuesAtF expects row position to be 0-based.
93 |
94 | 'PERCENTILE_DISC'([Pc], RowsTotal, ValuesAtF) ->
95 | RN = (Pc * (RowsTotal - 1)),
96 | [[Ret]] = ValuesAtF([{trunc(RN), 1}]),
97 | Ret.
98 |
99 | 'PERCENTILE_CONT'([Pc], RowsTotal, ValuesAtF) ->
100 | RN = (Pc * (RowsTotal - 1)),
101 | {LoRN, HiRN} = {trunc(RN), ceil(RN)},
102 | case LoRN == HiRN of
103 | true ->
104 | [[Val]] = ValuesAtF([{LoRN, 1}]),
105 | Val;
106 | false ->
107 | [[LoVal], [HiVal]] = ValuesAtF([{LoRN, 1}, {HiRN, 1}]),
108 | (HiRN - RN) * LoVal + (RN - LoRN) * HiVal
109 | end.
110 |
111 | 'MEDIAN'([], RowsTotal, ValuesAtF) ->
112 | 'PERCENTILE_DISC'([0.5], RowsTotal, ValuesAtF).
113 |
114 | 'MODE'([], RowsTotal, ValuesAtF) ->
115 | [[Min]] = ValuesAtF([{0, 1}]),
116 | largest_bin(Min, ValuesAtF, RowsTotal).
117 |
118 | %% This will be inefficient for ldb backends (that is, when a qbuf is
119 | %% dumped to leveldb): in this function, we call ValuesAtF to retrieve
120 | %% one row at a time. This means, each time it is called,
121 | %% `riak_kv_qry_buffers_ldb:fetch_rows` needs to seek from start and
122 | %% trundle all the way to the Nth position, and all over again to
123 | %% fetch N+1th row. The obvious todo item it to either teach
124 | %% fetch_rows to cache iterators or, alternatively, fetch rows in
125 | %% chunks ourselves.
126 | largest_bin(Min, ValuesAtF, RowsTotal) ->
127 | largest_bin_({Min, 1, Min, 1}, ValuesAtF, 1, RowsTotal).
128 |
129 | largest_bin_({LargestV, _, _, _}, _ValuesAtF, Pos, RowsTotal) when Pos >= RowsTotal ->
130 | LargestV;
131 | largest_bin_({LargestV, LargestC, CurrentV, CurrentC}, ValuesAtF, Pos, RowsTotal) ->
132 | case ValuesAtF([{Pos, 1}]) of
133 | [[V]] when V == CurrentV ->
134 | largest_bin_({LargestV, LargestC, %% collecting current bin
135 | CurrentV, CurrentC + 1}, ValuesAtF, Pos + 1, RowsTotal);
136 | [[V]] when V > CurrentV,
137 | CurrentC > LargestC ->
138 | largest_bin_({CurrentV, CurrentC, %% now these be largest
139 | V, 1}, ValuesAtF, Pos + 1, RowsTotal);
140 | [[V]] when V > CurrentV,
141 | CurrentC =< LargestC ->
142 | largest_bin_({LargestV, LargestC, %% keep largest, reset current
143 | V, 1}, ValuesAtF, Pos + 1, RowsTotal)
144 | end.
145 |
146 | ceil(X) ->
147 | T = trunc(X),
148 | case X - T == 0 of
149 | true -> T;
150 | false -> T + 1
151 | end.
152 |
--------------------------------------------------------------------------------
/src/riak_ql_lexer.xrl:
--------------------------------------------------------------------------------
1 | %%% -*- mode: erlang -*-
2 | %%% @doc Lexer for the riak Time Series Query Language.
3 | %%% @author gguthrie@basho.com
4 | %%% @copyright (C) 2016 Basho
5 |
6 | Definitions.
7 |
8 | ALTER = (A|a)(L|l)(T|t)(E|e)(R|r)
9 | AND = (A|a)(N|n)(D|d)
10 | ASC = (A|a)(S|s)(C|c)
11 | BLOB = (B|b)(L|l)(O|o)(B|b)
12 | BOOLEAN = (B|b)(O|o)(O|o)(L|l)(E|e)(A|a)(N|n)
13 | BY = (B|b)(Y|y)
14 | CREATE = (C|c)(R|r)(E|e)(A|a)(T|t)(E|e)
15 | DELETE = (D|d)(E|e)(L|l)(E|e)(T|t)(E|e)
16 | DESC = (D|d)(E|e)(S|s)(C|c)
17 | DESCRIBE = (D|d)(E|e)(S|s)(C|c)(R|r)(I|i)(B|b)(E|e)
18 | DOUBLE = (D|d)(O|o)(U|u)(B|b)(L|l)(E|e)
19 | EXPLAIN = (E|e)(X|x)(P|p)(L|l)(A|a)(I|i)(N|n)
20 | FALSE = (F|f)(A|a)(L|l)(S|s)(E|e)
21 | FIRST = (F|f)(I|i)(R|r)(S|s)(T|t)
22 | FROM = (F|f)(R|r)(O|o)(M|m)
23 | GROUP = (G|g)(R|r)(O|o)(U|u)(P|p)
24 | KEY = (K|k)(E|e)(Y|y)
25 | INSERT = (I|i)(N|n)(S|s)(E|e)(R|r)(T|t)
26 | INTO = (I|i)(N|n)(T|t)(O|o)
27 | LAST = (L|l)(A|a)(S|s)(T|t)
28 | LIMIT = (L|l)(I|i)(M|m)(I|i)(T|t)
29 | NOT = (N|n)(O|o)(T|t)
30 | IN = (I|i)(N|n)
31 | IS = (I|i)(S|s)
32 | NULL = (N|n)(U|u)(L|l)(L|l)
33 | NULLS = (N|n)(U|u)(L|l)(L|l)(S|s)
34 | OFFSET = (O|o)(F|f)(F|f)(S|s)(E|e)(T|t)
35 | OR = (O|o)(R|r)
36 | ORDER = (O|o)(R|r)(D|d)(E|e)(R|r)
37 | PRIMARY = (P|p)(R|r)(I|i)(M|m)(A|a)(R|r)(Y|y)
38 | QUANTUM = (Q|q)(U|u)(A|a)(N|n)(T|t)(U|u)(M|m)
39 | SELECT = (S|s)(E|e)(L|l)(E|e)(C|c)(T|t)
40 | SHOW = (S|s)(H|h)(O|o)(W|w)
41 | SINT64 = (S|s)(I|i)(N|n)(T|t)64
42 | TABLE = (T|t)(A|a)(B|b)(L|l)(E|e)
43 | TABLES = (T|t)(A|a)(B|b)(L|l)(E|e)(S|s)
44 | TIMESTAMP = (T|t)(I|i)(M|m)(E|e)(S|s)(T|t)(A|a)(M|m)(P|p)
45 | TRUE = (T|t)(R|r)(U|u)(E|e)
46 | VALUES = (V|v)(A|a)(L|l)(U|u)(E|e)(S|s)
47 | VARCHAR = (V|v)(A|a)(R|r)(C|c)(H|h)(A|a)(R|r)
48 | WHERE = (W|w)(H|h)(E|e)(R|r)(E|e)
49 | WITH = (W|w)(I|i)(T|t)(H|h)
50 |
51 | CHARACTER_LITERAL = '(''|[^'\n])*'
52 | HEX = 0x([0-9a-zA-Z]*)
53 |
54 | REGEX = (/[^/][a-zA-Z0-9\*\.]+/i?)
55 |
56 | IDENTIFIER = ([a-zA-Z][a-zA-Z0-9_\-]*)
57 | QUOTED_IDENTIFIER = \"(\"\"|[^\"\n])*\"
58 | COMMENT_MULTILINE = (/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/)|(--.*)
59 | WHITESPACE = ([\000-\s]*)
60 |
61 | % characters not in the ascii range
62 | UNICODE = ([^\x00-\x7F])
63 |
64 | INTNUM = (\-*[0-9]+)
65 | FLOATDEC = (\-*([0-9]+)?\.[0-9]+)
66 | FLOATSCI = (\-*([0-9]+)?(\.)?[0-9]+(E|e)(\+|\-)?[0-9]+)
67 |
68 | APPROXMATCH = (=\~)
69 | EQ = (=)
70 | GT = (>)
71 | LT = (<)
72 | GTE = (>=)
73 | LTE = (<=)
74 | NE = (<>)
75 | NOMATCH = (!=)
76 | NOTAPPROX = (!\~)
77 | OPEN = \(
78 | CLOSE =\)
79 |
80 | PLUS = (\+)
81 | MINUS = (\-)
82 | ASTERISK = (\*)
83 | SOLIDUS = (/)
84 |
85 | COMMA = (,)
86 | SEMICOLON = (\;)
87 |
88 | Rules.
89 |
90 | {ALTER} : {token, {alter, list_to_binary(TokenChars)}}.
91 | {AND} : {token, {and_, list_to_binary(TokenChars)}}.
92 | {ASC} : {token, {asc, list_to_binary(TokenChars)}}.
93 | {BLOB} : {token, {blob, list_to_binary(TokenChars)}}.
94 | {BOOLEAN} : {token, {boolean, list_to_binary(TokenChars)}}.
95 | {BY} : {token, {by, list_to_binary(TokenChars)}}.
96 | {CREATE} : {token, {create, list_to_binary(TokenChars)}}.
97 | {DELETE} : {token, {delete, list_to_binary(TokenChars)}}.
98 | {DESC} : {token, {desc, list_to_binary(TokenChars)}}.
99 | {DESCRIBE} : {token, {describe, list_to_binary(TokenChars)}}.
100 | {DOUBLE} : {token, {double, list_to_binary(TokenChars)}}.
101 | {EXPLAIN} : {token, {explain, list_to_binary(TokenChars)}}.
102 | {FALSE} : {token, {false, list_to_binary(TokenChars)}}.
103 | {FIRST} : {token, {first, list_to_binary(TokenChars)}}.
104 | {FROM} : {token, {from, list_to_binary(TokenChars)}}.
105 | {INSERT} : {token, {insert, list_to_binary(TokenChars)}}.
106 | {INTO} : {token, {into, list_to_binary(TokenChars)}}.
107 | {GROUP} : {token, {group, list_to_binary(TokenChars)}}.
108 | {GROUP_TIME} : {token, {group_time, list_to_binary(TokenChars)}}.
109 | {KEY} : {token, {key, list_to_binary(TokenChars)}}.
110 | {LAST} : {token, {last, list_to_binary(TokenChars)}}.
111 | {LIMIT} : {token, {limit, list_to_binary(TokenChars)}}.
112 | {NOT} : {token, {not_, list_to_binary(TokenChars)}}.
113 | {NULL} : {token, {null, list_to_binary(TokenChars)}}.
114 | {NULLS} : {token, {nulls, list_to_binary(TokenChars)}}.
115 | {OFFSET} : {token, {offset, list_to_binary(TokenChars)}}.
116 | {OR} : {token, {or_, list_to_binary(TokenChars)}}.
117 | {ORDER} : {token, {order, list_to_binary(TokenChars)}}.
118 | {PRIMARY} : {token, {primary, list_to_binary(TokenChars)}}.
119 | {QUANTUM} : {token, {quantum, list_to_binary(TokenChars)}}.
120 | {SELECT} : {token, {select, list_to_binary(TokenChars)}}.
121 | {SHOW} : {token, {show, list_to_binary(TokenChars)}}.
122 | {SINT64} : {token, {sint64, list_to_binary(TokenChars)}}.
123 | {TABLE} : {token, {table, list_to_binary(TokenChars)}}.
124 | {TABLES} : {token, {tables, list_to_binary(TokenChars)}}.
125 | {TIMESTAMP} : {token, {timestamp, list_to_binary(TokenChars)}}.
126 | {TRUE} : {token, {true, list_to_binary(TokenChars)}}.
127 | {VALUES} : {token, {values, list_to_binary(TokenChars)}}.
128 | {VARCHAR} : {token, {varchar, list_to_binary(TokenChars)}}.
129 | {WHERE} : {token, {where, list_to_binary(TokenChars)}}.
130 | {WITH} : {token, {with, list_to_binary(TokenChars)}}.
131 |
132 | {HEX} : {token, {character_literal, clean_up_hex(TokenChars)}}.
133 | {INTNUM} : {token, {integer, list_to_integer(TokenChars)}}.
134 |
135 | % float chars do not get converted to floats, if they are part of a word
136 | % then converting it and converting it back will alter the chars
137 | {FLOATDEC} : {token, {float, TokenChars}}.
138 | {FLOATSCI} : {token, {float_sci, TokenChars}}.
139 |
140 | {IS} : {token, {is_, list_to_binary(TokenChars)}}.
141 | {IN} : {token, {in, list_to_binary(TokenChars)}}.
142 | {EQ} : {token, {equals_operator, list_to_binary(TokenChars)}}.
143 | {APPROXMATCH} : {token, {approx, list_to_binary(TokenChars)}}.
144 | {GT} : {token, {greater_than_operator, list_to_binary(TokenChars)}}.
145 | {LT} : {token, {less_than_operator, list_to_binary(TokenChars)}}.
146 | {GTE} : {token, {gte, list_to_binary(TokenChars)}}.
147 | {LTE} : {token, {lte, list_to_binary(TokenChars)}}.
148 | {NE} : {token, {ne, list_to_binary(TokenChars)}}.
149 | {NOMATCH} : {token, {nomatch, list_to_binary(TokenChars)}}.
150 | {NOTAPPROX} : {token, {notapprox, list_to_binary(TokenChars)}}.
151 |
152 | {OPEN} : {token, {left_paren, list_to_binary(TokenChars)}}.
153 | {CLOSE} : {token, {right_paren, list_to_binary(TokenChars)}}.
154 |
155 | {PLUS} : {token, {plus_sign, list_to_binary(TokenChars)}}.
156 | {MINUS} : {token, {minus_sign, list_to_binary(TokenChars)}}.
157 | {ASTERISK} : {token, {asterisk, list_to_binary(TokenChars)}}.
158 | {SOLIDUS} : {token, {solidus, list_to_binary(TokenChars)}}.
159 |
160 | {CHARACTER_LITERAL} :
161 | {token, {character_literal, clean_up_literal(TokenChars)}}.
162 |
163 | {REGEX} : {token, {regex, list_to_binary(TokenChars)}}.
164 |
165 | {COMMA} : {token, {comma, list_to_binary(TokenChars)}}.
166 | {SEMICOLON} : {token, {semicolon, list_to_binary(TokenChars)}}.
167 |
168 | {COMMENT_MULTILINE} : skip_token.
169 | {WHITESPACE} : skip_token.
170 |
171 | \n : {end_token, {'$end'}}.
172 |
173 | {IDENTIFIER} : {token, {identifier, clean_up_identifier(TokenChars)}}.
174 | {QUOTED_IDENTIFIER} : {token, {identifier, clean_up_identifier(TokenChars)}}.
175 | {UNICODE} : error(unicode_in_identifier).
176 |
177 | . : error(iolist_to_binary(io_lib:format("Unexpected token '~s'.", [TokenChars]))).
178 |
179 | Erlang code.
180 |
181 | -compile([export_all]).
182 |
183 | get_tokens(X) ->
184 | Toks = lex(X),
185 | post_process(Toks).
186 |
187 | post_process(X) ->
188 | post_p(X, []).
189 |
190 | %% filter out the whitespaces at the end
191 | post_p([], Acc) ->
192 | lists:reverse(Acc);
193 | post_p([{identifier, TokenChars} | T], Acc) when is_list(TokenChars)->
194 | post_p(T, [{identifier, list_to_binary(TokenChars)} | Acc]);
195 | post_p([{float, TokenChars} | T], Acc) ->
196 | post_p(T, [{float, fpdec_to_float(TokenChars)} | Acc]);
197 | post_p([{float_sci, TokenChars} | T], Acc) ->
198 | post_p(T, [{float, fpsci_to_float(TokenChars)} | Acc]);
199 | post_p([H | T], Acc) ->
200 | post_p(T, [H | Acc]).
201 |
202 | lex(String) ->
203 | {ok, Toks, _} = string(String),
204 | Toks.
205 |
206 | clean_up_identifier(Literal) ->
207 | clean_up_literal(Literal).
208 |
209 | clean_up_hex([$0,$x|Hex]) ->
210 | case length(Hex) rem 2 of
211 | 0 ->
212 | mochihex:to_bin(Hex);
213 | _ ->
214 | error({odd_hex_chars,<<"Hex strings must have an even number of characters.">>})
215 | end.
216 |
217 | clean_up_literal(Literal) ->
218 | Literal1 = case hd(Literal) of
219 | $' -> accurate_strip(Literal, $');
220 | $" ->
221 | [error(unicode_in_quotes) || U <- Literal, U > 127],
222 | accurate_strip(Literal, $");
223 | _ -> Literal
224 | end,
225 | DeDupedInternalQuotes = dedup_quotes(Literal1),
226 | list_to_binary(DeDupedInternalQuotes).
227 |
228 | %% dedup(licate) quotes, using pattern matching to reduce to O(n)
229 | dedup_quotes(S) ->
230 | dedup_quotes(S, []).
231 | dedup_quotes([], Acc) ->
232 | lists:reverse(Acc);
233 | dedup_quotes([H0,H1|T], Acc) when H0 =:= $' andalso H1 =:= $' ->
234 | dedup_quotes(T, [H0|Acc]);
235 | dedup_quotes([H0,H1|T], Acc) when H0 =:= $" andalso H1 =:= $" ->
236 | dedup_quotes(T, [H0|Acc]);
237 | dedup_quotes([H|T], Acc) ->
238 | dedup_quotes(T, [H|Acc]).
239 |
240 | %% only strip one quote, to accept Literals ending in the quote
241 | %% character being stripped
242 | accurate_strip(S, C) ->
243 | case {hd(S), lists:last(S), length(S)} of
244 | {C, C, Len} when Len > 1 ->
245 | string:substr(S, 2, Len - 2);
246 | _ ->
247 | S
248 | end.
249 |
250 | fpsci_to_float(Chars) ->
251 | [Mantissa, Exponent] = re:split(Chars, "E|e", [{return, list}]),
252 | M2 = normalise_mant(Mantissa),
253 | E2 = normalise_exp(Exponent),
254 | sci_to_f2(M2, E2).
255 |
256 | sci_to_f2(M2, E) when E =:= "+0" orelse
257 | E =:= "-0" -> list_to_float(M2);
258 | sci_to_f2(M2, E2) -> list_to_float(M2 ++ "e" ++ E2).
259 |
260 | normalise_mant(Mantissa) ->
261 | case length(re:split(Mantissa, "\\.", [{return, list}])) of
262 | 1 -> Mantissa ++ ".0";
263 | 2 -> Mantissa
264 | end.
265 |
266 | normalise_exp("+" ++ No) -> "+" ++ No;
267 | normalise_exp("-" ++ No) -> "-" ++ No;
268 | normalise_exp(No) -> "+" ++ No.
269 |
270 | fpdec_to_float([$- | _RemainTokenChars] = TokenChars) ->
271 | list_to_float(TokenChars);
272 | fpdec_to_float([$. | _RemainTokenChars] = TokenChars) ->
273 | list_to_float([$0 | TokenChars]);
274 | fpdec_to_float(TokenChars) ->
275 | list_to_float(TokenChars).
276 |
--------------------------------------------------------------------------------
/src/riak_ql_quanta.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% riak_ql_quanta.erl - a library for quantising time for Time Series
4 | %%
5 | %% @doc This module serves to generate time quanta on multi - (day, hour, minute,
6 | %% second) boundaries. The quantum are based on an origin time of Jan 1, 1970
7 | %% 00:00:00 (Unix Epoch).
8 | %% The function quantum/3 takes a time in milliseconds to bucketize,
9 | %% a size of the quantum, and the units of said quantum.
10 | %% For instance, the following call would create buckets for timestamps on 15
11 | %% minute boundaries: quantum(Time, 15, m). The quantum time is returned in
12 | %% milliseconds since the Unix epoch.
13 | %% The function quanta/4 takes 2 times in milliseconds and size of the quantum
14 | %% and the of units of said quantum and returns a list of quantum boundaries that span the time
15 | %%
16 | %% Copyright (c) 2015-2016 Basho Technologies, Inc. All Rights Reserved.
17 | %%
18 | %% This file is provided to you under the Apache License,
19 | %% Version 2.0 (the "License"); you may not use this file
20 | %% except in compliance with the License. You may obtain
21 | %% a copy of the License at
22 | %%
23 | %% http://www.apache.org/licenses/LICENSE-2.0
24 | %%
25 | %% Unless required by applicable law or agreed to in writing,
26 | %% software distributed under the License is distributed on an
27 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
28 | %% KIND, either express or implied. See the License for the
29 | %% specific language governing permissions and limitations
30 | %% under the License.
31 | %%
32 | %% -------------------------------------------------------------------
33 | -module(riak_ql_quanta).
34 |
35 | -export([
36 | quantum/3,
37 | quanta/4,
38 | timestamp_to_ms/1,
39 | ms_to_timestamp/1,
40 | unit_to_millis/2
41 | ]).
42 |
43 | -type time_ms() :: non_neg_integer().
44 | %% A timestamp in millisconds representing number of millisconds from Unix epoch
45 |
46 | -type time_unit() :: d | h | m | s | ms.
47 | %% The units of quantization available to quantum/3
48 |
49 | -type err() :: {error, term()}.
50 |
51 | %% @doc The Number of Days from Jan 1, 0 to Jan 1, 1970
52 | %% We need this to compute years and months properly including leap years and variable length
53 | %% months.
54 | -define(DAYS_FROM_0_TO_1970, 719528).
55 |
56 | -ifdef(TEST).
57 | -ifdef(EQC).
58 | -include_lib("eqc/include/eqc.hrl").
59 | -define(QC_OUT(P),
60 | eqc:on_output(fun(Str, Args) ->
61 | io:format(user, Str, Args) end, P)).
62 | -compile(export_all).
63 | -endif.
64 | -endif.
65 | %% @clear
66 | %% @end
67 |
68 | %% @doc given an upper and lower bound for time, returns a tuple consisting of
69 | %% * the number of slices
70 | %% * a list of all the quantum boundaries
71 | %% - the length of the list is the number of slices - 1
72 | -spec quanta(time_ms(), time_ms(), non_neg_integer(), time_unit()) -> {integer(), [integer()]} | {error, any()}.
73 | quanta(StartTime, EndTime, QuantaSize, Unit) when StartTime > EndTime ->
74 | %% cheap trick to handle descending timestamps, reverse the arguments
75 | quanta(EndTime, StartTime, QuantaSize, Unit);
76 | quanta(StartTime, EndTime, QuantaSize, Unit) ->
77 | Start = quantum(StartTime, QuantaSize, Unit),
78 | case Start of
79 | {error, _} = E -> E;
80 | _Other -> End = EndTime,
81 | Diff = End - Start,
82 | Slice = unit_to_ms(Unit) * QuantaSize,
83 | NSlices = accommodate(Diff, Slice),
84 | Quanta = gen_quanta(NSlices, Start, Slice, []),
85 | {NSlices, Quanta}
86 | end.
87 |
88 | %% compute ceil(Length / Unit)
89 | accommodate(Length, Unit) ->
90 | Length div Unit + if Length rem Unit > 0 -> 1; el/=se -> 0 end.
91 |
92 | gen_quanta(1, _Start, _Slice, Acc) ->
93 | Acc;
94 | gen_quanta(N, Start, Slice, Acc) when is_integer(N) andalso N > 1 ->
95 | NewA = Start + (N - 1) * Slice,
96 | gen_quanta(N - 1, Start, Slice, [NewA | Acc]).
97 |
98 | %% @doc Given the time in milliseconds since the unix epoch and a time range and unit eg (15, m),
99 | %% generate the starting timestamp of the range (quantum) in milliseconds since the epoch where the
100 | %% time belongs. Note that Time - Quanta is less than or equal to QuantaSize * Unit (in milliseconds).
101 | -spec quantum(time_ms(), non_neg_integer(), time_unit()) -> time_ms() | err().
102 | quantum(Time, QuantaSize, Unit) when is_integer(Time) andalso
103 | is_integer(QuantaSize) andalso
104 | Unit == d;
105 | Unit == h;
106 | Unit == m;
107 | Unit == s;
108 | Unit == ms ->
109 | Ms = unit_to_ms(Unit),
110 | Diff = Time rem (QuantaSize*Ms),
111 | Time - Diff;
112 | quantum(_, _, Unit) ->
113 | {error, {invalid_unit, Unit}}.
114 |
115 | %% Convert an integer and a time unit in binary to millis, assumed from the unix
116 | %% epoch.
117 | -spec unit_to_millis(Value::integer(), Unit::binary() | time_unit()) -> integer() | error.
118 | unit_to_millis(V, U) when U == ms; U == <<"ms">> -> V;
119 | unit_to_millis(V, U) when U == s; U == <<"s">> -> V*1000;
120 | unit_to_millis(V, U) when U == m; U == <<"m">> -> V*1000*60;
121 | unit_to_millis(V, U) when U == h; U == <<"h">> -> V*1000*60*60;
122 | unit_to_millis(V, U) when U == d; U == <<"d">> -> V*1000*60*60*24;
123 | unit_to_millis(_, _) -> error.
124 |
125 | %% @doc Return the time in milliseconds since 00:00 GMT Jan 1, 1970 (Unix Epoch)
126 | -spec timestamp_to_ms(erlang:timestamp()) -> time_ms().
127 | timestamp_to_ms({Mega, Secs, Micro}) ->
128 | Mega*1000000000 + Secs*1000 + Micro div 1000.
129 |
130 | %% @doc Return an erlang:timestamp() given the time in milliseconds since the Unix Epoch
131 | -spec ms_to_timestamp(time_ms()) -> erlang:timestamp().
132 | ms_to_timestamp(Time) ->
133 | Seconds = Time div 1000,
134 | MicroSeconds = (Time rem 1000) * 1000,
135 | {0, Seconds, MicroSeconds}.
136 |
137 | -spec unit_to_ms(time_unit()) -> time_ms().
138 | unit_to_ms(ms) ->
139 | 1;
140 | unit_to_ms(s) ->
141 | 1000;
142 | unit_to_ms(m) ->
143 | 60 * unit_to_ms(s);
144 | unit_to_ms(h) ->
145 | 60 * unit_to_ms(m);
146 | unit_to_ms(d) ->
147 | 24 * unit_to_ms(h).
148 |
149 | -ifdef(TEST).
150 | -include_lib("eunit/include/eunit.hrl").
151 |
152 | assert_minutes(Quanta, OkTimes) ->
153 | Time = timestamp_to_ms(os:timestamp()),
154 | QuantaMs = quantum(Time, Quanta, m),
155 | {_, {_, M, _}} = calendar:now_to_universal_time(ms_to_timestamp(QuantaMs)),
156 | ?assert(lists:member(M, OkTimes)).
157 |
158 | quantum_minutes_test() ->
159 | assert_minutes(15, [0, 15, 30, 45]),
160 | assert_minutes(75, [0, 15, 30, 45]),
161 | assert_minutes(5, [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55]),
162 | assert_minutes(6, [0, 6, 12, 18, 24, 30, 36, 42, 48, 54]).
163 |
164 | assert_hours(Quanta, OkTimes) ->
165 | Time = timestamp_to_ms(os:timestamp()),
166 | QuantaMs = quantum(Time, Quanta, h),
167 | {_, {H, _, _}} = calendar:now_to_universal_time(ms_to_timestamp(QuantaMs)),
168 | ?assert(lists:member(H, OkTimes)).
169 |
170 | quantum_hours_test() ->
171 | assert_hours(12, [0, 12]),
172 | assert_hours(24, [0]).
173 |
174 | assert_days(Days) ->
175 | Now = os:timestamp(),
176 | Time = timestamp_to_ms(Now),
177 | QuantaMs = quantum(Time, Days, d),
178 | {NowDate, _} = calendar:now_to_universal_time(Now),
179 | {QuantaDate, _} = calendar:now_to_universal_time(ms_to_timestamp(QuantaMs)),
180 | NowDays = calendar:date_to_gregorian_days(NowDate),
181 | QuantaDays = calendar:date_to_gregorian_days(QuantaDate),
182 | ?assert((NowDays - QuantaDays) < Days),
183 | ?assert((NowDays - QuantaDays) >= 0).
184 |
185 | quantum_days_test() ->
186 | assert_days(1),
187 | assert_days(10),
188 | assert_days(15),
189 | assert_days(28),
190 | assert_days(29),
191 | assert_days(30),
192 | assert_days(31).
193 |
194 | %%
195 | %% test Quanta
196 | %%
197 |
198 | -define(MIN, 60*1000). % minute in miliseconds
199 | single_quanta_test() ->
200 | Start = 1 * ?MIN,
201 | End = 5 * ?MIN,
202 | {N, Quanta} = quanta(Start, End, 15, m),
203 | Length = length(Quanta),
204 | ?assertEqual(1, N),
205 | ?assertEqual(N - 1, Length),
206 | ?assertEqual([], Quanta).
207 |
208 | two_quanta_test() ->
209 | Start = 1 * ?MIN,
210 | End = 16 * ?MIN,
211 | {N, Quanta} = quanta(Start, End, 15, m),
212 | Length = length(Quanta),
213 | ?assertEqual(2, N),
214 | ?assertEqual(N -1, Length),
215 | ?assertEqual([15 * ?MIN], Quanta).
216 |
217 | split_quanta_test() ->
218 | Start = 14 * ?MIN,
219 | End = 16 * ?MIN,
220 | {N, Quanta} = quanta(Start, End, 15, m),
221 | Length = length(Quanta),
222 | ?assertEqual(2, N),
223 | ?assertEqual(N - 1, Length),
224 | ?assertEqual([15 * ?MIN], Quanta).
225 |
226 | -ifdef(EQC).
227 | prop_quantum_bounded_test() ->
228 | ?assertEqual(
229 | true,
230 | eqc:quickcheck(
231 | eqc:numtests(1000, prop_quantum_bounded()))
232 | ).
233 |
234 | %% Ensure that Quantas are always bounded, meaning that any time is no more
235 | %% than one quantum ahead of the quantum start.
236 | prop_quantum_bounded() ->
237 | ?FORALL(
238 | {Date, Time, {Quanta, Unit}},
239 | {date_gen(), time_gen(), quantum_gen()},
240 | begin
241 | DateTime = {Date, Time},
242 | SecondsFrom0To1970 = ?DAYS_FROM_0_TO_1970 * (unit_to_ms(d) div 1000),
243 | DateMs = (calendar:datetime_to_gregorian_seconds(DateTime) - SecondsFrom0To1970)*1000,
244 | QuantaMs = quantum(DateMs, Quanta, Unit),
245 | QuantaSize = quantum_in_ms(Quanta, Unit),
246 | (DateMs - QuantaMs) =< QuantaSize
247 | end).
248 |
249 | quantum_now_from_datetime(DateTime, Quanta, Unit) ->
250 | SecondsFrom0To1970 = ?DAYS_FROM_0_TO_1970 * (unit_to_ms(d) div 1000),
251 | DateMs = (calendar:datetime_to_gregorian_seconds(DateTime) - SecondsFrom0To1970)*1000,
252 | QuantaMs = quantum(DateMs, Quanta, Unit),
253 | ms_to_timestamp(QuantaMs).
254 |
255 | quantum_in_ms(Quanta, Unit) ->
256 | Quanta*unit_to_ms(Unit).
257 |
258 | %% EQC Generators
259 | date_gen() ->
260 | ?SUCHTHAT(Date, {choose(1970, 2015), choose(1, 12), choose(1, 31)}, calendar:valid_date(Date)).
261 |
262 | time_gen() ->
263 | {choose(0, 23), choose(0, 59), choose(0, 59)}.
264 |
265 | %% We expect quanta to be bigger than their cardinality
266 | %% A quantum of 100 minutes is perfectly reasonable
267 | quantum_gen() ->
268 | oneof([
269 | {choose(1, 1000), d},
270 | {choose(1, 1000), h},
271 | {choose(1, 1000), m},
272 | {choose(1, 1000), s}
273 | ]).
274 |
275 | -endif.
276 | -endif.
277 |
--------------------------------------------------------------------------------
/src/riak_ql_show_create_table.erl:
--------------------------------------------------------------------------------
1 | %%-------------------------------------------------------------------
2 | %%
3 | %% SHOW CREATE TABLE SQL command
4 | %%
5 | %% These are retrieved from riak_core_bucket:get_bucket/1
6 | %%
7 | %% Copyright (C) 2016 Basho Technologies, Inc. All rights reserved
8 | %%
9 | %% This file is provided to you under the Apache License,
10 | %% Version 2.0 (the "License"); you may not use this file
11 | %% except in compliance with the License. You may obtain
12 | %% a copy of the License at
13 | %%
14 | %% http://www.apache.org/licenses/LICENSE-2.0
15 | %%
16 | %% Unless required by applicable law or agreed to in writing,
17 | %% software distributed under the License is distributed on an
18 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
19 | %% KIND, either express or implied. See the License for the
20 | %% specific language governing permissions and limitations
21 | %% under the License.
22 | %%
23 | %%-------------------------------------------------------------------
24 |
25 | -module(riak_ql_show_create_table).
26 |
27 | -export([show_create_table/2]).
28 |
29 | -include("riak_ql_ddl.hrl").
30 |
31 | %%
32 | -spec show_create_table(?DDL{}, [tuple()]) -> {ok, {ColNames::[binary()],
33 | ColTypes::[riak_ql_ddl:external_field_type()],
34 | Rows::[[any()]]}}.
35 |
36 | show_create_table(DDL, Props) ->
37 | FilteredProps = filter_bucket_properties(Props),
38 | ColumnNames = [<<"SQL">>],
39 | ColumnTypes = [varchar],
40 | SQL = riak_ql_to_string:ddl_rec_to_sql_multiline(DDL, FilteredProps),
41 | Rows =
42 | [[SQL]],
43 | {ok, {ColumnNames, ColumnTypes, Rows}}.
44 |
45 | %% Skip any internal bucket properties
46 | filter_bucket_properties(Props) ->
47 | Filtered = lists:filter(fun({basic_quorum, _}) -> false;
48 | ({big_vclock, _}) -> false;
49 | ({chash_keyfun, _}) -> false;
50 | ({claimant, _}) -> false;
51 | ({ddl, _}) -> false;
52 | ({ddl_compiler_version, _}) -> false;
53 | ({linkfun, _}) -> false;
54 | ({name, _}) -> false;
55 | ({old_vclock, _}) -> false;
56 | ({precommit, _}) -> false;
57 | ({small_vclock, _}) -> false;
58 | ({write_once, _}) -> false;
59 | ({young_vclock, _}) -> false;
60 | (_) -> true end, Props),
61 | lists:sort(fun({A,_},{B,_}) -> A =< B end, Filtered).
62 |
63 |
64 | %%-------------------------------------------------------------------
65 | %% Unit tests
66 | %%-------------------------------------------------------------------
67 |
68 | -ifdef(TEST).
69 | -include_lib("eunit/include/eunit.hrl").
70 |
71 | matching_sql_test() ->
72 | SQL = "CREATE TABLE tab ("
73 | "a VARCHAR NOT NULL, "
74 | "b VARCHAR NOT NULL, "
75 | "c TIMESTAMP NOT NULL, "
76 | "PRIMARY KEY ((a, b, quantum(c, 15, 'm')), a, b, c)) "
77 | "WITH (a = 1, b = 'bee', c = false, d = 3.1415)",
78 | Props = [{<<"a">>,1},{<<"b">>,<<"bee">>},{<<"c">>,false},{<<"d">>,3.1415}],
79 | {ddl, DDL, Props} =
80 | riak_ql_parser:ql_parse(
81 | riak_ql_lexer:get_tokens(SQL)),
82 | {ok, Result} = show_create_table(DDL, Props),
83 | {Cols, _, [[Row]]} = Result,
84 | ?assertEqual(
85 | [<<"SQL">>],
86 | Cols),
87 | ?assertEqual(
88 | lowercase(SQL),
89 | lowercase(Row)
90 | ).
91 |
92 | matching_sql_no_props_test() ->
93 | SQL = "CREATE TABLE tab ("
94 | "a VARCHAR NOT NULL, "
95 | "b VARCHAR NOT NULL, "
96 | "c TIMESTAMP NOT NULL, "
97 | "PRIMARY KEY ((a, b, quantum(c, 15, 'm')), a, b, c))",
98 | {ddl, DDL, []} =
99 | riak_ql_parser:ql_parse(
100 | riak_ql_lexer:get_tokens(SQL)),
101 | {ok, Result} = show_create_table(DDL, []),
102 | {Cols, _, [[Row]]} = Result,
103 | ?assertEqual(
104 | [<<"SQL">>],
105 | Cols),
106 | ?assertEqual(
107 | lowercase(SQL),
108 | lowercase(Row)
109 | ).
110 |
111 | %% Remove the extra whitespace and lowercase everything for a safe comparison
112 | lowercase(S) when is_list(S) ->
113 | SingleSpace = re:replace(S, "\\s+", " ", [global,{return,list}]),
114 | string:to_lower(string:join(string:tokens(SingleSpace, " "), " "));
115 | lowercase(S) when is_binary(S) ->
116 | lowercase(binary_to_list(S)).
117 |
118 | -endif.
119 |
--------------------------------------------------------------------------------
/src/riak_ql_to_string.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% riak_ql_to_string: convert the output of the compiler
4 | %% back to the text representation
5 | %%
6 | %% Only works on a subset of outputs at the
7 | %% moment
8 | %%
9 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
10 | %%
11 | %% This file is provided to you under the Apache License,
12 | %% Version 2.0 (the "License"); you may not use this file
13 | %% except in compliance with the License. You may obtain
14 | %% a copy of the License at
15 | %%
16 | %% http://www.apache.org/licenses/LICENSE-2.0
17 | %%
18 | %% Unless required by applicable law or agreed to in writing,
19 | %% software distributed under the License is distributed on an
20 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
21 | %% KIND, either express or implied. See the License for the
22 | %% specific language governing permissions and limitations
23 | %% under the License.
24 | %%
25 | %% -------------------------------------------------------------------
26 | -module(riak_ql_to_string).
27 |
28 | -export([
29 | col_names_from_select/1,
30 | ddl_rec_to_sql/1,
31 | ddl_rec_to_sql/2,
32 | ddl_rec_to_sql_with_props/2,
33 | ddl_rec_to_sql_multiline/2
34 | ]).
35 |
36 | -include("riak_ql_ddl.hrl").
37 |
38 | %% --------------------------
39 | %% local functions
40 |
41 | %% Convert the selection in select clause to a list of strings, one
42 | %% element for each column. White space in the original query is not reproduced.
43 | -spec col_names_from_select(list(term())) -> [string()].
44 | col_names_from_select(Select) ->
45 | [select_col_to_string(S) || S <- Select].
46 |
47 | %% --------------------------
48 | %% local functions
49 |
50 |
51 | %% Convert one column to a flat string.
52 | -spec select_col_to_string(any()) ->
53 | string().
54 | %% these two happen only in sql
55 | select_col_to_string(Bare) when is_binary(Bare) -> %% bare column name in where expression
56 | binary_to_list(Bare);
57 | select_col_to_string([Expr]) -> %% a single where expression
58 | select_col_to_string(Expr);
59 | %% these are common to ddl and sql:
60 | select_col_to_string({identifier, [Name]}) ->
61 | binary_to_list(Name);
62 | select_col_to_string({identifier, Name}) ->
63 | binary_to_list(Name);
64 | select_col_to_string({integer, Value}) when is_integer(Value) ->
65 | integer_to_list(Value);
66 | select_col_to_string({float, Value}) when is_float(Value) ->
67 | mochinum:digits(Value);
68 | select_col_to_string({binary, Value}) when is_binary(Value) ->
69 | binary_to_list(<<"'", Value/binary, "'">>);
70 | select_col_to_string({boolean, true}) ->
71 | "true";
72 | select_col_to_string({boolean, false}) ->
73 | "false";
74 | select_col_to_string({{FnClass, FunName}, Args})
75 | when is_atom(FunName) andalso
76 | (FnClass == window_agg_fn orelse
77 | FnClass == inverse_distrib_fn orelse
78 | FnClass == sql_select_fn) ->
79 | lists:flatten([
80 | atom_to_list(FunName),
81 | $(,
82 | string:join([select_col_to_string(A) || A <- Args], ", "),
83 | $)
84 | ]);
85 | select_col_to_string({expr, Expression}) ->
86 | select_col_to_string(Expression);
87 | select_col_to_string({negate, Expression}) ->
88 | "-" ++ select_col_to_string(Expression);
89 | select_col_to_string({Op, Arg1, Arg2}) when is_atom(Op) ->
90 | flat_format(
91 | "(~s~s~s)",
92 | [select_col_to_string(Arg1), op_to_string(Op), select_col_to_string(Arg2)]).
93 |
94 | op_to_string(and_) -> "AND";
95 | op_to_string(or_) -> "OR";
96 | op_to_string(Op) ->
97 | atom_to_list(Op).
98 |
99 | flat_format(Format, Args) ->
100 | lists:flatten(io_lib:format(Format, Args)).
101 |
102 | -spec ddl_rec_to_sql(?DDL{}) -> string().
103 | ddl_rec_to_sql(DDL) ->
104 | ddl_rec_to_sql(DDL, " ").
105 |
106 | -spec ddl_rec_to_sql(?DDL{}, string()) -> string().
107 | ddl_rec_to_sql(?DDL{table = Tb,
108 | fields = Fs,
109 | partition_key = PK,
110 | local_key = LK},
111 | Join) ->
112 | "CREATE TABLE " ++ binary_to_list(Tb) ++ " (" ++ make_fields(Fs, Join) ++ "PRIMARY KEY ((" ++ pk_to_sql(PK) ++ ")," ++ Join ++ lk_to_sql(LK) ++ "))".
113 |
114 | -spec ddl_rec_to_sql_with_props(?DDL{}, [tuple()]) -> string().
115 | ddl_rec_to_sql_with_props(DDL, Props) ->
116 | ddl_rec_to_sql(DDL) ++ make_props(Props).
117 |
118 | -spec ddl_rec_to_sql_multiline(?DDL{}, [tuple()]) -> string().
119 | ddl_rec_to_sql_multiline(DDL, Props) ->
120 | Join = "\n",
121 | ddl_rec_to_sql(DDL, Join) ++ make_props(Props, Join).
122 |
123 | make_fields(Fs, Join) ->
124 | make_f2(Fs, Join, []).
125 |
126 | make_f2([], _Join, Acc) ->
127 | lists:flatten(lists:reverse(Acc));
128 | make_f2([#riak_field_v1{name = Nm,
129 | type = Ty,
130 | optional = IsOpt} | T], Join, Acc) ->
131 | Args = [
132 | binary_to_list(Nm),
133 | string:to_upper(atom_to_list(Ty))
134 | ] ++ case IsOpt of
135 | true -> [];
136 | false -> ["NOT NULL"]
137 | end,
138 | NewAcc = string:join(Args, " ") ++ "," ++ Join,
139 | make_f2(T, Join, [NewAcc | Acc]).
140 |
141 | pk_to_sql(#key_v1{ast = AST}) ->
142 | string:join(lists:map(fun extract_pk_name/1, AST), ", ").
143 |
144 | extract_pk_name(#hash_fn_v1{mod = riak_ql_quanta,
145 | fn = quantum,
146 | args = Args,
147 | type = timestamp}) ->
148 | [?SQL_PARAM{name = [Nm]}, No, Unit] = Args,
149 | _Q = "QUANTUM(" ++ string:join([binary_to_list(Nm), integer_to_list(No), "'" ++ atom_to_list(Unit) ++ "'"], ", ") ++ ")";
150 | extract_pk_name(Key) ->
151 | binary_to_list(extract(Key?SQL_PARAM.name)).
152 |
153 | extract([X]) -> X.
154 |
155 | lk_to_sql(LK) ->
156 | string:join([param_to_string(P) || P <- LK#key_v1.ast], ", ").
157 |
158 | param_to_string(?SQL_PARAM{name = [Name], ordering = Order}) ->
159 | case Order of
160 | descending ->
161 | binary_to_list(Name) ++ " DESC";
162 | _ ->
163 | binary_to_list(Name)
164 | end.
165 |
166 | %% Default join character is a space
167 | make_props(Props) ->
168 | make_props(Props, " ").
169 | make_props([], _Join) ->
170 | "";
171 | make_props(Props, Join) ->
172 | PropList = [flat_format("~s = ~s", [prop_to_string(K), prop_to_quoted_string(V)]) || {K, V} <- Props],
173 | Join ++ "WITH (" ++ string:join(PropList, "," ++ Join) ++ ")".
174 |
175 | prop_to_string(V) when is_integer(V) ->
176 | integer_to_list(V);
177 | prop_to_string(V) when is_boolean(V) ->
178 | atom_to_list(V);
179 | prop_to_string(V) when is_float(V) ->
180 | mochinum:digits(V);
181 | prop_to_string(V) when is_binary(V) ->
182 | binary_to_list(V);
183 | prop_to_string(V) when is_atom(V) ->
184 | atom_to_list(V);
185 | prop_to_string(V) ->
186 | flat_format("~p", [V]).
187 |
188 | prop_to_quoted_string(V) when is_binary(V) ->
189 | flat_format("'~s'", [V]);
190 | prop_to_quoted_string(V) when is_list(V) ->
191 | flat_format("'~s'", [V]);
192 | prop_to_quoted_string(V) ->
193 | prop_to_string(V).
194 |
195 |
196 | -ifdef(TEST).
197 | -include_lib("eunit/include/eunit.hrl").
198 | -compile(export_all).
199 |
200 | test_col_names(SQL, ColNames) ->
201 | {ok, Parsed} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
202 | SQL)),
203 | ?assertEqual(
204 | ColNames,
205 | col_names_from_select(proplists:get_value(fields, Parsed))
206 | ).
207 |
208 | select_col_to_string_all_test() ->
209 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
210 | "select * from bendy")),
211 | ?assertEqual(
212 | ["*"],
213 | col_names_from_select(proplists:get_value(fields, SQL))
214 | ).
215 |
216 | select_col_to_string_colname_1_test() ->
217 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
218 | "select mindy from bendy")),
219 | ?assertEqual(
220 | ["mindy"],
221 | col_names_from_select(proplists:get_value(fields, SQL))
222 | ).
223 |
224 | select_col_to_string_colname_2_test() ->
225 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
226 | "select mindy, raymond from bendy")),
227 | ?assertEqual(
228 | ["mindy", "raymond"],
229 | col_names_from_select(proplists:get_value(fields, SQL))
230 | ).
231 |
232 | select_col_to_string_integer_literal_test() ->
233 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
234 | "select 1 from bendy")),
235 | ?assertEqual(
236 | ["1"],
237 | col_names_from_select(proplists:get_value(fields, SQL))
238 | ).
239 |
240 | select_col_to_string_boolean_true_literal_test() ->
241 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
242 | "select true from bendy")),
243 | ?assertEqual(
244 | ["true"],
245 | col_names_from_select(proplists:get_value(fields, SQL))
246 | ).
247 |
248 | select_col_to_string_boolean_false_literal_test() ->
249 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
250 | "select false from bendy")),
251 | ?assertEqual(
252 | ["false"],
253 | col_names_from_select(proplists:get_value(fields, SQL))
254 | ).
255 |
256 | select_col_to_string_double_literal_test() ->
257 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
258 | "select 7.2 from bendy")),
259 | ?assertEqual(
260 | ["7.2"],
261 | col_names_from_select(proplists:get_value(fields, SQL))
262 | ).
263 |
264 | select_col_to_string_varchar_literal_test() ->
265 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
266 | "select 'derp' from bendy")),
267 | ?assertEqual(
268 | ["'derp'"],
269 | col_names_from_select(proplists:get_value(fields, SQL))
270 | ).
271 |
272 | select_col_to_string_one_plus_one_test() ->
273 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
274 | "select 1+1 from bendy")),
275 | ?assertEqual(
276 | ["(1+1)"],
277 | col_names_from_select(proplists:get_value(fields, SQL))
278 | ).
279 |
280 | select_col_to_string_four_div_two_test() ->
281 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
282 | "select 4/2 from bendy")),
283 | ?assertEqual(
284 | ["(4/2)"],
285 | col_names_from_select(proplists:get_value(fields, SQL))
286 | ).
287 |
288 | select_col_to_string_four_times_ten_test() ->
289 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
290 | "select 4*10 from bendy")),
291 | ?assertEqual(
292 | ["(4*10)"],
293 | col_names_from_select(proplists:get_value(fields, SQL))
294 | ).
295 |
296 | select_col_to_string_avg_funcall_test() ->
297 | {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
298 | "select avg(mona) from bendy")),
299 | ?assertEqual(
300 | ["AVG(mona)"],
301 | col_names_from_select(proplists:get_value(fields, SQL))
302 | ).
303 |
304 | % mixed aggregate and arithmetic are not in 1.1
305 | %% select_col_to_string_avg_funcall_with_nested_maths_test() ->
306 | %% {ok, SQL} = riak_ql_parser:parse(riak_ql_lexer:get_tokens(
307 | %% "select avg(10+5) from bendy")),
308 | %% ?assertEqual(
309 | %% ["AVG((10+5))"],
310 | %% col_names_from_select(SQL)
311 | %% ).
312 |
313 | select_col_to_string_negated_test() ->
314 | test_col_names("select - 1.0, - 1, -asdf, - asdf from dual",
315 | ["-1.0",
316 | "-1",
317 | "-asdf",
318 | "-asdf"]).
319 |
320 | select_col_to_string_negated_parens_test() ->
321 | test_col_names("select -(1), -(asdf), -(3 + -4) from dual",
322 | ["-1",
323 | "-asdf",
324 | "-(3+-4)"]).
325 |
326 | ddl_rec_to_string_test() ->
327 | SQL = "CREATE TABLE Mesa "
328 | "(Uno TIMESTAMP NOT NULL, "
329 | "Dos TIMESTAMP NOT NULL, "
330 | "Tres TIMESTAMP NOT NULL, "
331 | "PRIMARY KEY ((Uno, Dos, "
332 | "QUANTUM(Tres, 1, 'd')), "
333 | "Uno, Dos, Tres))",
334 | Lexed = riak_ql_lexer:get_tokens(SQL),
335 | {ddl, DDL = ?DDL{}, _} = riak_ql_parser:ql_parse(Lexed),
336 | ?assertEqual(
337 | SQL,
338 | ddl_rec_to_sql(DDL)
339 | ).
340 |
341 | ddl_rec_with_shorter_pk_to_string_test() ->
342 | SQL = "CREATE TABLE Mesa "
343 | "(Uno TIMESTAMP NOT NULL, "
344 | "Dos TIMESTAMP NOT NULL, "
345 | "PRIMARY KEY ((Uno, "
346 | "QUANTUM(Dos, 1, 'd')), "
347 | "Uno, Dos))",
348 | Lexed = riak_ql_lexer:get_tokens(SQL),
349 | {ddl, DDL = ?DDL{}, _} = riak_ql_parser:ql_parse(Lexed),
350 | ?assertEqual(
351 | SQL,
352 | ddl_rec_to_sql(DDL)
353 | ).
354 |
355 | ddl_rec_with_no_quantum_to_string_test() ->
356 | SQL = "CREATE TABLE Mesa "
357 | "(Uno TIMESTAMP NOT NULL, "
358 | "Dos TIMESTAMP NOT NULL, "
359 | "Tres TIMESTAMP NOT NULL, "
360 | "PRIMARY KEY ((Uno, Dos, Tres), "
361 | "Uno, Dos, Tres))",
362 | Lexed = riak_ql_lexer:get_tokens(SQL),
363 | {ddl, DDL = ?DDL{}, _} = riak_ql_parser:ql_parse(Lexed),
364 | ?assertEqual(
365 | SQL,
366 | ddl_rec_to_sql(DDL)
367 | ).
368 |
369 | ddl_rec_with_longer_pk_no_quantum_to_string_test() ->
370 | SQL = "CREATE TABLE Mesa "
371 | "(Uno TIMESTAMP NOT NULL, "
372 | "Dos TIMESTAMP NOT NULL, "
373 | "Tres TIMESTAMP NOT NULL, "
374 | "Quatro TIMESTAMP NOT NULL, "
375 | "PRIMARY KEY ((Uno, Dos, Tres, Quatro), "
376 | "Uno, Dos, Tres, Quatro))",
377 | Lexed = riak_ql_lexer:get_tokens(SQL),
378 | {ddl, DDL = ?DDL{}, _} = riak_ql_parser:ql_parse(Lexed),
379 | ?assertEqual(
380 | SQL,
381 | ddl_rec_to_sql(DDL)
382 | ).
383 |
384 | ddl_rec_to_multiline_string_test() ->
385 | SQL = "CREATE TABLE Mesa "
386 | "(Uno TIMESTAMP NOT NULL,\n"
387 | "Dos TIMESTAMP NOT NULL,\n"
388 | "Tres TIMESTAMP NOT NULL,\n"
389 | "PRIMARY KEY ((Uno, Dos, "
390 | "QUANTUM(Tres, 1, 'd')),\n"
391 | "Uno, Dos, Tres))",
392 | Lexed = riak_ql_lexer:get_tokens(SQL),
393 | {ddl, DDL = ?DDL{}, _} = riak_ql_parser:ql_parse(Lexed),
394 | ?assertEqual(
395 | SQL,
396 | ddl_rec_to_sql_multiline(DDL, [])
397 | ).
398 |
399 | ddl_rec_with_props_to_string_test() ->
400 | SQL = "CREATE TABLE Mesa "
401 | "(Uno TIMESTAMP NOT NULL,\n"
402 | "Dos TIMESTAMP NOT NULL,\n"
403 | "Tres TIMESTAMP NOT NULL,\n"
404 | "PRIMARY KEY ((Uno, Dos, "
405 | "QUANTUM(Tres, 1, 'd')),\n"
406 | "Uno, Dos, Tres))\n"
407 | "WITH (a = 1,\n"
408 | "b = true,\n"
409 | "c = 'hola')",
410 | Props = [{a, 1}, {b, true}, {c, "hola"}],
411 | Lexed = riak_ql_lexer:get_tokens(SQL),
412 | {ddl, DDL = ?DDL{}, _} = riak_ql_parser:ql_parse(Lexed),
413 | ?assertEqual(
414 | SQL,
415 | ddl_rec_to_sql_multiline(DDL, Props)
416 | ).
417 |
418 | ddl_rec_to_string_desc_keys_test() ->
419 | SQL = "CREATE TABLE mytab ("
420 | "a TIMESTAMP NOT NULL, "
421 | "b TIMESTAMP NOT NULL, "
422 | "c TIMESTAMP NOT NULL, "
423 | "PRIMARY KEY ((a, b, QUANTUM(c, 1, 'd')), a, b, c DESC))",
424 | Lexed = riak_ql_lexer:get_tokens(SQL),
425 | {ddl, DDL = ?DDL{}, _} = riak_ql_parser:ql_parse(Lexed),
426 | ?assertEqual(
427 | SQL,
428 | ddl_rec_to_sql(DDL)
429 | ).
430 |
431 | -endif.
432 |
--------------------------------------------------------------------------------
/src/riak_ql_window_agg_fns.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% riak_ql_window_agg_fns: implementation of Windows Aggregation Fns
4 | %% for the query runner
5 | %%
6 | %% Copyright (c) 2016, 2017 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 | -module(riak_ql_window_agg_fns).
24 |
25 |
26 | -export(['COUNT'/2, 'SUM'/2, 'AVG'/2, 'MEAN'/2, 'MIN'/2, 'MAX'/2, 'STDDEV'/2, 'STDDEV_POP'/2, 'STDDEV_SAMP'/2]).
27 | -export([add/2, divide/2, multiply/2, subtract/2]).
28 | -export([finalise/2]).
29 | -export([start_state/1]).
30 | -export([fn_arity/1]).
31 | -export([fn_type_signature/2]).
32 | -export([fn_param_check/2]).
33 | -export([supported_functions/0]).
34 |
35 | -type aggregate_function() :: 'COUNT' | 'SUM' | 'AVG' |'MEAN' | 'MIN' | 'MAX' | 'STDDEV' | 'STDDEV_POP' | 'STDDEV_SAMP'.
36 |
37 | -include("riak_ql_ddl.hrl").
38 |
39 | -spec supported_functions() -> [aggregate_function()].
40 | supported_functions() ->
41 | ['COUNT', 'SUM', 'AVG','MEAN', 'MIN', 'MAX', 'STDDEV', 'STDDEV_POP', 'STDDEV_SAMP'].
42 |
43 | -spec fn_type_signature(aggregate_function(), Args::[riak_ql_ddl:external_field_type()]) ->
44 | riak_ql_ddl:external_field_type().
45 | fn_type_signature('AVG', [double]) -> double;
46 | fn_type_signature('AVG', [sint64]) -> double;
47 | fn_type_signature('AVG', [timestamp]) -> double;
48 | fn_type_signature('COUNT', [_]) -> sint64;
49 | fn_type_signature('MAX', [double]) -> double;
50 | fn_type_signature('MAX', [sint64]) -> sint64;
51 | fn_type_signature('MAX', [timestamp]) -> timestamp;
52 | fn_type_signature('MEAN', Args) -> fn_type_signature('AVG', Args);
53 | fn_type_signature('MIN', [double]) -> double;
54 | fn_type_signature('MIN', [sint64]) -> sint64;
55 | fn_type_signature('MIN', [timestamp]) -> timestamp;
56 | fn_type_signature('STDDEV', Args) -> fn_type_signature('STDDEV_SAMP', Args);
57 | fn_type_signature('STDDEV_POP', [double]) -> double;
58 | fn_type_signature('STDDEV_POP', [sint64]) -> double;
59 | fn_type_signature('STDDEV_POP', [timestamp]) -> double;
60 | fn_type_signature('STDDEV_SAMP', [double]) -> double;
61 | fn_type_signature('STDDEV_SAMP', [sint64]) -> double;
62 | fn_type_signature('STDDEV_SAMP', [timestamp]) -> double;
63 | fn_type_signature('SUM', [double]) -> double;
64 | fn_type_signature('SUM', [sint64]) -> sint64;
65 | fn_type_signature('SUM', [timestamp]) -> timestamp;
66 | fn_type_signature(Fn, Args) ->
67 | {error, {argument_type_mismatch, Fn, Args}}.
68 |
69 | -spec fn_param_check(aggregate_function(), [riak_ql_ddl:external_field_type()]) ->
70 | ok. %% | {error, WhichParamInvalid::pos_integer()}.
71 | %% (no extra parameters for these functions, but if they were, this
72 | %% check might return an error)
73 | fn_param_check(_, []) ->
74 | ok.
75 |
76 | %%
77 | fn_arity(_FnName) -> 1.
78 |
79 | %% Get the initial accumulator state for the aggregation.
80 | -spec start_state(aggregate_function()) ->
81 | any().
82 | start_state('AVG') -> ?SQL_NULL;
83 | start_state('MEAN') -> start_state('AVG');
84 | start_state('COUNT') -> 0;
85 | start_state('MAX') -> ?SQL_NULL;
86 | start_state('MIN') -> ?SQL_NULL;
87 | start_state('STDDEV') -> start_state_stddev();
88 | start_state('STDDEV_POP') -> start_state_stddev();
89 | start_state('STDDEV_SAMP') -> start_state_stddev();
90 | start_state('SUM') -> ?SQL_NULL;
91 | start_state(_) -> stateless.
92 |
93 | %%
94 | start_state_stddev() ->
95 | {0, 0.0, 0.0}.
96 |
97 | %% Calculate the final results using the accumulated result.
98 | -spec finalise(aggregate_function(), any()) -> any().
99 | finalise(_, ?SQL_NULL) ->
100 | ?SQL_NULL;
101 | finalise('MEAN', State) ->
102 | finalise('AVG', State);
103 | finalise('AVG', {N, Acc}) ->
104 | Acc / N;
105 | finalise(Stddev, {N, _, _}) when (Stddev == 'STDDEV' orelse Stddev == 'STDDEV_POP' orelse Stddev == 'STDDEV_SAMP') andalso N < 2 ->
106 | % STDDEV_POP must have two or more values to or return NULL
107 | ?SQL_NULL;
108 | finalise('STDDEV', State) ->
109 | finalise('STDDEV_SAMP', State);
110 | finalise('STDDEV_POP', {N, _, Q}) ->
111 | math:sqrt(Q / N);
112 | finalise('STDDEV_SAMP', {N, _, Q}) ->
113 | math:sqrt(Q / (N-1));
114 | finalise(_Fn, Acc) ->
115 | Acc.
116 |
117 | %% Group functions (avg, mean etc). These can only appear as top-level
118 | %% expressions in SELECT part, and there can be only one in a query.
119 | %% Can take an Expr that includes the column identifier and some static
120 | %% values.
121 | %%
122 | %% Incrementally operates on chunks, needs to carry state.
123 |
124 | 'COUNT'(?SQL_NULL, State) ->
125 | State;
126 | 'COUNT'(_, State) ->
127 | State + 1.
128 |
129 | 'SUM'(Arg, State) when is_number(Arg), is_number(State) ->
130 | Arg + State;
131 | 'SUM'(?SQL_NULL, State) ->
132 | State;
133 | 'SUM'(Arg, ?SQL_NULL) ->
134 | Arg.
135 |
136 | 'MEAN'(Arg, State) ->
137 | 'AVG'(Arg, State).
138 |
139 | 'AVG'(Arg, {N, Acc}) when is_number(Arg) ->
140 | {N + 1, Acc + Arg};
141 | 'AVG'(Arg, ?SQL_NULL) when is_number(Arg) ->
142 | {1, Arg};
143 | 'AVG'(?SQL_NULL, {_,_} = State) ->
144 | State;
145 | 'AVG'(?SQL_NULL, ?SQL_NULL) ->
146 | ?SQL_NULL.
147 |
148 | 'MIN'(Arg, ?SQL_NULL) -> Arg;
149 | 'MIN'(Arg, State) when Arg < State -> Arg;
150 | 'MIN'(_, State) -> State.
151 |
152 | 'MAX'(Arg, ?SQL_NULL) when is_number(Arg) -> Arg;
153 | 'MAX'(?SQL_NULL, Arg) when is_number(Arg) -> Arg;
154 | 'MAX'(?SQL_NULL, ?SQL_NULL) -> ?SQL_NULL;
155 | 'MAX'(Arg, State) when Arg > State -> Arg;
156 | 'MAX'(_, State) -> State.
157 |
158 | 'STDDEV'(Arg, State) ->
159 | 'STDDEV_POP'(Arg, State).
160 |
161 | 'STDDEV_POP'(Arg, {N_old, A_old, Q_old}) when is_number(Arg) ->
162 | %% A and Q are those in https://en.wikipedia.org/wiki/Standard_deviation#Rapid_calculation_methods
163 | N = N_old + 1,
164 | A = A_old + (Arg - A_old) / N,
165 | Q = Q_old + (Arg - A_old) * (Arg - A),
166 | {N, A, Q};
167 | 'STDDEV_POP'(_, State) ->
168 | State.
169 |
170 | 'STDDEV_SAMP'(Arg, State) ->
171 | 'STDDEV_POP'(Arg, State).
172 |
173 | %%
174 | add(?SQL_NULL, _) -> ?SQL_NULL;
175 | add(_, ?SQL_NULL) -> ?SQL_NULL;
176 | add(A, B) -> A + B.
177 |
178 | %%
179 | divide(?SQL_NULL, _) -> ?SQL_NULL;
180 | divide(_, ?SQL_NULL) -> ?SQL_NULL;
181 | divide(_, 0) -> error(divide_by_zero);
182 | divide(_, 0.0) -> error(divide_by_zero);
183 | divide(A, B) when is_integer(A) andalso is_integer(B)
184 | -> A div B;
185 | divide(A, B) -> A / B.
186 |
187 | %%
188 | multiply(?SQL_NULL, _) -> ?SQL_NULL;
189 | multiply(_, ?SQL_NULL) -> ?SQL_NULL;
190 | multiply(A, B) -> A * B.
191 |
192 | %%
193 | subtract(?SQL_NULL, _) -> ?SQL_NULL;
194 | subtract(_, ?SQL_NULL) -> ?SQL_NULL;
195 | subtract(A, B) -> A - B.
196 |
197 | -ifdef(TEST).
198 | -include_lib("eunit/include/eunit.hrl").
199 |
200 | stddev_pop_test() ->
201 | State0 = start_state('STDDEV_POP'),
202 | Data = [
203 | 1.0, 2.0, 3.0, 4.0, 2.0,
204 | 3.0, 4.0, 4.0, 4.0, 3.0,
205 | 2.0, 3.0, 2.0, 1.0, 1.0
206 | ],
207 | %% numpy.std(Data) computes it to:
208 | Expected = 1.0832051206181281,
209 | %% There is a possibility of Erlang computing it differently, on
210 | %% fairy 16-bit architectures or some such. If this happens, we
211 | %% need to run python on that arch to figure out what Expected
212 | %% value can be then. Or, introduce an epsilon and check that the
213 | %% delta is small enough.
214 | State9 = lists:foldl(fun 'STDDEV_POP'/2, State0, Data),
215 | Got = finalise('STDDEV_POP', State9),
216 | ?assertEqual(Expected, Got).
217 |
218 | stddev_samp_test() ->
219 | State0 = start_state('STDDEV_SAMP'),
220 | Data = [
221 | 1.0, 2.0, 3.0, 4.0, 2.0,
222 | 3.0, 4.0, 4.0, 4.0, 3.0,
223 | 2.0, 3.0, 2.0, 1.0, 1.0
224 | ],
225 | State9 = lists:foldl(fun 'STDDEV_SAMP'/2, State0, Data),
226 | %% expected value calulated usingpostgres STDDEV_SAMP
227 | ?assertEqual(
228 | 1.1212238211627762,
229 | finalise('STDDEV_SAMP', State9)
230 | ).
231 |
232 | stddev_pop_no_value_test() ->
233 | ?assertEqual(
234 | [],
235 | finalise('STDDEV_POP', start_state('STDDEV_POP'))
236 | ).
237 | stddev_pop_one_value_test() ->
238 | ?assertEqual(
239 | ?SQL_NULL,
240 | finalise('STDDEV_POP', 'STDDEV_POP'(3, start_state('STDDEV_POP')))
241 | ).
242 | stddev_pop_two_value_test() ->
243 | ?assertEqual(
244 | 0.5,
245 | finalise('STDDEV_POP', lists:foldl(fun 'STDDEV_POP'/2, start_state('STDDEV_POP'), [1.0,2.0]))
246 | ).
247 |
248 |
249 | testing_fold_avg(InitialState, InputList) ->
250 | finalise('AVG', lists:foldl(fun 'AVG'/2, InitialState, InputList)).
251 |
252 | avg_integer_test() ->
253 | ?assertEqual(
254 | 10 / 4,
255 | testing_fold_avg(start_state('AVG'), [1,2,3,4])
256 | ).
257 | avg_double_test() ->
258 | ?assertEqual(
259 | 10 / 4,
260 | testing_fold_avg(start_state('AVG'), [1.0,2.0,3.0,4.0])
261 | ).
262 | avg_null_right_test() ->
263 | ?assertEqual(1.0, finalise('AVG', 'AVG'(1, ?SQL_NULL))).
264 | avg_null_null_test() ->
265 | ?assertEqual(?SQL_NULL, 'AVG'(?SQL_NULL, ?SQL_NULL)).
266 | avg_finalise_null_test() ->
267 | ?assertEqual(?SQL_NULL, finalise('AVG', start_state('AVG'))).
268 |
269 | sum_null_state_arg_integer_test() ->
270 | ?assertEqual(1, 'SUM'(1, ?SQL_NULL)).
271 | sum_integer_state_arg_integer_test() ->
272 | ?assertEqual(4, 'SUM'(1, 3)).
273 | sum_double_state_arg_double_test() ->
274 | ?assertEqual(4.5, 'SUM'(1.2, 3.3)).
275 | sum_null_state_arg_double_test() ->
276 | ?assertEqual(1.1, 'SUM'(1.1, ?SQL_NULL)).
277 | sum_null_null_test() ->
278 | ?assertEqual(?SQL_NULL, 'SUM'(?SQL_NULL, ?SQL_NULL)).
279 | sum_finalise_null_test() ->
280 | ?assertEqual(?SQL_NULL, finalise('SUM', start_state('SUM'))).
281 |
282 | min_integer_test() ->
283 | ?assertEqual(erlang:min(1, 3), 'MIN'(1, 3)).
284 | min_double_test() ->
285 | ?assertEqual(erlang:min(1.0, 3.0), 'MIN'(1.0, 3.0)).
286 | min_null_left_test() ->
287 | ?assertEqual(3, 'MIN'(?SQL_NULL, 3)).
288 | min_null_right_test() ->
289 | ?assertEqual(1, 'MIN'(1, ?SQL_NULL)).
290 | min_null_null_test() ->
291 | ?assertEqual(?SQL_NULL, 'MIN'(?SQL_NULL, ?SQL_NULL)).
292 | min_finalise_null_test() ->
293 | ?assertEqual(?SQL_NULL, finalise('MIN', start_state('MIN'))).
294 |
295 | max_test() ->
296 | ?assertEqual('MAX'(1, 3), erlang:max(1, 3)).
297 | max_double_test() ->
298 | ?assertEqual(erlang:max(1.0, 3.0), 'MAX'(1.0, 3.0)).
299 | max_null_left_test() ->
300 | ?assertEqual(3, 'MAX'(?SQL_NULL, 3)).
301 | max_null_right_test() ->
302 | ?assertEqual(1, 'MAX'(1, ?SQL_NULL)).
303 | max_null_null_test() ->
304 | ?assertEqual(?SQL_NULL, 'MAX'(?SQL_NULL, ?SQL_NULL)).
305 | max_finalise_null_test() ->
306 | ?assertEqual(?SQL_NULL, finalise('MAX', start_state('MAX'))).
307 |
308 | testing_fold_agg(FnName, InitialState, InputList) ->
309 | finalise(FnName, lists:foldl(
310 | fun(E, Acc) ->
311 | ?MODULE:FnName(E, Acc)
312 | end, InitialState, InputList)).
313 |
314 | count_no_values_test() ->
315 | ?assertEqual(0, finalise('COUNT', start_state('COUNT'))).
316 | count_all_null_values_test() ->
317 | ?assertEqual(
318 | 0,
319 | testing_fold_agg('COUNT', start_state('COUNT'), [?SQL_NULL, ?SQL_NULL])
320 | ).
321 | count_some_null_values_test() ->
322 | ?assertEqual(
323 | 2,
324 | testing_fold_agg('COUNT', start_state('COUNT'), [?SQL_NULL, <<"bob">>, ?SQL_NULL, <<"boris">>])
325 | ).
326 | count_values_test() ->
327 | ?assertEqual(
328 | 4,
329 | testing_fold_agg('COUNT', start_state('COUNT'), [1,2,3,4])
330 | ).
331 | count_rows_test() ->
332 | ?assertEqual(
333 | 4,
334 | testing_fold_agg('COUNT', start_state('COUNT'), [[1,2,3,4],[1,2,3,4],[1,2,3,4],[1,2,3,4]])
335 | ).
336 | count_rows_with_nulls_test() ->
337 | ?assertEqual(
338 | 3,
339 | testing_fold_agg('COUNT', start_state('COUNT'), [[1,2,3,4],?SQL_NULL,[1,2,3,4],[1,2,3,4]])
340 | ).
341 | -endif.
342 |
--------------------------------------------------------------------------------
/test/compiler_basic_1.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% a basic test suite for the compiler
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 | -module(compiler_basic_1).
24 |
25 | -include_lib("eunit/include/eunit.hrl").
26 |
27 | %% this is a basic test of timeseries that writes a single element to the back end
28 | %% and checks it is correct
29 |
30 | -define(VALID, true).
31 | -define(INVALID, false).
32 |
33 | -compile(export_all).
34 |
35 | %%
36 | %% this test calls into the PRIVATE interface
37 | %% compile_and_load_from_tmp/1
38 | %%
39 | -define(passing_test(Name, Query, Val, ExpectedPK, ExpectedLK),
40 | Name() ->
41 | Lexed = riak_ql_lexer:get_tokens(Query),
42 | {ddl, DDL, _Props} = riak_ql_parser:ql_parse(Lexed),
43 | case riak_ql_ddl_compiler:compile_and_load_from_tmp(DDL) of
44 | {module, Module} ->
45 | Result = Module:validate_obj(Val),
46 | GotPK = riak_ql_ddl:get_partition_key(DDL, Val),
47 | GotLK = riak_ql_ddl:get_local_key(DDL, Val),
48 | Expected = {?VALID, ExpectedPK, ExpectedLK},
49 | Got = {Result, GotPK, GotLK},
50 | ?assertEqual(Expected, Got);
51 | _Other ->
52 | ?debugFmt("~n~p compilation failed:~n~p", [Name, _Other]),
53 | ?assert(false)
54 | end).
55 |
56 | -define(passing_short_test(Name, Query, Val),
57 | Name() ->
58 | Lexed = riak_ql_lexer:get_tokens(Query),
59 | {ddl, DDL, _Props} = riak_ql_parser:ql_parse(Lexed),
60 | case riak_ql_ddl_compiler:compile_and_load_from_tmp(DDL) of
61 | {module, Module} ->
62 | Result = Module:validate_obj(Val),
63 | ?assertEqual(?VALID, Result);
64 | _Other ->
65 | ?debugFmt("~n~p compilation failed:~n~p", [Name, _Other]),
66 | ?assert(false)
67 | end).
68 |
69 |
70 | %%
71 | %% this test calls into the PRIVATE interface
72 | %% compile_and_load_from_tmp/1
73 | %%
74 | -define(failing_test(Name, Query, Val),
75 | Name() ->
76 | Lexed = riak_ql_lexer:get_tokens(Query),
77 | {ddl, DDL, _Props} = riak_ql_parser:ql_parse(Lexed),
78 | case riak_ql_ddl_compiler:compile_and_load_from_tmp(DDL) of
79 | {module, Module} ->
80 | Result = Module:validate_obj(Val),
81 | ?assertEqual(?INVALID, Result);
82 | _Other ->
83 | ?assertEqual(?INVALID, false) % didn't compile
84 | end).
85 |
86 |
87 | %%
88 | %% this test calls in the PUBLIC interface
89 | %% compile_and_load_from_tmp/1
90 | %%
91 | -define(not_valid_test(Name, Query),
92 | Name() ->
93 | Lexed = riak_ql_lexer:get_tokens(Query),
94 | {ddl, DDL, _Props} = riak_ql_parser:ql_parse(Lexed),
95 | case riak_ql_ddl_compiler:compile_and_load_from_tmp(DDL) of
96 | {error, _} ->
97 | ?assertEqual(?VALID, true);
98 | Other ->
99 | ?assertEqual(?VALID, Other) % didn't compile
100 | end).
101 |
102 |
103 | %%
104 | %% this test tests that the DDL returned by the helper fun is
105 | %% the same as the DDL that the helper fun was compiled from
106 | %%
107 | -define(ddl_roundtrip_assert(Name, Query),
108 | Name() ->
109 | Lexed = riak_ql_lexer:get_tokens(Query),
110 | {ddl, DDL, _Props} = riak_ql_parser:ql_parse(Lexed),
111 | %% ?debugFmt("in ~p~n- DDL is:~n -~p~n", [Name, DDL]),
112 | {module, Module} = riak_ql_ddl_compiler:compile_and_load_from_tmp(DDL),
113 | Got = Module:get_ddl(),
114 | %% ?debugFmt("in ~p~n- Got is:~n -~p~n", [Name, Got]),
115 | ?assertEqual(DDL, Got)).
116 |
117 | %%
118 | %% round trip passing tests
119 | %%
120 |
121 | -define(GOOD_DDL,
122 | "create table temperatures ("
123 | " user_id varchar not null,"
124 | " user_di varchar not null,"
125 | " time timestamp not null,"
126 | " primary key ((user_id, user_di, quantum(time, 1, 'm')), user_id, user_di, time))").
127 | -define(GOOD_DDL_INT,
128 | "create table temperatures ("
129 | " user_id varchar not null,"
130 | " counter sint64 not null, "
131 | " time timestamp not null,"
132 | " primary key ((user_id, counter, quantum(time, 1, 'm')), user_id, counter, time))").
133 | -define(GOOD_DDL_DOUBLE,
134 | "create table temperatures ("
135 | " user_id varchar not null,"
136 | " bouble double not null, "
137 | " time timestamp not null,"
138 | " primary key ((user_id, bouble, quantum(time, 1, 'm')), user_id, bouble, time))").
139 |
140 | ?passing_test(round_trip_test,
141 | ?GOOD_DDL,
142 | {<<"beeees">>, <<"boooos">>, 12345},
143 | [{varchar, <<"beeees">>}, {varchar, <<"boooos">>}, {timestamp, 0}],
144 | [{varchar, <<"beeees">>}, {varchar, <<"boooos">>}, {timestamp, 12345}]).
145 |
146 | ?passing_short_test(sint64_type_test,
147 | ?GOOD_DDL_INT,
148 | { <<"boooos">>, 12345, 222222222}).
149 |
150 | ?passing_short_test(double_type_test,
151 | ?GOOD_DDL_DOUBLE,
152 | { <<"boooos">>, 12345.6, 222222222}).
153 |
154 | ?failing_test(round_trip_fail_test,
155 | ?GOOD_DDL,
156 | {<<"banjette">>, <<"beeees">>}).
157 |
158 | %%
159 | %% roundtrip DDL tests
160 | %%
161 | ?ddl_roundtrip_assert(round_trip_ddl_test,
162 | ?GOOD_DDL).
163 |
164 | ?ddl_roundtrip_assert(sint64_type_ddl_test,
165 | ?GOOD_DDL_INT).
166 |
167 | ?ddl_roundtrip_assert(double_ddl_test,
168 | ?GOOD_DDL_DOUBLE).
169 |
170 | %%
171 | sql_to_module(SQL) ->
172 | Lexed = riak_ql_lexer:get_tokens(SQL),
173 | {ddl, DDL, _Props} = riak_ql_parser:ql_parse(Lexed),
174 | {module, _Module} = riak_ql_ddl_compiler:compile_and_load_from_tmp(DDL).
175 |
176 | get_min_required_ddl_cap_v1_test() ->
177 | Table =
178 | "CREATE TABLE get_min_required_ddl_cap_v1_test ("
179 | " a varchar not null,"
180 | " b varchar not null,"
181 | " c timestamp not null,"
182 | " primary key ((a, b, quantum(c, 1, 'm')), a, b, c))",
183 | {module, Mod} = sql_to_module(Table),
184 | ?assertEqual(
185 | v1,
186 | Mod:get_min_required_ddl_cap()
187 | ).
188 |
189 | get_min_required_ddl_cap_desc_key_test() ->
190 | Table =
191 | "CREATE TABLE get_min_required_ddl_cap_desc_key_test ("
192 | " a varchar NOT NULL,"
193 | " b varchar NOT NULL,"
194 | " c timestamp NOT NULL,"
195 | " primary key ((a, b, quantum(c, 1, 'm')), a, b, c DESC))",
196 | {module, Mod} = sql_to_module(Table),
197 | ?assertEqual(
198 | v2,
199 | Mod:get_min_required_ddl_cap()
200 | ).
201 |
--------------------------------------------------------------------------------
/test/lexer_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Tests for the Lexer
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 | -module(lexer_tests).
24 |
25 | -compile([export_all]).
26 |
27 | -include_lib("eunit/include/eunit.hrl").
28 | -include("riak_ql_ddl.hrl").
29 |
30 | %% Tests
31 | keywords_1_test_() ->
32 | Str = "select",
33 | Got = riak_ql_lexer:get_tokens(Str),
34 | ?_assertEqual([{select, <<"select">>}], Got).
35 |
36 | keywords_2_test_() ->
37 | Str = "seLEct",
38 | Got = riak_ql_lexer:get_tokens(Str),
39 | ?_assertEqual([{select, <<"seLEct">>}], Got).
40 |
41 | keywords_3_test_() ->
42 | Got = riak_ql_lexer:get_tokens("from LiMit Where anD oR"),
43 | Expected = [
44 | {from, <<"from">>},
45 | {limit, <<"LiMit">>},
46 | {where, <<"Where">>},
47 | {and_, <<"anD">>},
48 | {or_, <<"oR">>}
49 | ],
50 | ?_assertEqual(Expected, Got).
51 |
52 | keywords_3a_test_() ->
53 | Got = riak_ql_lexer:get_tokens("from limit where and or"),
54 | Expected = [
55 | {from, <<"from">>},
56 | {limit, <<"limit">>},
57 | {where, <<"where">>},
58 | {and_, <<"and">>},
59 | {or_, <<"or">>}
60 | ],
61 | ?_assertEqual(Expected, Got).
62 |
63 | keywords_3b_test_() ->
64 | Got = riak_ql_lexer:get_tokens("FROM LIMIT WHERE AND OR"),
65 | Expected = [
66 | {from, <<"FROM">>},
67 | {limit, <<"LIMIT">>},
68 | {where, <<"WHERE">>},
69 | {and_, <<"AND">>},
70 | {or_, <<"OR">>}
71 | ],
72 | ?_assertEqual(Expected, Got).
73 |
74 | keyword_general_test_() ->
75 | ?_assertEqual(
76 | [
77 | {boolean, <<"boolean">>},
78 | {boolean, <<"BOOLEAN">>},
79 | {boolean, <<"booLEan">>}
80 | ],
81 | riak_ql_lexer:get_tokens("boolean BOOLEAN booLEan ")
82 | ).
83 |
84 |
85 | keyword_int_test_() ->
86 | ?_assertEqual(
87 | [{sint64, <<"sint64">>},
88 | {sint64, <<"SINT64">>},
89 | {sint64, <<"siNT64">>},
90 | {sint64, <<"Sint64">>}],
91 | riak_ql_lexer:get_tokens("sint64 SINT64 siNT64 Sint64")
92 | ).
93 |
94 | keyword_double_test_() ->
95 | ?_assertEqual(
96 | [{double, <<"double">>},
97 | {double, <<"Double">>},
98 | {double, <<"dOuble">>},
99 | {double, <<"DOUBLE">>}],
100 | riak_ql_lexer:get_tokens("double Double dOuble DOUBLE")
101 | ).
102 |
103 | keywords_create_test_() ->
104 | Got = riak_ql_lexer:get_tokens("create table not null primary key"),
105 | Expected = [
106 | {create, <<"create">>},
107 | {table, <<"table">>},
108 | {not_, <<"not">>},
109 | {null, <<"null">>},
110 | {primary, <<"primary">>},
111 | {key, <<"key">>}],
112 | ?_assertEqual(Expected, Got).
113 |
114 | words_containing_keywords_test_() ->
115 | Got = riak_ql_lexer:get_tokens("error or horror and handy andy or "
116 | "zdoublecintxcreateyb or jimmy3boy or jimmy4"),
117 | Expected = [
118 | {identifier, <<"error">>},
119 | {or_, <<"or">>},
120 | {identifier, <<"horror">>},
121 | {and_, <<"and">>},
122 | {identifier, <<"handy">>},
123 | {identifier, <<"andy">>},
124 | {or_, <<"or">>},
125 | {identifier, <<"zdoublecintxcreateyb">>},
126 | {or_, <<"or">>},
127 | {identifier, <<"jimmy3boy">>},
128 | {or_, <<"or">>},
129 | {identifier, <<"jimmy4">>}
130 | ],
131 | ?_assertEqual(Expected, Got).
132 |
133 | words_containing_digits_test_() ->
134 | Got = riak_ql_lexer:get_tokens("'sha512' sha 512"),
135 | Expected = [
136 | {character_literal, <<"sha512">>},
137 | {identifier, <<"sha">>},
138 | {integer, 512}
139 | ],
140 | ?_assertEqual(Expected, Got).
141 |
142 | nums_test_() ->
143 | Got = riak_ql_lexer:get_tokens("1 -2 2.0 -2.0 3.3e+3 -3.3e-3 44e4 44e-4 44e+4 44e-0 44e+0 44e0"),
144 | Expected = [
145 | {integer, 1},
146 | {integer, -2},
147 | {float, 2.0},
148 | {float, -2.0},
149 | {float, 3.3e3},
150 | {float, -0.0033},
151 | {float, 4.4e5},
152 | {float, 0.0044},
153 | {float, 4.4e5},
154 | {float, 44.0},
155 | {float, 44.0},
156 | {float, 44.0}
157 | ],
158 | ?_assertEqual(Expected, Got).
159 |
160 | floats_in_character_literals_test_() ->
161 | ?_assertEqual(
162 | [{character_literal, <<"hello44.4">>}],
163 | riak_ql_lexer:get_tokens("'hello44.4'")
164 | ).
165 | negative_floats_in_character_literals_test_() ->
166 | ?_assertEqual(
167 | [{character_literal, <<"hello-44.4">>}],
168 | riak_ql_lexer:get_tokens("'hello-44.4'")
169 | ).
170 |
171 | sci_floats_in_character_literals_test_() ->
172 | ?_assertEqual(
173 | [{character_literal, <<"hello4.40000000000000035527e+00">>}],
174 | riak_ql_lexer:get_tokens("'hello4.40000000000000035527e+00'")
175 | ).
176 |
177 | negative_sci_floats_in_character_literals_test_() ->
178 | ?_assertEqual(
179 | [{character_literal, <<"hello-4.40000000000000035527e+00">>}],
180 | riak_ql_lexer:get_tokens("'hello-4.40000000000000035527e+00'")
181 | ).
182 |
183 | ops_test_() ->
184 | Got = riak_ql_lexer:get_tokens("> < <> != !~ = =~"),
185 | Expected = [
186 | {greater_than_operator, <<">">>},
187 | {less_than_operator, <<"<">>},
188 | {ne, <<"<>">>},
189 | {nomatch, <<"!=">>},
190 | {notapprox, <<"!~">>},
191 | {equals_operator, <<"=">>},
192 | {approx, <<"=~">>}
193 | ],
194 | ?_assertEqual(Expected, Got).
195 |
196 | is_null_test_() ->
197 | Got = riak_ql_lexer:get_tokens("field_name IS NULL"),
198 | Expected = [
199 | {identifier, <<"field_name">>},
200 | {is_, <<"IS">>},
201 | {null, <<"NULL">>}
202 | ],
203 | ?_assertEqual(Expected, Got).
204 |
205 | is_not_null_test_() ->
206 | Got = riak_ql_lexer:get_tokens("field_name IS NOT NULL"),
207 | Expected = [
208 | {identifier, <<"field_name">>},
209 | {is_, <<"IS">>},
210 | {not_, <<"NOT">>},
211 | {null, <<"NULL">>}
212 | ],
213 | ?_assertEqual(Expected, Got).
214 |
215 | brackets_test_() ->
216 | Got = riak_ql_lexer:get_tokens(" ( )"),
217 | Expected = [
218 | {left_paren, <<"(">>},
219 | {right_paren, <<")">>}
220 | ],
221 | ?_assertEqual(Expected, Got).
222 |
223 | unicode_character_literal_test_() ->
224 | Got = riak_ql_lexer:get_tokens("'πίτσα пицца ピザ 比萨'"),
225 | Expected = [
226 | {character_literal, <<"πίτσα пицца ピザ 比萨">>}
227 | ],
228 | ?_assertEqual(Expected, Got).
229 |
230 | unicode_identifier_test_() ->
231 | ?_assertException(
232 | error,
233 | unicode_in_identifier,
234 | riak_ql_lexer:get_tokens("πίτσα пицца ピザ 比萨")
235 | ).
236 |
237 | unicode_quoted_test_() ->
238 | ?_assertException(
239 | error,
240 | unicode_in_quotes,
241 | riak_ql_lexer:get_tokens("\"helピザlo\"")
242 | ).
243 |
244 | inner_zero_test_() ->
245 | Got = riak_ql_lexer:get_tokens("foo01 \"foo01\" 'foo01'"),
246 | Expected = [
247 | {identifier, <<"foo01">>},
248 | {identifier, <<"foo01">>},
249 | {character_literal, <<"foo01">>}
250 | ],
251 | ?_assertEqual(Expected, Got).
252 |
253 | keywords_in_words_test_() ->
254 | Got = riak_ql_lexer:get_tokens("mydouble mysint64 myboolean mycreate myany"),
255 | Expected = [
256 | {identifier, <<"mydouble">>},
257 | {identifier, <<"mysint64">>},
258 | {identifier, <<"myboolean">>},
259 | {identifier, <<"mycreate">>},
260 | {identifier, <<"myany">>}
261 | ],
262 | ?_assertEqual(Expected, Got).
263 |
264 | not_a_date_test_() ->
265 | Got = riak_ql_lexer:get_tokens("'ergle'"),
266 | Expected = [
267 | {character_literal, <<"ergle">>}
268 | ],
269 | ?_assertEqual(Expected, Got).
270 |
271 | multiple_single_quotes_test_() ->
272 | Got = riak_ql_lexer:get_tokens("'user_1' 'San Fierro' 'klingon''name' '''a ' ''''"),
273 | Expected = [
274 | {character_literal, <<"user_1">>},
275 | {character_literal, <<"San Fierro">>},
276 | {character_literal, <<"klingon'name">>},
277 | {character_literal, <<"'a ">>},
278 | {character_literal, <<"'">>}
279 | ],
280 | ?_assertEqual(Expected, Got).
281 |
282 | double_quote_1_test_() ->
283 | Got = riak_ql_lexer:get_tokens("\" yardle hoop !@#$%^&*() _ -\""),
284 | Expected = [
285 | {identifier, <<" yardle hoop !@#$%^&*() _ -">>}
286 | ],
287 | ?_assertEqual(Expected, Got).
288 |
289 | double_quote_2_test_() ->
290 | Got = riak_ql_lexer:get_tokens("\"first quote\" \"second quote\""),
291 | Expected = [
292 | {identifier, <<"first quote">>},
293 | {identifier, <<"second quote">>}
294 | ],
295 | ?_assertEqual(Expected, Got).
296 |
297 | regex_1_test_() ->
298 | Got = riak_ql_lexer:get_tokens("/*./"),
299 | Expected = [
300 | {regex, <<"/*./">>}
301 | ],
302 | ?_assertEqual(Expected, Got).
303 |
304 | regex_2_test_() ->
305 | Got = riak_ql_lexer:get_tokens("/^*./i"),
306 | Expected = [
307 | {regex, <<"/^*./i">>}
308 | ],
309 | io:format("Expected is ~p~n", [Expected]),
310 | io:format("Got is ~p~n", [Got]),
311 | ?_assertEqual(Expected, Got).
312 |
313 | regex_3_test_() ->
314 | Got = riak_ql_lexer:get_tokens("/*./ or /erkle/"),
315 | Expected = [
316 | {regex, <<"/*./">>},
317 | {or_, <<"or">>},
318 | {regex, <<"/erkle/">>}
319 | ],
320 | ?_assertEqual(Expected, Got).
321 |
322 | chars_test_() ->
323 | Got = riak_ql_lexer:get_tokens("r_t , ee where"),
324 | Expected = [
325 | {identifier, <<"r_t">>},
326 | {comma, <<",">>},
327 | {identifier, <<"ee">>},
328 | {where, <<"where">>}
329 | ],
330 | io:format("Expected is ~p~n", [Expected]),
331 | io:format("Got is ~p~n", [Got]),
332 | ?_assertEqual(Expected, Got).
333 |
334 | arithmatic_test_() ->
335 | Got = riak_ql_lexer:get_tokens(" + - * / "),
336 | Expected = [
337 | {plus_sign, <<"+">>},
338 | {minus_sign, <<"-">>},
339 | {asterisk, <<"*">>},
340 | {solidus, <<"/">>}
341 | ],
342 | ?_assertEqual(Expected, Got).
343 |
344 | semicolon_test_() ->
345 | Expected = [{semicolon, <<";">>}],
346 | Got = riak_ql_lexer:get_tokens(";"),
347 | ?_assertEqual(Expected, Got).
348 |
349 | general_test_() ->
350 | Got = riak_ql_lexer:get_tokens("select v from r_t where time > '23 April 63 1:2:3'"),
351 | Expected = [
352 | {select, <<"select">>},
353 | {identifier, <<"v">>},
354 | {from, <<"from">>},
355 | {identifier, <<"r_t">>},
356 | {where, <<"where">>},
357 | {identifier, <<"time">>},
358 | {greater_than_operator, <<">">>},
359 | {character_literal, <<"23 April 63 1:2:3">>}
360 | ],
361 | io:format("Expected is ~p~n", [Expected]),
362 | io:format("Got is ~p~n", [Got]),
363 | ?_assertEqual(Expected, Got).
364 |
365 | timeseries_test_() ->
366 | Got = riak_ql_lexer:get_tokens("CREATE TABLE Geo ("
367 | ++ "geohash varchar not_null, "
368 | ++ "user varchar not_null, "
369 | ++ "time timestamp not_null, "
370 | ++ "weather varchar not_null, "
371 | ++ "temperature double not_null, "
372 | ++ "PRIMARY KEY ((geohash, quantum(time, 15, m), time, user)"),
373 | Expected = [
374 | {create, <<"CREATE">>},
375 | {table, <<"TABLE">>},
376 | {identifier,<<"Geo">>},
377 | {left_paren, <<"(">>},
378 | {identifier,<<"geohash">>},
379 | {varchar, <<"varchar">>},
380 | {identifier,<<"not_null">>},
381 | {comma, <<",">>},
382 | {identifier,<<"user">>},
383 | {varchar, <<"varchar">>},
384 | {identifier,<<"not_null">>},
385 | {comma, <<",">>},
386 | {identifier,<<"time">>},
387 | {timestamp, <<"timestamp">>},
388 | {identifier,<<"not_null">>},
389 | {comma, <<",">>},
390 | {identifier,<<"weather">>},
391 | {varchar, <<"varchar">>},
392 | {identifier,<<"not_null">>},
393 | {comma, <<",">>},
394 | {identifier,<<"temperature">>},
395 | {double, <<"double">>},
396 | {identifier,<<"not_null">>},
397 | {comma, <<",">>},
398 | {primary, <<"PRIMARY">>},
399 | {key, <<"KEY">>},
400 | {left_paren, <<"(">>},
401 | {left_paren, <<"(">>},
402 | {identifier,<<"geohash">>},
403 | {comma, <<",">>},
404 | {quantum, <<"quantum">>},
405 | {left_paren, <<"(">>},
406 | {identifier,<<"time">>},
407 | {comma, <<",">>},
408 | {integer,15},
409 | {comma, <<",">>},
410 | {identifier,<<"m">>},
411 | {right_paren, <<")">>},
412 | {comma, <<",">>},
413 | {identifier,<<"time">>},
414 | {comma, <<",">>},
415 | {identifier,<<"user">>},
416 | {right_paren, <<")">>}
417 | ],
418 | ?_assertEqual(Expected, Got).
419 |
420 | unquoted_identifiers_test_() ->
421 | String = "cats = be a st",
422 | Got = riak_ql_lexer:get_tokens(String),
423 | Expected = [
424 | {identifier, <<"cats">>},
425 | {equals_operator, <<"=">>},
426 | {identifier, <<"be">>},
427 | {identifier, <<"a">>},
428 | {identifier, <<"st">>}
429 | ],
430 | ?_assertEqual(Expected, Got).
431 |
432 | symbols_in_identifier_1_test_() ->
433 | ?_assertError(
434 | <<"Unexpected token '^'.">>,
435 | riak_ql_lexer:get_tokens(
436 | "CREATE TABLE ^ ("
437 | "time TIMESTAMP NOT NULL, "
438 | "family VARCHAR NOT NULL, "
439 | "series VARCHAR NOT NULL, "
440 | "PRIMARY KEY "
441 | " ((family, series, quantum(time, 15, 's')), family, series, time))")
442 | ).
443 |
444 | symbols_in_identifier_2_test_() ->
445 | ?_assertError(
446 | <<"Unexpected token '&'.">>,
447 | riak_ql_lexer:get_tokens("klsdafj kljfd (*((*& 89& 8KHH kJHkj hKJH K K")
448 | ).
449 |
450 | symbols_in_identifier_3_test_() ->
451 | ?_assertError(
452 | <<"Unexpected token '$'.">>,
453 | riak_ql_lexer:get_tokens(
454 | "CREATE TABLE mytable ("
455 | "time TIMESTAMP NOT NULL, "
456 | "family $ NOT NULL, "
457 | "series VARCHAR NOT NULL, "
458 | "PRIMARY KEY "
459 | " ((family, series, quantum(time, 15, 's')), family, series, time))")
460 | ).
461 |
462 | symbols_in_identifier_4_test_() ->
463 | ?_assertError(
464 | <<"Unexpected token ']'.">>,
465 | riak_ql_lexer:get_tokens("select ] from a")
466 | ).
467 |
--------------------------------------------------------------------------------
/test/parser_arithmetic_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Arithmetic tests for the Parser
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 | -module(parser_arithmetic_tests).
24 |
25 | -compile([export_all]).
26 |
27 | -include_lib("eunit/include/eunit.hrl").
28 | -include("parser_test_utils.hrl").
29 |
30 | select_arithmetic_test() ->
31 | ?sql_comp_assert_match("select temperature + 1 from details", select,
32 | [{fields, [
33 | {'+',
34 | {identifier, <<"temperature">>},
35 | {integer, 1}
36 | }
37 | ]},
38 | {tables, <<"details">>},
39 | {where, []}
40 | ]).
41 |
42 | arithmetic_precedence_test() ->
43 | ?sql_comp_assert_match("select 1 * 2 + 3 / 4 - 5 * 6 from dual", select,
44 | [{fields,
45 | [{'-',
46 | {'+',
47 | {'*', {integer,1}, {integer,2}},
48 | {'/', {integer,3}, {integer,4}}
49 | },
50 | {'*', {integer,5}, {integer,6}}
51 | }]},
52 | {tables, <<"dual">>},
53 | {where, []}
54 | ]).
55 |
56 | parens_precedence_test() ->
57 | ?sql_comp_assert_match("select 1 * (2 + 3) / (4 - 5) * 6 from dual", select,
58 | [{fields,
59 | [{'*',
60 | {'/',
61 | {'*', {integer,1},
62 | {'+',{integer,2}, {integer,3}}},
63 | {'-',{integer,4}, {integer,5}}},
64 | {integer,6}}]},
65 | {tables, <<"dual">>},
66 | {where, []}
67 | ]).
68 |
69 | negated_parens_test() ->
70 | ?sql_comp_assert_match("select - (2 + 3) from dual", select,
71 | [{fields,
72 | [{negate,
73 | {expr,
74 | {'+', {integer,2}, {integer,3}}}}
75 | ]},
76 | {tables, <<"dual">>},
77 | {where, []}
78 | ]).
79 |
80 |
81 | no_functions_in_where_test() ->
82 | ?sql_comp_fail("select * from dual where sin(4) > 4").
83 |
84 | window_aggregate_fn_arithmetic_1_test() ->
85 | ?sql_comp_assert_match(
86 | "SELECT AVG(temperature) + 1 - 2 * 3 / 4 FROM details", select,
87 | [{fields,
88 | [{'-',{'+',{{window_agg_fn,'AVG'},
89 | [{identifier,[<<"temperature">>]}]
90 | },{integer,1}},
91 | {'/',{'*',{integer,2},{integer,3}},{integer,4}}}]
92 | },
93 | {tables, <<"details">>}]
94 | ).
95 |
96 | window_aggregate_fn_arithmetic_2_test() ->
97 | ?sql_comp_assert_match(
98 | "SELECT AVG((temperature * 2) + 32) FROM details", select,
99 | [{fields,
100 | [{{window_agg_fn,'AVG'},
101 | [{'+',{expr,{'*',
102 | {identifier,<<"temperature">>},{integer,2}}},{integer,32}}]}]
103 | },
104 | {tables, <<"details">>}]
105 | ).
106 |
107 | window_aggregate_fn_arithmetic_3_test() ->
108 | ?sql_comp_assert_match(
109 | "SELECT COUNT(x) + 1 / AVG(y) FROM details", select,
110 | [{fields,
111 | [{'+',{{window_agg_fn,'COUNT'},
112 | [{identifier,[<<"x">>]}]},
113 | {'/',{integer,1},
114 | {{window_agg_fn,'AVG'},[{identifier,[<<"y">>]}]}}}]},
115 | {tables, <<"details">>}]
116 | ).
117 |
--------------------------------------------------------------------------------
/test/parser_canonicalise_where_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Canonical WHERE clause tests for the Parser
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_canonicalise_where_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | %%
30 | %% canonicalise WHERE clauses tests
31 | %%
32 |
33 | canonicalise_where_1_test() ->
34 | ?where_test({or_,
35 | {'<', "alpha", {integer, 2}},
36 | {'>', "beta", {integer, 3}}
37 | },
38 | {or_,
39 | {'<', "alpha", {integer, 2}},
40 | {'>', "beta", {integer, 3}}
41 | }).
42 |
43 | canonicalise_where_2_test() ->
44 | ?where_test({or_,
45 | {'>', "beta", {integer, 3}},
46 | {'<', "alpha", {integer, 2}}
47 | },
48 | {or_,
49 | {'<', "alpha", {integer, 2}},
50 | {'>', "beta", {integer, 3}}
51 | }).
52 |
53 | canonicalise_where_2a_test() ->
54 | ?where_test({or_,
55 | {'>=', "beta", {integer, 3}},
56 | {'<', "alpha", {integer, 2}}
57 | },
58 | {or_,
59 | {'<', "alpha", {integer, 2}},
60 | {'>=', "beta", {integer, 3}}
61 | }).
62 |
63 | canonicalise_where_2b_test() ->
64 | ?where_test({or_,
65 | {'>', "beta", {integer, 3}},
66 | {'<=', "alpha", {integer, 2}}
67 | },
68 | {or_,
69 | {'<=', "alpha", {integer, 2}},
70 | {'>', "beta", {integer, 3}}
71 | }).
72 |
73 |
74 | canonicalise_where_3_test() ->
75 | ?where_test({and_,
76 | {'>', "beta", {integer, 3}},
77 | {'<', "alpha", {integer, 2}}
78 | },
79 | {and_,
80 | {'<', "alpha", {integer, 2}},
81 | {'>', "beta", {integer, 3}}
82 | }).
83 |
84 | canonicalise_where_4_test() ->
85 | ?where_test({or_,
86 | {and_,
87 | {'>', "beta", {integer, 3}},
88 | {'<', "alpha", {integer, 2}}
89 | },
90 | {'=', "time", {integer, 1234}}
91 | },
92 | {or_,
93 | {'=', "time", {integer, 1234}},
94 | {and_,
95 | {'<', "alpha", {integer, 2}},
96 | {'>', "beta", {integer, 3}}
97 | }
98 | }).
99 |
100 | %%
101 | %% these are the ones that matters
102 | %% all the ands float to the front which means
103 | %% the query rewriter can walk them them and rearange them
104 | %%
105 | canonicalise_where_5_test() ->
106 | ?where_test({and_,
107 | {or_,
108 | {'>', "beta", {integer, 3}},
109 | {'<', "alpha", {integer, 2}}
110 | },
111 | {and_,
112 | {'>', "gamma", {integer, 3}},
113 | {'<', "delta", {integer, 2}}
114 | }
115 | },
116 | {and_,
117 | {'<', "delta", {integer, 2}},
118 | {and_,
119 | {'>', "gamma", {integer, 3}},
120 | {or_,
121 | {'<', "alpha", {integer, 2}},
122 | {'>', "beta", {integer, 3}}
123 | }
124 | }
125 | }).
126 |
127 | canonicalise_where_6_test() ->
128 | ?where_test({and_,
129 | {and_,
130 | {'>', "beta6", {integer, 3}},
131 | {'<', "alpha6", {integer, 2}}
132 | },
133 | {and_,
134 | {'>', "gamma6", {integer, 3}},
135 | {'<', "delta6", {integer, 2}}
136 | }
137 | },
138 | {and_,
139 | {'<', "alpha6", {integer, 2}},
140 | {and_,
141 | {'<', "delta6", {integer, 2}},
142 | {and_,
143 | {'>', "beta6", {integer, 3}},
144 | {'>', "gamma6", {integer, 3}}
145 | }
146 | }
147 | }).
148 |
149 | canonicalise_where_7_test() ->
150 | ?where_test({and_,
151 | {and_,
152 | {or_,
153 | {'>', "beta7", {integer, 3}},
154 | {'<', "alpha7", {integer, 2}}
155 | },
156 | {and_,
157 | {'>', "gamma7", {integer, 3}},
158 | {'<', "delta7", {integer, 2}}
159 | }
160 | },
161 | {and_,
162 | {'>', "epsilon7", {integer, 3}},
163 | {'<', "zeta7", {integer, 2}}
164 | }
165 | },
166 | {and_,
167 | {'<', "delta7", {integer, 2}},
168 | {and_,
169 | {'<', "zeta7", {integer, 2}},
170 | {and_,
171 | {'>', "epsilon7", {integer, 3}},
172 | {and_,
173 | {'>', "gamma7", {integer, 3}},
174 | {or_,
175 | {'<', "alpha7", {integer, 2}},
176 | {'>', "beta7", {integer, 3}}
177 | }
178 | }
179 | }
180 | }
181 | }).
182 |
--------------------------------------------------------------------------------
/test/parser_delete_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% DELETE command tests for the Parser
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_delete_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | delete_sql_test() ->
30 | ?sql_comp_assert_match(
31 | "delete from argle where bish > 1", delete,
32 | [
33 | {table, <<"argle">>},
34 | {where, [
35 | {'>', <<"bish">>, {integer, 1}}
36 | ]}
37 | ]).
38 |
39 |
--------------------------------------------------------------------------------
/test/parser_describe_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Parser tests for the DESCRIBE command
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_describe_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | simple_describe_test() ->
30 | ?sql_comp_assert("describe GeoCheckins", describe,
31 | [{identifier, <<"GeoCheckins">>}]).
32 |
33 | uppercase_quoted_describe_test() ->
34 | ?sql_comp_assert("DESCRIBE \"GeoCheckins\"", describe,
35 | [{identifier, <<"GeoCheckins">>}]).
36 |
--------------------------------------------------------------------------------
/test/parser_explain_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% EXPLAIN command tests for the Parser
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_explain_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | explain_sql_test() ->
30 | ?sql_comp_assert_match(
31 | "explain select * from argle", explain,
32 | [{fields, [
33 | {identifier, [<<"*">>]}
34 | ]},
35 | {tables, <<"argle">>},
36 | {where, []}
37 | ]).
38 |
39 | explain_sql_with_semicolon_test() ->
40 | ?sql_comp_assert_match(
41 | "explain select * from argle;", explain,
42 | [{fields, [
43 | {identifier, [<<"*">>]}
44 | ]},
45 | {tables, <<"argle">>},
46 | {where, []}
47 | ]).
48 |
49 | explain_sql_with_semicolons_in_quotes_test() ->
50 | ?sql_comp_assert_match(
51 | "explain select * from \"table;name\" where ';' = asdf;", explain,
52 | [{fields, [
53 | {identifier, [<<"*">>]}
54 | ]},
55 | {tables, <<"table;name">>},
56 | {where, [{'=', <<"asdf">>, {binary, <<";">>}}]}
57 | ]).
58 |
59 | explain_sql_semicolon_second_statement_test() ->
60 | ?sql_comp_fail("explain select * from asdf; select * from asdf").
61 |
62 | explain_sql_multiple_semicolon_test() ->
63 | ?sql_comp_fail("explain select * from asdf;;").
64 |
65 | select_quoted_sql_test() ->
66 | ?sql_comp_assert_match(
67 | "explain select * from \"argle\"", explain,
68 | [{fields, [
69 | {identifier, [<<"*">>]}
70 | ]},
71 | {tables, <<"argle">>},
72 | {where, []}
73 | ]).
74 |
75 | select_quoted_keyword_sql_test() ->
76 | ?sql_comp_assert_match(
77 | "explain select * from \"select\"", explain,
78 | [{fields, [
79 | {identifier, [<<"*">>]}
80 | ]},
81 | {tables, <<"select">>},
82 | {where, []}
83 | ]).
84 |
85 | select_nested_quotes_sql_test() ->
86 | ?sql_comp_assert_match(
87 | "explain select * from \"some \"\"quotes\"\" in me\"", explain,
88 | [{fields, [
89 | {identifier, [<<"*">>]}
90 | ]},
91 | {tables, <<"some \"quotes\" in me">>},
92 | {where, []}
93 | ]).
94 |
95 | select_from_lists_sql_test() ->
96 | ?sql_comp_fail("explain select * from events, errors").
97 |
98 | select_from_lists_with_where_sql_test() ->
99 | ?sql_comp_fail("explain select foo from events, errors where x = y").
100 |
101 | select_fields_from_lists_sql_test() ->
102 | ?sql_comp_assert_match(
103 | "explain select hip, hop, dont, stop from events", explain,
104 | [{fields, [
105 | {identifier, [<<"hip">>]},
106 | {identifier, [<<"hop">>]},
107 | {identifier, [<<"dont">>]},
108 | {identifier, [<<"stop">>]}
109 | ]},
110 | {tables, <<"events">>},
111 | {where, []}
112 | ]).
113 |
114 | select_quoted_spaces_sql_test() ->
115 | ?sql_comp_assert_match(
116 | "explain select * from \"table with spaces\"", explain,
117 | [{fields, [
118 | {identifier, [<<"*">>]}
119 | ]},
120 | {tables, <<"table with spaces">>},
121 | {where, []}
122 | ]).
123 |
124 | select_quoted_escape_sql_test() ->
125 | ?sql_comp_assert_match(
126 | "explain select * from \"table with spaces\" where "
127 | "\"co\"\"or\" = 'klingon''name' or "
128 | "\"co\"\"or\" = '\"'", explain,
129 | [{fields, [
130 | {identifier, [<<"*">>]}
131 | ]},
132 | {tables, <<"table with spaces">>},
133 | {where, [
134 | {or_,
135 | {'=', <<"co\"or">>, {binary, <<"\"">>}},
136 | {'=', <<"co\"or">>, {binary, <<"klingon'name">>}}
137 | }
138 | ]}
139 | ]).
140 |
--------------------------------------------------------------------------------
/test/parser_function_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Tests for user-defined functions for the Parser
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_function_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | function_arity_0_test() ->
30 | ?assertMatch(
31 | {error, {0, riak_ql_parser, <<_/binary>>}},
32 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun() = a"))
33 | ).
34 |
35 | function_identifier_arity_1_test() ->
36 | ?assertMatch(
37 | {error, {0, riak_ql_parser, <<_/binary>>}},
38 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun(a) = a"))
39 | ).
40 |
41 | function_identifier_arity_2_test() ->
42 | ?assertMatch(
43 | {error, {0, riak_ql_parser, <<_/binary>>}},
44 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun(a, b) = a"))
45 | ).
46 |
47 | function_val_arity_1_test() ->
48 | ?assertMatch(
49 | {error, {0, riak_ql_parser, <<_/binary>>}},
50 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun('a') = a"))
51 | ).
52 |
53 | function_val_arity_2_test() ->
54 | ?assertMatch(
55 | {error, {0, riak_ql_parser, <<_/binary>>}},
56 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun('a', 'b') = a"))
57 | ).
58 |
59 | function_val_arity_3_test() ->
60 | ?assertMatch(
61 | {error, {0, riak_ql_parser, <<_/binary>>}},
62 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun('a', 'b', 'c') = a"))
63 | ).
64 |
65 | function_val_arity_10_test() ->
66 | ?assertMatch(
67 | {error, {0, riak_ql_parser, <<_/binary>>}},
68 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun('a', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b') = a"))
69 | ).
70 |
71 | function_val_and_identifier_mix_1_test() ->
72 | ?assertMatch(
73 | {error, {0, riak_ql_parser, <<_/binary>>}},
74 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun('a', 10, b, 3.5) = a"))
75 | ).
76 |
77 | function_val_and_identifier_mix_2_test() ->
78 | ?assertMatch(
79 | {error, {0, riak_ql_parser, <<_/binary>>}},
80 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun('a', 10, b, 3.5, true) = a"))
81 | ).
82 |
83 | function_val_and_identifier_mix_3_test() ->
84 | ?assertMatch(
85 | {error, {0, riak_ql_parser, <<_/binary>>}},
86 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun('a', 10, b, 3.5, false) = a"))
87 | ).
88 |
89 | function_call_error_message_test() ->
90 | ?assertMatch(
91 | {error, {0, riak_ql_parser, <<"Function not supported - 'myfun'.">>}},
92 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun('a') = a"))
93 | ).
94 |
95 | function_as_arg_test() ->
96 | ?assertMatch(
97 | {error, {0, riak_ql_parser,
98 | <<"Function not supported - 'herfun'.">>}},
99 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens("select f from a WHERE myfun(hisfun(herfun(a))) = 'a'"))
100 | ).
101 |
--------------------------------------------------------------------------------
/test/parser_insert_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Insert tests for the Parser
4 | %%
5 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
6 | %%
7 | %% This file is provided to you under the Apache License,
8 | %% Version 2.0 (the "License"); you may not use this file
9 | %% except in compliance with the License. You may obtain
10 | %% a copy of the License at
11 | %%
12 | %% http://www.apache.org/licenses/LICENSE-2.0
13 | %%
14 | %% Unless required by applicable law or agreed to in writing,
15 | %% software distributed under the License is distributed on an
16 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17 | %% KIND, either express or implied. See the License for the
18 | %% specific language governing permissions and limitations
19 | %% under the License.
20 | %%
21 | %% -------------------------------------------------------------------
22 | -module(parser_insert_tests).
23 |
24 | -include_lib("eunit/include/eunit.hrl").
25 |
26 | insert_boolean_true_test() ->
27 | Insert_sql =
28 | "INSERT INTO mytab (col) VALUES (true)",
29 | ?assertEqual(
30 | {insert,[{table,<<"mytab">>},
31 | {fields,[{identifier,[<<"col">>]}]},
32 | {values,[[{boolean,true}]]}]},
33 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
34 | ).
35 |
36 | insert_boolean_false_test() ->
37 | Insert_sql =
38 | "INSERT INTO mytab (col) VALUES (false)",
39 | ?assertEqual(
40 | {insert,[{table,<<"mytab">>},
41 | {fields,[{identifier,[<<"col">>]}]},
42 | {values,[[{boolean,false}]]}]},
43 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
44 | ).
45 |
46 | insert_double_test() ->
47 | Insert_sql =
48 | "INSERT INTO mytab (col) VALUES (3.5)",
49 | ?assertEqual(
50 | {insert,[{table,<<"mytab">>},
51 | {fields,[{identifier,[<<"col">>]}]},
52 | {values,[[{float,3.5}]]}]},
53 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
54 | ).
55 |
56 | insert_varchar_test() ->
57 | Insert_sql =
58 | "INSERT INTO mytab (col) VALUES ('qwerty')",
59 | ?assertEqual(
60 | {insert,[{table,<<"mytab">>},
61 | {fields,[{identifier,[<<"col">>]}]},
62 | {values,[[{binary,<<"qwerty">>}]]}]},
63 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
64 | ).
65 |
66 | insert_sint64_test() ->
67 | Insert_sql =
68 | "INSERT INTO mytab (col) VALUES (22)",
69 | ?assertEqual(
70 | {insert,[{table,<<"mytab">>},
71 | {fields,[{identifier,[<<"col">>]}]},
72 | {values,[[{integer,22}]]}]},
73 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
74 | ).
75 |
76 | insert_null_test() ->
77 | Insert_sql =
78 | "INSERT INTO mytab (col) VALUES (NULL)",
79 | ?assertEqual(
80 | {insert,[{table,<<"mytab">>},
81 | {fields,[{identifier,[<<"col">>]}]},
82 | {values,[[{null,<<"NULL">>}]]}]},
83 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
84 | ).
85 |
86 | insert_null_case_insensitive_test() ->
87 | Insert_sql =
88 | "INSERT INTO mytab (col) VALUES (NuLl)",
89 | ?assertEqual(
90 | {insert,[{table,<<"mytab">>},
91 | {fields,[{identifier,[<<"col">>]}]},
92 | {values,[[{null,<<"NuLl">>}]]}]},
93 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
94 | ).
95 |
96 | insert_literal_null_test() ->
97 | Insert_sql =
98 | "INSERT INTO mytab (col) VALUES ('null')",
99 | ?assertEqual(
100 | {insert,[{table,<<"mytab">>},
101 | {fields,[{identifier,[<<"col">>]}]},
102 | {values,[[{binary,<<"null">>}]]}]},
103 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
104 | ).
105 |
106 | insert_identifier_test() ->
107 | Insert_sql =
108 | "INSERT INTO mytab (col) VALUES (john)",
109 | ?assertEqual(
110 | {insert,[{table,<<"mytab">>},
111 | {fields,[{identifier,[<<"col">>]}]},
112 | {values,[[{identifier,<<"john">>}]]}]},
113 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(Insert_sql))
114 | ).
115 |
116 |
--------------------------------------------------------------------------------
/test/parser_select_aggregate_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Window Aggregation function tests for the Parser
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_select_aggregate_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | window_aggregate_fn_1_test() ->
30 | ?sql_comp_assert_match(
31 | "select avg(temp) from details", select,
32 | [{fields, [
33 | {{window_agg_fn, 'AVG'},
34 | [{identifier, [<<"temp">>]}]}
35 | ]},
36 | {tables, <<"details">>}
37 | ]
38 | ).
39 |
40 | window_aggregate_fn_1a_test() ->
41 | ?sql_comp_assert_match(
42 | "select mean(temp) from details", select,
43 | [{fields, [
44 | {{window_agg_fn, 'MEAN'},
45 | [{identifier, [<<"temp">>]}]}
46 | ]},
47 | {tables, <<"details">>}
48 | ]
49 | ).
50 |
51 | window_aggregate_fn_2_test() ->
52 | ?sql_comp_assert_match(
53 | "select avg(temp), sum(counts) from details", select,
54 | [{fields, [
55 | {{window_agg_fn, 'AVG'},
56 | [{identifier, [<<"temp">>]}]},
57 | {{window_agg_fn, 'SUM'},
58 | [{identifier, [<<"counts">>]}]}
59 | ]},
60 | {tables, <<"details">>}
61 | ]
62 | ).
63 |
64 | window_aggregate_fn_wildcard_count_test() ->
65 | ?sql_comp_assert_match(
66 | "select count(*) from details", select,
67 | [{fields, [
68 | {{window_agg_fn, 'COUNT'},
69 | [{identifier, [<<"*">>]}]}
70 | ]},
71 | {tables, <<"details">>}
72 | ]
73 | ).
74 |
75 | window_aggregate_fn_capitalisation_test() ->
76 | ?sql_comp_assert_match(
77 | "select aVg(temp) from details", select,
78 | [{fields, [
79 | {{window_agg_fn, 'AVG'},
80 | [{identifier, [<<"temp">>]}]}
81 | ]},
82 | {tables, <<"details">>}
83 | ]
84 | ).
85 |
86 | window_aggregate_fn_all_funs_test() ->
87 | ?sql_comp_assert_match(
88 | "select avg(temp), sum(counts), count(counts), min(counts), "
89 | "max(counts), stddev(counts) from details", select,
90 | [{fields, [
91 | {{window_agg_fn, 'AVG'},
92 | [{identifier, [<<"temp">>]}]},
93 | {{window_agg_fn, 'SUM'},
94 | [{identifier, [<<"counts">>]}]},
95 | {{window_agg_fn, 'COUNT'},
96 | [{identifier, [<<"counts">>]}]},
97 | {{window_agg_fn, 'MIN'},
98 | [{identifier, [<<"counts">>]}]},
99 | {{window_agg_fn, 'MAX'},
100 | [{identifier, [<<"counts">>]}]},
101 | {{window_agg_fn, 'STDDEV'},
102 | [{identifier, [<<"counts">>]}]}
103 | ]},
104 | {tables, <<"details">>}
105 | ]
106 | ).
107 |
108 |
109 | window_aggregate_fn_arithmetic_2_test() ->
110 | ?sql_comp_assert_match(
111 | "select aVg(temperature) + count(temperature) from details", select,
112 | [{fields, [
113 | {'+',
114 | {{window_agg_fn, 'AVG'},
115 | [{identifier, [<<"temperature">>]}]},
116 | {{window_agg_fn, 'COUNT'},
117 | [{identifier, [<<"temperature">>]}]}}
118 | ]},
119 | {tables, <<"details">>}
120 | ]
121 | ).
122 |
123 | window_aggregate_fn_arithmetic_3_test() ->
124 | ?sql_comp_assert_match(
125 | "select aVg(temperature + 1) + count(temperature / distance) from details", select,
126 | [{fields, [
127 | {'+',
128 | {{window_agg_fn, 'AVG'}, [{'+', {identifier, <<"temperature">>}, {integer, 1}}]},
129 | {{window_agg_fn, 'COUNT'}, [{'/', {identifier, <<"temperature">>}, {identifier, <<"distance">>}}]}
130 | }]
131 | },
132 | {tables, <<"details">>}
133 | ]
134 | ).
135 |
136 | %%
137 | %% TS 1.1 fail tests
138 | %%
139 |
140 | window_aggregate_fn_not_supported_test() ->
141 | ?sql_comp_fail("select bingo(temp) from details").
142 |
143 | window_aggregate_fn_wildcard_fail_test() ->
144 | ?sql_comp_fail("select avg(*) from details").
145 |
--------------------------------------------------------------------------------
/test/parser_select_where_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% More WHERE clause tests for the Parser
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_select_where_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | select_where_1_sql_test() ->
30 | ?sql_comp_assert_match("select value from response_times "
31 | "where time > '2013-08-12 23:32:01' and time < '2013-08-13 12:34:56'", select,
32 | [{fields, [
33 | {identifier, [<<"value">>]}
34 | ]},
35 | {tables, <<"response_times">>},
36 | {where, [
37 | {and_,
38 | {'<', <<"time">>, {binary,<<"2013-08-13 12:34:56">>}},
39 | {'>', <<"time">>, {binary, <<"2013-08-12 23:32:01">>}}
40 | }
41 | ]}
42 | ]).
43 |
44 | select_where_1_reverse_sql_test() ->
45 | ?sql_comp_assert_match("select value from response_times "
46 | "where '2013-08-12 23:32:01' < time and '2013-08-13 12:34:56' > time", select,
47 | [{fields, [
48 | {identifier, [<<"value">>]}
49 | ]},
50 | {tables, <<"response_times">>},
51 | {where, [
52 | {and_,
53 | {'<', <<"time">>, {binary,<<"2013-08-13 12:34:56">>}},
54 | {'>', <<"time">>, {binary, <<"2013-08-12 23:32:01">>}}
55 | }
56 | ]}
57 | ]).
58 |
59 | select_where_3_sql_test() ->
60 | ?sql_comp_assert_match("select value from response_times where time > 1388534400", select,
61 | [{fields, [
62 | {identifier, [<<"value">>]}
63 | ]},
64 | {tables, <<"response_times">>},
65 | {where, [
66 | {'>', <<"time">>, {integer, 1388534400}}
67 | ]}
68 | ]).
69 |
70 | select_where_4_sql_test() ->
71 | ?sql_comp_assert_match("select value from response_times where time > 1388534400s", select,
72 | [{fields, [
73 | {identifier, [<<"value">>]}
74 | ]},
75 | {tables, <<"response_times">>},
76 | {where, [
77 | {'>', <<"time">>, {integer, 1388534400000}}
78 | ]}
79 | ]).
80 |
81 | select_where_5_sql_test() ->
82 | ?sql_comp_assert_match("select * from events where time = 1400497861762723 "
83 | "and sequence_number = 2321", select,
84 | [{fields, [
85 | {identifier, [<<"*">>]}
86 | ]},
87 | {tables, <<"events">>},
88 | {where, [
89 | {and_,
90 | {'=', <<"sequence_number">>, {integer, 2321}},
91 | {'=', <<"time">>, {integer, 1400497861762723}}
92 | }
93 | ]}
94 | ]).
95 |
96 | select_where_8_sql_test() ->
97 | ?sql_comp_assert_match("select * from events where state = 'NY'", select,
98 | [{fields, [
99 | {identifier, [<<"*">>]}
100 | ]},
101 | {tables, <<"events">>},
102 | {where, [
103 | {'=', <<"state">>, {binary, <<"NY">>}}
104 | ]}
105 | ]).
106 |
107 | select_where_approxmatch_sql_test() ->
108 | ?sql_comp_fail("select * from log_lines where line =~ /error/i").
109 |
110 | select_where_10_sql_test() ->
111 | ?sql_comp_assert_match("select * from events where customer_id = 23 and type = 'click10'", select,
112 | [{fields, [
113 | {identifier, [<<"*">>]}
114 | ]},
115 | {tables, <<"events">>},
116 | {where, [
117 | {and_,
118 | {'=', <<"customer_id">>, {integer, 23}},
119 | {'=', <<"type">>, {binary, <<"click10">>}}
120 | }
121 | ]}
122 | ]).
123 |
124 | select_where_11_sql_test() ->
125 | ?sql_comp_assert_match("select * from response_times where value > 500", select,
126 | [{fields, [
127 | {identifier, [<<"*">>]}
128 | ]},
129 | {tables, <<"response_times">>},
130 | {where, [
131 | {'>', <<"value">>, {integer, 500}}
132 | ]}
133 | ]).
134 |
135 | select_where_11a_sql_test() ->
136 | ?sql_comp_assert_match("select * from response_times where value >= 500", select,
137 | [{fields, [
138 | {identifier, [<<"*">>]}
139 | ]},
140 | {tables, <<"response_times">>},
141 | {where, [
142 | {'>=', <<"value">>, {integer, 500}}
143 | ]}
144 | ]).
145 |
146 | select_where_11b_sql_test() ->
147 | ?sql_comp_assert_match("select * from response_times where value <= 500", select,
148 | [{fields, [
149 | {identifier, [<<"*">>]}
150 | ]},
151 | {tables, <<"response_times">>},
152 | {where, [
153 | {'<=', <<"value">>, {integer, 500}}
154 | ]}
155 | ]).
156 |
157 | select_where_not_approx_sql_test() ->
158 | ?sql_comp_fail("select * from events where email !~ /.*gmail.*/").
159 |
160 | select_where_ne_sql_test() ->
161 | ?sql_comp_fail("select * from nagios_checks where status <> 0").
162 |
163 | select_where_14_sql_test() ->
164 | ?sql_comp_assert_match("select * from events where signed_in = false", select,
165 | [{fields, [
166 | {identifier, [<<"*">>]}
167 | ]},
168 | {tables, <<"events">>},
169 | {where, [
170 | {'=', <<"signed_in">>, {boolean, false}}
171 | ]}
172 | ]).
173 |
174 | select_where_15_sql_test() ->
175 | ?sql_comp_assert_match("select * from events where signed_in = -3", select,
176 | [{fields, [
177 | {identifier, [<<"*">>]}
178 | ]},
179 | {tables, <<"events">>},
180 | {where, [
181 | {'=', <<"signed_in">>, {integer, -3}}
182 | ]}
183 | ]).
184 |
185 | select_where_approx_or_approx_sql_test() ->
186 | ?sql_comp_fail("select * from events where (email =~ /.*gmail.*/ or " ++
187 | "email =~ /.*yahoo.*/) and state = 'ny'").
188 |
189 | select_where_letters_nos_in_strings_1a_test() ->
190 | ?sql_comp_assert_match("select * from events where user = 'user 1'", select,
191 | [{fields, [
192 | {identifier, [<<"*">>]}
193 | ]},
194 | {tables, <<"events">>},
195 | {where, [
196 | {'=', <<"user">>, {binary, <<"user 1">>}}
197 | ]}
198 | ]).
199 |
200 | select_where_letters_nos_in_strings_2a_test() ->
201 | ?sql_comp_assert_match(
202 | "select weather from GeoCheckin where time > 2000 and time < 8000 and user = 'user_1'", select,
203 | [{fields, [
204 | {identifier, [<<"weather">>]}
205 | ]},
206 | {tables, <<"GeoCheckin">>},
207 | {where, [
208 | {and_,
209 | {'=', <<"user">>, {binary, <<"user_1">>}},
210 | {and_,
211 | {'<', <<"time">>, {integer, 8000}},
212 | {'>', <<"time">>, {integer, 2000}}
213 | }
214 | }
215 | ]}
216 | ]).
217 |
218 | select_where_single_quotes_test() ->
219 | ?sql_comp_assert_match(
220 | "select weather from GeoCheckin where user = 'user_1' and location = 'San Francisco'", select,
221 | [{fields, [
222 | {identifier, [<<"weather">>]}
223 | ]},
224 | {tables, <<"GeoCheckin">>},
225 | {where, [
226 | {and_,
227 | {'=', <<"location">>, {binary, <<"San Francisco">>}},
228 | {'=', <<"user">>, {binary, <<"user_1">>}}
229 | }
230 | ]}
231 | ]).
232 |
233 | select_where_ors_at_start_test() ->
234 | ?sql_comp_assert_match(
235 | "select * FROM tsall2 WHERE "
236 | "d3 = 1.0 OR d3 = 2.0 "
237 | "AND vc1nn != '2' AND vc2nn != '3' AND 0 < ts1nn AND ts1nn < 1", select,
238 | [{fields, [
239 | {identifier, [<<"*">>]}
240 | ]},
241 | {tables, <<"tsall2">>},
242 | {where, [
243 | {or_,
244 | {'=', <<"d3">>, {float, 1.0}},
245 | {and_,
246 | {'<', <<"ts1nn">>, {integer, 1}},
247 | {and_,
248 | {'>', <<"ts1nn">>, {integer, 0}},
249 | {and_,
250 | {'!=', <<"vc2nn">>, {binary, <<"3">>}},
251 | {and_,
252 | {'!=', <<"vc1nn">>, {binary, <<"2">>}},
253 | {'=', <<"d3">>, {float, 2.0}}
254 | }}}}}
255 | ]}
256 | ]).
257 |
258 | select_where_ors_at_end_test() ->
259 | ?sql_comp_assert_match(
260 | "select * FROM tsall2 WHERE "
261 | "d3 = 1.0 OR d3 = 2.0 "
262 | "AND vc1nn != '2' AND vc2nn != '3' AND 0 < ts1nn AND ts1nn < 1 "
263 | "OR d3 = 3.0 OR d3 = 4.0", select,
264 | [{fields, [
265 | {identifier, [<<"*">>]}
266 | ]},
267 | {tables, <<"tsall2">>},
268 | {where, [
269 | {or_,
270 | {'=',<<"d3">>,{float,4.0}},
271 | {or_,
272 | {'=',<<"d3">>,{float,3.0}},
273 | {or_,
274 | {'=', <<"d3">>, {float, 1.0}},
275 | {and_,
276 | {'<', <<"ts1nn">>, {integer, 1}},
277 | {and_,
278 | {'>', <<"ts1nn">>, {integer, 0}},
279 | {and_,
280 | {'!=', <<"vc2nn">>, {binary, <<"3">>}},
281 | {and_,
282 | {'!=', <<"vc1nn">>, {binary, <<"2">>}},
283 | {'=', <<"d3">>, {float, 2.0}}
284 | }}}}}}}
285 | ]}
286 | ]).
287 |
288 | select_where_letters_nos_in_strings_2b_test() ->
289 | ?sql_comp_assert_match("select weather from GeoCheckin where time > 2000 and time < 8000 and user = 'user_1'", select,
290 | [{fields, [
291 | {identifier, [<<"weather">>]}
292 | ]},
293 | {tables, <<"GeoCheckin">>},
294 | {where, [
295 | {and_,
296 | {'=', <<"user">>, {binary, <<"user_1">>}},
297 | {and_,
298 | {'<', <<"time">>, {integer, 8000}},
299 | {'>', <<"time">>, {integer, 2000}}
300 | }
301 | }
302 | ]}
303 | ]).
304 |
305 | select_where_brackets_1_test() ->
306 | ?sql_comp_assert_match("select weather from GeoCheckin where (time > 2000 and time < 8000) and user = 'user_1'", select,
307 | [{fields, [
308 | {identifier, [<<"weather">>]}
309 | ]},
310 | {tables, <<"GeoCheckin">>},
311 | {where, [
312 | {and_,
313 | {'=', <<"user">>, {binary, <<"user_1">>}},
314 | {and_,
315 | {'<', <<"time">>, {integer, 8000}},
316 | {'>', <<"time">>, {integer, 2000}}
317 | }
318 | }
319 | ]}
320 | ]).
321 |
322 | select_where_brackets_2_test() ->
323 | ?sql_comp_assert_match("select weather from GeoCheckin where user = 'user_1' and (time > 2000 and time < 8000)", select,
324 | [{fields, [
325 | {identifier, [<<"weather">>]}
326 | ]},
327 | {tables, <<"GeoCheckin">>},
328 | {where, [
329 | {and_,
330 | {'=', <<"user">>, {binary, <<"user_1">>}},
331 | {and_,
332 | {'<', <<"time">>, {integer, 8000}},
333 | {'>', <<"time">>, {integer, 2000}}
334 | }
335 | }
336 | ]}
337 | ]).
338 |
339 | select_where_brackets_2a_test() ->
340 | ?sql_comp_assert_match("select weather from GeoCheckin where user = 'user_1' and (time > 2000 and (time < 8000))", select,
341 | [{fields, [
342 | {identifier, [<<"weather">>]}
343 | ]},
344 | {tables, <<"GeoCheckin">>},
345 | {where, [
346 | {and_,
347 | {'=', <<"user">>, {binary, <<"user_1">>}},
348 | {and_,
349 | {'<', <<"time">>, {integer, 8000}},
350 | {'>', <<"time">>, {integer, 2000}}
351 | }
352 | }
353 | ]}
354 | ]).
355 |
356 |
357 | select_field_to_field_forbidden_test() ->
358 | ?sql_comp_fail("select * from table where time = time").
359 |
360 | select_quoted_where_sql_test() ->
361 | ?sql_comp_assert_match("select * from \"table with spaces\" where \"color spaces\" = 'someone had painted it blue'", select,
362 | [{fields, [
363 | {identifier, [<<"*">>]}
364 | ]},
365 | {tables, <<"table with spaces">>},
366 | {where, [
367 | {'=', <<"color spaces">>, {binary, <<"someone had painted it blue">>}}
368 | ]}
369 | ]).
370 |
371 | select_where_field_is_null_test() ->
372 | ?sql_comp_assert_match("select weather from GeoCheckin where user IS NULL", select,
373 | [{fields, [
374 | {identifier, [<<"weather">>]}
375 | ]},
376 | {tables, <<"GeoCheckin">>},
377 | {where, [
378 | {is_null, {identifier, <<"user">>}}
379 | ]}
380 | ]).
381 |
382 | select_where_field_is_not_null_test() ->
383 | ?sql_comp_assert_match("select weather from GeoCheckin where user IS NOT NULL", select,
384 | [{fields, [
385 | {identifier, [<<"weather">>]}
386 | ]},
387 | {tables, <<"GeoCheckin">>},
388 | {where, [
389 | {is_not_null, {identifier, <<"user">>}}
390 | ]}
391 | ]).
392 |
--------------------------------------------------------------------------------
/test/parser_show_create_table_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Table SQL creation tests for the Parser
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_show_create_table_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | simple_describe_test() ->
30 | ?sql_comp_assert("show create table GeoCheckins", show_create_table,
31 | [{identifier, <<"GeoCheckins">>}]).
32 |
33 | uppercase_quoted_describe_test() ->
34 | ?sql_comp_assert("SHOW CREATE TABLE \"GeoCheckins\"", show_create_table,
35 | [{identifier, <<"GeoCheckins">>}]).
--------------------------------------------------------------------------------
/test/parser_show_tables_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Parser tests for the DESCRIBE command
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_show_tables_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 | simple_show_tables_test() ->
30 | ?sql_comp_assert("show tables", show_tables, []).
31 |
32 | uppercase_show_tables_describe_test() ->
33 | ?sql_comp_assert("SHOW TABLES", show_tables, []).
34 |
--------------------------------------------------------------------------------
/test/parser_test_utils.hrl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
4 | %%
5 | %% This file is provided to you under the Apache License,
6 | %% Version 2.0 (the "License"); you may not use this file
7 | %% except in compliance with the License. You may obtain
8 | %% a copy of the License at
9 | %%
10 | %% http://www.apache.org/licenses/LICENSE-2.0
11 | %%
12 | %% Unless required by applicable law or agreed to in writing,
13 | %% software distributed under the License is distributed on an
14 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | %% KIND, either express or implied. See the License for the
16 | %% specific language governing permissions and limitations
17 | %% under the License.
18 | %%
19 | %% -------------------------------------------------------------------
20 |
21 | -include_lib("eunit/include/eunit.hrl").
22 |
23 | -include("riak_ql_ddl.hrl").
24 |
25 | -compile([export_all]).
26 |
27 | -define(sql_comp_assert(String, Type, Expected),
28 | Toks = riak_ql_lexer:get_tokens(String),
29 | {Type, Got} = riak_ql_parser:ql_parse(Toks),
30 | ?assertEqual(Expected, Got)).
31 |
32 | -define(where_test(Uncanonical, Expected),
33 | Got = riak_ql_parser:canonicalise_where(Uncanonical),
34 | ?assertEqual(Expected, Got)).
35 |
36 | -define(sql_comp_assert_match(String, Type, Expected),
37 | Toks = riak_ql_lexer:get_tokens(String),
38 | {Type, Got} = riak_ql_parser:ql_parse(Toks),
39 | lists:foreach(
40 | fun({Key, Value}) -> ?assertEqual(Value, proplists:get_value(Key, Got)) end,
41 | Expected)).
42 |
43 | -define(sql_comp_fail(QL_string),
44 | Toks = riak_ql_lexer:get_tokens(QL_string),
45 | Got = riak_ql_parser:ql_parse(Toks),
46 | ?assertMatch({error, _}, Got)).
47 |
--------------------------------------------------------------------------------
/test/parser_tests.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% General Parser Tests
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 |
24 | -module(parser_tests).
25 |
26 | -include_lib("eunit/include/eunit.hrl").
27 | -include("parser_test_utils.hrl").
28 |
29 |
30 | select_sql_case_insensitive_1_test() ->
31 | ?sql_comp_assert_match("SELECT * from argle", select,
32 | [{fields, [
33 | {identifier, [<<"*">>]}
34 | ]}
35 | ]).
36 |
37 | select_sql_case_insensitive_2_test() ->
38 | ?sql_comp_assert_match("seLEct * from argle", select,
39 | [{fields, [
40 | {identifier, [<<"*">>]}
41 | ]}
42 | ]).
43 |
44 | sql_first_char_is_newline_test() ->
45 | ?sql_comp_assert_match("\nselect * from argle", select,
46 | [{fields, [
47 | {identifier, [<<"*">>]}
48 | ]}
49 | ]).
50 |
51 | %% RTS-645
52 | flubber_test() ->
53 | ?assertEqual(
54 | {error, {0, riak_ql_parser,
55 | <<"Used f as a measure of time in 1f. Only s, m, h and d are allowed.">>}},
56 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
57 | "SELECT * FROM ts_X_subquery "
58 | "WHERE d > 0 AND d < 1 f = 'f' "
59 | "AND s='s' AND ts > 0 AND ts < 100"))
60 | ).
61 |
62 | time_unit_seconds_test() ->
63 | ?sql_comp_assert_match("SELECT * FROM mytable WHERE time > 10s AND time < 20s", select,
64 | [{fields, [
65 | {identifier, [<<"*">>]}
66 | ]},
67 | {where, [
68 | {and_,
69 | {'<',<<"time">>,{integer,20 * 1000}},
70 | {'>',<<"time">>,{integer,10 * 1000}}}
71 | ]}
72 | ]).
73 |
74 | time_unit_minutes_test() ->
75 | ?sql_comp_assert_match("SELECT * FROM mytable WHERE time > 10m AND time < 20m", select,
76 | [{fields, [
77 | {identifier, [<<"*">>]}
78 | ]},
79 | {where, [
80 | {and_,
81 | {'<',<<"time">>,{integer,20 * 60 * 1000}},
82 | {'>',<<"time">>,{integer,10 * 60 * 1000}}}
83 | ]}
84 | ]).
85 |
86 | time_unit_seconds_and_minutes_test() ->
87 | ?sql_comp_assert_match("SELECT * FROM mytable WHERE time > 10s AND time < 20m", select,
88 | [{fields, [
89 | {identifier, [<<"*">>]}
90 | ]},
91 | {where, [
92 | {and_,
93 | {'<',<<"time">>,{integer,20 * 60 * 1000}},
94 | {'>',<<"time">>,{integer,10 * 1000}}}
95 | ]}
96 | ]).
97 |
98 | time_unit_hours_test() ->
99 | ?sql_comp_assert_match("SELECT * FROM mytable WHERE time > 10h AND time < 20h", select,
100 | [{fields, [
101 | {identifier, [<<"*">>]}
102 | ]},
103 | {where, [
104 | {and_,
105 | {'<',<<"time">>,{integer,20 * 60 * 60 * 1000}},
106 | {'>',<<"time">>,{integer,10 * 60 * 60 * 1000}}}
107 | ]}
108 | ]).
109 |
110 | time_unit_days_test() ->
111 | ?sql_comp_assert_match("SELECT * FROM mytable WHERE time > 10d AND time < 20d", select,
112 | [{fields, [
113 | {identifier, [<<"*">>]}
114 | ]},
115 | {where, [
116 | {and_,
117 | {'<',<<"time">>,{integer,20 * 60 * 60 * 24 * 1000}},
118 | {'>',<<"time">>,{integer,10 * 60 * 60 * 24 * 1000}}}
119 | ]}
120 | ]).
121 |
122 | time_unit_invalid_1_test() ->
123 | ?assertMatch(
124 | {error, {0, riak_ql_parser, <<_/binary>>}},
125 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
126 | "SELECT * FROM mytable WHERE time > 10y AND time < 20y"))
127 | ).
128 |
129 | time_unit_invalid_2_test() ->
130 | ?assertMatch(
131 | {error, {0, riak_ql_parser, <<_/binary>>}},
132 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
133 | "SELECT * FROM mytable WHERE time > 10mo AND time < 20mo"))
134 | ).
135 |
136 | time_unit_whitespace_test() ->
137 | ?sql_comp_assert_match("SELECT * FROM mytable WHERE time > 10 d AND time < 20\td", select,
138 | [{fields, [
139 | {identifier, [<<"*">>]}
140 | ]},
141 | {where, [
142 | {and_,
143 | {'<',<<"time">>,{integer,20 * 60 * 60 * 24 * 1000}},
144 | {'>',<<"time">>,{integer,10 * 60 * 60 * 24 * 1000}}}
145 | ]}
146 | ]).
147 |
148 | time_unit_case_insensitive_test() ->
149 | ?assertMatch(
150 | {select, _},
151 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
152 | "SELECT * FROM mytable WHERE time > 10S "
153 | "AND time < 20M AND time > 15H and time < 4D"))
154 | ).
155 |
156 | left_hand_side_literal_equals_test() ->
157 | ?sql_comp_assert_match("SELECT * FROM mytable WHERE 10 = age", select,
158 | [{fields, [
159 | {identifier, [<<"*">>]}
160 | ]},
161 | {where, [
162 | {'=', <<"age">>, {integer, 10}}
163 | ]}
164 | ]).
165 |
166 | left_hand_side_literal_not_equals_test() ->
167 | ?sql_comp_assert_match("SELECT * FROM mytable WHERE 10 != age", select,
168 | [{fields, [
169 | {identifier, [<<"*">>]}
170 | ]},
171 | {where, [
172 | {'!=', <<"age">>, {integer, 10}}
173 | ]}
174 | ]).
175 |
176 | %% RTS-788
177 | %% an infinite loop was occurring when two where clauses were the same
178 | %% i.e. time = 10 and time 10
179 | infinite_loop_test_() ->
180 | {timeout, 0.2,
181 | fun() ->
182 | ?assertMatch(
183 | {select, _},
184 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
185 | "Select myseries, temperature from GeoCheckin2 "
186 | "where time > 1234567 and time > 1234567 "
187 | "and myfamily = 'family1' and myseries = 'series1' "))
188 | )
189 | end}.
190 |
191 | remove_duplicate_clauses_1_test() ->
192 | ?assertEqual(
193 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
194 | "SELECT * FROM mytab "
195 | "WHERE time > 1234567 ")),
196 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
197 | "SELECT * FROM mytab "
198 | "WHERE time > 1234567 AND time > 1234567"))
199 | ).
200 |
201 | remove_duplicate_clauses_2_test() ->
202 | ?assertEqual(
203 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
204 | "SELECT * FROM mytab "
205 | "WHERE time > 1234567 ")),
206 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
207 | "SELECT * FROM mytab "
208 | "WHERE time > 1234567 AND time > 1234567 AND time > 1234567 "))
209 | ).
210 |
211 | remove_duplicate_clauses_3_test() ->
212 | ?assertEqual(
213 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
214 | "SELECT * FROM mytab "
215 | "WHERE time > 1234567 ")),
216 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
217 | "SELECT * FROM mytab "
218 | "WHERE time > 1234567 AND time > 1234567 OR time > 1234567 "))
219 | ).
220 |
221 | remove_duplicate_clauses_4_test() ->
222 | ?assertEqual(
223 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
224 | "SELECT * FROM mytab "
225 | "WHERE time > 1234567 ")),
226 | riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
227 | "SELECT * FROM mytab "
228 | "WHERE time > 1234567 AND (time > 1234567 OR time > 1234567) "))
229 | ).
230 |
231 | %% This fails. de-duping does not yet go through the entire tree and
232 | %% pull out duplicates
233 | %% remove_duplicate_clauses_5_test() ->
234 | %% ?assertEqual(
235 | %% riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
236 | %% "SELECT * FROM mytab "
237 | %% "WHERE time > 1234567 "
238 | %% "AND (localtion > 'derby' OR time > 'sheffield') "
239 | %% "AND weather = 'raining' ")),
240 | %% riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(
241 | %% "SELECT * FROM mytab "
242 | %% "WHERE time > 1234567 "
243 | %% "AND (localtion > 'derby' OR time > 'sheffield') "
244 | %% "AND weather = 'raining' "
245 | %% "AND time > 1234567 "))
246 | %% ).
247 |
248 | concatenated_unquoted_strings_test() ->
249 | String = "select * from response_times where cats = be a st",
250 | Expected = error,
251 | Got = case riak_ql_parser:ql_parse(riak_ql_lexer:get_tokens(String)) of
252 | {error, _Err} ->
253 | error;
254 | {_, Other} -> {should_not_compile, Other}
255 | end,
256 | ?assertEqual(Expected, Got).
257 |
258 | %%
259 | %% Regression tests
260 | %%
261 |
262 | rts_433_regression_test() ->
263 | ?sql_comp_assert_match("select * from HardDrivesV14 where date >= 123 "
264 | "and date <= 567 "
265 | "and family = 'Hitachi HDS5C4040ALE630' "
266 | "and series = 'true'", select,
267 | [{fields, [
268 | {identifier, [<<"*">>]}
269 | ]},
270 | {where, [
271 | {and_,
272 | {'=', <<"series">>, {binary, <<"true">>}},
273 | {and_,
274 | {'=', <<"family">>, {binary, <<"Hitachi HDS5C4040ALE630">>}},
275 | {and_,
276 | {'<=', <<"date">>, {integer, 567}},
277 | {'>=', <<"date">>, {integer, 123}}
278 | }
279 | }
280 | }
281 | ]}
282 | ]).
283 |
--------------------------------------------------------------------------------
/test/query_ddl.erl:
--------------------------------------------------------------------------------
1 | %% -------------------------------------------------------------------
2 | %%
3 | %% query_ddl: a test suite for queries against DDLs
4 | %%
5 | %%
6 | %% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
7 | %%
8 | %% This file is provided to you under the Apache License,
9 | %% Version 2.0 (the "License"); you may not use this file
10 | %% except in compliance with the License. You may obtain
11 | %% a copy of the License at
12 | %%
13 | %% http://www.apache.org/licenses/LICENSE-2.0
14 | %%
15 | %% Unless required by applicable law or agreed to in writing,
16 | %% software distributed under the License is distributed on an
17 | %% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
18 | %% KIND, either express or implied. See the License for the
19 | %% specific language governing permissions and limitations
20 | %% under the License.
21 | %%
22 | %% -------------------------------------------------------------------
23 | -module(query_ddl).
24 |
25 | -include_lib("eunit/include/eunit.hrl").
26 |
27 | -define(VALID, true).
28 | -define(INVALID, false).
29 |
30 | %%
31 | %% if you don't use indirection here
32 | %% (ie you tried to have a single macro and pass in true/false as an arguement)
33 | %% the bloody compiler detects that some code branches can't run and gives an error
34 | %%
35 | %% which is why we have a valid and an invalid test macro
36 | %%
37 | -define(valid_query_test(Name, CreateTable, SQLQuery),
38 | Name() -> run_test(Name, CreateTable, SQLQuery, ?VALID)).
39 |
40 | -define(invalid_query_test(Name, CreateTable, SQLQuery),
41 | Name() -> run_test(Name, CreateTable, SQLQuery, ?INVALID)).
42 |
43 | run_test(Name, CreateTable, SQLQuery, IsValid) ->
44 | Lexed = riak_ql_lexer:get_tokens(CreateTable),
45 | {ddl, DDL, _Props} = riak_ql_parser:ql_parse(Lexed),
46 | case riak_ql_ddl_compiler:compile_and_load_from_tmp(DDL) of
47 | {module, Module} ->
48 | Lexed2 = riak_ql_lexer:get_tokens(SQLQuery),
49 | Qry = riak_ql_parser:ql_parse(Lexed2),
50 | case Qry of
51 | {select, Q} -> case riak_ql_ddl:is_query_valid(Module, DDL, riak_ql_ddl:parsed_sql_to_query(Q)) of
52 | true ->
53 | case IsValid of
54 | true ->
55 | ?assert(true);
56 | false ->
57 | ?debugFmt("Query in ~p should not be valid", [Name]),
58 | ?assert(false)
59 | end;
60 | {false, E} ->
61 | case IsValid of
62 | true ->
63 | ?debugFmt("Test ~p failed with query syntax error of ~p~n",
64 | [Name, E]),
65 | ?assert(false);
66 | false ->
67 | ?assert(true)
68 | end
69 | end;
70 | Err -> ?debugFmt("Test ~p failed with error ~p~n", [Name, Err]),
71 | ?assert(false)
72 | end;
73 | _Other ->
74 | ?debugFmt("~n~p compilation failed:~n~p", [Name, _Other]),
75 | ?assert(false)
76 | end.
77 |
78 |
79 | -define(STANDARDTABLE,
80 | "CREATE TABLE GeoCheckin"
81 | " (geohash varchar not null,"
82 | " user varchar not null,"
83 | " time timestamp not null,"
84 | " mytimestamp timestamp not null,"
85 | " myboolean boolean not null,"
86 | " mydouble double not null,"
87 | " mysint64 sint64 not null,"
88 | " myvarchar varchar not null,"
89 | " PRIMARY KEY ((geohash, user, quantum(time, 15, 'm')), geohash, user, time))").
90 |
91 | -define(SQL, "SELECT * FROM GeoCheckin WHERE "
92 | "geohash = 'erk' and user = 'berk' and time > 1 and time < 1000 and ").
93 |
94 | %% Timestamps
95 |
96 | ?valid_query_test(timestamp_1_test,
97 | ?STANDARDTABLE,
98 | ?SQL ++ "mytimestamp = 3").
99 |
100 | ?valid_query_test(timestamp_2_test,
101 | ?STANDARDTABLE,
102 | ?SQL ++ "mytimestamp != 3").
103 |
104 | ?valid_query_test(timestamp_3_test,
105 | ?STANDARDTABLE,
106 | ?SQL ++ "mytimestamp < 3").
107 |
108 | ?valid_query_test(timestamp_4_test,
109 | ?STANDARDTABLE,
110 | ?SQL ++ "mytimestamp <= 3").
111 |
112 | ?valid_query_test(timestamp_5_test,
113 | ?STANDARDTABLE,
114 | ?SQL ++ "mytimestamp > 3").
115 |
116 | ?valid_query_test(timestamp_6_test,
117 | ?STANDARDTABLE,
118 | ?SQL ++ "mytimestamp >= 3").
119 |
120 | %% booleans
121 |
122 | ?valid_query_test(boolean_1_test,
123 | ?STANDARDTABLE,
124 | ?SQL ++ "myboolean = true").
125 |
126 | ?valid_query_test(boolean_1a_test,
127 | ?STANDARDTABLE,
128 | ?SQL ++ "myboolean = True").
129 |
130 | ?valid_query_test(boolean_1b_test,
131 | ?STANDARDTABLE,
132 | ?SQL ++ "myboolean = false").
133 |
134 | ?valid_query_test(boolean_1c_test,
135 | ?STANDARDTABLE,
136 | ?SQL ++ "myboolean = False").
137 |
138 | ?invalid_query_test(boolean_1d_test,
139 | ?STANDARDTABLE,
140 | ?SQL ++ "myboolean = 'yardle'").
141 |
142 | ?valid_query_test(boolean_2_test,
143 | ?STANDARDTABLE,
144 | ?SQL ++ "myboolean != true").
145 |
146 | ?invalid_query_test(boolean_3_test,
147 | ?STANDARDTABLE,
148 | ?SQL ++ "myboolean < 3.4").
149 |
150 | ?invalid_query_test(boolean_3a_test,
151 | ?STANDARDTABLE,
152 | ?SQL ++ "myboolean < true").
153 |
154 | ?invalid_query_test(boolean_4_test,
155 | ?STANDARDTABLE,
156 | ?SQL ++ "myboolean <= 3.4").
157 |
158 | ?invalid_query_test(boolean_4a_test,
159 | ?STANDARDTABLE,
160 | ?SQL ++ "myboolean <= false").
161 |
162 | ?invalid_query_test(boolean_5_test,
163 | ?STANDARDTABLE,
164 | ?SQL ++ "myboolean > 3.4").
165 |
166 | ?invalid_query_test(boolean_5a_test,
167 | ?STANDARDTABLE,
168 | ?SQL ++ "myboolean > true").
169 |
170 | ?invalid_query_test(boolean_6_test,
171 | ?STANDARDTABLE,
172 | ?SQL ++ "myboolean >= 3.4").
173 |
174 | ?invalid_query_test(boolean_6a_test,
175 | ?STANDARDTABLE,
176 | ?SQL ++ "myboolean >= fALse").
177 |
178 | %% Doubles
179 |
180 | ?valid_query_test(double_1_test,
181 | ?STANDARDTABLE,
182 | ?SQL ++ "mydouble = 3.4").
183 |
184 | ?valid_query_test(double_2_test,
185 | ?STANDARDTABLE,
186 | ?SQL ++ "mydouble != 3.4").
187 |
188 | ?valid_query_test(double_3_test,
189 | ?STANDARDTABLE,
190 | ?SQL ++ "mydouble < 3.4").
191 |
192 | ?valid_query_test(double_4_test,
193 | ?STANDARDTABLE,
194 | ?SQL ++ "mydouble <= 3.4").
195 |
196 | ?valid_query_test(double_5_test,
197 | ?STANDARDTABLE,
198 | ?SQL ++ "mydouble > 3.4").
199 |
200 | ?valid_query_test(double_6_test,
201 | ?STANDARDTABLE,
202 | ?SQL ++ "mydouble >= 3.4").
203 |
204 | %% sint64s
205 |
206 | ?valid_query_test(sint64_1_test,
207 | ?STANDARDTABLE,
208 | ?SQL ++ "mysint64 = 3").
209 |
210 | ?valid_query_test(sint64_2_test,
211 | ?STANDARDTABLE,
212 | ?SQL ++ "mysint64 != 3").
213 |
214 | ?valid_query_test(sint64_3_test,
215 | ?STANDARDTABLE,
216 | ?SQL ++ "mysint64 < 3").
217 |
218 | ?valid_query_test(sint64_4_test,
219 | ?STANDARDTABLE,
220 | ?SQL ++ "mysint64 <= 3").
221 |
222 | ?valid_query_test(sint64_5_test,
223 | ?STANDARDTABLE,
224 | ?SQL ++ "mysint64 > 3").
225 |
226 | ?valid_query_test(sint64_6_test,
227 | ?STANDARDTABLE,
228 | ?SQL ++ "mysint64 >= 3").
229 |
230 | %% varchars
231 |
232 | ?valid_query_test(varchar_1_test,
233 | ?STANDARDTABLE,
234 | ?SQL ++ "myvarchar = 'eert'").
235 |
236 | ?valid_query_test(varchar_2_test,
237 | ?STANDARDTABLE,
238 | ?SQL ++ "myvarchar != 'wertetr'").
239 |
240 | ?invalid_query_test(varchar_3_test,
241 | ?STANDARDTABLE,
242 | ?SQL ++ "myvarchar < 3.4").
243 |
244 | ?invalid_query_test(varchar_4_test,
245 | ?STANDARDTABLE,
246 | ?SQL ++ "myvarchar <= 3.4").
247 |
248 | ?invalid_query_test(varchar_5_test,
249 | ?STANDARDTABLE,
250 | ?SQL ++ "myvarchar > 3.4").
251 |
252 | ?invalid_query_test(varchar_6_test,
253 | ?STANDARDTABLE,
254 | ?SQL ++ "myvarchar >= 3.4").
255 |
256 | %% identity hash tests
257 |
258 | identity_hash_test() ->
259 | CreateTable = ?STANDARDTABLE,
260 | Lexed = riak_ql_lexer:get_tokens(CreateTable),
261 | {ddl, DDL, _Props} = riak_ql_parser:ql_parse(Lexed),
262 | {module, Module} = riak_ql_ddl_compiler:compile_and_load_from_tmp(DDL),
263 | Result = Module:get_identity_plaintext_DEBUG(),
264 | Expected = "TABLE: "
265 | "GeoCheckin: "
266 | "FIELDS: "
267 | "geohash varchar not null: "
268 | "user varchar not null: "
269 | "time timestamp not null: "
270 | "mytimestamp timestamp not null: "
271 | "myboolean boolean not null: "
272 | "mydouble double not null: "
273 | "mysint64 sint64 not null: "
274 | "myvarchar varchar not null: "
275 | "PRIMARY KEY: geohash: user: riak_ql_quanta quantum time 15 m timestamp: "
276 | "LOCAL KEY: geohash: user: time",
277 | ?assertEqual(Expected, Result).
278 |
--------------------------------------------------------------------------------
/tools.mk:
--------------------------------------------------------------------------------
1 | REBAR ?= ./rebar
2 |
3 | .PHONY: compile-no-deps test docs xref dialyzer-run dialyzer-quick dialyzer \
4 | cleanplt
5 |
6 | compile-no-deps:
7 | ${REBAR} compile skip_deps=true
8 |
9 | #test: compile
10 | # ${REBAR} eunit skip_deps=true
11 | ## overridden in ../Makefile, with special care to remove ./eunit/* etc.
12 |
13 | docs:
14 | ${REBAR} doc skip_deps=true
15 |
16 | xref: compile
17 | ${REBAR} xref skip_deps=true
18 |
19 | PLT ?= $(HOME)/.combo_dialyzer_plt
20 | LOCAL_PLT = .local_dialyzer_plt
21 | DIALYZER_FLAGS ?= -Wunmatched_returns
22 |
23 | ${PLT}: compile
24 | @if [ -f $(PLT) ]; then \
25 | dialyzer --check_plt --plt $(PLT) --apps $(DIALYZER_APPS) && \
26 | dialyzer --add_to_plt --plt $(PLT) --output_plt $(PLT) --apps $(DIALYZER_APPS) ; test $$? -ne 1; \
27 | else \
28 | dialyzer --build_plt --output_plt $(PLT) --apps $(DIALYZER_APPS); test $$? -ne 1; \
29 | fi
30 |
31 | ${LOCAL_PLT}: compile
32 | @if [ -d deps ]; then \
33 | if [ -f $(LOCAL_PLT) ]; then \
34 | dialyzer --check_plt --plt $(LOCAL_PLT) deps/*/ebin && \
35 | dialyzer --add_to_plt --plt $(LOCAL_PLT) --output_plt $(LOCAL_PLT) deps/*/ebin ; test $$? -ne 1; \
36 | else \
37 | dialyzer --build_plt --output_plt $(LOCAL_PLT) deps/*/ebin ; test $$? -ne 1; \
38 | fi \
39 | fi
40 |
41 | dialyzer-run:
42 | @echo "==> $(shell basename $(shell pwd)) (dialyzer)"
43 | # The bulk of the code below deals with the dialyzer.ignore-warnings file
44 | # which contains strings to ignore if output by dialyzer.
45 | # Typically the strings include line numbers. Using them exactly is hard
46 | # to maintain as the code changes. This approach instead ignores the line
47 | # numbers, but takes into account the number of times a string is listed
48 | # for a given file. So if one string is listed once, for example, and it
49 | # appears twice in the warnings, the user is alerted. It is possible but
50 | # unlikely that this approach could mask a warning if one ignored warning
51 | # is removed and two warnings of the same kind appear in the file, for
52 | # example. But it is a trade-off that seems worth it.
53 | # Details of the cryptic commands:
54 | # - Remove line numbers from dialyzer.ignore-warnings
55 | # - Pre-pend duplicate count to each warning with sort | uniq -c
56 | # - Remove annoying white space around duplicate count
57 | # - Save in dialyer.ignore-warnings.tmp
58 | # - Do the same to dialyzer_warnings
59 | # - Remove matches from dialyzer.ignore-warnings.tmp from output
60 | # - Remove duplicate count
61 | # - Escape regex special chars to use lines as regex patterns
62 | # - Add pattern to match any line number (file.erl:\d+:)
63 | # - Anchor to match the entire line (^entire line$)
64 | # - Save in dialyzer_unhandled_warnings
65 | # - Output matches for those patterns found in the original warnings
66 | @if [ -f $(LOCAL_PLT) ]; then \
67 | PLTS="$(PLT) $(LOCAL_PLT)"; \
68 | else \
69 | PLTS=$(PLT); \
70 | fi; \
71 | if [ -f dialyzer.ignore-warnings ]; then \
72 | if [ $$(grep -cvE '[^[:space:]]' dialyzer.ignore-warnings) -ne 0 ]; then \
73 | echo "ERROR: dialyzer.ignore-warnings contains a blank/empty line, this will match all messages!"; \
74 | exit 1; \
75 | fi; \
76 | dialyzer $(DIALYZER_FLAGS) --plts $${PLTS} -c ebin > dialyzer_warnings ; \
77 | cat dialyzer.ignore-warnings \
78 | | sed -E 's/^([^:]+:)[^:]+:/\1/' \
79 | | sort \
80 | | uniq -c \
81 | | sed -E '/.*\.erl: /!s/^[[:space:]]*[0-9]+[[:space:]]*//' \
82 | > dialyzer.ignore-warnings.tmp ; \
83 | egrep -v "^[[:space:]]*(done|Checking|Proceeding|Compiling)" dialyzer_warnings \
84 | | sed -E 's/^([^:]+:)[^:]+:/\1/' \
85 | | sort \
86 | | uniq -c \
87 | | sed -E '/.*\.erl: /!s/^[[:space:]]*[0-9]+[[:space:]]*//' \
88 | | grep -F -f dialyzer.ignore-warnings.tmp -v \
89 | | sed -E 's/^[[:space:]]*[0-9]+[[:space:]]*//' \
90 | | sed -E 's/([]\^:+?|()*.$${}\[])/\\\1/g' \
91 | | sed -E 's/(\\\.erl\\\:)/\1\\d+:/g' \
92 | | sed -E 's/^(.*)$$/^\1$$/g' \
93 | > dialyzer_unhandled_warnings ; \
94 | rm dialyzer.ignore-warnings.tmp; \
95 | if [ $$(cat dialyzer_unhandled_warnings | wc -l) -gt 0 ]; then \
96 | egrep -f dialyzer_unhandled_warnings dialyzer_warnings ; \
97 | found_warnings=1; \
98 | fi; \
99 | [ "$$found_warnings" != 1 ] ; \
100 | else \
101 | dialyzer $(DIALYZER_FLAGS) --plts $${PLTS} -c ebin; \
102 | fi
103 |
104 | dialyzer-quick: compile-no-deps dialyzer-run
105 |
106 | dialyzer: ${PLT} ${LOCAL_PLT} dialyzer-run
107 |
108 | cleanplt:
109 | @echo
110 | @echo "Are you sure? It takes several minutes to re-build."
111 | @echo Deleting $(PLT) and $(LOCAL_PLT) in 5 seconds.
112 | @echo
113 | sleep 5
114 | rm $(PLT)
115 | rm $(LOCAL_PLT)
116 |
117 |
--------------------------------------------------------------------------------