├── .github
└── workflows
│ └── maven.yml
├── .gitignore
├── LICENSE.md
├── README.md
├── java-crud-api.iml
├── lgpl-3.0.txt
├── pom.xml
└── src
├── main
└── java
│ ├── android
│ └── util
│ │ └── Base64.java
│ ├── com
│ └── ivanceras
│ │ └── fluent
│ │ └── sql
│ │ ├── Breakdown.java
│ │ └── SQL.java
│ └── eu
│ └── hadeco
│ └── crudapi
│ ├── ApiConfig.java
│ ├── CrudApiHandler.java
│ ├── RequestHandler.java
│ └── TableMeta.java
└── test
├── java
├── com
│ └── ivanceras
│ │ └── fluent
│ │ ├── AllTests.java
│ │ ├── CTest.java
│ │ ├── TestComplexQuery.java
│ │ ├── TestQuery2HiveSQL.java
│ │ ├── TestSQL1.java
│ │ ├── TestSQLBuilderDelete.java
│ │ ├── TestSQLBuilderEquality.java
│ │ ├── TestSQLBuilderFunctions.java
│ │ ├── TestSQLBuilderIn.java
│ │ ├── TestSQLBuilderInsert.java
│ │ ├── TestSQLBuilderMoreComplexFunctions.java
│ │ ├── TestSQLBuilderNamedColumns.java
│ │ ├── TestSQLBuilderRecursive.java
│ │ ├── TestSQLBuilderSelect.java
│ │ ├── TestSQLBuilderUpdate.java
│ │ ├── TestSQLBuilderWithRecursive.java
│ │ ├── TestSQLFieldsComma.java
│ │ ├── TestSQLOrderBy.java
│ │ ├── TestSimpleComplexFunctions.java
│ │ ├── TestStaticCreate.java
│ │ ├── TestStaticSelects.java
│ │ └── TestStringBuilderTechniques.java
└── eu
│ └── hadeco
│ └── crudapi
│ ├── MysqlTest.java
│ ├── OracleTest.java
│ ├── OrderedTestRunner.java
│ ├── PostgresqlTest.java
│ ├── TestApi.java
│ ├── TestBase.java
│ └── Tests.java
└── resources
├── blog_mysql.sql
├── blog_oracle.sql
├── blog_postgresql.sql
├── blog_sqlite.sql
└── blog_sqlserver.sql
/.github/workflows/maven.yml:
--------------------------------------------------------------------------------
1 | name: Java CI
2 |
3 | on: [push]
4 |
5 | jobs:
6 | build:
7 |
8 | runs-on: ubuntu-latest
9 |
10 | steps:
11 | - uses: actions/checkout@v2
12 | - name: Set up JDK 1.8
13 | uses: actions/setup-java@v1
14 | with:
15 | java-version: 1.8
16 | - name: Build with Maven
17 | run: mvn -B package --file pom.xml
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .vscode
2 | .classpath
3 | .project
4 | .settings
5 | .idea
6 | target
7 | server.jar
8 | crudtest.db
9 | unittest.bat
10 | **/PostgresqlTest.java
11 | **/SqliteTest.java
12 | **/SqlServerTest.java
13 | **/MysqlTest.java
14 | **/OracleTest.java
15 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | ```
2 | GNU LESSER GENERAL PUBLIC LICENSE
3 | Version 3, 29 June 2007
4 |
5 | Copyright (C) 2007 Free Software Foundation, Inc.
6 | Everyone is permitted to copy and distribute verbatim copies
7 | of this license document, but changing it is not allowed.
8 |
9 |
10 | This version of the GNU Lesser General Public License incorporates
11 | the terms and conditions of version 3 of the GNU General Public
12 | License, supplemented by the additional permissions listed below.
13 |
14 | 0. Additional Definitions.
15 |
16 | As used herein, "this License" refers to version 3 of the GNU Lesser
17 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
18 | General Public License.
19 |
20 | "The Library" refers to a covered work governed by this License,
21 | other than an Application or a Combined Work as defined below.
22 |
23 | An "Application" is any work that makes use of an interface provided
24 | by the Library, but which is not otherwise based on the Library.
25 | Defining a subclass of a class defined by the Library is deemed a mode
26 | of using an interface provided by the Library.
27 |
28 | A "Combined Work" is a work produced by combining or linking an
29 | Application with the Library. The particular version of the Library
30 | with which the Combined Work was made is also called the "Linked
31 | Version".
32 |
33 | The "Minimal Corresponding Source" for a Combined Work means the
34 | Corresponding Source for the Combined Work, excluding any source code
35 | for portions of the Combined Work that, considered in isolation, are
36 | based on the Application, and not on the Linked Version.
37 |
38 | The "Corresponding Application Code" for a Combined Work means the
39 | object code and/or source code for the Application, including any data
40 | and utility programs needed for reproducing the Combined Work from the
41 | Application, but excluding the System Libraries of the Combined Work.
42 |
43 | 1. Exception to Section 3 of the GNU GPL.
44 |
45 | You may convey a covered work under sections 3 and 4 of this License
46 | without being bound by section 3 of the GNU GPL.
47 |
48 | 2. Conveying Modified Versions.
49 |
50 | If you modify a copy of the Library, and, in your modifications, a
51 | facility refers to a function or data to be supplied by an Application
52 | that uses the facility (other than as an argument passed when the
53 | facility is invoked), then you may convey a copy of the modified
54 | version:
55 |
56 | a) under this License, provided that you make a good faith effort to
57 | ensure that, in the event an Application does not supply the
58 | function or data, the facility still operates, and performs
59 | whatever part of its purpose remains meaningful, or
60 |
61 | b) under the GNU GPL, with none of the additional permissions of
62 | this License applicable to that copy.
63 |
64 | 3. Object Code Incorporating Material from Library Header Files.
65 |
66 | The object code form of an Application may incorporate material from
67 | a header file that is part of the Library. You may convey such object
68 | code under terms of your choice, provided that, if the incorporated
69 | material is not limited to numerical parameters, data structure
70 | layouts and accessors, or small macros, inline functions and templates
71 | (ten or fewer lines in length), you do both of the following:
72 |
73 | a) Give prominent notice with each copy of the object code that the
74 | Library is used in it and that the Library and its use are
75 | covered by this License.
76 |
77 | b) Accompany the object code with a copy of the GNU GPL and this license
78 | document.
79 |
80 | 4. Combined Works.
81 |
82 | You may convey a Combined Work under terms of your choice that,
83 | taken together, effectively do not restrict modification of the
84 | portions of the Library contained in the Combined Work and reverse
85 | engineering for debugging such modifications, if you also do each of
86 | the following:
87 |
88 | a) Give prominent notice with each copy of the Combined Work that
89 | the Library is used in it and that the Library and its use are
90 | covered by this License.
91 |
92 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
93 | document.
94 |
95 | c) For a Combined Work that displays copyright notices during
96 | execution, include the copyright notice for the Library among
97 | these notices, as well as a reference directing the user to the
98 | copies of the GNU GPL and this license document.
99 |
100 | d) Do one of the following:
101 |
102 | 0) Convey the Minimal Corresponding Source under the terms of this
103 | License, and the Corresponding Application Code in a form
104 | suitable for, and under terms that permit, the user to
105 | recombine or relink the Application with a modified version of
106 | the Linked Version to produce a modified Combined Work, in the
107 | manner specified by section 6 of the GNU GPL for conveying
108 | Corresponding Source.
109 |
110 | 1) Use a suitable shared library mechanism for linking with the
111 | Library. A suitable mechanism is one that (a) uses at run time
112 | a copy of the Library already present on the user's computer
113 | system, and (b) will operate properly with a modified version
114 | of the Library that is interface-compatible with the Linked
115 | Version.
116 |
117 | e) Provide Installation Information, but only if you would otherwise
118 | be required to provide such information under section 6 of the
119 | GNU GPL, and only to the extent that such information is
120 | necessary to install and execute a modified version of the
121 | Combined Work produced by recombining or relinking the
122 | Application with a modified version of the Linked Version. (If
123 | you use option 4d0, the Installation Information must accompany
124 | the Minimal Corresponding Source and Corresponding Application
125 | Code. If you use option 4d1, you must provide the Installation
126 | Information in the manner specified by section 6 of the GNU GPL
127 | for conveying Corresponding Source.)
128 |
129 | 5. Combined Libraries.
130 |
131 | You may place library facilities that are a work based on the
132 | Library side by side in a single library together with other library
133 | facilities that are not Applications and are not covered by this
134 | License, and convey such a combined library under terms of your
135 | choice, if you do both of the following:
136 |
137 | a) Accompany the combined library with a copy of the same work based
138 | on the Library, uncombined with any other library facilities,
139 | conveyed under the terms of this License.
140 |
141 | b) Give prominent notice with the combined library that part of it
142 | is a work based on the Library, and explaining where to find the
143 | accompanying uncombined form of the same work.
144 |
145 | 6. Revised Versions of the GNU Lesser General Public License.
146 |
147 | The Free Software Foundation may publish revised and/or new versions
148 | of the GNU Lesser General Public License from time to time. Such new
149 | versions will be similar in spirit to the present version, but may
150 | differ in detail to address new problems or concerns.
151 |
152 | Each version is given a distinguishing version number. If the
153 | Library as you received it specifies that a certain numbered version
154 | of the GNU Lesser General Public License "or any later version"
155 | applies to it, you have the option of following the terms and
156 | conditions either of that published version or of any later version
157 | published by the Free Software Foundation. If the Library as you
158 | received it does not specify a version number of the GNU Lesser
159 | General Public License, you may choose any version of the GNU Lesser
160 | General Public License ever published by the Free Software Foundation.
161 |
162 | If the Library as you received it specifies that a proxy can decide
163 | whether future versions of the GNU Lesser General Public License shall
164 | apply, that proxy's public statement of acceptance of any version is
165 | permanent authorization for you to choose that version for the
166 | Library.
167 | ```
168 |
--------------------------------------------------------------------------------
/java-crud-api.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/lgpl-3.0.txt:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 | eu.hadeco
5 | java-crud-api
6 | jar
7 | 1.1
8 | Java CRUD API
9 | https://github.com/kolchagov/java-crud-api
10 |
11 |
12 |
13 | github
14 | GitHub kolchagov Apache Maven Packages
15 | https://maven.pkg.github.com/kolchagov/java-crud-api
16 |
17 |
18 |
19 |
20 | UTF-8
21 | 1.7
22 | 1.8
23 |
24 | 9.4.41.v20210516
25 | 4.0.3
26 | 1.7.30
27 | 2.8.5
28 | 8.0.30
29 |
30 |
31 |
32 | com.zaxxer
33 | HikariCP
34 | ${hikari.version}
35 |
36 |
37 | org.slf4j
38 | slf4j-nop
39 | ${slf4j.version}
40 |
41 |
42 | com.google.code.gson
43 | gson
44 | ${gson.version}
45 |
46 |
47 | org.eclipse.jetty
48 | jetty-server
49 | ${jetty.version}
50 |
51 |
52 | org.eclipse.jetty
53 | jetty-servlet
54 | ${jetty.version}
55 |
56 |
57 | org.eclipse.jetty
58 | jetty-servlets
59 | ${jetty.version}
60 |
61 |
62 | org.eclipse.jetty
63 | jetty-xml
64 | ${jetty.version}
65 |
66 |
67 |
68 | net.sf.json-lib
69 | json-lib
70 | 2.4
71 | jdk15
72 |
73 |
74 |
75 | xom
76 | xom
77 | 1.3.7
78 |
79 |
80 |
81 |
82 | org.springframework
83 | spring-jdbc
84 | 4.3.30.RELEASE
85 | test
86 |
87 |
88 |
89 |
90 |
91 | mysql
92 | mysql-connector-java
93 | ${mysql.version}
94 |
95 |
96 |
97 |
98 |
99 | org.postgresql
100 | postgresql
101 | 42.4.1
102 | test
103 |
104 |
105 |
106 |
107 |
108 | org.xerial
109 | sqlite-jdbc
110 | 3.36.0.2
111 | test
112 |
113 |
114 |
115 | com.microsoft.sqlserver
116 | mssql-jdbc
117 | 6.4.0.jre7
118 |
119 | test
120 |
121 |
122 |
123 | org.springframework
124 | spring-test
125 | 3.2.18.RELEASE
126 | test
127 |
128 |
129 | org.springframework
130 | spring-web
131 | 3.2.18.RELEASE
132 | test
133 |
134 |
135 | net.javacrumbs.json-unit
136 | json-unit
137 | 1.31.1
138 | test
139 |
140 |
141 | javassist
142 | javassist
143 | 3.12.1.GA
144 | test
145 |
146 |
147 | junit
148 | junit
149 | 4.13.2
150 | test
151 |
152 |
153 |
154 |
155 |
156 |
161 |
167 |
168 |
172 |
173 |
179 |
180 |
181 |
182 |
183 |
184 |
185 | org.codehaus.mojo
186 | exec-maven-plugin
187 | 1.6.0
188 |
189 |
190 | default-cli
191 |
192 | java
193 |
194 |
195 | eu.hadeco.crudapi.CrudApiHandler
196 |
197 |
198 |
199 |
200 |
201 | maven-assembly-plugin
202 |
203 | server
204 | false
205 |
206 |
207 | eu.hadeco.crudapi.CrudApiHandler
208 |
209 |
210 |
211 | jar-with-dependencies
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
--------------------------------------------------------------------------------
/src/main/java/com/ivanceras/fluent/sql/Breakdown.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent.sql;
2 |
3 | import java.util.LinkedList;
4 |
5 | /**
6 | *
Breakdown class.
7 | *
8 | * @author ivankol
9 | * @version $Id: $Id
10 | */
11 | public class Breakdown {
12 |
13 | boolean doComma = false;
14 | private StringBuilder sql;
15 | private LinkedList parameters;
16 |
17 | /**
18 | * Constructor for Breakdown.
19 | *
20 | * @param sql a {@link java.lang.String} object.
21 | * @param parameters a {@link java.util.LinkedList} object.
22 | */
23 | public Breakdown(String sql, LinkedList parameters) {
24 | this.sql = new StringBuilder(sql);
25 | this.parameters = parameters;
26 | }
27 |
28 | /**
29 | * Constructor for Breakdown.
30 | */
31 | public Breakdown() {
32 | this.sql = new StringBuilder();
33 | this.parameters = new LinkedList<>();
34 | }
35 |
36 | /**
37 | * Getter for the field sql
.
38 | *
39 | * @return a {@link java.lang.String} object.
40 | */
41 | public String getSql() {
42 | return sql.toString();
43 | }
44 |
45 | /**
46 | * append.
47 | *
48 | * @param sb a {@link java.lang.StringBuilder} object.
49 | */
50 | public void append(StringBuilder sb) {
51 | this.sql.append(sb);
52 | }
53 |
54 | /**
55 | * append.
56 | *
57 | * @param sb a {@link java.lang.String} object.
58 | */
59 | public void append(String sb) {
60 | this.sql.append(sb);
61 | }
62 |
63 | /**
64 | * appendSp.
65 | *
66 | * @param sb a {@link java.lang.String} object.
67 | */
68 | public void appendSp(String sb) {
69 | append(sb + " ");
70 | }
71 |
72 |
73 | /**
74 | * addParameter.
75 | *
76 | * @param parameter a {@link java.lang.Object} object.
77 | */
78 | public void addParameter(Object parameter) {
79 | this.parameters.add(parameter);
80 | }
81 |
82 | /**
83 | * line.
84 | */
85 | public void line() {
86 | line(0);
87 | }
88 |
89 | /**
90 | * line.
91 | *
92 | * @param tabs a int.
93 | */
94 | public void line(int tabs) {
95 | append("\n");
96 | tabs(tabs);
97 | }
98 |
99 | /**
100 | * tabs.
101 | *
102 | * @param tabs a int.
103 | */
104 | public void tabs(int tabs) {
105 | for (int i = 0; i < tabs; i++) {
106 | append("\t");
107 | }
108 | }
109 |
110 | /**
111 | * Getter for the field parameters
.
112 | *
113 | * @return an array of {@link java.lang.Object} objects.
114 | */
115 | public Object[] getParameters() {
116 | return parameters.toArray(new Object[parameters.size()]);
117 | }
118 |
119 |
120 | }
121 |
--------------------------------------------------------------------------------
/src/main/java/eu/hadeco/crudapi/ApiConfig.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 | package eu.hadeco.crudapi;
18 |
19 | import com.zaxxer.hikari.HikariConfig;
20 | import com.zaxxer.hikari.HikariDataSource;
21 | import eu.hadeco.crudapi.RequestHandler.Actions;
22 |
23 | import java.sql.Connection;
24 | import java.sql.SQLException;
25 | import java.util.Map;
26 | import java.util.Properties;
27 | import javax.servlet.http.HttpServletRequest;
28 |
29 | /**
30 | * Extend this class to provide customization
31 | *
32 | * @author ivankol
33 | * @version $Id: $Id
34 | */
35 | public class ApiConfig implements AutoCloseable {
36 |
37 | /** Constant DERBY="org.apache.derby.jdbc.ClientDataSource"
*/
38 | public static final String DERBY = "org.apache.derby.jdbc.ClientDataSource";
39 | /** Constant JAYBIRD="org.firebirdsql.pool.FBSimpleDataSource"
*/
40 | public static final String JAYBIRD = "org.firebirdsql.pool.FBSimpleDataSource";
41 | /** Constant H2="org.h2.jdbcx.JdbcDataSource"
*/
42 | public static final String H2 = "org.h2.jdbcx.JdbcDataSource";
43 | /** Constant HSQLDB="org.hsqldb.jdbc.JDBCDataSource"
*/
44 | public static final String HSQLDB = "org.hsqldb.jdbc.JDBCDataSource";
45 | /** Constant IBM_JCC="com.ibm.db2.jcc.DB2SimpleDataSource"
*/
46 | public static final String IBM_JCC = "com.ibm.db2.jcc.DB2SimpleDataSource";
47 | /** Constant IBM_INFORMIX="com.informix.jdbcx.IfxDataSource"
*/
48 | public static final String IBM_INFORMIX = "com.informix.jdbcx.IfxDataSource";
49 | /** Constant MICROSOFT="com.microsoft.sqlserver.jdbc.SQLServerD"{trunked}
*/
50 | public static final String MICROSOFT = "com.microsoft.sqlserver.jdbc.SQLServerDataSource";
51 | /** Constant CONNECTOR_J="com.mysql.jdbc.jdbc2.optional.MysqlData"{trunked}
*/
52 | public static final String CONNECTOR_J = "com.mysql.jdbc.jdbc2.optional.MysqlDataSource";
53 | /** Constant MYSQL="com.mysql.jdbc.jdbc2.optional.MysqlData"{trunked}
*/
54 | public static final String MYSQL = "com.mysql.cj.jdbc.MysqlDataSource";
55 | /** Constant MARIADB="org.mariadb.jdbc.MariaDbDataSource"
*/
56 | public static final String MARIADB = "org.mariadb.jdbc.MariaDbDataSource";
57 | /** Constant ORACLE="oracle.jdbc.pool.OracleDataSource"
*/
58 | public static final String ORACLE = "oracle.jdbc.pool.OracleDataSource";
59 | /** Constant ORIENTDB="com.orientechnologies.orient.jdbc.Orien"{trunked}
*/
60 | public static final String ORIENTDB = "com.orientechnologies.orient.jdbc.OrientDataSource";
61 | /** Constant PGJDBC_NG="com.impossibl.postgres.jdbc.PGDataSourc"{trunked}
*/
62 | public static final String PGJDBC_NG = "com.impossibl.postgres.jdbc.PGDataSource";
63 | /** Constant POSTGRESQL="org.postgresql.ds.PGSimpleDataSource"
*/
64 | public static final String POSTGRESQL = "org.postgresql.ds.PGSimpleDataSource";
65 | /** Constant SAP="com.sap.dbtech.jdbc.DriverSapDB"
*/
66 | public static final String SAP = "com.sap.dbtech.jdbc.DriverSapDB";
67 | /** Constant XERIAL="org.sqlite.SQLiteDataSource"
*/
68 | public static final String XERIAL = "org.sqlite.SQLiteDataSource";
69 | /** Constant JCONNECT="com.sybase.jdbc4.jdbc.SybDataSource"
*/
70 | public static final String JCONNECT = "com.sybase.jdbc4.jdbc.SybDataSource";
71 |
72 | private static final int CACHE_TO = 1 * 60 * 1000; //1min
73 | //contains table metadata map to gain some performance, refreshed every 30s
74 | private static Map cachedTableMeta = null;
75 | private static long cacheTimestamp = 0;
76 | private final HikariDataSource dataSource;
77 | private final Properties properties;
78 |
79 | /**
80 | * Default constructor with Hikari properties
81 | *
82 | * @param hikariDatasourceProperties a {@link java.util.Properties} object.
83 | */
84 | public ApiConfig(Properties hikariDatasourceProperties) {
85 | properties = hikariDatasourceProperties;
86 | dataSource = new HikariDataSource(new HikariConfig(properties));
87 | }
88 |
89 | ApiConfig(HikariDataSource dataSource) {
90 | properties = null;
91 | this.dataSource = dataSource;
92 | }
93 |
94 | /**
95 | * Verbose configuration
96 | *
97 | * @param user a {@link java.lang.String} object.
98 | * @param password a {@link java.lang.String} object.
99 | * @param databaseName a {@link java.lang.String} object.
100 | * @param portNumber a {@link java.lang.String} object.
101 | * @param serverHostName a {@link java.lang.String} object.
102 | * @param datasourceClassName a {@link java.lang.String} object.
103 | */
104 | public ApiConfig(String user, String password, String databaseName, String portNumber, String serverHostName, String datasourceClassName) {
105 | properties = new Properties();
106 | properties.put("dataSource.user", user);
107 | properties.put("dataSource.password", password);
108 | properties.put("dataSource.databaseName", databaseName);
109 | properties.put("dataSource.serverName", serverHostName);
110 | properties.put("dataSource.portNumber", portNumber);
111 | properties.put("dataSourceClassName", datasourceClassName);
112 | properties.put("dataSource.useUnicode", "true");
113 | properties.put("dataSource.characterEncoding", "utf8");
114 | dataSource = new HikariDataSource(new HikariConfig(properties));
115 | }
116 |
117 | /**
118 | * Verbose configuration, with default port
119 | *
120 | * @param user a {@link java.lang.String} object.
121 | * @param password a {@link java.lang.String} object.
122 | * @param databaseName a {@link java.lang.String} object.
123 | * @param serverHostName a {@link java.lang.String} object.
124 | * @param datasourceClassName a {@link java.lang.String} object.
125 | */
126 | public ApiConfig(String user, String password, String databaseName, String serverHostName, String datasourceClassName) {
127 | properties = new Properties();
128 | if (XERIAL.equals(datasourceClassName)) {
129 | //dataSource props doesn't work with sqlite... why?
130 | properties.put("jdbcUrl", "jdbc:sqlite:" + databaseName);
131 | } else {
132 | properties.put("dataSource.user", user);
133 | properties.put("dataSource.password", password);
134 | properties.put("dataSource.serverName", serverHostName);
135 | properties.put("dataSourceClassName", datasourceClassName);
136 | }
137 | properties.put("dataSource.databaseName", databaseName);
138 | if (MYSQL.equals(datasourceClassName)) {
139 | //this is no longer supported in Mysql Connector Java 8, but utf8 is now default
140 | // properties.put("dataSource.useUnicode", "true");
141 | //This removes warning messages by explicitly set SSL to false.
142 | //if you need SSL, set this to true and provide trust store as required by MySQL
143 | properties.put("dataSource.useSSL", "false");
144 | properties.put("dataSource.characterEncoding", "utf8");
145 | }
146 | if (isPSQL()) {
147 | //allows proper handling of timestamps like "2013-12-11 10:09:08"
148 | properties.put("dataSource.stringType", "unspecified");
149 | }
150 | if (ORACLE.equals(datasourceClassName)) {
151 | properties.remove("dataSourceClassName");
152 | properties.setProperty("DriverClassName", "oracle.jdbc.OracleDriver");
153 | String jdbcUrl = String.format("jdbc:oracle:thin:@%s:%d:%s", serverHostName, 1521, databaseName);
154 | properties.setProperty("jdbcUrl", jdbcUrl);
155 | }
156 | final HikariConfig hikariConfig = new HikariConfig(properties);
157 | // hikariConfig.setConnectionTestQuery("SELECT 1");
158 | hikariConfig.setMaximumPoolSize(10); //tweak connections according to your needs
159 | // hikariConfig.setMaxLifetime(5*60*1000);
160 | // hikariConfig.setMinimumIdle(5);
161 | dataSource = new HikariDataSource(hikariConfig);
162 | }
163 |
164 | /**
165 | * Default constructor for MySQL server
166 | *
167 | * @param user
168 | * @param password
169 | * @param databaseName
170 | * @param serverHostName
171 | */
172 | ApiConfig(String user, String password, String databaseName, String serverHostName) {
173 | this(user, password, databaseName, serverHostName, MYSQL);
174 | }
175 |
176 | /**
177 | * Returns cached tableMeta map but clears the references
178 | *
179 | * @return null if cache is expired or cleared
180 | */
181 | public static Map getCachedTableMeta() {
182 | if (System.currentTimeMillis() > cacheTimestamp + CACHE_TO) {
183 | cachedTableMeta = null;
184 | } else {
185 | for (TableMeta tableMeta : cachedTableMeta.values()) {
186 | tableMeta.clearReferencedTables();
187 | }
188 | }
189 | return cachedTableMeta;
190 | }
191 |
192 | /**
193 | * Caches tableMeta map - this provides huge performance boost, as reading
194 | * this data is expensive
195 | *
196 | * @param cachedTableMeta a {@link java.util.Map} object.
197 | */
198 | public static void setCachedTableMeta(Map cachedTableMeta) {
199 | ApiConfig.cachedTableMeta = cachedTableMeta;
200 | cacheTimestamp = System.currentTimeMillis();
201 | }
202 |
203 | /**
204 | * Resets the tableMeta cache.
205 | */
206 | public static void clearCachedTableMeta() {
207 | cachedTableMeta = null;
208 | cacheTimestamp = 0;
209 | }
210 |
211 | /**
212 | * getConnection.
213 | *
214 | * @return a {@link java.sql.Connection} object.
215 | * @throws java.sql.SQLException if any.
216 | */
217 | public Connection getConnection() throws SQLException {
218 | return dataSource.getConnection();
219 | }
220 |
221 | /**
222 | * tableAuthorizer.
223 | *
224 | * @param action root actions: "list" (GET), "create" (POST); ID actions:
225 | * "read" (GET), "update" (PUT), "delete" (DELETE), "increment" (PATCH)
226 | * @param database a {@link java.lang.String} object.
227 | * @param table a {@link java.lang.String} object.
228 | * @return a boolean.
229 | */
230 | public boolean tableAuthorizer(Actions action, String database, String table) {
231 | return true;
232 | }
233 |
234 | /**
235 | * recordFilter.
236 | *
237 | * @param action root actions: "list" (GET), "create" (POST); ID actions:
238 | * "read" (GET), "update" (PUT), "delete" (DELETE), "increment" (PATCH)
239 | * @param database a {@link java.lang.String} object.
240 | * @param table a {@link java.lang.String} object.
241 | * @return additional filters to be added (Map column-[filters]) or null
242 | */
243 | public String[] recordFilter(Actions action, String database, String table) {
244 | return null;
245 | }
246 |
247 | /**
248 | * columnAuthorizer.
249 | *
250 | * @param action root actions: "list" (GET), "create" (POST); ID actions:
251 | * "read" (GET), "update" (PUT), "delete" (DELETE), "increment" (PATCH)
252 | * @param database a {@link java.lang.String} object.
253 | * @param table a {@link java.lang.String} object.
254 | * @param column a {@link java.lang.String} object.
255 | * @return a boolean.
256 | */
257 | public boolean columnAuthorizer(Actions action, String database, String table, String column) {
258 | return true;
259 | }
260 |
261 | /**
262 | * tenancyFunction.
263 | *
264 | * @param action root actions: "list" (GET), "create" (POST); ID actions:
265 | * "read" (GET), "update" (PUT), "delete" (DELETE), "increment" (PATCH)
266 | * @param database a {@link java.lang.String} object.
267 | * @param table a {@link java.lang.String} object.
268 | * @param column a {@link java.lang.String} object.
269 | * @return a {@link java.lang.Object} object.
270 | */
271 | public Object tenancyFunction(Actions action, String database, String table, String column) {
272 | return null;
273 | }
274 |
275 | /**
276 | * Process the input value and returns sanitized
277 | *
278 | * @param action root actions: "list" (GET), "create" (POST); ID actions:
279 | * "read" (GET), "update" (PUT), "delete" (DELETE), "increment" (PATCH)
280 | * @param database a {@link java.lang.String} object.
281 | * @param table a {@link java.lang.String} object.
282 | * @param column a {@link java.lang.String} object.
283 | * @param type SQL type as read from JDBC metadata
284 | * @param value a {@link java.lang.Object} object.
285 | * @param context a {@link javax.servlet.http.HttpServletRequest} object.
286 | * @return sanitized value
287 | */
288 | public Object inputSanitizer(Actions action, String database, String table, String column, String type, Object value, HttpServletRequest context) {
289 | return value;
290 | }
291 |
292 | /**
293 | * Validates the input. Returns true if validation is ok or String REASON
294 | * for failed validation
295 | *
296 | * @param action root actions: "list" (GET), "create" (POST); ID actions:
297 | * "read" (GET), "update" (PUT), "delete" (DELETE), "increment" (PATCH)
298 | * @param database a {@link java.lang.String} object.
299 | * @param table a {@link java.lang.String} object.
300 | * @param column a {@link java.lang.String} object.
301 | * @param type SQL type as read from JDBC metadata
302 | * @param value a {@link java.lang.Object} object.
303 | * @param context a {@link javax.servlet.http.HttpServletRequest} object.
304 | * @return Boolean.true if value is valid or String to be reported to the
305 | * client
306 | */
307 | public Object inputValidator(Actions action, String database, String table, String column, String type, Object value, HttpServletRequest context) {
308 | return true;
309 | }
310 |
311 | /**
312 | * Can be used to manipulate the action or input map, right before DB
313 | * operation. (e.g. soft delete operations)
314 | *
315 | * @param action a {@link eu.hadeco.crudapi.RequestHandler.Actions} object.
316 | * @param database a {@link java.lang.String} object.
317 | * @param table a {@link java.lang.String} object.
318 | * @param ids an array of {@link java.lang.String} objects.
319 | * @param input a {@link java.util.Map} object.
320 | * @return a {@link eu.hadeco.crudapi.RequestHandler.Actions} object.
321 | */
322 | public Actions before(Actions action, String database, String table, String[] ids, Map input) {
323 | return action;
324 | }
325 |
326 | /**
327 | * isMsSQL.
328 | *
329 | * @return a boolean.
330 | */
331 | public final boolean isMsSQL() {
332 | return MICROSOFT.equals(properties.get("dataSourceClassName"));
333 | }
334 |
335 | /**
336 | * isOracle.
337 | *
338 | * @return a boolean.
339 | */
340 | public final boolean isOracle() {
341 | return ORACLE.equals(properties.get("dataSourceClassName")) || properties.getProperty("jdbcUrl", "").startsWith("jdbc:oracle");
342 | }
343 |
344 | /**
345 | * isPSQL.
346 | *
347 | * @return a boolean.
348 | */
349 | public final boolean isPSQL() {
350 | return POSTGRESQL.equals(properties.get("dataSourceClassName"));
351 | }
352 |
353 | /**
354 | * isXERIAL.
355 | *
356 | * @return a boolean.
357 | */
358 | public final boolean isXERIAL() {
359 | return XERIAL.equals(properties.get("dataSourceClassName"));
360 | }
361 |
362 | /** {@inheritDoc} */
363 | @Override
364 | public String toString() {
365 | return properties.toString();
366 | }
367 |
368 | /** {@inheritDoc} */
369 | @Override
370 | public final void close() {
371 | dataSource.close();
372 | }
373 | }
374 |
--------------------------------------------------------------------------------
/src/main/java/eu/hadeco/crudapi/CrudApiHandler.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 |
18 | package eu.hadeco.crudapi;
19 |
20 | import org.eclipse.jetty.server.*;
21 | import org.eclipse.jetty.server.handler.AbstractHandler;
22 |
23 | import javax.servlet.ServletException;
24 | import javax.servlet.http.HttpServletRequest;
25 | import javax.servlet.http.HttpServletResponse;
26 | import java.io.IOException;
27 | import java.util.Map;
28 | import java.util.regex.Pattern;
29 |
30 | import static eu.hadeco.crudapi.ApiConfig.XERIAL;
31 |
32 | class CrudApiHandler extends AbstractHandler {
33 | static final Pattern TAG_FILTER = Pattern.compile("()");
34 | private final ApiConfig apiConfig;
35 |
36 | private CrudApiHandler() throws IOException {
37 | //this is configuration example from tests!
38 | apiConfig = new ApiConfig("root","root", "crudtest.db", "localhost", XERIAL) {
39 | @Override
40 | public boolean columnAuthorizer(RequestHandler.Actions action, String database, String table, String column) {
41 | return !("password".equals(column) && RequestHandler.Actions.LIST.equals(action));
42 | }
43 |
44 | @Override
45 | public String[] recordFilter(RequestHandler.Actions action, String database, String table) {
46 | return "posts".equals(table) ? new String[]{"id,neq,13"} : null;
47 | }
48 |
49 | @Override
50 | public Object tenancyFunction(RequestHandler.Actions action, String database, String table, String column) {
51 | return "users".equals(table) && "id".equals(column) ? 1 : null;
52 | }
53 |
54 | @Override
55 | public Object inputSanitizer(RequestHandler.Actions action, String database, String table, String column, String type, Object value, HttpServletRequest context) {
56 | return value instanceof String ? TAG_FILTER.matcher(((String) value)).replaceAll("") : value;
57 | }
58 |
59 | @Override
60 | public Object inputValidator(RequestHandler.Actions action, String database, String table, String column, String type, Object value, HttpServletRequest context) {
61 | // ($column=='category_id' && !is_numeric($value))?'must be numeric':true;
62 | return "category_id".equals(column) && !(value instanceof Long) ? "must be numeric" : true;
63 | }
64 |
65 | @Override
66 | public RequestHandler.Actions before(RequestHandler.Actions action, String database, String table, String[] ids, Map input) {
67 | if ("products".equals(table)) {
68 | if (action == RequestHandler.Actions.CREATE) {
69 | input.put("created_at", "2013-12-11 10:09:08");
70 | } else if (action == RequestHandler.Actions.DELETE) {
71 | action = RequestHandler.Actions.UPDATE;
72 | input.put("deleted_at", "2013-12-11 11:10:09");
73 | }
74 | }
75 | return action;
76 | }
77 | };
78 | }
79 |
80 | /**
81 | * main.
82 | *
83 | * @param args an array of {@link java.lang.String} objects.
84 | * @throws java.lang.Exception if any.
85 | */
86 | public static void main(String[] args) throws Exception {
87 | HttpConfiguration config = new HttpConfiguration();
88 | config.setSendServerVersion(false);
89 | HttpConnectionFactory factory = new HttpConnectionFactory(config);
90 | Server server = new Server();
91 | ServerConnector connector = new ServerConnector(server, factory);
92 | server.setConnectors(new Connector[]{connector});
93 | connector.setHost("localhost");
94 | connector.setPort(8080);
95 | server.addConnector(connector);
96 | CrudApiHandler crudApiHandler = new CrudApiHandler();
97 | server.setHandler(crudApiHandler);
98 | server.start();
99 | server.join();
100 | }
101 |
102 | /** {@inheritDoc} */
103 | @Override
104 | public void handle(String target, Request baseReq, HttpServletRequest req, HttpServletResponse resp)
105 | throws IOException, ServletException {
106 | RequestHandler.handle(req, resp, apiConfig);
107 | baseReq.setHandled(true);
108 | }
109 |
110 | }
111 |
--------------------------------------------------------------------------------
/src/main/java/eu/hadeco/crudapi/TableMeta.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 |
18 | package eu.hadeco.crudapi;
19 |
20 | import java.util.*;
21 |
22 | class TableMeta {
23 | private final String table;
24 | private final Map referencedTables;
25 | private final Set foreignKeys;
26 | private final Map foreignToPrimaryKeys;
27 | private final Map referencedTablePrimaryKeys;
28 | private final Map primaryToForeignKeys;
29 | private String primaryKey, referedFromKey, referedToKey;
30 |
31 | /**
32 | * Constructor for TableMeta.
33 | *
34 | * @param table a {@link java.lang.String} object.
35 | */
36 | public TableMeta(String table) {
37 | this.table = table;
38 | foreignKeys = new HashSet<>();
39 | referencedTablePrimaryKeys = new HashMap<>();
40 | foreignToPrimaryKeys = new HashMap<>();
41 | primaryToForeignKeys = new HashMap<>();
42 | referencedTables = new HashMap<>();
43 | }
44 |
45 | /**
46 | * getName.
47 | *
48 | * @return a {@link java.lang.String} object.
49 | */
50 | public String getName() {
51 | return table;
52 | }
53 |
54 | /**
55 | * Getter for the field primaryKey
.
56 | *
57 | * @return a {@link java.lang.String} object.
58 | */
59 | public String getPrimaryKey() {
60 | return primaryKey;
61 | }
62 |
63 | /**
64 | * Setter for the field primaryKey
.
65 | *
66 | * @param primaryKey a {@link java.lang.String} object.
67 | */
68 | public void setPrimaryKey(String primaryKey) {
69 | this.primaryKey = String.format("%s.%s", table, primaryKey);
70 | }
71 |
72 | /**
73 | * Getter for the field referedToKey
.
74 | *
75 | * @return a {@link java.lang.String} object.
76 | */
77 | public String getReferedToKey() {
78 | return referedToKey;
79 | }
80 |
81 | /**
82 | * getRelatedTableKeys.
83 | *
84 | * @return a {@link java.util.Set} object.
85 | */
86 | public Set getRelatedTableKeys() {
87 | HashSet result = new HashSet<>();
88 | for (TableMeta tableMeta : referencedTables.values()) {
89 | result.add(tableMeta.getReferedToKey());
90 | }
91 | return result;
92 | }
93 |
94 | /**
95 | * Getter for the field foreignKeys
.
96 | *
97 | * @return a {@link java.util.Set} object.
98 | */
99 | public Set getForeignKeys() {
100 | return foreignKeys;
101 | }
102 |
103 | /**
104 | * clearReferencedTables.
105 | */
106 | public void clearReferencedTables() {
107 | referencedTables.clear();
108 | referedFromKey = null;
109 | }
110 |
111 | /**
112 | * addReferencedTable.
113 | *
114 | * @param otherTable a {@link eu.hadeco.crudapi.TableMeta} object.
115 | */
116 | public void addReferencedTable(TableMeta otherTable) {
117 | otherTable.setReferencesFrom(this);
118 | referencedTables.put(otherTable.table, otherTable);
119 | }
120 |
121 | private void setReferencesFrom(TableMeta otherTable) {
122 | if (hasReferenceTo(otherTable.table)) {
123 | //other table has primary key, referenced by this table's foreign key
124 | referedToKey = getReferencedPrimaryKey(otherTable.table);
125 | referedFromKey = primaryToForeignKeys.get(referedToKey);
126 | } else {
127 | referedFromKey = otherTable.getReferencedPrimaryKey(table);
128 | referedToKey = otherTable.primaryToForeignKeys.get(referedFromKey);
129 | }
130 | }
131 |
132 | /**
133 | * Getter for the field referencedTables
.
134 | *
135 | * @return a {@link java.util.Map} object.
136 | */
137 | public Map getReferencedTables() {
138 | return referencedTables;
139 | }
140 |
141 | /**
142 | * addForeignKeys.
143 | *
144 | * @param pkTable a {@link java.lang.String} object.
145 | * @param pk a {@link java.lang.String} object.
146 | * @param fkTable a {@link java.lang.String} object.
147 | * @param fk a {@link java.lang.String} object.
148 | */
149 | public void addForeignKeys(String pkTable, String pk, String fkTable, String fk) {
150 | String primaryKey = String.format("%s.%s", pkTable, pk);
151 | String foreignKey = String.format("%s.%s", fkTable, fk);
152 | foreignToPrimaryKeys.put(foreignKey, primaryKey);
153 | primaryToForeignKeys.put(primaryKey, foreignKey);
154 | foreignKeys.add(foreignKey);
155 | referencedTablePrimaryKeys.put(pkTable, primaryKey);
156 | }
157 |
158 | private String getReferencedPrimaryKey(String table) {
159 | return referencedTablePrimaryKeys.get(table);
160 | }
161 |
162 | /**
163 | * hasReferenceTo.
164 | *
165 | * @param table a {@link java.lang.String} object.
166 | * @return a boolean.
167 | */
168 | public boolean hasReferenceTo(String table) {
169 | return referencedTablePrimaryKeys.containsKey(table);
170 | }
171 |
172 | /**
173 | * isIntermediateFor.
174 | *
175 | * @param leftTable a {@link java.lang.String} object.
176 | * @param rightTable a {@link java.lang.String} object.
177 | * @return a boolean.
178 | */
179 | public boolean isIntermediateFor(String leftTable, String rightTable) {
180 | return referencedTablePrimaryKeys.containsKey(leftTable) && referencedTablePrimaryKeys.containsKey(rightTable);
181 | }
182 |
183 | /** {@inheritDoc} */
184 | @Override
185 | public String toString() {
186 | final String tableName = String.format("%s: %s", table, foreignToPrimaryKeys);
187 | return String.format("%s, relation: %s", tableName, getRelation());
188 | }
189 |
190 | /**
191 | * Rturns an entry with key pair: thisTable.foreignKey, referringTable.primaryKey
192 | *
193 | * @return key pair Entry or null
194 | */
195 | public Map.Entry getRelation() {
196 | return referedFromKey == null ? null : new AbstractMap.SimpleEntry<>(referedFromKey, referedToKey);
197 | }
198 |
199 | /**
200 | * getRelationsJson.
201 | *
202 | * @return a {@link java.lang.String} object.
203 | */
204 | public String getRelationsJson() {
205 | Map.Entry relation = getRelation();
206 | if (relation == null) {
207 | return "";
208 | } else {
209 | final String fromColumn = relation.getKey().substring(table.length() + 1);
210 | return String.format("\"relations\":{\"%s\":\"%s\"},", fromColumn, relation.getValue());
211 | }
212 | }
213 |
214 | }
215 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/AllTests.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import org.junit.runner.RunWith;
4 | import org.junit.runners.Suite;
5 | import org.junit.runners.Suite.SuiteClasses;
6 |
7 | @RunWith(Suite.class)
8 | @SuiteClasses({TestComplexQuery.class, TestQuery2HiveSQL.class,
9 | TestSimpleComplexFunctions.class, TestSQL1.class,
10 | TestSQLBuilderDelete.class, TestSQLBuilderEquality.class,
11 | TestSQLBuilderFunctions.class, TestSQLBuilderIn.class,
12 | TestSQLBuilderInsert.class, TestSQLBuilderMoreComplexFunctions.class,
13 | TestSQLBuilderNamedColumns.class, TestSQLBuilderRecursive.class,
14 | TestSQLBuilderSelect.class, TestSQLBuilderUpdate.class,
15 | TestSQLBuilderWithRecursive.class, TestSQLFieldsComma.class,
16 | TestSQLOrderBy.class, TestStaticCreate.class, TestStaticSelects.class,
17 | TestStringBuilderTechniques.class})
18 | public class AllTests {
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/CTest.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import static org.junit.Assert.assertEquals;
4 |
5 | public class CTest {
6 |
7 |
8 | public static void cassertEquals(String expected, String actual) {
9 | String cleansedExpected = cleanUpSpaces(expected).toLowerCase();
10 | String cleansedActual = cleanUpSpaces(actual).toLowerCase();
11 | System.out.println("cleansed expected:\n" + cleansedExpected);
12 | System.out.println("cleansed actual: \n" + cleansedActual);
13 | assertEquals(cleansedExpected, cleansedActual);
14 | }
15 |
16 | public static String cleanUpSpaces(String str) {
17 | return str.trim().replaceAll("\\s+", " ");
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestComplexQuery.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 |
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.*;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.*;
8 |
9 |
10 | public class TestComplexQuery {
11 |
12 | @BeforeClass
13 | public static void setUpBeforeClass() throws Exception {
14 | }
15 |
16 | @AfterClass
17 | public static void tearDownAfterClass() throws Exception {
18 | }
19 |
20 | @Before
21 | public void setUp() throws Exception {
22 | }
23 |
24 | @After
25 | public void tearDown() throws Exception {
26 | }
27 |
28 | @Test
29 | public void test() {
30 | String expected = "" +
31 | " SELECT Customers.CustomerName , Orders.OrderID " +
32 | " FROM Customers " +
33 | " INNER JOIN Orders " +
34 | " ON Customers.CustomerID = Orders.CustomerID " +
35 | " ORDER BY Customers.CustomerName ";
36 | String actual = SELECT("Customers.CustomerName", "Orders.OrderID")
37 | .FROM("Customers")
38 | .INNER_JOIN("Orders")
39 | .ON("Customers.CustomerID", "Orders.CustomerID")
40 | .ORDER_BY("Customers.CustomerName").build().getSql();
41 | CTest.cassertEquals(expected, actual);
42 | }
43 |
44 | @Test
45 | public void test2() {
46 | String expected =
47 | " WITH LatestOrders AS (" +
48 | " SELECT MAX ( ID ) " +
49 | " FROM dbo.Orders " +
50 | " GROUP BY CustomerID" +
51 | " ) " +
52 | " SELECT " +
53 | " Customers.* , " +
54 | " Orders.OrderTime AS LatestOrderTime , " +
55 | " ( SELECT COUNT ( * ) " +
56 | " FROM dbo.OrderItems " +
57 | " WHERE OrderID IN " +
58 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) " +
59 | " ) AS TotalItemsPurchased " +
60 | " FROM dbo.Customers " +
61 | " INNER JOIN dbo.Orders " +
62 | " ON Customers.ID = Orders.CustomerID " +
63 | " WHERE " +
64 | " Orders.ID IN ( SELECT ID FROM LatestOrders ) ";
65 |
66 | SQL sql = WITH("LatestOrders", SELECT(MAX("ID"))
67 | .FROM("dbo.Orders")
68 | .GROUP_BY("CustomerID")
69 | )
70 | .append(SELECT()
71 | .FIELD("Customers.*")
72 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
73 | .FIELD(SELECT(COUNT("*"))
74 | .FROM("dbo.OrderItems")
75 | .WHERE("OrderID").IN(SELECT("ID")
76 | .FROM("dbo.Orders")
77 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
78 |
79 | ).AS("TotalItemsPurchased")
80 | .FROM("dbo.Customers")
81 | .INNER_JOIN("dbo.Orders")
82 | .ON("Customers.ID", "Orders.CustomerID")
83 | .WHERE("Orders.ID").IN(SELECT("ID").FROM("LatestOrders")));
84 |
85 | String actual = sql.build().getSql();
86 |
87 | System.out.println("expected: \n" + expected);
88 | System.out.println("actual: \n" + actual);
89 | CTest.cassertEquals(expected, actual);
90 | }
91 |
92 |
93 | }
94 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestQuery2HiveSQL.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import org.junit.*;
4 |
5 | import static com.ivanceras.fluent.sql.SQL.Statics.SELECT;
6 | import static com.ivanceras.fluent.sql.SQL.Statics.SUM;
7 |
8 | public class TestQuery2HiveSQL {
9 |
10 | @BeforeClass
11 | public static void setUpBeforeClass() throws Exception {
12 | }
13 |
14 | @AfterClass
15 | public static void tearDownAfterClass() throws Exception {
16 |
17 | }
18 |
19 |
20 | @Before
21 | public void setUp() throws Exception {
22 |
23 | }
24 |
25 | @After
26 | public void tearDown() throws Exception {
27 | }
28 |
29 | @Test
30 | public void test() {
31 |
32 | String expected = "SELECT * FROM products WHERE price IS NOT NULL";
33 | String actual = SELECT("*").FROM("products").WHERE("price").IS_NOT_NULL().build().getSql();
34 | CTest.cassertEquals(expected, actual);
35 | }
36 |
37 |
38 | @Test
39 | public void test2() {
40 |
41 | String expected = "SELECT SUM ( price ) FROM products WHERE price IS NOT NULL";
42 | String actual = SELECT(SUM("price")).FROM("products").WHERE("price").IS_NOT_NULL().build().getSql();
43 | CTest.cassertEquals(expected, actual);
44 | }
45 |
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQL1.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.Test;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.*;
8 | import static org.junit.Assert.assertArrayEquals;
9 |
10 | public class TestSQL1 {
11 |
12 | @Test
13 | public void test() {
14 | String expected = "" +
15 | " SELECT Customers.CustomerName , Orders.OrderID " +
16 | " FROM Customers " +
17 | " INNER JOIN Orders " +
18 | " ON Customers.CustomerID = Orders.CustomerID " +
19 | " ORDER BY Customers.CustomerName ";
20 | String actual = SELECT("Customers.CustomerName", "Orders.OrderID")
21 | .FROM("Customers")
22 | .INNER_JOIN("Orders")
23 | .ON("Customers.CustomerID", "Orders.CustomerID")
24 | .ORDER_BY("Customers.CustomerName").build().getSql();
25 | CTest.cassertEquals(expected, actual);
26 | }
27 |
28 | @Test
29 | public void test2() {
30 | String expected =
31 | " WITH LatestOrders AS (" +
32 | " SELECT MAX ( ID ) " +
33 | " FROM dbo.Orders " +
34 | " GROUP BY CustomerID" +
35 | " ) " +
36 | " SELECT " +
37 | " Customers.* , " +
38 | " Orders.OrderTime AS LatestOrderTime , " +
39 | " ( SELECT COUNT ( * ) " +
40 | " FROM dbo.OrderItems " +
41 | " WHERE OrderID IN " +
42 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) )" +
43 | " AS TotalItemsPurchased " +
44 | " FROM dbo.Customers " +
45 | " INNER JOIN dbo.Orders " +
46 | " ON Customers.ID = Orders.CustomerID " +
47 | " WHERE " +
48 | " Orders.ID IN ( SELECT ID FROM LatestOrders ) ";
49 |
50 | SQL sql =
51 | WITH("LatestOrders",
52 | SELECT(MAX("ID"))
53 | .FROM("dbo.Orders")
54 | .GROUP_BY("CustomerID")
55 | )
56 | .append(SELECT()
57 | .FIELD("Customers.*")
58 |
59 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
60 |
61 | .FIELD(SELECT(COUNT("*"))
62 | .FROM("dbo.OrderItems")
63 | .WHERE("OrderID").IN(SELECT("ID")
64 | .FROM("dbo.Orders")
65 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
66 |
67 | ).AS("TotalItemsPurchased")
68 | .FROM("dbo.Customers")
69 | .INNER_JOIN("dbo.Orders")
70 | .ON("Customers.ID", "Orders.CustomerID")
71 | .WHERE("Orders.ID").IN(SELECT("ID").FROM("LatestOrders")));
72 |
73 | String actual = sql.build().getSql();
74 |
75 | System.out.println("expected: \n" + expected);
76 | System.out.println("actual: \n" + actual);
77 | CTest.cassertEquals(expected, actual);
78 | }
79 |
80 | @Test
81 | public void testDelete() {
82 | String expected = "DELETE FROM products WHERE price = ?";
83 | String actual = DELETE().FROM("products").WHERE("price").EQUAL_TO(10).build().getSql();
84 | String actual2 = DELETE().FROM("products").WHERE("price").EQUAL_TO("10").build().getSql();
85 | CTest.cassertEquals(expected, actual);
86 | CTest.cassertEquals(expected, actual2);
87 | }
88 |
89 |
90 | @Test
91 | public void testRecursiveComplexFunctions() {
92 | String expected =
93 | " WITH LatestOrders AS (" +
94 | " SELECT CustomerName , SUM ( COUNT ( ID ) ) ," +
95 | " COUNT ( MAX ( n_items ) ) " +
96 | " " +
97 | " FROM dbo.Orders" +
98 | " RIGHT JOIN Customers" +
99 | " on Orders.Customer_ID = Customers.ID " +
100 | " LEFT JOIN Persons" +
101 | " ON Persons.name = Customer.name" +
102 | " AND Persons.lastName = Customer.lastName" +
103 | " GROUP BY CustomerID" +
104 | " ) " +
105 | " SELECT " +
106 | " Customers.* , " +
107 | " Orders.OrderTime AS LatestOrderTime , " +
108 | " ( SELECT COUNT ( * ) " +
109 | " FROM dbo.OrderItems " +
110 | " WHERE OrderID IN " +
111 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) " +
112 | " ) AS TotalItemsPurchased " +
113 | " FROM dbo.Customers " +
114 | " INNER JOIN dbo.Orders " +
115 | " USING ID" +
116 | " WHERE " +
117 | " Orders.n_items > ? " +
118 | " AND Orders.ID IN ( SELECT ID FROM LatestOrders ) ";
119 |
120 | SQL sql = WITH("LatestOrders",
121 | SELECT("CustomerName")
122 | .FN(SUM(COUNT("ID")))
123 | .FN(COUNT(MAX("n_items")))
124 | .FROM("dbo.Orders")
125 | .RIGHT_JOIN("Customers")
126 | .ON("Orders.customer_ID", "Customers.ID")
127 | .LEFT_JOIN("Persons")
128 | .ON("Persons.name", "Customer.name")
129 | .AND_ON("Persons.lastName", "Customer.lastName")
130 | .GROUP_BY("CustomerID")
131 | ).append(SELECT()
132 | .FIELD("Customers.*")
133 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
134 | .FIELD(SELECT(COUNT("*"))
135 | .FROM("dbo.OrderItems")
136 | .WHERE("OrderID").IN(
137 | SELECT("ID")
138 | .FROM("dbo.Orders")
139 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
140 |
141 | ).AS("TotalItemsPurchased")
142 | .FROM("dbo.Customers")
143 | .INNER_JOIN("dbo.Orders")
144 | .USING("ID")
145 | .WHERE("Orders.n_items").GREATER_THAN(0)
146 | .AND("Orders.ID").IN(SELECT("ID").FROM("LatestOrders")));
147 |
148 | Breakdown actual = sql.build();
149 |
150 | System.out.println("expected: \n" + expected);
151 | System.out.println("actual: \n" + actual.getSql());
152 | CTest.cassertEquals(expected, actual.getSql());
153 | }
154 |
155 | @Test
156 | public void testInsert() {
157 | String expected = "INSERT INTO films ( SELECT * FROM tmp_films WHERE date_prod < ? )";
158 | Breakdown actual = INSERT().INTO("films").FIELD(SELECT("*").FROM("tmp_films").WHERE("date_prod").LESS_THAN("2004-05007")).build();
159 | CTest.cassertEquals(expected, actual.getSql());
160 | assertArrayEquals(new Object[]{"2004-05007"}, actual.getParameters());
161 | }
162 |
163 |
164 | }
165 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderDelete.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import org.junit.*;
4 |
5 | import static com.ivanceras.fluent.sql.SQL.Statics.DELETE;
6 |
7 | public class TestSQLBuilderDelete {
8 |
9 | @BeforeClass
10 | public static void setUpBeforeClass() throws Exception {
11 | }
12 |
13 | @AfterClass
14 | public static void tearDownAfterClass() throws Exception {
15 | }
16 |
17 | @Before
18 | public void setUp() throws Exception {
19 | }
20 |
21 | @After
22 | public void tearDown() throws Exception {
23 | }
24 |
25 | @Test
26 | public void test() {
27 | String expected = "DELETE FROM products WHERE price IS NOT NULL";
28 | String actual = DELETE().FROM("products").WHERE("price").IS_NOT_NULL().build().getSql();
29 | CTest.cassertEquals(expected, actual);
30 | }
31 |
32 | @Test
33 | public void test2() {
34 | String expected = "DELETE FROM products WHERE price = ? ";
35 | String actual2 = DELETE().FROM("products").WHERE("price").EQUAL_TO("10").build().getSql();
36 | CTest.cassertEquals(expected, actual2);
37 |
38 | }
39 |
40 | }
41 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderEquality.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import org.junit.*;
4 |
5 | import static com.ivanceras.fluent.sql.SQL.Statics.DELETE;
6 |
7 |
8 | public class TestSQLBuilderEquality {
9 |
10 |
11 | @BeforeClass
12 | public static void setUpBeforeClass() throws Exception {
13 | }
14 |
15 | @AfterClass
16 | public static void tearDownAfterClass() throws Exception {
17 | }
18 |
19 | @Before
20 | public void setUp() throws Exception {
21 | }
22 |
23 | @After
24 | public void tearDown() throws Exception {
25 | }
26 |
27 | @Test
28 | public void test() {
29 | String expected = "DELETE FROM products WHERE price = ?";
30 | String actual = DELETE().FROM("products").WHERE("price").EQUAL_TO(10).build().getSql();
31 | String actual2 = DELETE().FROM("products").WHERE("price").EQUAL_TO("10").build().getSql();
32 | CTest.cassertEquals(expected, actual);
33 | CTest.cassertEquals(expected, actual2);
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderFunctions.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.*;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.*;
8 |
9 | public class TestSQLBuilderFunctions {
10 |
11 | @BeforeClass
12 | public static void setUpBeforeClass() throws Exception {
13 | }
14 |
15 | @AfterClass
16 | public static void tearDownAfterClass() throws Exception {
17 | }
18 |
19 | @Before
20 | public void setUp() throws Exception {
21 | }
22 |
23 | @After
24 | public void tearDown() throws Exception {
25 | }
26 |
27 | @Test
28 | public void testRecursiveComplexFunctions() {
29 | String expected =
30 | " WITH LatestOrders AS (" +
31 | " SELECT CustomerName , SUM ( COUNT ( ID ) ) ," +
32 | " COUNT ( MAX ( n_items ) ) " +
33 | " " +
34 | " FROM dbo.Orders" +
35 | " RIGHT JOIN Customers" +
36 | " on Orders.Customer_ID = Customers.ID " +
37 | " LEFT JOIN Persons" +
38 | " ON Persons.name = Customer.name" +
39 | " AND Persons.lastName = Customer.lastName" +
40 | " GROUP BY CustomerID" +
41 | " ) " +
42 | " SELECT " +
43 | " Customers.* , " +
44 | " Orders.OrderTime AS LatestOrderTime , " +
45 | " ( SELECT COUNT ( * ) " +
46 | " FROM dbo.OrderItems " +
47 | " WHERE OrderID IN " +
48 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) " +
49 | " ) AS TotalItemsPurchased " +
50 | " FROM dbo.Customers " +
51 | " INNER JOIN dbo.Orders " +
52 | " USING ID" +
53 | " WHERE " +
54 | " Orders.n_items > ? " +
55 | " AND Orders.ID IN ( SELECT ID FROM LatestOrders ) ";
56 |
57 | SQL sql = WITH("LatestOrders",
58 | SELECT("CustomerName")
59 | .FN(SUM(COUNT("ID")))
60 | .FN(COUNT(MAX("n_items")))
61 | .FROM("dbo.Orders")
62 | .RIGHT_JOIN("Customers")
63 | .ON("Orders.customer_ID", "Customers.ID")
64 | .LEFT_JOIN("Persons")
65 | .ON("Persons.name", "Customer.name")
66 | .AND_ON("Persons.lastName", "Customer.lastName")
67 | .GROUP_BY("CustomerID")
68 | );
69 | SQL sql2 = SELECT()
70 | .FIELD("Customers.*")
71 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
72 | .FIELD(SELECT(COUNT("*"))
73 | .FROM("dbo.OrderItems")
74 | .WHERE("OrderID").IN(
75 | SELECT("ID")
76 | .FROM("dbo.Orders")
77 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
78 |
79 | ).AS("TotalItemsPurchased")
80 | .FROM("dbo.Customers")
81 | .INNER_JOIN("dbo.Orders")
82 | .USING("ID")
83 | .WHERE("Orders.n_items").GREATER_THAN(0)
84 | .AND("Orders.ID").IN(SELECT("ID").FROM("LatestOrders"));
85 |
86 | sql.append(sql2);
87 |
88 | Breakdown actual = sql.build();
89 | System.out.println("expected: \n" + expected);
90 | System.out.println("actual: \n" + actual.getSql());
91 | CTest.cassertEquals(expected, actual.getSql());
92 | }
93 |
94 | }
95 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderIn.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.*;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.SELECT;
8 | import static com.ivanceras.fluent.sql.SQL.Statics.WITH_RECURSIVE;
9 | import static org.junit.Assert.assertArrayEquals;
10 |
11 |
12 | public class TestSQLBuilderIn {
13 |
14 |
15 | @BeforeClass
16 | public static void setUpBeforeClass() throws Exception {
17 | }
18 |
19 | @AfterClass
20 | public static void tearDownAfterClass() throws Exception {
21 | }
22 |
23 |
24 | @Before
25 | public void setUp() throws Exception {
26 | }
27 |
28 | @After
29 | public void tearDown() throws Exception {
30 | }
31 |
32 | @Test
33 | public void testNotIn() {
34 | String expected = "SELECT * FROM User WHERE name NOT IN ( ? , ? , ? )";
35 | Breakdown result = SELECT("*").FROM("User").WHERE("name").NOT_IN("lee", "cesar", "anthony").build();
36 | CTest.cassertEquals(expected, result.getSql());
37 | assertArrayEquals(new Object[]{"lee", "cesar", "anthony"}, result.getParameters());
38 | }
39 |
40 | @Test
41 | public void testIn() {
42 | String expected = "SELECT * FROM User WHERE name IN ( ? , ? , ? )";
43 | Breakdown result = SELECT("*").FROM("User").WHERE("name").IN("lee", "cesar", "anthony").build();
44 | CTest.cassertEquals(expected, result.getSql());
45 | assertArrayEquals(new Object[]{"lee", "cesar", "anthony"}, result.getParameters());
46 | }
47 |
48 | @Test
49 | public void test2() {
50 | String expected = "SELECT * FROM User WHERE name = ? ORDER BY name , lastname , password DESC";
51 | SQL sql = SELECT("*").FROM("User").WHERE("name").EQUAL_TO("lee").ORDER_BY("name", "lastname").FIELD("password").DESC();
52 | Breakdown result = sql.build();
53 | System.out.println(result.getSql());
54 | CTest.cassertEquals(expected, result.getSql());
55 | assertArrayEquals(new Object[]{"lee"}, result.getParameters());
56 | }
57 |
58 | @Test
59 | public void testOriginal() {
60 | String expected = "" +
61 | " WITH RECURSIVE child AS " +
62 | " ( SELECT Option.option_id , " +
63 | " Option.clause , " +
64 | " Option.description , " +
65 | " Option.dax_clause , " +
66 | " Option.parent_option_id " +
67 | " FROM dax.Option " +
68 | " WHERE parent_option_id = ? " +
69 | " UNION SELECT Option.option_id , " +
70 | " Option.clause , " +
71 | " Option.description , " +
72 | " Option.dax_clause , " +
73 | " Option.parent_option_id " +
74 | " FROM dax.Option " +
75 | " INNER JOIN child ON Option.parent_option_id = child.option_id ) " +
76 | " SELECT * " +
77 | " FROM child INTERSECT " +
78 | " SELECT * " +
79 | " FROM dax.option " +
80 | " WHERE option_id NOT IN " +
81 | " ( SELECT DISTINCT parent_option_id " +
82 | " FROM dax.Option " +
83 | " WHERE parent_option_id IS NOT NULL ) ";
84 |
85 |
86 | SQL sql = WITH_RECURSIVE("child",
87 | SELECT()
88 | .FIELD("Option.option_id")
89 | .FIELD("Option.clause")
90 | .FIELD("Option.description")
91 | .FIELD("Option.dax_clause")
92 | .FIELD("Option.parent_option_id")
93 | .FROM("dax.Option")
94 | .WHERE("parent_option_id").EQUAL_TO("Personal-Information")
95 | .UNION(
96 | SELECT()
97 | .FIELD("Option.option_id")
98 | .FIELD("Option.clause")
99 | .FIELD("Option.description")
100 | .FIELD("Option.dax_clause")
101 | .FIELD("Option.parent_option_id")
102 | .FROM("dax.Option")
103 | .INNER_JOIN("child")
104 | .ON("Option.parent_option_id", "child.option_id")
105 | )
106 | ).append(
107 | SELECT("*")
108 | .FROM("child")
109 | .INTERSECT(
110 | SELECT("*")
111 | .FROM("dax.Option")
112 | .WHERE("option_id").NOT_IN(
113 | SELECT()
114 | .DISTINCT("parent_option_id")
115 | .FROM("dax.Option")
116 | .WHERE("parent_option_id").IS_NOT_NULL()
117 | )
118 | ));
119 | Breakdown bd = sql.build();
120 | CTest.cassertEquals(expected, bd.getSql());
121 | }
122 |
123 | }
124 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderInsert.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import org.junit.*;
5 |
6 | import static com.ivanceras.fluent.sql.SQL.Statics.INSERT;
7 | import static com.ivanceras.fluent.sql.SQL.Statics.SELECT;
8 | import static org.junit.Assert.assertArrayEquals;
9 |
10 | public class TestSQLBuilderInsert {
11 |
12 | @BeforeClass
13 | public static void setUpBeforeClass() throws Exception {
14 | }
15 |
16 | @AfterClass
17 | public static void tearDownAfterClass() throws Exception {
18 | }
19 |
20 |
21 | @Before
22 | public void setUp() throws Exception {
23 | }
24 |
25 | @After
26 | public void tearDown() throws Exception {
27 | }
28 |
29 | @Test
30 | public void test() {
31 | String expected = "INSERT INTO films ( SELECT * FROM tmp_films WHERE date_prod < ? )";
32 | Breakdown actual = INSERT().INTO("films").FIELD(SELECT("*").FROM("tmp_films").WHERE("date_prod").LESS_THAN("2004-05007")).build();
33 | CTest.cassertEquals(expected, actual.getSql());
34 | assertArrayEquals(new Object[]{"2004-05007"}, actual.getParameters());
35 | }
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderMoreComplexFunctions.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import org.junit.*;
5 |
6 | import static com.ivanceras.fluent.sql.SQL.Statics.*;
7 |
8 | public class TestSQLBuilderMoreComplexFunctions {
9 |
10 | @BeforeClass
11 | public static void setUpBeforeClass() throws Exception {
12 | }
13 |
14 | @AfterClass
15 | public static void tearDownAfterClass() throws Exception {
16 | }
17 |
18 | @Before
19 | public void setUp() throws Exception {
20 | }
21 |
22 | @After
23 | public void tearDown() throws Exception {
24 | }
25 |
26 | @Test
27 | public void testRecursiveComplexFunctions() {
28 | String expected =
29 | " SELECT " +
30 | " ( SELECT CustomerName , SUM ( COUNT ( ID ) ) ," +
31 | " COUNT ( MAX ( n_items ) )" +
32 | " FROM dbo.Orders" +
33 | " RIGHT JOIN Customers" +
34 | " on Orders.Customer_ID = Customers.ID " +
35 | " LEFT JOIN Persons" +
36 | " ON Persons.name = Customer.name" +
37 | " AND Persons.lastName = Customer.lastName" +
38 | " GROUP BY CustomerID " +
39 | " ) AS LatestOrders ," +
40 | " Customers.* , " +
41 | " Orders.OrderTime AS LatestOrderTime , " +
42 | " ( SELECT COUNT ( * ) " +
43 | " FROM dbo.OrderItems " +
44 | " WHERE OrderID IN " +
45 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) " +
46 | " ) AS TotalItemsPurchased " +
47 | " FROM dbo.Customers " +
48 | " INNER JOIN dbo.Orders " +
49 | " USING ID" +
50 | " WHERE " +
51 | " Orders.n_items > ? " +
52 | " AND Orders.ID IN ( SELECT ID FROM LatestOrders )" +
53 | " ORDER BY ID DESC " +
54 | " LIMIT 100 " +
55 | " OFFSET 20";
56 |
57 | Breakdown actual =
58 | SELECT()
59 | .FIELD(SELECT("CustomerName")
60 | .FN(SUM(COUNT("ID")))
61 | .FN(COUNT(MAX("n_items")))
62 | .FROM("dbo.Orders")
63 | .RIGHT_JOIN("Customers")
64 | .ON("Orders.customer_ID", "Customers.ID")
65 | .LEFT_JOIN("Persons")
66 | .ON("Persons.name", "Customer.name")
67 | .AND_ON("Persons.lastName", "Customer.lastName")
68 | .GROUP_BY("CustomerID")
69 | )
70 | .AS("LatestOrders")
71 | .FIELD("Customers.*")
72 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
73 | .FIELD(SELECT(COUNT("*"))
74 | .FROM("dbo.OrderItems")
75 | .WHERE("OrderID").IN(
76 | SELECT("ID")
77 | .FROM("dbo.Orders")
78 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
79 |
80 | ).AS("TotalItemsPurchased")
81 | .FROM("dbo.Customers")
82 | .INNER_JOIN("dbo.Orders")
83 | .USING("ID")
84 | .WHERE("Orders.n_items").GREATER_THAN(0)
85 | .AND("Orders.ID").IN(SELECT("ID").FROM("LatestOrders"))
86 | .ORDER_BY("ID").DESC()
87 | .LIMIT(100)
88 | .OFFSET(20)
89 | .build();
90 | System.out.println("expected: \n" + expected);
91 | System.out.println("actual: \n" + actual.getSql());
92 | CTest.cassertEquals(expected, actual.getSql());
93 | }
94 |
95 | }
96 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderNamedColumns.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import org.junit.After;
4 | import org.junit.AfterClass;
5 | import org.junit.BeforeClass;
6 | import org.junit.Test;
7 |
8 | import static com.ivanceras.fluent.sql.SQL.Statics.SELECT;
9 |
10 |
11 | public class TestSQLBuilderNamedColumns {
12 |
13 | @BeforeClass
14 | public static void setUpBeforeClass() throws Exception {
15 | }
16 |
17 | @AfterClass
18 | public static void tearDownAfterClass() throws Exception {
19 | }
20 |
21 | @After
22 | public void tearDown() throws Exception {
23 | }
24 |
25 | @Test
26 | public void test() {
27 | String expected = "SELECT * FROM prefmgr WHERE prefmgr.name IS NOT NULL";
28 | String actual = SELECT("*").FROM("prefmgr").WHERE("prefmgr.name").IS_NOT_NULL().build().getSql();
29 | CTest.cassertEquals(expected, actual);
30 | }
31 |
32 | /**
33 | * name = "prefmgr.name";
34 | * description = "prefmgr.description";
35 | * prefmgr_id = "prefmgr.prefmgr_id";
36 | * preftype = "prefmgr.preftype";
37 | * pid = "prefmgr.pid";
38 | * aid = "prefmgr.aid";
39 | * cid = "prefmgr.cid";
40 | * size = "prefmgr.size";
41 | */
42 | @Test
43 | public void testMultiField() {
44 | String expected = "SELECT prefmgr.name , prefmgr.description FROM ads.prefmgr WHERE prefmgr.name IS NOT NULL";
45 | String actual = SELECT("prefmgr.name", "prefmgr.description")
46 | .FROM("ads.prefmgr")
47 | .WHERE("prefmgr.name").IS_NOT_NULL().build().getSql();
48 | CTest.cassertEquals(expected, actual);
49 | }
50 |
51 | @Test
52 | public void testMultiFieldDistinct() {
53 | String expected = "SELECT DISTINCT prefmgr.name , prefmgr.description FROM ads.prefmgr WHERE prefmgr.name IS NOT NULL";
54 | String actual = SELECT().DISTINCT("prefmgr.name", "prefmgr.description")
55 | .FROM("ads.prefmgr")
56 | .WHERE("prefmgr.name").IS_NOT_NULL().build().getSql();
57 | CTest.cassertEquals(expected, actual);
58 | }
59 |
60 | @Test
61 | public void testMultiFieldDistinctOn() {
62 | String expected = "SELECT DISTINCT ON ( prefmgr.name , prefmgr.description ) prefmgr.name , prefmgr.description FROM ads.prefmgr WHERE prefmgr.name IS NOT NULL";
63 | String actual = SELECT()
64 | .DISTINCT_ON("prefmgr.name", "prefmgr.description")
65 | .FIELD("prefmgr.name", "prefmgr.description")
66 | .FROM("ads.prefmgr")
67 | .WHERE("prefmgr.name").IS_NOT_NULL().build().getSql();
68 | CTest.cassertEquals(expected, actual);
69 | }
70 |
71 | @Test
72 | public void testMultiFieldAsColumn() {
73 | String expected = "SELECT prefmgr.name AS prefmgr_name , prefmgr.description FROM ads.prefmgr WHERE prefmgr.name IS NOT NULL";
74 | String actual = SELECT("prefmgr.name").AS("prefmgr_name")
75 | .FIELD("prefmgr.description")
76 | .FROM("ads.prefmgr")
77 | .WHERE("prefmgr.name").IS_NOT_NULL().build().getSql();
78 | CTest.cassertEquals(expected, actual);
79 | }
80 |
81 | @Test
82 | public void testMultiFieldSQLAsColumn() {
83 | String expected = "SELECT prefmgr.name AS prefmgr_name , ( SELECT 1 from dual ) as one_dual , prefmgr.description FROM ads.prefmgr WHERE prefmgr.name IS NOT NULL";
84 | String actual = SELECT("prefmgr.name").AS("prefmgr_name")
85 | .FIELD(
86 | SELECT("1")
87 | .FROM("dual")
88 | ).AS("one_dual")
89 |
90 | .FIELD("prefmgr.description")
91 | .FROM("ads.prefmgr")
92 | .WHERE("prefmgr.name").IS_NOT_NULL().build().getSql();
93 | CTest.cassertEquals(expected, actual);
94 | }
95 |
96 |
97 | }
98 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderRecursive.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.After;
6 | import org.junit.AfterClass;
7 | import org.junit.BeforeClass;
8 | import org.junit.Test;
9 |
10 | import static com.ivanceras.fluent.sql.SQL.Statics.*;
11 | import static org.junit.Assert.assertArrayEquals;
12 |
13 |
14 | public class TestSQLBuilderRecursive {
15 |
16 |
17 | @BeforeClass
18 | public static void setUpBeforeClass() throws Exception {
19 | }
20 |
21 | @AfterClass
22 | public static void tearDownAfterClass() throws Exception {
23 | }
24 |
25 | @After
26 | public void tearDown() throws Exception {
27 | }
28 |
29 | @Test
30 | public void testMultipleTables() {
31 | String expected =
32 | " WITH LatestOrders ( sum_count_idm , count_max_items , cname , colour ) AS (" +
33 | " SELECT " +
34 | " CustomerName , " +
35 | " SUM ( COUNT ( ID ) ) ," +
36 | " COUNT ( MAX ( n_items ) ) , " +
37 | " ? as color" +
38 | " FROM dbo.Orders , customers , persons" +
39 | " RIGHT JOIN Customers" +
40 | " on Orders.Customer_ID = Customers.ID " +
41 | " LEFT JOIN Persons" +
42 | " ON Persons.name = Customer.name" +
43 | " AND Persons.lastName = Customer.lastName" +
44 | " GROUP BY CustomerID" +
45 | " ) " +
46 | " SELECT " +
47 | " Customers.* , " +
48 | " Orders.OrderTime AS LatestOrderTime , " +
49 | " ( SELECT COUNT ( * ) " +
50 | " FROM dbo.OrderItems " +
51 | " WHERE OrderID IN " +
52 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) " +
53 | " ) AS TotalItemsPurchased " +
54 | " FROM dbo.Customers , people , pg_tables " +
55 | " INNER JOIN dbo.Orders " +
56 | " USING ID" +
57 | " WHERE " +
58 | " Orders.n_items > ? " +
59 | " AND Orders.ID IN ( SELECT ID FROM LatestOrders )";
60 |
61 | SQL sql = WITH("LatestOrders")
62 | .openParen()
63 | .FIELD("sum_count_idm", "count_max_items", "cname", "colour")
64 | .closeParen()
65 | .AS()
66 | .FIELD(
67 | SELECT("CustomerName")
68 | .FN(SUM(COUNT("ID")))
69 | .FN(COUNT(MAX("n_items")))
70 | .VALUE("Red").AS("color")
71 | .FROM("dbo.Orders", "customers", "persons")
72 | .RIGHT_JOIN("Customers")
73 | .ON("Orders.customer_ID", "Customers.ID")
74 | .LEFT_JOIN("Persons")
75 | .ON("Persons.name", "Customer.name")
76 | .AND_ON("Persons.lastName", "Customer.lastName")
77 | .GROUP_BY("CustomerID")
78 | )
79 | .append(
80 | SELECT()
81 | .FIELD("Customers.*")
82 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
83 | .FIELD(SELECT(COUNT("*"))
84 | .FROM("dbo.OrderItems")
85 | .WHERE("OrderID").IN(
86 | SELECT("ID")
87 | .FROM("dbo.Orders")
88 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
89 |
90 | ).AS("TotalItemsPurchased")
91 | .FROM("dbo.Customers", "people", "pg_tables")
92 | .INNER_JOIN("dbo.Orders")
93 | .USING("ID")
94 | .WHERE("Orders.n_items").GREATER_THAN(0)
95 | .AND("Orders.ID").IN(SELECT("ID").FROM("LatestOrders"))
96 | );
97 |
98 | Breakdown actual = sql.build();
99 | System.out.println("expected: \n" + expected);
100 | System.out.println("actual: \n" + actual.getSql());
101 | CTest.cassertEquals(expected, actual.getSql());
102 | for (Object p : actual.getParameters()) {
103 | System.out.println(p.toString());
104 | }
105 | assertArrayEquals(new Object[]{"Red", 0}, actual.getParameters());
106 | }
107 |
108 | @Test
109 | public void testWhereInQuery() {
110 | String expected =
111 | " WITH LatestOrders AS (" +
112 | " SELECT CustomerName , " +
113 | " SUM ( COUNT ( ID ) ) ," +
114 | " COUNT ( MAX ( n_items ) ) , " +
115 | " Red as color" +
116 | " FROM dbo.Orders" +
117 | " RIGHT JOIN Customers" +
118 | " on Orders.Customer_ID = Customers.ID " +
119 | " LEFT JOIN Persons" +
120 | " ON Persons.name = Customer.name" +
121 | " AND Persons.lastName = Customer.lastName" +
122 | " GROUP BY CustomerID" +
123 | " ) " +
124 | " SELECT " +
125 | " Customers.* , " +
126 | " Orders.OrderTime AS LatestOrderTime , " +
127 | " ( SELECT COUNT ( * ) " +
128 | " FROM dbo.OrderItems " +
129 | " WHERE OrderID IN " +
130 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) " +
131 | " ) AS TotalItemsPurchased " +
132 | " FROM dbo.Customers " +
133 | " INNER JOIN dbo.Orders " +
134 | " USING ID" +
135 | " WHERE " +
136 | " Orders.ID IN ( SELECT ID FROM LatestOrders )" +
137 | " AND Orders.n_items > ? ";
138 |
139 | SQL sql = WITH("LatestOrders",
140 | SELECT("CustomerName")
141 | .FN(SUM(COUNT("ID")))
142 | .FN(COUNT(MAX("n_items")))
143 | .FIELD("Red").AS("color")
144 | .FROM("dbo.Orders")
145 | .RIGHT_JOIN("Customers")
146 | .ON("Orders.customer_ID", "Customers.ID")
147 | .LEFT_JOIN("Persons")
148 | .ON("Persons.name", "Customer.name")
149 | .AND_ON("Persons.lastName", "Customer.lastName")
150 | .GROUP_BY("CustomerID")
151 | )
152 | .append(SELECT()
153 | .FIELD("Customers.*")
154 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
155 | .FIELD(SELECT(COUNT("*"))
156 | .FROM("dbo.OrderItems")
157 | .WHERE("OrderID").IN(
158 | SELECT("ID")
159 | .FROM("dbo.Orders")
160 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
161 |
162 | ).AS("TotalItemsPurchased")
163 | .FROM("dbo.Customers")
164 | .INNER_JOIN("dbo.Orders")
165 | .USING("ID")
166 | .WHERE("Orders.ID").IN(SELECT("ID").FROM("LatestOrders"))
167 | .AND("Orders.n_items").GREATER_THAN(0)
168 | );
169 |
170 | Breakdown actual = sql.build();
171 |
172 | System.out.println("expected: \n" + expected);
173 | System.out.println("actual: \n" + actual.getSql());
174 | CTest.cassertEquals(expected, actual.getSql());
175 | for (Object p : actual.getParameters()) {
176 | System.out.println(p.toString());
177 | }
178 | // assertArrayEquals(new Object[]{"Red",0} , actual.getParameters());
179 | }
180 |
181 | }
182 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderSelect.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import org.junit.*;
5 |
6 | import static com.ivanceras.fluent.sql.SQL.Statics.SELECT;
7 | import static org.junit.Assert.assertArrayEquals;
8 |
9 | public class TestSQLBuilderSelect {
10 |
11 | @BeforeClass
12 | public static void setUpBeforeClass() throws Exception {
13 | }
14 |
15 | @AfterClass
16 | public static void tearDownAfterClass() throws Exception {
17 | }
18 |
19 | @Before
20 | public void setUp() throws Exception {
21 | }
22 |
23 | @After
24 | public void tearDown() throws Exception {
25 | }
26 |
27 | @Test
28 | public void test() {
29 | String expected = "SELECT * FROM products WHERE price IS NOT NULL";
30 | String actual = SELECT("*").FROM("products").WHERE("price").IS_NOT_NULL().build().getSql();
31 | CTest.cassertEquals(expected, actual);
32 | }
33 |
34 | @Test
35 | public void test2() {
36 | String expected = "SELECT name FROM products WHERE price >= ? ";
37 | Breakdown actual2 = SELECT("name").FROM("products").WHERE("price").GREATER_THAN_OR_EQUAL("10").build();
38 | CTest.cassertEquals(expected, actual2.getSql());
39 | assertArrayEquals(new Object[]{"10"}, actual2.getParameters());
40 |
41 | }
42 |
43 | @Test
44 | public void test4() {
45 | String expected = "SELECT name FROM products WHERE price > ? LIMIT 10 OFFSET 1";
46 | String actual2 = SELECT("name").FROM("products").WHERE("price").GREATER_THAN("10").LIMIT(10).OFFSET(1).build().getSql();
47 | CTest.cassertEquals(expected, actual2);
48 |
49 | }
50 |
51 | @Test
52 | public void test3() {
53 | String expected = "SELECT name FROM products WHERE price < ? ";
54 | String actual2 = SELECT("name").FROM("products").WHERE("price").LESS_THAN("10").build().getSql();
55 | CTest.cassertEquals(expected, actual2);
56 |
57 | }
58 |
59 | @Test
60 | public void test5() {
61 | String expected = "SELECT name FROM products LEFT JOIN item USING item_id , name WHERE price > ? LIMIT 10 OFFSET 1";
62 | String actual2 = SELECT("name").FROM("products").LEFT_JOIN("item").USING("item_id", "name").WHERE("price").GREATER_THAN("10").LIMIT(10).OFFSET(1).build().getSql();
63 | CTest.cassertEquals(expected, actual2);
64 |
65 | }
66 |
67 | @Test
68 | public void test6() {
69 | String expected = "SELECT name" +
70 | " FROM products" +
71 | " LEFT JOIN item" +
72 | " ON item_id = product_id" +
73 | " AND products.name = item.name" +
74 | " WHERE price > ?" +
75 | " LIMIT 10" +
76 | " OFFSET 1";
77 | String actual2 = SELECT("name")
78 | .FROM("products")
79 | .LEFT_JOIN("item")
80 | .ON("item_id", "product_id")
81 | .AND_ON("products.name", "item.name")
82 | .WHERE("price").GREATER_THAN("10")
83 | .LIMIT(10)
84 | .OFFSET(1)
85 | .build()
86 | .getSql();
87 |
88 | CTest.cassertEquals(expected, actual2);
89 |
90 | }
91 |
92 |
93 | }
94 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderUpdate.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import org.junit.*;
5 |
6 | import static com.ivanceras.fluent.sql.SQL.Statics.UPDATE;
7 | import static org.junit.Assert.assertArrayEquals;
8 |
9 | public class TestSQLBuilderUpdate {
10 |
11 | @BeforeClass
12 | public static void setUpBeforeClass() throws Exception {
13 | }
14 |
15 | @AfterClass
16 | public static void tearDownAfterClass() throws Exception {
17 | }
18 |
19 | @Before
20 | public void setUp() throws Exception {
21 | }
22 |
23 | @After
24 | public void tearDown() throws Exception {
25 | }
26 |
27 | @Test
28 | public void test() {
29 | String expected = "UPDATE films SET kind = ? WHERE kind = ?";
30 | Breakdown actual = UPDATE("films").SET("kind").EQUAL("Dramatic").WHERE("kind").EQUAL_TO("Drama").build();
31 | Object[] expectedParam = new Object[]{"Dramatic", "Drama"};
32 | CTest.cassertEquals(expected, actual.getSql());
33 | assertArrayEquals(expectedParam, actual.getParameters());
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLBuilderWithRecursive.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.*;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.*;
8 | import static org.junit.Assert.assertArrayEquals;
9 |
10 |
11 | public class TestSQLBuilderWithRecursive {
12 |
13 | @BeforeClass
14 | public static void setUpBeforeClass() throws Exception {
15 | }
16 |
17 | @AfterClass
18 | public static void tearDownAfterClass() throws Exception {
19 | }
20 |
21 | @Before
22 | public void setUp() throws Exception {
23 | }
24 |
25 | @After
26 | public void tearDown() throws Exception {
27 | }
28 |
29 | @Test
30 | public void testOriginal() {
31 | String expected = "" +
32 | " WITH RECURSIVE child AS " +
33 | " ( SELECT Option.option_id , " +
34 | " Option.clause , " +
35 | " Option.description , " +
36 | " Option.dax_clause , " +
37 | " Option.parent_option_id " +
38 | " FROM dax.Option )";
39 |
40 |
41 | Breakdown bd = WITH_RECURSIVE("child",
42 | SELECT()
43 | .FIELD("Option.option_id")
44 | .FIELD("Option.clause")
45 | .FIELD("Option.description")
46 | .FIELD("Option.dax_clause")
47 | .FIELD("Option.parent_option_id")
48 | .FROM("dax.Option")
49 | ).build();
50 | CTest.cassertEquals(expected, bd.getSql());
51 | }
52 |
53 | @Test
54 | public void testRecursiveComplexFunctions() {
55 | String expected =
56 | " WITH RECURSIVE LatestOrders ( sum_count_idm , count_max_items , cname , colour ) " +
57 | " AS (" +
58 | " SELECT customername , SUM ( COUNT ( ID ) ) ," +
59 | " COUNT ( MAX ( n_items ) ) , " +
60 | " ? as color" +
61 | " FROM dbo.Orders" +
62 | " RIGHT JOIN Customers" +
63 | " on Orders.Customer_ID = Customers.ID " +
64 | " LEFT JOIN Persons" +
65 | " ON Persons.name = Customer.name" +
66 | " AND Persons.lastName = Customer.lastName" +
67 | " GROUP BY CustomerID " +
68 | " ) " +
69 | " SELECT " +
70 | " Customers.* , " +
71 | " Orders.OrderTime AS LatestOrderTime , " +
72 | " ( SELECT COUNT ( * ) " +
73 | " FROM dbo.OrderItems " +
74 | " WHERE OrderID IN " +
75 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) ) " +
76 | " AS TotalItemsPurchased " +
77 | " FROM dbo.Customers " +
78 | " INNER JOIN dbo.Orders " +
79 | " USING ID" +
80 | " WHERE " +
81 | " Orders.n_items > ? " +
82 | " AND Orders.ID IN ( SELECT ID FROM LatestOrders )";
83 |
84 | SQL sql1 = WITH_RECURSIVE("LatestOrders")
85 | .openParen().FIELD("sum_count_idm", "count_max_items", "cname", "colour").closeParen()
86 | .AS()
87 | .FIELD((SELECT("CustomerName")
88 | .FN(SUM(COUNT("ID")))
89 | .FN(COUNT(MAX("n_items")))
90 | .VALUE("Red").AS("color")
91 | .FROM("dbo.Orders")
92 | .RIGHT_JOIN("Customers")
93 | .ON("Orders.customer_ID", "Customers.ID")
94 | .LEFT_JOIN("Persons")
95 | .ON("Persons.name", "Customer.name")
96 | .AND_ON("Persons.lastName", "Customer.lastName")
97 | .GROUP_BY("CustomerID")
98 | ))
99 | .append(
100 | SELECT()
101 | .FIELD("Customers.*")
102 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
103 | .FIELD(SELECT(COUNT("*"))
104 | .FROM("dbo.OrderItems")
105 | .WHERE("OrderID").IN(
106 | SELECT("ID")
107 | .FROM("dbo.Orders")
108 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
109 |
110 | ).AS("TotalItemsPurchased")
111 | .FROM("dbo.Customers")
112 | .INNER_JOIN("dbo.Orders")
113 | .USING("ID")
114 | .WHERE("Orders.n_items").GREATER_THAN(0)
115 | .AND("Orders.ID").IN(SELECT("ID").FROM("LatestOrders")));
116 |
117 | Breakdown actual = sql1.build();
118 | System.out.println("expected: \n" + expected);
119 | System.out.println("actual: \n" + actual.getSql());
120 | CTest.cassertEquals(expected, actual.getSql());
121 | for (Object p : actual.getParameters()) {
122 | System.out.println(p.toString());
123 | }
124 | assertArrayEquals(new Object[]{"Red", 0}, actual.getParameters());
125 | }
126 |
127 |
128 | }
129 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLFieldsComma.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import org.junit.Test;
4 |
5 | import static com.ivanceras.fluent.sql.SQL.Statics.SELECT;
6 |
7 | public class TestSQLFieldsComma {
8 |
9 | @Test
10 | public void testFieldsAutoComma() {
11 | String expected = "" +
12 | " SELECT Customers.CustomerName , Orders.OrderID " +
13 | " FROM Customers ";
14 |
15 | String actual = SELECT()
16 | .FIELD("Customers.CustomerName")
17 | .FIELD("Orders.OrderID")
18 | .FROM("Customers").build().getSql();
19 |
20 | CTest.cassertEquals(expected, actual);
21 | }
22 |
23 |
24 | @Test
25 | public void testFieldsAutoCommaGroupBy() {
26 | String expected = "" +
27 | " SELECT Customers.CustomerName , Orders.OrderID " +
28 | " FROM Customers "
29 | + " GROUP BY CustomerName , Date";
30 |
31 | String actual = SELECT()
32 | .FIELD("Customers.CustomerName")
33 | .FIELD("Orders.OrderID")
34 | .FROM("Customers")
35 | .GROUP_BY("CustomerName", "Date")
36 | .build().getSql();
37 |
38 | CTest.cassertEquals(expected, actual);
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSQLOrderBy.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.*;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.SELECT;
8 |
9 | public class TestSQLOrderBy {
10 |
11 | @BeforeClass
12 | public static void setUpBeforeClass() throws Exception {
13 | }
14 |
15 | @AfterClass
16 | public static void tearDownAfterClass() throws Exception {
17 | }
18 |
19 | @Before
20 | public void setUp() throws Exception {
21 | }
22 |
23 | @After
24 | public void tearDown() throws Exception {
25 | }
26 |
27 | @Test
28 | public void test() {
29 | String expected = "" +
30 | " SELECT Customers.CustomerName , Orders.OrderID " +
31 | " FROM Customers " +
32 | " ORDER BY name desc , description";
33 |
34 |
35 | SQL sql = SELECT("Customers.CustomerName", "Orders.OrderID")
36 | .FROM("Customers")
37 | .ORDER_BY("name").DESC()
38 | .FIELD("description");
39 | Breakdown bk = sql.build();
40 | CTest.cassertEquals(expected, bk.getSql());
41 | }
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestSimpleComplexFunctions.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.*;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.*;
8 |
9 |
10 | public class TestSimpleComplexFunctions {
11 |
12 |
13 | @BeforeClass
14 | public static void setUpBeforeClass() throws Exception {
15 | }
16 |
17 | @AfterClass
18 | public static void tearDownAfterClass() throws Exception {
19 | }
20 |
21 | @Before
22 | public void setUp() throws Exception {
23 | }
24 |
25 | @After
26 | public void tearDown() throws Exception {
27 | }
28 |
29 | @Test
30 | public void testRecursiveComplexFunctions() {
31 | String expected =
32 | " WITH LatestOrders AS (" +
33 | " SELECT CustomerName , SUM ( COUNT ( ID ) ) ," +
34 | " COUNT ( MAX ( n_items ) ) " +
35 | " " +
36 | " FROM dbo.Orders" +
37 | " RIGHT JOIN Customers" +
38 | " on Orders.Customer_ID = Customers.ID " +
39 | " LEFT JOIN Persons" +
40 | " ON Persons.name = Customer.name" +
41 | " AND Persons.lastName = Customer.lastName" +
42 | " GROUP BY CustomerID" +
43 | " ) " +
44 | " SELECT " +
45 | " Customers.* , " +
46 | " Orders.OrderTime AS LatestOrderTime , " +
47 | " ( SELECT COUNT ( * ) " +
48 | " FROM dbo.OrderItems " +
49 | " WHERE OrderID IN " +
50 | " ( SELECT ID FROM dbo.Orders WHERE CustomerID = Customers.ID ) ) " +
51 | " AS TotalItemsPurchased " +
52 | " FROM dbo.Customers " +
53 | " INNER JOIN dbo.Orders " +
54 | " USING ID" +
55 | " WHERE " +
56 | " Orders.n_items > ? " +
57 | " AND Orders.ID IN ( SELECT ID FROM LatestOrders ) ";
58 |
59 | SQL sql = WITH("LatestOrders",
60 | SELECT("CustomerName")
61 | .FN(SUM(COUNT("ID")))
62 | .FN(COUNT(MAX("n_items")))
63 | .FROM("dbo.Orders")
64 | .RIGHT_JOIN("Customers")
65 | .ON("Orders.customer_ID", "Customers.ID")
66 | .LEFT_JOIN("Persons")
67 | .ON("Persons.name", "Customer.name")
68 | .AND_ON("Persons.lastName", "Customer.lastName")
69 | .GROUP_BY("CustomerID")
70 | ).append(SELECT()
71 | .FIELD("Customers.*")
72 | .FIELD("Orders.OrderTime").AS("LatestOrderTime")
73 | .FIELD(SELECT(COUNT("*"))
74 | .FROM("dbo.OrderItems")
75 | .WHERE("OrderID").IN(
76 | SELECT("ID")
77 | .FROM("dbo.Orders")
78 | .WHERE("CustomerID").EQUAL_TO_FIELD("Customers.ID"))
79 |
80 | ).AS("TotalItemsPurchased")
81 | .FROM("dbo.Customers")
82 | .INNER_JOIN("dbo.Orders")
83 | .USING("ID")
84 | .WHERE("Orders.n_items").GREATER_THAN(0)
85 | .AND("Orders.ID").IN(SELECT("ID").FROM("LatestOrders")));
86 |
87 | Breakdown actual = sql.build();
88 |
89 | System.out.println("expected: \n" + expected);
90 | System.out.println("actual: \n" + actual.getSql());
91 | CTest.cassertEquals(expected, actual.getSql());
92 | }
93 |
94 | }
95 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestStaticCreate.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 |
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.*;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.CREATE_TABLE;
8 |
9 | public class TestStaticCreate {
10 |
11 |
12 | @BeforeClass
13 | public static void setUpBeforeClass() throws Exception {
14 | }
15 |
16 | @AfterClass
17 | public static void tearDownAfterClass() throws Exception {
18 | }
19 |
20 |
21 | @Before
22 | public void setUp() throws Exception {
23 | }
24 |
25 | @After
26 | public void tearDown() throws Exception {
27 |
28 | }
29 |
30 | @Test
31 | public void testReferences() {
32 | String expected = "CREATE TABLE portal.user\n" +
33 | "(\n" +
34 | " name character varying (60) ,\n" +
35 | " password character varying ,\n" +
36 | " firstname character varying ,\n" +
37 | " lastname character varying ,\n" +
38 | " email character varying ,\n" +
39 | " user_id character varying NOT NULL ,\n" +
40 | " photo character varying ,\n" +
41 | " CONSTRAINT user_pkey PRIMARY KEY ( user_id ) ,\n" +
42 | " CONSTRAINT unique_name UNIQUE ( name )\n" +
43 | ")";
44 |
45 | SQL sql = CREATE_TABLE("portal.user")
46 | .openParen()
47 | .FIELD("name").keyword("character varying").keyword("(60)").comma()
48 | .FIELD("password").keyword("character varying").comma()
49 | .FIELD("firstname").keyword("character varying").comma()
50 | .FIELD("lastname").keyword("character varying").comma()
51 | .FIELD("email").keyword("character varying").comma()
52 | .FIELD("user_id").keyword("character varying").NOT_NULL().comma()
53 | .FIELD("photo").keyword("character varying").comma()
54 | .CONSTRAINT("user_pkey").PRIMARY_KEY("user_id").comma()
55 | .CONSTRAINT("unique_name").UNIQUE("name")
56 | .closeParen();
57 | System.out.println(sql.build().getSql());
58 | CTest.cassertEquals(expected, sql.build().getSql());
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestStaticSelects.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 |
4 | import com.ivanceras.fluent.sql.SQL;
5 | import org.junit.*;
6 |
7 | import static com.ivanceras.fluent.sql.SQL.Statics.*;
8 |
9 | public class TestStaticSelects {
10 |
11 | @BeforeClass
12 | public static void setUpBeforeClass() throws Exception {
13 | }
14 |
15 | @AfterClass
16 | public static void tearDownAfterClass() throws Exception {
17 | }
18 |
19 |
20 | @Before
21 | public void setUp() throws Exception {
22 | }
23 |
24 | @After
25 | public void tearDown() throws Exception {
26 |
27 | }
28 |
29 | @Test
30 | public void test() {
31 | String expected =
32 | "SELECT *" +
33 | " FROM Users";
34 |
35 | SQL sql = SELECT("*").FROM("Users");
36 | System.out.println(sql.build().getSql());
37 | CTest.cassertEquals(expected, sql.build().getSql());
38 | }
39 |
40 | @Test
41 | public void test2() {
42 | String expected =
43 | "SELECT *" +
44 | " FROM Users";
45 | SQL sql2 = SELECT("*").FROM("Users");
46 | System.out.println(sql2.build().getSql());
47 | CTest.cassertEquals(expected, sql2.build().getSql());
48 | }
49 |
50 | @Test
51 | public void test3() {
52 | String expected = " SELECT *" +
53 | " FROM Users" +
54 | " WHERE name = ?";
55 | SQL sql3 = SELECT("*").FROM("Users").WHERE("name").EQUAL_TO("Lee");
56 | System.out.println(sql3.build().getSql());
57 | CTest.cassertEquals(expected, sql3.build().getSql());
58 | }
59 |
60 | @Test
61 | public void test4() {
62 | String expected = "SELECT MAX ( SUM ( length ) )" +
63 | " FROM Users" +
64 | " WHERE name = ?";
65 | SQL sql4 = SELECT(MAX(SUM("length"))).FROM("Users").WHERE("name").EQUAL_TO("Lee");
66 | System.out.println(sql4.build().getSql());
67 | CTest.cassertEquals(expected, sql4.build().getSql());
68 | }
69 |
70 | @Test
71 | public void test5() {
72 | String expected = "SELECT SUM ( SUM ( SUM ( length ) ) ) FROM Users WHERE name = ?";
73 | SQL sql5 = SELECT(SUM(SUM(SUM("length")))).FROM("Users").WHERE("name").EQUAL_TO("Lee");
74 | System.out.println(sql5.build().getSql());
75 | CTest.cassertEquals(expected, sql5.build().getSql());
76 | }
77 |
78 | @Test
79 | public void test6() {
80 | String expected =
81 | " SELECT SUM ( SUM ( SUM ( length ) ) )" +
82 | " FROM Users" +
83 | " WHERE name = ?";
84 | SQL sql6 = SELECT(SUM(SUM(SUM("length")))).FROM("Users").WHERE("name").EQUAL_TO("Lee");
85 | System.out.println(sql6.build().getSql());
86 | CTest.cassertEquals(expected, sql6.build().getSql());
87 | }
88 |
89 | @Test
90 | public void test7() {
91 | String expected =
92 | " SELECT SUM ( SUM ( SUM ( length ) ) ) , " +
93 | " MAX ( MAX ( SELECT" +
94 | " FROM dual ) )" +
95 | " FROM Users" +
96 | " LEFT JOIN Role" +
97 | " USING name , firstname" +
98 | " RIGHT JOIN User_role" +
99 | " ON" +
100 | " id = id" +
101 | " WHERE name = ?" +
102 | " AND description IN ( ? , ? , ? )" +
103 | " UNION SELECT DISTINCT name ," +
104 | " description" +
105 | " WHERE name IN ( SUM ( SUM ( lee ) ) ) GROUP BY name , description" +
106 | " ORDER BY name DESC LIMIT 10 OFFSET 20";
107 |
108 | SQL sql7 = SELECT(SUM(SUM(SUM("length")))
109 | .FN(MAX(MAX(SELECT().FROM("dual"))))
110 | .FROM("Users")
111 | .LEFT_JOIN("Role")
112 | .USING("name", "firstname")
113 | .RIGHT_JOIN("User_role")
114 | .ON("id", "id")
115 | .WHERE("name").EQUAL_TO("Lee")
116 | .AND("description").IN("desc1", "desc2", "desc3")
117 | .UNION(
118 | SELECT()
119 | .DISTINCT("name", "description")
120 | .WHERE("name").IN(SUM(SUM("lee"))))
121 | .GROUP_BY("name", "description")
122 | .ORDER_BY("name").DESC()
123 | .LIMIT(10)
124 | .OFFSET(20)
125 | );
126 | System.out.println(sql7.build().getSql());
127 | CTest.cassertEquals(expected, sql7.build().getSql());
128 | }
129 |
130 |
131 | @Test
132 | public void test8() {
133 | String expected =
134 | " SELECT SUM ( MAX ( MIN ( length ) ) ) , AVG ( SELECT" +
135 | " FROM dual )" +
136 | " FROM Users" +
137 | " LEFT JOIN Role" +
138 | " USING name , firstname" +
139 | " RIGHT JOIN User_role" +
140 | " ON" +
141 | " id = id" +
142 | " WHERE name = ?" +
143 | " AND description IN ( ? , ? , ? )" +
144 | " UNION SELECT DISTINCT name ," +
145 | " description" +
146 | " WHERE name IN ( SUM ( SUM ( lee ) ) ) " +
147 | " AND SUM ( ID ) > ? " +
148 | " GROUP BY lower ( name ) , description " +
149 | " HAVING name = ?" +
150 | " ORDER BY name DESC LIMIT 10 OFFSET 30 ";
151 |
152 | SQL sql8 = SELECT(SUM(MAX(MIN("length")))
153 | .FN(AVG(SELECT().FROM("dual"))))
154 | .FROM("Users")
155 | .LEFT_JOIN("Role")
156 | .USING("name", "firstname")
157 | .RIGHT_JOIN("User_role")
158 | .ON("id", "id")
159 | .WHERE("name").EQUAL_TO("Lee")
160 | .AND("description").IN("desc1", "desc2", "desc3")
161 | .UNION(
162 | SELECT()
163 | .DISTINCT("name", "description")
164 | .WHERE("name").IN(SUM(SUM("lee")))
165 | .AND(SUM("ID")).GREATER_THAN(2)
166 | .GROUP_BY(LOWER("name"))
167 | .FIELD("description")
168 | .HAVING("name").EQUAL_TO("lee")
169 | .ORDER_BY("name").DESC()
170 | .LIMIT(10)
171 | .OFFSET(30)
172 | );
173 | System.out.println(sql8.build().getSql());
174 | CTest.cassertEquals(expected, sql8.build().getSql());
175 | }
176 |
177 | @Test
178 | public void test9() {
179 | String expected =
180 | " SELECT name , SUM ( MAX ( MIN ( length ) ) ) , AVG ( SELECT" +
181 | " FROM dual ) " +
182 | " FROM Users" +
183 | " LEFT JOIN Role" +
184 | " USING name , firstname" +
185 | " RIGHT JOIN User_role" +
186 | " ON" +
187 | " id = id" +
188 | " WHERE name = ?" +
189 | " AND description IN ( ? , ? , ? )" +
190 | " UNION SELECT DISTINCT name ," +
191 | " description" +
192 | " WHERE name IN ( SELECT SUM ( SUM ( lee ) ) ) " +
193 | " GROUP BY lower ( name ) , description " +
194 | " HAVING name = ?" +
195 | " ORDER BY name DESC LIMIT 10 OFFSET 30 ";
196 |
197 | SQL sql9 = SELECT()
198 | .FIELD("name")
199 | .FN(SUM(MAX(MIN("length"))))
200 | .FN(AVG(SELECT().FROM("dual")))
201 | .FROM("Users")
202 | .LEFT_JOIN("Role")
203 | .USING("name", "firstname")
204 | .RIGHT_JOIN("User_role")
205 | .ON("id", "id")
206 | .WHERE("name").EQUAL_TO("Lee")
207 | .AND("description").IN("desc1", "desc2", "desc3")
208 | .UNION(
209 | SELECT()
210 | .DISTINCT("name", "description")
211 | .WHERE("name").IN(SELECT(SUM(SUM("lee"))))
212 | .GROUP_BY(LOWER("name"))
213 | .FIELD("description")
214 | .HAVING("name").EQUAL_TO("lee")
215 | .ORDER_BY("name").DESC()
216 | .LIMIT(10)
217 | .OFFSET(30)
218 | );
219 | System.out.println(sql9.build().getSql());
220 | CTest.cassertEquals(expected, sql9.build().getSql());
221 | }
222 | }
223 |
--------------------------------------------------------------------------------
/src/test/java/com/ivanceras/fluent/TestStringBuilderTechniques.java:
--------------------------------------------------------------------------------
1 | package com.ivanceras.fluent;
2 |
3 | import com.ivanceras.fluent.sql.Breakdown;
4 | import org.junit.*;
5 |
6 | public class TestStringBuilderTechniques {
7 |
8 | private int iterations = 500;
9 |
10 | @BeforeClass
11 | public static void setUpBeforeClass() throws Exception {
12 | }
13 |
14 | @AfterClass
15 | public static void tearDownAfterClass() throws Exception {
16 | }
17 |
18 | @Before
19 | public void setUp() throws Exception {
20 | }
21 |
22 | @After
23 | public void tearDown() throws Exception {
24 | }
25 |
26 | @Test
27 | public void test() {
28 | long t1 = System.currentTimeMillis();
29 | StringBuilder sb = new StringBuilder();
30 | for (int i = 0; i < iterations; i++) {
31 | sb.append(buildString(i));
32 | }
33 | long t2 = System.currentTimeMillis();
34 | StringBuilder sb2 = new StringBuilder();
35 | for (int i = 0; i < iterations; i++) {
36 | buildPassString(sb2, i);
37 | }
38 | long t3 = System.currentTimeMillis();
39 | // System.out.println(sb);
40 | long first = t2 - t1;
41 | long second = t3 - t2;
42 | System.out.println("First: took " + first + " ms ");
43 | System.out.println("Second: took " + second + " ms ");
44 | //assertTrue(second < first);
45 | }
46 |
47 | @Test
48 | public void testBreakdown() {
49 | long t1 = System.currentTimeMillis();
50 | Breakdown bk1 = new Breakdown();
51 | for (int i = 0; i < iterations; i++) {
52 | Breakdown iterBk = buildBreakdown(i);
53 | bk1.append(iterBk.getSql());
54 | for (Object p : iterBk.getParameters()) {
55 | bk1.addParameter(p);
56 | }
57 | }
58 | long t2 = System.currentTimeMillis();
59 | Breakdown bk2 = new Breakdown();
60 | for (int i = 0; i < iterations; i++) {
61 | buildPassBreakdown(bk2, i);
62 | }
63 | long t3 = System.currentTimeMillis();
64 | long first = t2 - t1;
65 | long second = t3 - t2;
66 | System.out.println("Breakdown First: took " + first + " ms ");
67 | System.out.println("Breakdown Second: took " + second + " ms ");
68 | // assertTrue(second < first); //this is not always true
69 | }
70 |
71 | private StringBuilder buildString(int n) {
72 | StringBuilder sb = new StringBuilder();
73 | for (int i = 0; i < n; i++) {
74 | sb.append("StringBuilder ").append(n).append(" - ").append(i).append("\n");
75 | }
76 | return sb;
77 | }
78 |
79 | private void buildPassString(StringBuilder sb, int n) {
80 | for (int i = 0; i < n; i++) {
81 | sb.append("StringBuilder ").append(n).append(" - ").append(i).append("\n");
82 | }
83 | }
84 |
85 | private void buildPassBreakdown(Breakdown bk, int n) {
86 | for (int i = 0; i < n; i++) {
87 | bk.append("StringBuilder " + n + " - " + i + "\n");
88 | bk.addParameter(new Integer(i));
89 | bk.addParameter(new Integer(n));
90 | }
91 | }
92 |
93 | private Breakdown buildBreakdown(int n) {
94 | Breakdown bk = new Breakdown();
95 | for (int i = 0; i < n; i++) {
96 | bk.append("StringBuilder " + n + " - " + i + "\n");
97 | bk.addParameter(new Integer(i));
98 | bk.addParameter(new Integer(n));
99 | }
100 | return bk;
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/src/test/java/eu/hadeco/crudapi/MysqlTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 |
18 | package eu.hadeco.crudapi;
19 |
20 | import org.junit.BeforeClass;
21 |
22 | import java.io.BufferedReader;
23 | import java.io.IOException;
24 | import java.io.InputStream;
25 | import java.io.InputStreamReader;
26 | import java.sql.Connection;
27 | import java.sql.SQLException;
28 | import java.sql.Statement;
29 |
30 | public class MysqlTest extends Tests {
31 |
32 | static {
33 | //configure test parameters here
34 | USER = "";
35 | PASS = "";
36 | DB_NAME = "";
37 | SERVER_NAME = "";
38 | SERVER_CLASS = ApiConfig.MYSQL;
39 | }
40 |
41 | @BeforeClass
42 | public static void setupTestClass() {
43 | setupClass();
44 | }
45 |
46 | @Override
47 | public String getEngineName() {
48 | return "MySQL";
49 | }
50 |
51 | @Override
52 | public boolean checkVersion(Connection link) throws SQLException {
53 | boolean isSupported = false;
54 | final String databaseProductVersion = link.getMetaData().getDatabaseProductVersion();
55 | System.out.println(databaseProductVersion);
56 | final String[] versionInfo = databaseProductVersion.split("\\.");
57 | if (versionInfo.length > 2) {
58 | int majorVersion = Integer.valueOf(versionInfo[0]);
59 | isSupported = majorVersion >= 5;
60 | }
61 | return isSupported;
62 | }
63 |
64 | @Override
65 | public int getCapabilities(Connection link) {
66 | int capabilities = 0;
67 | try {
68 | final String databaseProductVersion = link.getMetaData().getDatabaseProductVersion();
69 | final String[] versionInfo = databaseProductVersion.split("\\.");
70 | if (versionInfo.length > 2) {
71 | int minorVersion = Integer.valueOf(versionInfo[1]);
72 | if (minorVersion >= 6) {
73 | capabilities = capabilities | GIS;
74 | }
75 | if (minorVersion >= 7) {
76 | capabilities = capabilities | JSON;
77 | }
78 | }
79 | } catch (SQLException e) {
80 | e.printStackTrace();
81 | }
82 | return capabilities;
83 | }
84 |
85 | @Override
86 | public void seedDatabase(Connection con, int capabilities) throws SQLException {
87 | try (InputStream stream = MysqlTest.class.getClassLoader().getResourceAsStream("blog_mysql.sql")) {
88 | try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "utf8"))) {
89 | executeSQLScript(capabilities, con, reader);
90 | }
91 | } catch (IOException e) {
92 | e.printStackTrace();
93 | }
94 | }
95 |
96 | private void executeSQLScript(int capabilities, Connection conn, BufferedReader reader) throws IOException, SQLException {
97 | StringBuilder sb = new StringBuilder();
98 | conn.setAutoCommit(false);
99 | Statement stmt = conn.createStatement();
100 | while (reader.ready()) {
101 | readSqlStatement(reader, sb);
102 | if (sb.length() > 0) {
103 | String line = sb.toString().trim();
104 | if ((capabilities & JSON) == 0) {
105 | line = line.replaceAll("JSON NOT NULL", "text NOT NULL");
106 | }
107 | if ((capabilities & GIS) == 0) {
108 | line = line.replaceAll("(POINT|POLYGON)( NOT)? NULL", "text\u0002 NULL");
109 | line = line.replaceAll("ST_GeomFromText", "concat");
110 | }
111 | try {
112 | stmt.addBatch(line);
113 | } catch (SQLException ex) {
114 | System.out.println("error line: " + line);
115 | throw ex;
116 | }
117 | }
118 | }
119 | executeBatch(conn, stmt);
120 | conn.setAutoCommit(true);
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/src/test/java/eu/hadeco/crudapi/OracleTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 |
18 | package eu.hadeco.crudapi;
19 |
20 | import org.junit.BeforeClass;
21 |
22 | import java.io.BufferedReader;
23 | import java.io.IOException;
24 | import java.io.InputStream;
25 | import java.io.InputStreamReader;
26 | import java.sql.CallableStatement;
27 | import java.sql.Connection;
28 | import java.sql.SQLException;
29 | import java.sql.Statement;
30 |
31 | public class OracleTest extends Tests {
32 |
33 | static {
34 | //configure test parameters here
35 | USER = "";
36 | PASS = "";
37 | DB_NAME = "xe";
38 | SERVER_NAME = "localhost";
39 | SERVER_CLASS = ApiConfig.ORACLE;
40 | }
41 |
42 | @BeforeClass
43 | public static void setupTestClass() {
44 | setupClass();
45 | }
46 |
47 | @Override
48 | public String getEngineName() {
49 | return "Oracle";
50 | }
51 |
52 | @Override
53 | public boolean checkVersion(Connection link) throws SQLException {
54 | final String databaseProductString = link.getMetaData().getDatabaseProductVersion();
55 | final String releaseKeyword = "Release ";
56 | int releaseIdx = databaseProductString.lastIndexOf(releaseKeyword) + releaseKeyword.length();
57 | boolean isSupported = releaseIdx > 0;
58 | if (isSupported) {
59 | String[] version = databaseProductString.substring(releaseIdx).split("\\.");
60 | final Integer major = Integer.valueOf(version[0]);
61 | final Integer minor = Integer.valueOf(version[1]);
62 | isSupported = major == 11 ? minor >= 2 : major > 11; //minimum supported JDBC version is 11.2
63 | }
64 | return isSupported;
65 | }
66 |
67 | @Override
68 | public int getCapabilities(Connection link) {
69 | //Oracle Express does not have a Javavm in the database and the WKT conversion routines need this as the feature
70 | // is implemented as Java stored procedures. So these WKT routines are not supported on the Express edition.
71 | // https://stackoverflow.com/questions/44832223/oracle-converting-sdo-geometry-to-wkt
72 | int capabilities = 0;
73 | try {
74 | final String databaseProductVersion = link.getMetaData().getDatabaseProductVersion();
75 | if (databaseProductVersion.toLowerCase().contains("express edition")) {
76 | capabilities = JSON;
77 | } else {
78 | capabilities = JSON | GIS;
79 | }
80 | } catch (SQLException e) {
81 | e.printStackTrace();
82 | }
83 | return capabilities;
84 | }
85 |
86 | @Override
87 | public void seedDatabase(Connection con, int capabilities) throws SQLException {
88 | try (InputStream stream = OracleTest.class.getClassLoader().getResourceAsStream("blog_oracle.sql")) {
89 | try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "utf8"))) {
90 | executeSQLScript(capabilities, con, reader);
91 | }
92 | } catch (IOException e) {
93 | e.printStackTrace();
94 | }
95 | }
96 |
97 | protected void readSqlStatement(BufferedReader reader, StringBuilder sb) throws IOException {
98 | sb.setLength(0);
99 | //read until ; reached
100 | while (true) {
101 | String line = reader.readLine().trim();
102 | //skip comments
103 | if (line.startsWith("--")) { //NOI18N
104 | break;
105 | }
106 | if (line.startsWith("/")) break;
107 | if (!line.isEmpty())
108 | sb.append(line).append(' ');
109 | }
110 | }
111 |
112 | private void executeSQLScript(int capabilities, Connection conn, BufferedReader reader) throws IOException, SQLException {
113 | StringBuilder sb = new StringBuilder();
114 | dropAllDataObjects(conn);
115 | conn.setAutoCommit(false);
116 | Statement stmt = conn.createStatement();
117 | while (reader.ready()) {
118 | readSqlStatement(reader, sb);
119 | if (sb.length() > 0) {
120 | String line = sb.toString().trim();
121 | //System.out.println(line);
122 | if ((capabilities & GIS) == 0) {
123 | line = line.replaceAll("(SDO_GEOMETRY)( NOT)? NULL", "varchar(255) NULL");
124 | line = line.replaceAll("SDO_GEOMETRY\\('(.*)'\\)", "'$1'");
125 | }
126 | try {
127 | stmt.addBatch(line);
128 | } catch (SQLException ex) {
129 | System.out.println("error line: " + line);
130 | throw ex;
131 | }
132 | }
133 | }
134 | executeBatch(conn, stmt);
135 | conn.setAutoCommit(true);
136 | }
137 |
138 | /**
139 | * Drops all DB objects for current schema (user)
140 | * @param conn DB connection
141 | */
142 | private void dropAllDataObjects(Connection conn) {
143 | try (CallableStatement stmt = conn.prepareCall("BEGIN FOR cur_rec IN (SELECT object_name, object_type FROM user_objects WHERE object_type IN ('TABLE', 'VIEW', 'PACKAGE', 'PROCEDURE', 'FUNCTION', 'SEQUENCE', 'SYNONYM', 'PACKAGE BODY' )) LOOP BEGIN IF cur_rec.object_type = 'TABLE' THEN EXECUTE IMMEDIATE 'DROP ' || cur_rec.object_type || ' \"' || cur_rec.object_name || '\" CASCADE CONSTRAINTS'; ELSE EXECUTE IMMEDIATE 'DROP ' || cur_rec.object_type || ' \"' || cur_rec.object_name || '\"'; END IF; EXCEPTION WHEN OTHERS THEN DBMS_OUTPUT.put_line ( 'FAILED: DROP ' || cur_rec.object_type || ' \"' || cur_rec.object_name || '\"' ); END; END LOOP; END;")) {
144 | stmt.execute();
145 | } catch (SQLException e) {
146 | e.printStackTrace();
147 | }
148 | }
149 |
150 | }
151 |
--------------------------------------------------------------------------------
/src/test/java/eu/hadeco/crudapi/OrderedTestRunner.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 |
18 | package eu.hadeco.crudapi;/*
19 | * Copyright (C) <2014>
20 | *
21 | *
22 | * Permission is hereby granted, free of charge, to any person obtaining a copy
23 | * of this software and associated documentation files (the "Software"), to deal
24 | * in the Software without restriction, including without limitation the rights
25 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
26 | * copies of the Software, and to permit persons to whom the Software is
27 | * furnished to do so, subject to the following conditions:
28 | *
29 | * The above copyright notice and this permission notice shall be included in
30 | * all copies or substantial portions of the Software.
31 | *
32 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
33 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
34 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
35 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
36 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
37 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
38 | * SOFTWARE.
39 | */
40 |
41 | import javassist.ClassPool;
42 | import javassist.CtClass;
43 | import javassist.CtMethod;
44 | import javassist.NotFoundException;
45 | import org.junit.runners.BlockJUnit4ClassRunner;
46 | import org.junit.runners.model.FrameworkMethod;
47 | import org.junit.runners.model.InitializationError;
48 |
49 | import java.util.*;
50 |
51 | /**
52 | * A test runner that runs tests according to their position in the source file
53 | * of the test class.
54 | *
55 | * @author Michele Bonazza
56 | */
57 | public class OrderedTestRunner extends BlockJUnit4ClassRunner {
58 |
59 | /**
60 | * Creates a new runner
61 | *
62 | * @param clazz the class being tested
63 | * @throws InitializationError if something goes wrong
64 | */
65 | public OrderedTestRunner(Class> clazz) throws InitializationError {
66 | super(clazz);
67 | }
68 |
69 | /*
70 | * (non-Javadoc)
71 | *
72 | * @see org.junit.runners.BlockJUnit4ClassRunner#computeTestMethods()
73 | */
74 | @Override
75 | protected List computeTestMethods() {
76 | // get all methods to be tested
77 | List toSort = super.computeTestMethods();
78 |
79 | if (toSort.isEmpty())
80 | return toSort;
81 |
82 | // a map containing
83 | final Map testMethods = new TreeMap<>();
84 |
85 | // check that all methods here are declared in the same class, we don't
86 | // deal with test methods from superclasses that haven't been overridden
87 | Class> clazz = getDeclaringClass(toSort);
88 | if (clazz == null) {
89 | // fail explicitly
90 | System.err
91 | .println("OrderedTestRunner can only run test classes that"
92 | + " don't have test methods inherited from superclasses");
93 | return Collections.emptyList();
94 | }
95 |
96 | // use Javassist to figure out line numbers for methods
97 | ClassPool pool = ClassPool.getDefault();
98 | try {
99 | CtClass cc = pool.get(clazz.getName());
100 | // all methods in toSort are declared in the same class, we checked
101 | for (FrameworkMethod m : toSort) {
102 | String methodName = m.getName();
103 | CtMethod method = cc.getDeclaredMethod(methodName);
104 | testMethods.put(method.getMethodInfo().getLineNumber(0), m);
105 | }
106 | } catch (NotFoundException e) {
107 | e.printStackTrace();
108 | }
109 |
110 | return new ArrayList<>(testMethods.values());
111 | }
112 |
113 | private Class> getDeclaringClass(List methods) {
114 | // methods can't be empty, it's been checked
115 | Class> clazz = methods.get(0).getMethod().getDeclaringClass();
116 |
117 | for (int i = 1; i < methods.size(); i++) {
118 | if (!methods.get(i).getMethod().getDeclaringClass().equals(clazz)) {
119 | // they must be all in the same class
120 | return null;
121 | }
122 | }
123 |
124 | return clazz;
125 | }
126 | }
--------------------------------------------------------------------------------
/src/test/java/eu/hadeco/crudapi/PostgresqlTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 |
18 | package eu.hadeco.crudapi;
19 |
20 | import org.junit.BeforeClass;
21 |
22 | import java.io.BufferedReader;
23 | import java.io.IOException;
24 | import java.io.InputStream;
25 | import java.io.InputStreamReader;
26 | import java.sql.Connection;
27 | import java.sql.SQLException;
28 | import java.sql.Statement;
29 |
30 | public class PostgresqlTest extends Tests {
31 | static {
32 | //configure test parameters here
33 | USER = "";
34 | PASS = "";
35 | DB_NAME = "";
36 | SERVER_NAME = "";
37 | SERVER_CLASS = ApiConfig.POSTGRESQL;
38 | }
39 |
40 | @BeforeClass
41 | public static void setupTestClass() {
42 | setupClass();
43 | }
44 |
45 | @Override
46 | public String getEngineName() {
47 | return "PostgreSQL";
48 | }
49 |
50 | @Override
51 | public boolean checkVersion(Connection link) throws SQLException {
52 | boolean isSupported = false;
53 | final String databaseProductVersion = link.getMetaData().getDatabaseProductVersion();
54 | System.out.println(databaseProductVersion);
55 | final String[] versionInfo = databaseProductVersion.split("\\.");
56 | if (versionInfo.length > 1) {
57 | int majorVersion = Integer.valueOf(versionInfo[0]);
58 | int minorVersion = Integer.valueOf(versionInfo[0]);
59 | isSupported = majorVersion>=9 && minorVersion>=1;
60 | }
61 | return isSupported;
62 | }
63 |
64 | @Override
65 | public int getCapabilities(Connection link) {
66 | int capabilities = 0;
67 | //todo: check capabilities
68 | return capabilities;
69 | }
70 |
71 | @Override
72 | public void seedDatabase(Connection con, int capabilities) throws SQLException {
73 | try (InputStream stream = PostgresqlTest.class.getClassLoader().getResourceAsStream("blog_postgresql.sql")) {
74 | try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "utf8"))) {
75 | executeSQLScript(capabilities, con, reader);
76 | }
77 | } catch (IOException e) {
78 | e.printStackTrace();
79 | }
80 | }
81 |
82 | private void executeSQLScript(int capabilities, Connection conn, BufferedReader reader) throws IOException, SQLException {
83 | StringBuilder sb = new StringBuilder();
84 | conn.setAutoCommit(false);
85 | Statement stmt = conn.createStatement();
86 | while (reader.ready()) {
87 | readSqlStatement(reader, sb);
88 | if (sb.length() > 0) {
89 | String line = sb.toString();
90 | if ((capabilities & JSON) == 0) {
91 | line = line.replaceAll("jsonb NOT NULL", "text NOT NULL");
92 | }
93 | if ((capabilities & GIS) == 0) {
94 | line = line.replaceAll("(POINT|POLYGON)( NOT)? NULL", "text\u0002 NULL");
95 | line = line.replaceAll("ST_GeomFromText", "concat");
96 | line = line.replaceAll("CREATE EXTENSION IF NOT EXISTS postgis;", "");
97 | line = line.replaceAll("geometry", "text");
98 | }
99 | try {
100 | //add batch call for bulk inserts
101 | // stmt.addBatch(line);
102 | stmt.execute(line);
103 | // conn.commit();
104 | } catch (SQLException ex) {
105 | System.out.println("error line: " + line);
106 | throw ex;
107 | }
108 | }
109 | }
110 | executeBatch(conn, stmt);
111 | conn.setAutoCommit(true);
112 | }
113 |
114 | }
115 |
--------------------------------------------------------------------------------
/src/test/java/eu/hadeco/crudapi/TestApi.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 |
18 | package eu.hadeco.crudapi;
19 |
20 |
21 | import com.google.gson.JsonParseException;
22 | import com.google.gson.JsonParser;
23 | import net.javacrumbs.jsonunit.JsonAssert;
24 | import org.springframework.mock.web.MockHttpServletRequest;
25 | import org.springframework.mock.web.MockHttpServletResponse;
26 | import org.springframework.util.MultiValueMap;
27 | import org.springframework.web.util.UriComponents;
28 | import org.springframework.web.util.UriComponentsBuilder;
29 |
30 | import java.io.UnsupportedEncodingException;
31 | import java.net.URLDecoder;
32 | import java.sql.Connection;
33 | import java.util.List;
34 |
35 | import static org.junit.Assert.assertEquals;
36 | import static org.junit.Assert.fail;
37 |
38 | public class TestApi {
39 | private static final JsonParser parser = new JsonParser();
40 | private final TestBase test;
41 | private String baseUrl;
42 | private String method;
43 | private String data;
44 |
45 | TestApi(TestBase test) {
46 | this.test = test;
47 | }
48 |
49 |
50 | private void setBaseUrl(String relativeUrlWithParams) {
51 | baseUrl = String.format("http://localhost:8080%s", relativeUrlWithParams);
52 | }
53 |
54 | public void expect(String expected) {
55 | expect(true, expected);
56 | }
57 |
58 | public void expect(boolean isOkResponse, String expected) {
59 | try {
60 | final MockHttpServletRequest req = getMockHttpServletRequest();
61 | final MockHttpServletResponse resp = new MockHttpServletResponse();
62 | //todo check why some tests are failing without it
63 | try (Connection link = test.connect()) {
64 | final ApiConfig apiConfig = TestBase.getApiConfig();
65 | RequestHandler.handle(req, resp, apiConfig);
66 | String actual = resp.getContentAsString();
67 | assertEquals("expected ok response, got: " + actual, isOkResponse, resp.getStatus() < 400);
68 | if (expected != null) {
69 | if (isOkResponse) {
70 | try {
71 | JsonAssert.assertJsonEquals(expected, actual);
72 | } catch(AssertionError ex) {
73 | System.out.println("*** expected and actual JSON output:");
74 | System.out.println(expected);
75 | System.out.println(actual);
76 | throw ex;
77 | }
78 | } else {
79 | assertEquals(expected, actual);
80 | }
81 | }
82 | }
83 | } catch (Exception e) {
84 | e.printStackTrace();
85 | fail(e.getMessage());
86 | } finally {
87 | this.data = null;
88 | }
89 | }
90 |
91 | public void expectAny() {
92 | expect(true, null);
93 | }
94 |
95 | private MockHttpServletRequest getMockHttpServletRequest() throws UnsupportedEncodingException {
96 | final MockHttpServletRequest req = new MockHttpServletRequest();
97 | req.setServerName("localhost");
98 | req.setMethod(method);
99 | final UriComponents build = UriComponentsBuilder.fromUriString(this.baseUrl).build();
100 | req.setPathInfo(build.getPath());
101 | req.setQueryString(build.getQuery());
102 | setParamsFromBuild(req, build);
103 | if (data != null) {
104 | try {
105 | if (data.endsWith("__is_null")) throw new JsonParseException("");
106 | //invalid json test expects json content
107 | if (!"{\"}".equals(data)) {
108 | parser.parse(data);
109 | }
110 | req.setContentType("application/json");
111 | } catch (JsonParseException ignored) {
112 | req.setContentType("application/x-www-form-urlencoded");
113 | final String url = "/?" + URLDecoder.decode(data, "utf8");
114 | setParamsFromBuild(req, UriComponentsBuilder.fromUriString(url).build());
115 | }
116 | req.setContent(data.getBytes("utf8"));
117 | }
118 | return req;
119 | }
120 |
121 | private void setParamsFromBuild(MockHttpServletRequest req, UriComponents build) {
122 | MultiValueMap parameters = build.getQueryParams();
123 | for (String param : parameters.keySet()) {
124 | final List values = parameters.get(param);
125 | req.setParameter(param, values.toArray(new String[values.size()]));
126 | }
127 | if(!parameters.containsKey("transform")) req.setParameter("transform", new String[] {"0"});
128 | }
129 |
130 | public void get(String relativeUrlWithParams) {
131 | setBaseUrl(relativeUrlWithParams);
132 | this.method = "GET";
133 | }
134 |
135 | public void post(String url, String data) {
136 | prepareRequestWithData("POST", url, data);
137 | }
138 |
139 | private void prepareRequestWithData(String method, String url, String data) {
140 | setBaseUrl(url);
141 | this.method = method;
142 | this.data = data;
143 | }
144 |
145 | public void put(String url, String data) {
146 | prepareRequestWithData("PUT", url, data);
147 | }
148 |
149 | public void delete(String url) {
150 | setBaseUrl(url);
151 | this.method = "DELETE";
152 | }
153 |
154 | public void options(String url) {
155 | setBaseUrl(url);
156 | this.method = "OPTIONS";
157 | }
158 |
159 | public void patch(String url, String data) {
160 | prepareRequestWithData("PATCH", url, data);
161 | }
162 | }
163 |
--------------------------------------------------------------------------------
/src/test/java/eu/hadeco/crudapi/TestBase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. I.Kolchagov, All rights reserved.
3 | * Contact: I.Kolchagov (kolchagov (at) gmail.com)
4 | *
5 | * The contents of this file is licensed under the terms of LGPLv3 license.
6 | * You may read the the included file 'lgpl-3.0.txt'
7 | * or https://www.gnu.org/licenses/lgpl-3.0.txt
8 | *
9 | * Software distributed under the License is distributed on an "AS IS" basis,
10 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
11 | * for the specific language governing rights and limitations under the License.
12 | *
13 | * The project uses 'fluentsql' internally, licensed under Apache Public License v2.0.
14 | * https://github.com/ivanceras/fluentsql/blob/master/LICENSE.txt
15 | *
16 | */
17 |
18 | package eu.hadeco.crudapi;
19 |
20 | import org.junit.AfterClass;
21 | import org.junit.Assume;
22 |
23 | import javax.servlet.http.HttpServletRequest;
24 | import java.io.BufferedReader;
25 | import java.io.IOException;
26 | import java.sql.Connection;
27 | import java.sql.SQLException;
28 | import java.sql.Statement;
29 | import java.util.Map;
30 | import java.util.logging.Logger;
31 |
32 | import static eu.hadeco.crudapi.CrudApiHandler.TAG_FILTER;
33 |
34 | public abstract class TestBase {
35 |
36 | //configure test DB parameters in the corresponding test classes!!!
37 | protected static String USER = null;
38 | protected static String PASS = null;
39 | protected static String DB_NAME = null;
40 | protected static String SERVER_NAME = null;
41 | protected static String SERVER_CLASS = null;
42 |
43 | public static final int GIS = 1;
44 | public static final int JSON = 2;
45 | public static final Logger LOG = Logger.getLogger(TestBase.class.getName());
46 |
47 | protected static ApiConfig apiConfig;
48 |
49 | public static ApiConfig getApiConfig() {
50 | return apiConfig;
51 | }
52 |
53 | @AfterClass
54 | public static void tearDown() {
55 | ApiConfig.clearCachedTableMeta();
56 | apiConfig = null; //prepare for next Test from the suite
57 | }
58 |
59 | /**
60 | * Initialize ApiConfig here
61 | */
62 | public static void setupClass() {
63 | if (apiConfig == null) {
64 | if(hasEmptyConfigurationParameters(USER, PASS, DB_NAME, SERVER_NAME, SERVER_CLASS)) {
65 | LOG.severe("Server class is not configured!");
66 | Assume.assumeTrue(false);
67 | }
68 | apiConfig = new ApiConfig(USER, PASS, DB_NAME, SERVER_NAME, SERVER_CLASS) {
69 | @Override
70 | public boolean columnAuthorizer(RequestHandler.Actions action, String database, String table, String column) {
71 | return !("password".equals(column) && RequestHandler.Actions.LIST.equals(action));
72 | }
73 |
74 | @Override
75 | public String[] recordFilter(RequestHandler.Actions action, String database, String table) {
76 | return "posts".equals(table) ? new String[]{"id,neq,13"} : null;
77 | }
78 |
79 | @Override
80 | public Object tenancyFunction(RequestHandler.Actions action, String database, String table, String column) {
81 | return "users".equals(table) && "id".equals(column) ? 1 : null;
82 | }
83 |
84 | @Override
85 | public Object inputSanitizer(RequestHandler.Actions action, String database, String table, String column, String type, Object value, HttpServletRequest context) {
86 | return value instanceof String ? TAG_FILTER.matcher(((String) value)).replaceAll("") : value;
87 | }
88 |
89 | @Override
90 | public Object inputValidator(RequestHandler.Actions action, String database, String table, String column, String type, Object value, HttpServletRequest context) {
91 | // ($column=='category_id' && !is_numeric($value))?'must be numeric':true;
92 | return "category_id".equals(column) && !(value instanceof Long) ? "must be numeric" : true;
93 | }
94 |
95 | @Override
96 | public RequestHandler.Actions before(RequestHandler.Actions action, String database, String table, String[] ids, Map input) {
97 | if ("products".equals(table)) {
98 | if (action == RequestHandler.Actions.CREATE) {
99 | input.put("created_at", "2013-12-11 10:09:08");
100 | } else if (action == RequestHandler.Actions.DELETE) {
101 | action = RequestHandler.Actions.UPDATE;
102 | input.put("deleted_at", "2013-12-11 11:10:09");
103 | }
104 | }
105 | return action;
106 | }
107 | };
108 | }
109 | }
110 |
111 | private static boolean isNumeric(Object value){
112 | return (value instanceof Long || value instanceof Double);
113 | }
114 |
115 | private static boolean hasEmptyConfigurationParameters(String... parameters) {
116 | for (String parameter : parameters) {
117 | if(parameter == null || parameter.isEmpty()) {
118 | return true;
119 | }
120 | }
121 | return false;
122 | }
123 |
124 | public abstract String getEngineName();
125 |
126 | public Connection connect() throws SQLException {
127 | return apiConfig.getConnection();
128 | }
129 |
130 | public abstract boolean checkVersion(Connection link) throws SQLException;
131 |
132 | public abstract int getCapabilities(Connection link);
133 |
134 | public abstract void seedDatabase(Connection connection, int capabilities) throws SQLException;
135 |
136 | protected void readSqlStatement(BufferedReader reader, StringBuilder sb) throws IOException {
137 | sb.setLength(0);
138 | //read until ; reached
139 | while (true) {
140 | String line = reader.readLine().trim();
141 | //skip comments
142 | if(line.equals("GO")) break;
143 | if (line.startsWith("--")) { //NOI18N
144 | break;
145 | }
146 | if(!line.isEmpty())
147 | sb.append(line).append(' ');
148 | if (line.endsWith(";")) { //NOI18N
149 | break;
150 | }
151 | }
152 | }
153 |
154 | protected void executeBatch(Connection conn, Statement stmt) throws SQLException {
155 | try {
156 | stmt.executeBatch();
157 | conn.commit();
158 | } catch (SQLException ex) {
159 | LOG.severe(ex.getMessage());
160 | throw ex;
161 | }
162 | }
163 | }
--------------------------------------------------------------------------------
/src/test/resources/blog_mysql.sql:
--------------------------------------------------------------------------------
1 | -- Adminer 4.2.4 MySQL dump
2 |
3 | SET NAMES utf8;
4 | SET time_zone = '+00:00';
5 | SET foreign_key_checks = 0;
6 | SET sql_mode = 'NO_AUTO_VALUE_ON_ZERO';
7 |
8 | DROP TABLE IF EXISTS `categories`;
9 | CREATE TABLE `categories` (
10 | `id` int(11) NOT NULL AUTO_INCREMENT,
11 | `name` varchar(255) NOT NULL,
12 | `icon` blob NULL,
13 | PRIMARY KEY (`id`)
14 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
15 |
16 | INSERT INTO `categories` (`id`, `name`, `icon`) VALUES
17 | (1, 'announcement', NULL),
18 | (2, 'article', NULL);
19 |
20 | DROP TABLE IF EXISTS `comments`;
21 | CREATE TABLE `comments` (
22 | `id` int(11) NOT NULL AUTO_INCREMENT,
23 | `post_id` int(11) NOT NULL,
24 | `message` varchar(255) NOT NULL,
25 | PRIMARY KEY (`id`),
26 | KEY `post_id` (`post_id`),
27 | CONSTRAINT `comments_ibfk_1` FOREIGN KEY (`post_id`) REFERENCES `posts` (`id`)
28 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
29 |
30 | INSERT INTO `comments` (`id`, `post_id`, `message`) VALUES
31 | (1, 1, 'great'),
32 | (2, 1, 'fantastic'),
33 | (3, 2, 'thank you'),
34 | (4, 2, 'awesome');
35 |
36 | DROP TABLE IF EXISTS `posts`;
37 | CREATE TABLE `posts` (
38 | `id` int(11) NOT NULL AUTO_INCREMENT,
39 | `user_id` int(11) NOT NULL,
40 | `category_id` int(11) NOT NULL,
41 | `content` varchar(255) NOT NULL,
42 | PRIMARY KEY (`id`),
43 | KEY `category_id` (`category_id`),
44 | KEY `user_id` (`user_id`),
45 | CONSTRAINT `posts_ibfk_3` FOREIGN KEY (`category_id`) REFERENCES `categories` (`id`),
46 | CONSTRAINT `posts_ibfk_4` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`)
47 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
48 |
49 | INSERT INTO `posts` (`id`, `user_id`, `category_id`, `content`) VALUES
50 | (1, 1, 1, 'blog started'),
51 | (2, 1, 2, 'It works!');
52 |
53 | DROP TABLE IF EXISTS `post_tags`;
54 | CREATE TABLE `post_tags` (
55 | `id` int(11) NOT NULL AUTO_INCREMENT,
56 | `post_id` int(11) NOT NULL,
57 | `tag_id` int(11) NOT NULL,
58 | PRIMARY KEY (`id`),
59 | KEY `post_id` (`post_id`),
60 | KEY `tag_id` (`tag_id`),
61 | CONSTRAINT `post_tags_ibfk_1` FOREIGN KEY (`post_id`) REFERENCES `posts` (`id`),
62 | CONSTRAINT `post_tags_ibfk_2` FOREIGN KEY (`tag_id`) REFERENCES `tags` (`id`)
63 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
64 |
65 | INSERT INTO `post_tags` (`id`, `post_id`, `tag_id`) VALUES
66 | (1, 1, 1),
67 | (2, 1, 2),
68 | (3, 2, 1),
69 | (4, 2, 2);
70 |
71 | DROP TABLE IF EXISTS `tags`;
72 | CREATE TABLE `tags` (
73 | `id` int(11) NOT NULL AUTO_INCREMENT,
74 | `name` varchar(255) NOT NULL,
75 | PRIMARY KEY (`id`)
76 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
77 |
78 | INSERT INTO `tags` (`id`, `name`) VALUES
79 | (1, 'funny'),
80 | (2, 'important');
81 |
82 | DROP TABLE IF EXISTS `users`;
83 | CREATE TABLE `users` (
84 | `id` int(11) NOT NULL AUTO_INCREMENT,
85 | `username` varchar(255) NOT NULL,
86 | `password` varchar(255) NOT NULL,
87 | `location` point NULL,
88 | PRIMARY KEY (`id`)
89 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
90 |
91 | INSERT INTO `users` (`id`, `username`, `password`, `location`) VALUES
92 | (1, 'user1', 'pass1', null),
93 | (2, 'user2', 'pass2', null);
94 |
95 | DROP TABLE IF EXISTS `countries`;
96 | CREATE TABLE `countries` (
97 | `id` int(11) NOT NULL AUTO_INCREMENT,
98 | `name` varchar(255) NOT NULL,
99 | `shape` polygon NOT NULL,
100 | PRIMARY KEY (`id`)
101 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
102 |
103 | INSERT INTO `countries` (`id`, `name`, `shape`) VALUES
104 | (1, 'Left', ST_GeomFromText('POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))')),
105 | (2, 'Right', ST_GeomFromText('POLYGON ((70 10, 80 40, 60 40, 50 20, 70 10))'));
106 |
107 | DROP TABLE IF EXISTS `events`;
108 | CREATE TABLE `events` (
109 | `id` int(11) NOT NULL AUTO_INCREMENT,
110 | `name` varchar(255) NOT NULL,
111 | `datetime` datetime NOT NULL,
112 | `visitors` int(11) NOT NULL,
113 | PRIMARY KEY (`id`)
114 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
115 |
116 | INSERT INTO `events` (`id`, `name`, `datetime`, `visitors`) VALUES
117 | (1, 'Launch', '2016-01-01 13:01:01', 0);
118 |
119 | DROP VIEW IF EXISTS `tag_usage`;
120 | CREATE VIEW `tag_usage` AS select `name`, count(`name`) AS `count` from `tags`, `post_tags` where `tags`.`id` = `post_tags`.`tag_id` group by `name` order by `count` desc, `name`;
121 |
122 | DROP TABLE IF EXISTS `products`;
123 | CREATE TABLE `products` (
124 | `id` int(11) NOT NULL AUTO_INCREMENT,
125 | `name` varchar(255) NOT NULL,
126 | `price` decimal(10,2) NOT NULL,
127 | `properties` JSON NOT NULL,
128 | `created_at` datetime NOT NULL,
129 | `deleted_at` datetime NULL,
130 | PRIMARY KEY (`id`)
131 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
132 |
133 | INSERT INTO `products` (`id`, `name`, `price`, `properties`, `created_at`) VALUES
134 | (1, 'Calculator', '23.01', '{"depth":false,"model":"TRX-120","width":100,"height":null}', '1970-01-01 01:01:01');
135 |
136 | DROP TABLE IF EXISTS `barcodes`;
137 | CREATE TABLE `barcodes` (
138 | `id` int(11) NOT NULL AUTO_INCREMENT,
139 | `product_id` int(11) NOT NULL,
140 | `hex` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,
141 | `bin` varbinary(255) NOT NULL,
142 | PRIMARY KEY (`id`),
143 | CONSTRAINT `barcodes_ibfk_1` FOREIGN KEY (`product_id`) REFERENCES `products` (`id`)
144 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
145 |
146 | INSERT INTO `barcodes` (`id`, `product_id`, `hex`, `bin`) VALUES
147 | (1, 1, '00ff01', UNHEX('00ff01'));
148 | -- gloriaorders.parameters definition
149 |
150 | DROP TABLE IF EXISTS `parameters`;
151 | CREATE TABLE `parameters` (
152 | `key` varchar(255) NOT NULL,
153 | `value` text,
154 | PRIMARY KEY (`key`)
155 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
156 | -- 2016-11-05 13:11:47
157 |
--------------------------------------------------------------------------------
/src/test/resources/blog_oracle.sql:
--------------------------------------------------------------------------------
1 | alter session set nls_date_format = 'YYYY-MM-DD HH24:MI:SS'
2 | /
3 | alter session set ddl_lock_timeout = 900
4 | /
5 |
6 |
7 | CREATE TABLE categories (
8 | id number(11) NOT NULL,
9 | name varchar(255) NOT NULL,
10 | icon blob NULL,
11 | PRIMARY KEY (id)
12 | )
13 | /
14 |
15 |
16 | CREATE SEQUENCE categories_seq start with 1 increment by 1 nocycle
17 | /
18 |
19 | CREATE OR REPLACE TRIGGER categories_autoinc
20 | BEFORE INSERT ON categories
21 | FOR EACH ROW
22 | declare
23 | v_next_id number;
24 | BEGIN
25 | v_next_id := categories_seq.NEXTVAL;
26 | if :new.id is not null then
27 | v_next_id := :new.id;
28 | end if;
29 | :new.id := v_next_id;
30 | END;
31 | /
32 |
33 | INSERT INTO categories (id, name, icon) VALUES (1, 'announcement', NULL)
34 | /
35 | INSERT INTO categories (id, name, icon) VALUES (2, 'article', NULL)
36 | /
37 |
38 |
39 | CREATE TABLE users (
40 | id number(11) NOT NULL,
41 | username varchar(255) NOT NULL,
42 | password varchar(255) NOT NULL,
43 | location SDO_GEOMETRY NULL,
44 | PRIMARY KEY (id)
45 | )
46 | /
47 |
48 | CREATE SEQUENCE users_seq start with 1 increment by 1 nocycle
49 | /
50 |
51 | CREATE OR REPLACE TRIGGER users_autoinc
52 | BEFORE INSERT ON users
53 | FOR EACH ROW
54 | declare
55 | v_next_id number;
56 | BEGIN
57 | v_next_id := users_seq.NEXTVAL;
58 | if :new.id is not null then
59 | v_next_id := :new.id;
60 | end if;
61 | :new.id := v_next_id;
62 | END;
63 | /
64 |
65 | INSERT INTO users (id, username, password, location) VALUES (1, 'user1', 'pass1', null)
66 | /
67 | INSERT INTO users (id, username, password, location) VALUES (2, 'user2', 'pass2', null)
68 | /
69 |
70 |
71 | CREATE TABLE posts (
72 | id number(11) NOT NULL,
73 | user_id number(11) NOT NULL,
74 | category_id number(11) NOT NULL,
75 | content varchar(255) NOT NULL,
76 | PRIMARY KEY (id),
77 | CONSTRAINT posts_ibfk_3 FOREIGN KEY (category_id) REFERENCES categories (id),
78 | CONSTRAINT posts_ibfk_4 FOREIGN KEY (user_id) REFERENCES users (id)
79 | )
80 | /
81 |
82 | CREATE SEQUENCE posts_seq start with 1 increment by 1 nocycle
83 | /
84 |
85 | CREATE OR REPLACE TRIGGER posts_autoinc
86 | BEFORE INSERT ON posts
87 | FOR EACH ROW
88 | declare
89 | v_next_id number;
90 | BEGIN
91 | v_next_id := posts_seq.NEXTVAL;
92 | if :new.id is not null then
93 | v_next_id := :new.id;
94 | end if;
95 | :new.id := v_next_id;
96 | END;
97 | /
98 |
99 | INSERT INTO posts (id, user_id, category_id, content) VALUES (1, 1, 1, 'blog started')
100 | /
101 | INSERT INTO posts (id, user_id, category_id, content) VALUES (2, 1, 2, 'It works!')
102 | /
103 |
104 | CREATE TABLE comments (
105 | id number(11) NOT NULL,
106 | post_id number(11) NOT NULL,
107 | message varchar(255) NOT NULL,
108 | PRIMARY KEY (id),
109 | CONSTRAINT comments_ibfk_1 FOREIGN KEY (post_id) REFERENCES posts (id)
110 | )
111 | /
112 |
113 | CREATE SEQUENCE comments_seq start with 1 increment by 1 nocycle
114 | /
115 |
116 | CREATE OR REPLACE TRIGGER comments_autoinc
117 | BEFORE INSERT ON comments
118 | FOR EACH ROW
119 | declare
120 | v_next_id number;
121 | BEGIN
122 | v_next_id := comments_seq.NEXTVAL;
123 | if :new.id is not null then
124 | v_next_id := :new.id;
125 | end if;
126 | :new.id := v_next_id;
127 | END;
128 | /
129 |
130 | INSERT INTO comments (id, post_id, message) VALUES (1, 1, 'great')
131 | /
132 | INSERT INTO comments (id, post_id, message) VALUES (2, 1, 'fantastic')
133 | /
134 | INSERT INTO comments (id, post_id, message) VALUES (3, 2, 'thank you')
135 | /
136 | INSERT INTO comments (id, post_id, message) VALUES (4, 2, 'awesome')
137 | /
138 |
139 |
140 | CREATE TABLE tags (
141 | id number(11) NOT NULL,
142 | name varchar(255) NOT NULL,
143 | PRIMARY KEY (id)
144 | )
145 | /
146 |
147 | CREATE SEQUENCE tags_seq start with 1 increment by 1 nocycle
148 | /
149 |
150 | CREATE OR REPLACE TRIGGER tags_autoinc
151 | BEFORE INSERT ON tags
152 | FOR EACH ROW
153 | declare
154 | v_next_id number;
155 | BEGIN
156 | v_next_id := tags_seq.NEXTVAL;
157 | if :new.id is not null then
158 | v_next_id := :new.id;
159 | end if;
160 | :new.id := v_next_id;
161 | END;
162 | /
163 |
164 | INSERT INTO tags (id, name) VALUES (1, 'funny')
165 | /
166 | INSERT INTO tags (id, name) VALUES (2, 'important')
167 | /
168 |
169 |
170 | CREATE TABLE post_tags (
171 | id number(11) NOT NULL,
172 | post_id number(11) NOT NULL,
173 | tag_id number(11) NOT NULL,
174 | PRIMARY KEY (id),
175 | CONSTRAINT post_tags_ibfk_1 FOREIGN KEY (post_id) REFERENCES posts (id),
176 | CONSTRAINT post_tags_ibfk_2 FOREIGN KEY (tag_id) REFERENCES tags (id)
177 | )
178 | /
179 |
180 | CREATE SEQUENCE post_tags_seq start with 1 increment by 1 nocycle
181 | /
182 |
183 | CREATE OR REPLACE TRIGGER post_tags_autoinc
184 | BEFORE INSERT ON post_tags
185 | FOR EACH ROW
186 | declare
187 | v_next_id number;
188 | BEGIN
189 | v_next_id := post_tags_seq.NEXTVAL;
190 | if :new.id is not null then
191 | v_next_id := :new.id;
192 | end if;
193 | :new.id := v_next_id;
194 | END;
195 | /
196 |
197 | INSERT INTO post_tags (id, post_id, tag_id) VALUES (1, 1, 1)
198 | /
199 | INSERT INTO post_tags (id, post_id, tag_id) VALUES (2, 1, 2)
200 | /
201 | INSERT INTO post_tags (id, post_id, tag_id) VALUES (3, 2, 1)
202 | /
203 | INSERT INTO post_tags (id, post_id, tag_id) VALUES (4, 2, 2)
204 | /
205 |
206 | CREATE TABLE countries (
207 | id number(11) NOT NULL,
208 | name varchar(255) NOT NULL,
209 | shape SDO_GEOMETRY NOT NULL,
210 | PRIMARY KEY (id)
211 | )
212 | /
213 |
214 | CREATE SEQUENCE countries_seq start with 1 increment by 1 nocycle
215 | /
216 |
217 | CREATE OR REPLACE TRIGGER countries_autoinc
218 | BEFORE INSERT ON countries
219 | FOR EACH ROW
220 | declare
221 | v_next_id number;
222 | BEGIN
223 | v_next_id := countries_seq.NEXTVAL;
224 | if :new.id is not null then
225 | v_next_id := :new.id;
226 | end if;
227 | :new.id := v_next_id;
228 | END;
229 | /
230 |
231 | INSERT INTO countries (id, name, shape) VALUES (1, 'Left', SDO_GEOMETRY('POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'))
232 | /
233 | INSERT INTO countries (id, name, shape) VALUES (2, 'Right', SDO_GEOMETRY('POLYGON ((70 10, 80 40, 60 40, 50 20, 70 10))'))
234 | /
235 |
236 |
237 | CREATE TABLE events (
238 | id number(11) NOT NULL,
239 | name varchar(255) NOT NULL,
240 | datetime date NOT NULL,
241 | visitors number(11) NOT NULL,
242 | PRIMARY KEY (id)
243 | )
244 | /
245 |
246 | CREATE SEQUENCE events_seq start with 1 increment by 1 nocycle
247 | /
248 |
249 | CREATE OR REPLACE TRIGGER events_autoinc
250 | BEFORE INSERT ON events
251 | FOR EACH ROW
252 | declare
253 | v_next_id number;
254 | BEGIN
255 | v_next_id := events_seq.NEXTVAL;
256 | if :new.id is not null then
257 | v_next_id := :new.id;
258 | end if;
259 | :new.id := v_next_id;
260 | END;
261 | /
262 |
263 |
264 | INSERT INTO events (id, name, datetime, visitors) VALUES (1, 'Launch', '2016-01-01 13:01:01', 0)
265 | /
266 |
267 | CREATE VIEW tag_usage AS select name, count(name) AS count from tags, post_tags where tags.id = post_tags.tag_id group by name order by count desc, name
268 | /
269 |
270 |
271 |
272 | CREATE TABLE products (
273 | id number(11) NOT NULL,
274 | name varchar(255) NOT NULL,
275 | price decimal(10,2) NOT NULL,
276 | properties clob NOT NULL,
277 | created_at date NOT NULL,
278 | deleted_at date NULL,
279 | PRIMARY KEY (id)
280 | )
281 | /
282 |
283 | CREATE SEQUENCE products_seq start with 1 increment by 1 nocycle
284 | /
285 |
286 | CREATE OR REPLACE TRIGGER products_autoinc
287 | BEFORE INSERT ON products
288 | FOR EACH ROW
289 | declare
290 | v_next_id number;
291 | BEGIN
292 | v_next_id := products_seq.NEXTVAL;
293 | if :new.id is not null then
294 | v_next_id := :new.id;
295 | end if;
296 | :new.id := v_next_id;
297 | END;
298 | /
299 |
300 | INSERT INTO products (id, name, price, properties, created_at) VALUES (1, 'Calculator', '23.01', '{"depth":false,"model":"TRX-120","width":100,"height":null}', '1970-01-01 01:01:01')
301 | /
302 |
303 |
304 | CREATE TABLE barcodes (
305 | id number(11) NOT NULL,
306 | product_id number(11) NOT NULL,
307 | hex varchar(255) NOT NULL,
308 | bin blob NOT NULL,
309 | PRIMARY KEY (id),
310 | CONSTRAINT barcodes_ibfk_1 FOREIGN KEY (product_id) REFERENCES products (id)
311 | )
312 | /
313 |
314 | CREATE SEQUENCE barcodes_seq start with 1 increment by 1 nocycle
315 | /
316 |
317 | CREATE OR REPLACE TRIGGER barcodes_autoinc
318 | BEFORE INSERT ON barcodes
319 | FOR EACH ROW
320 | declare
321 | v_next_id number;
322 | BEGIN
323 | v_next_id := barcodes_seq.NEXTVAL;
324 | if :new.id is not null then
325 | v_next_id := :new.id;
326 | end if;
327 | :new.id := v_next_id;
328 | END;
329 | /
330 |
331 | INSERT INTO barcodes (id, product_id, hex, bin) VALUES (1, 1, '00ff01', HEXTORAW('00ff01'))
332 | /
--------------------------------------------------------------------------------
/src/test/resources/blog_postgresql.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- PostgreSQL database dump
3 | --
4 |
5 | SET statement_timeout = 0;
6 | SET client_encoding = 'UTF8';
7 | SET standard_conforming_strings = on;
8 | SET check_function_bodies = false;
9 | SET client_min_messages = warning;
10 |
11 | SET search_path = public, pg_catalog;
12 |
13 | SET default_tablespace = '';
14 |
15 | SET default_with_oids = false;
16 |
17 | --
18 | -- Drop everything
19 | --
20 |
21 | DROP TABLE IF EXISTS categories CASCADE;
22 | DROP TABLE IF EXISTS comments CASCADE;
23 | DROP TABLE IF EXISTS post_tags CASCADE;
24 | DROP TABLE IF EXISTS posts CASCADE;
25 | DROP TABLE IF EXISTS tags CASCADE;
26 | DROP TABLE IF EXISTS users CASCADE;
27 | DROP TABLE IF EXISTS countries CASCADE;
28 | DROP TABLE IF EXISTS events CASCADE;
29 | DROP VIEW IF EXISTS tag_usage;
30 | DROP TABLE IF EXISTS products CASCADE;
31 | DROP TABLE IF EXISTS barcodes CASCADE;
32 | DROP TABLE IF EXISTS PARAMETERS;
33 |
34 | CREATE TABLE PARAMETERS (
35 | key varchar(255) NOT NULL,
36 | value text,
37 | PRIMARY KEY (key)
38 | );
39 | --
40 | -- Enables the Postgis extension
41 | --
42 |
43 | CREATE EXTENSION IF NOT EXISTS postgis;
44 |
45 | --
46 | -- Name: categories; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
47 | --
48 |
49 | CREATE TABLE categories (
50 | id serial NOT NULL,
51 | name character varying(255) NOT NULL,
52 | icon bytea
53 | );
54 |
55 |
56 | --
57 | -- Name: comments; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
58 | --
59 |
60 | CREATE TABLE comments (
61 | id serial NOT NULL,
62 | post_id integer NOT NULL,
63 | message character varying(255) NOT NULL
64 | );
65 |
66 |
67 | --
68 | -- Name: post_tags; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
69 | --
70 |
71 | CREATE TABLE post_tags (
72 | id serial NOT NULL,
73 | post_id integer NOT NULL,
74 | tag_id integer NOT NULL
75 | );
76 |
77 |
78 | --
79 | -- Name: posts; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
80 | --
81 |
82 | CREATE TABLE posts (
83 | id serial NOT NULL,
84 | user_id integer NOT NULL,
85 | category_id integer NOT NULL,
86 | content character varying(255) NOT NULL
87 | );
88 |
89 |
90 | --
91 | -- Name: tags; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
92 | --
93 |
94 | CREATE TABLE tags (
95 | id serial NOT NULL,
96 | name character varying(255) NOT NULL
97 | );
98 |
99 |
100 | --
101 | -- Name: users; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
102 | --
103 |
104 | CREATE TABLE users (
105 | id serial NOT NULL,
106 | username character varying(255) NOT NULL,
107 | password character varying(255) NOT NULL,
108 | location geometry NULL
109 | );
110 |
111 | --
112 | -- Name: countries; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
113 | --
114 |
115 | CREATE TABLE countries (
116 | id serial NOT NULL,
117 | name character varying(255) NOT NULL,
118 | shape geometry NOT NULL
119 | );
120 |
121 | --
122 | -- Name: events; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
123 | --
124 |
125 | CREATE TABLE events (
126 | id serial NOT NULL,
127 | name character varying(255) NOT NULL,
128 | datetime timestamp NOT NULL,
129 | visitors integer NOT NULL
130 | );
131 |
132 | --
133 | -- Name: tag_usage; Type: VIEW; Schema: public; Owner: postgres; Tablespace:
134 | --
135 |
136 | CREATE VIEW "tag_usage" AS select "name", count("name") AS "count" from "tags", "post_tags" where "tags"."id" = "post_tags"."tag_id" group by "name" order by "count" desc, "name";
137 |
138 | --
139 | -- Name: products; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
140 | --
141 |
142 | CREATE TABLE products (
143 | id serial NOT NULL,
144 | name character varying(255) NOT NULL,
145 | price decimal(10,2) NOT NULL,
146 | properties jsonb NOT NULL,
147 | created_at timestamp NOT NULL,
148 | deleted_at timestamp NULL
149 | );
150 |
151 | --
152 | -- Name: barcodes; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
153 | --
154 |
155 | CREATE TABLE barcodes (
156 | id serial NOT NULL,
157 | product_id integer NOT NULL,
158 | hex character varying(255) NOT NULL,
159 | bin bytea NOT NULL
160 | );
161 |
162 | --
163 | -- Data for Name: categories; Type: TABLE DATA; Schema: public; Owner: postgres
164 | --
165 |
166 | INSERT INTO "categories" ("name", "icon") VALUES
167 | ('announcement', NULL),
168 | ('article', NULL);
169 |
170 | --
171 | -- Data for Name: comments; Type: TABLE DATA; Schema: public; Owner: postgres
172 | --
173 |
174 | INSERT INTO "comments" ("post_id", "message") VALUES
175 | (1, 'great'),
176 | (1, 'fantastic'),
177 | (2, 'thank you'),
178 | (2, 'awesome');
179 |
180 | --
181 | -- Data for Name: post_tags; Type: TABLE DATA; Schema: public; Owner: postgres
182 | --
183 |
184 | INSERT INTO "post_tags" ("post_id", "tag_id") VALUES
185 | (1, 1),
186 | (1, 2),
187 | (2, 1),
188 | (2, 2);
189 |
190 | --
191 | -- Data for Name: posts; Type: TABLE DATA; Schema: public; Owner: postgres
192 | --
193 |
194 | INSERT INTO "posts" ("user_id", "category_id", "content") VALUES
195 | (1, 1, 'blog started'),
196 | (1, 2, 'It works!');
197 |
198 | --
199 | -- Data for Name: tags; Type: TABLE DATA; Schema: public; Owner: postgres
200 | --
201 |
202 | INSERT INTO "tags" ("name") VALUES
203 | ('funny'),
204 | ('important');
205 |
206 | --
207 | -- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: postgres
208 | --
209 |
210 | INSERT INTO "users" ("username", "password", "location") VALUES
211 | ('user1', 'pass1', NULL),
212 | ('user2', 'pass2', NULL);
213 |
214 | --
215 | -- Data for Name: countries; Type: TABLE DATA; Schema: public; Owner: postgres
216 | --
217 |
218 | INSERT INTO "countries" ("name", "shape") VALUES
219 | ('Left', ST_GeomFromText('POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))')),
220 | ('Right', ST_GeomFromText('POLYGON ((70 10, 80 40, 60 40, 50 20, 70 10))'));
221 |
222 | --
223 | -- Data for Name: events; Type: TABLE DATA; Schema: public; Owner: postgres
224 | --
225 |
226 | INSERT INTO "events" ("name", "datetime", "visitors") VALUES
227 | ('Launch', '2016-01-01 13:01:01', 0);
228 |
229 | --
230 | -- Data for Name: products; Type: TABLE DATA; Schema: public; Owner: postgres
231 | --
232 |
233 | INSERT INTO "products" ("name", "price", "properties", "created_at") VALUES
234 | ('Calculator', '23.01', '{"depth":false,"model":"TRX-120","width":100,"height":null}', '1970-01-01 01:01:01');
235 |
236 | --
237 | -- Data for Name: barcodes; Type: TABLE DATA; Schema: public; Owner: postgres
238 | --
239 |
240 | INSERT INTO "barcodes" ("product_id", "hex", "bin") VALUES
241 | (1, '00ff01', E'\\x00ff01');
242 |
243 | --
244 | -- Name: categories_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
245 | --
246 |
247 | ALTER TABLE ONLY categories
248 | ADD CONSTRAINT categories_pkey PRIMARY KEY (id);
249 |
250 |
251 | --
252 | -- Name: comments_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
253 | --
254 |
255 | ALTER TABLE ONLY comments
256 | ADD CONSTRAINT comments_pkey PRIMARY KEY (id);
257 |
258 |
259 | --
260 | -- Name: post_tags_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
261 | --
262 |
263 | ALTER TABLE ONLY post_tags
264 | ADD CONSTRAINT post_tags_pkey PRIMARY KEY (id);
265 |
266 |
267 | --
268 | -- Name: post_tags_post_id_tag_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
269 | --
270 |
271 | ALTER TABLE ONLY post_tags
272 | ADD CONSTRAINT post_tags_post_id_tag_id_key UNIQUE (post_id, tag_id);
273 |
274 |
275 | --
276 | -- Name: posts_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
277 | --
278 |
279 | ALTER TABLE ONLY posts
280 | ADD CONSTRAINT posts_pkey PRIMARY KEY (id);
281 |
282 |
283 | --
284 | -- Name: tags_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
285 | --
286 |
287 | ALTER TABLE ONLY tags
288 | ADD CONSTRAINT tags_pkey PRIMARY KEY (id);
289 |
290 |
291 | --
292 | -- Name: users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
293 | --
294 |
295 | ALTER TABLE ONLY users
296 | ADD CONSTRAINT users_pkey PRIMARY KEY (id);
297 |
298 | --
299 | -- Name: countries_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
300 | --
301 |
302 | ALTER TABLE ONLY countries
303 | ADD CONSTRAINT countries_pkey PRIMARY KEY (id);
304 |
305 |
306 | --
307 | -- Name: events_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
308 | --
309 |
310 | ALTER TABLE ONLY events
311 | ADD CONSTRAINT events_pkey PRIMARY KEY (id);
312 |
313 |
314 | --
315 | -- Name: products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
316 | --
317 |
318 | ALTER TABLE ONLY products
319 | ADD CONSTRAINT products_pkey PRIMARY KEY (id);
320 |
321 |
322 | --
323 | -- Name: barcodes_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
324 | --
325 |
326 | ALTER TABLE ONLY barcodes
327 | ADD CONSTRAINT barcodes_pkey PRIMARY KEY (id);
328 |
329 |
330 | --
331 | -- Name: comments_post_id_idx; Type: INDEX; Schema: public; Owner: postgres; Tablespace:
332 | --
333 |
334 | CREATE INDEX comments_post_id_idx ON comments USING btree (post_id);
335 |
336 |
337 | --
338 | -- Name: post_tags_post_id_idx; Type: INDEX; Schema: public; Owner: postgres; Tablespace:
339 | --
340 |
341 | CREATE INDEX post_tags_post_id_idx ON post_tags USING btree (post_id);
342 |
343 |
344 | --
345 | -- Name: post_tags_tag_id_idx; Type: INDEX; Schema: public; Owner: postgres; Tablespace:
346 | --
347 |
348 | CREATE INDEX post_tags_tag_id_idx ON post_tags USING btree (tag_id);
349 |
350 |
351 | --
352 | -- Name: posts_category_id_idx; Type: INDEX; Schema: public; Owner: postgres; Tablespace:
353 | --
354 |
355 | CREATE INDEX posts_category_id_idx ON posts USING btree (category_id);
356 |
357 |
358 | --
359 | -- Name: posts_user_id_idx; Type: INDEX; Schema: public; Owner: postgres; Tablespace:
360 | --
361 |
362 | CREATE INDEX posts_user_id_idx ON posts USING btree (user_id);
363 |
364 |
365 | --
366 | -- Name: barcodes_product_id_idx; Type: INDEX; Schema: public; Owner: postgres; Tablespace:
367 | --
368 |
369 | CREATE INDEX barcodes_product_id_idx ON barcodes USING btree (product_id);
370 |
371 |
372 | --
373 | -- Name: comments_post_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres
374 | --
375 |
376 | ALTER TABLE ONLY comments
377 | ADD CONSTRAINT comments_post_id_fkey FOREIGN KEY (post_id) REFERENCES posts(id);
378 |
379 |
380 | --
381 | -- Name: post_tags_post_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres
382 | --
383 |
384 | ALTER TABLE ONLY post_tags
385 | ADD CONSTRAINT post_tags_post_id_fkey FOREIGN KEY (post_id) REFERENCES posts(id);
386 |
387 |
388 | --
389 | -- Name: post_tags_tag_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres
390 | --
391 |
392 | ALTER TABLE ONLY post_tags
393 | ADD CONSTRAINT post_tags_tag_id_fkey FOREIGN KEY (tag_id) REFERENCES tags(id);
394 |
395 |
396 | --
397 | -- Name: posts_category_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres
398 | --
399 |
400 | ALTER TABLE ONLY posts
401 | ADD CONSTRAINT posts_category_id_fkey FOREIGN KEY (category_id) REFERENCES categories(id);
402 |
403 |
404 | --
405 | -- Name: posts_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres
406 | --
407 |
408 | ALTER TABLE ONLY posts
409 | ADD CONSTRAINT posts_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id);
410 |
411 |
412 | --
413 | -- Name: barcodes_product_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres
414 | --
415 |
416 | ALTER TABLE ONLY barcodes
417 | ADD CONSTRAINT barcodes_product_id_fkey FOREIGN KEY (product_id) REFERENCES products(id);
418 |
419 |
420 | --
421 | -- PostgreSQL database dump complete
422 | --
423 |
--------------------------------------------------------------------------------
/src/test/resources/blog_sqlite.sql:
--------------------------------------------------------------------------------
1 | -- Adminer 4.2.4 SQLite 3 dump
2 |
3 | DROP TABLE IF EXISTS "categories";
4 | CREATE TABLE "categories" (
5 | "id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
6 | "name" text(255) NOT NULL,
7 | "icon" data NULL
8 | );
9 |
10 | INSERT INTO "categories" ("id", "name", "icon") VALUES (1, 'announcement', NULL);
11 | INSERT INTO "categories" ("id", "name", "icon") VALUES (2, 'article', NULL);
12 |
13 | DROP TABLE IF EXISTS "comments";
14 | CREATE TABLE "comments" (
15 | "id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
16 | "post_id" integer NOT NULL,
17 | "message" text NOT NULL,
18 | FOREIGN KEY ("post_id") REFERENCES "posts" ("id")
19 | );
20 |
21 | CREATE INDEX "comments_post_id" ON "comments" ("post_id");
22 |
23 | INSERT INTO "comments" ("id", "post_id", "message") VALUES (1, 1, 'great');
24 | INSERT INTO "comments" ("id", "post_id", "message") VALUES (2, 1, 'fantastic');
25 | INSERT INTO "comments" ("id", "post_id", "message") VALUES (3, 2, 'thank you');
26 | INSERT INTO "comments" ("id", "post_id", "message") VALUES (4, 2, 'awesome');
27 |
28 | DROP TABLE IF EXISTS "post_tags";
29 | CREATE TABLE "post_tags" (
30 | "id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
31 | "post_id" integer NOT NULL,
32 | "tag_id" integer NOT NULL,
33 | FOREIGN KEY ("tag_id") REFERENCES "tags" ("id"),
34 | FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE RESTRICT ON UPDATE RESTRICT
35 | );
36 |
37 | CREATE UNIQUE INDEX "post_tags_post_id_tag_id" ON "post_tags" ("post_id", "tag_id");
38 |
39 | INSERT INTO "post_tags" ("id", "post_id", "tag_id") VALUES (1, 1, 1);
40 | INSERT INTO "post_tags" ("id", "post_id", "tag_id") VALUES (2, 1, 2);
41 | INSERT INTO "post_tags" ("id", "post_id", "tag_id") VALUES (3, 2, 1);
42 | INSERT INTO "post_tags" ("id", "post_id", "tag_id") VALUES (4, 2, 2);
43 |
44 | DROP TABLE IF EXISTS "posts";
45 | CREATE TABLE "posts" (
46 | "id" integer NOT NULL PRIMARY KEY AUTOINCREMENT,
47 | "user_id" integer NOT NULL,
48 | "category_id" integer NOT NULL,
49 | "content" text NOT NULL,
50 | FOREIGN KEY ("user_id") REFERENCES "users" ("id") ON DELETE RESTRICT ON UPDATE RESTRICT,
51 | FOREIGN KEY ("category_id") REFERENCES "categories" ("id") ON DELETE RESTRICT ON UPDATE RESTRICT
52 | );
53 |
54 | CREATE INDEX "posts_user_id" ON "posts" ("user_id");
55 |
56 | CREATE INDEX "posts_category_id" ON "posts" ("category_id");
57 |
58 | INSERT INTO "posts" ("id", "user_id", "category_id", "content") VALUES (1, 1, 1, 'blog started');
59 | INSERT INTO "posts" ("id", "user_id", "category_id", "content") VALUES (2, 1, 2, 'It works!');
60 |
61 | DROP TABLE IF EXISTS "tags";
62 | CREATE TABLE `tags` (
63 | `id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,
64 | `name` text(255) NOT NULL
65 | );
66 |
67 | INSERT INTO "tags" ("id", "name") VALUES (1, 'funny');
68 | INSERT INTO "tags" ("id", "name") VALUES (2, 'important');
69 |
70 | DROP TABLE IF EXISTS "users";
71 | CREATE TABLE `users` (
72 | `id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,
73 | `username` text(255) NOT NULL,
74 | `password` text(255) NOT NULL,
75 | `location` geometry NULL
76 | );
77 |
78 | INSERT INTO "users" ("id", "username", "password", "location") VALUES (1, 'user1', 'pass1', NULL);
79 | INSERT INTO "users" ("id", "username", "password", "location") VALUES (2, 'user2', 'pass2', NULL);
80 |
81 | DROP TABLE IF EXISTS `countries`;
82 | CREATE TABLE `countries` (
83 | `id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,
84 | `name` text(255) NOT NULL,
85 | `shape` geometry NOT NULL
86 | );
87 |
88 | INSERT INTO `countries` (`id`, `name`, `shape`) VALUES (1, 'Left', 'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))');
89 | INSERT INTO `countries` (`id`, `name`, `shape`) VALUES (2, 'Right', 'POLYGON ((70 10, 80 40, 60 40, 50 20, 70 10))');
90 |
91 | DROP TABLE IF EXISTS `events`;
92 | CREATE TABLE `events` (
93 | `id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,
94 | `name` text(255) NOT NULL,
95 | `datetime` datetime NOT NULL,
96 | `visitors` integer NOT NULL
97 | );
98 |
99 | INSERT INTO `events` (`id`, `name`, `datetime`, `visitors`) VALUES (1, 'Launch', '2016-01-01 13:01:01', 0);
100 |
101 | DROP VIEW IF EXISTS `tag_usage`;
102 | CREATE VIEW `tag_usage` AS select `name`, count(`name`) AS `count` from `tags`, `post_tags` where `tags`.`id` = `post_tags`.`tag_id` group by `name` order by `count` desc, `name`;
103 |
104 | DROP TABLE IF EXISTS `products`;
105 | CREATE TABLE `products` (
106 | `id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,
107 | `name` text(255) NOT NULL,
108 | `price` text(12) NOT NULL,
109 | `properties` json NOT NULL,
110 | `created_at` datetime NOT NULL,
111 | `deleted_at` datetime NULL
112 | );
113 |
114 | INSERT INTO `products` (`id`, `name`, `price`, `properties`, `created_at`) VALUES (1, 'Calculator', '23.01', '{"depth":false,"model":"TRX-120","width":100,"height":null}', '1970-01-01 01:01:01');
115 |
116 | DROP TABLE IF EXISTS `barcodes`;
117 | CREATE TABLE `barcodes` (
118 | `id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,
119 | `product_id` integer NOT NULL,
120 | `hex` text(255) NOT NULL,
121 | `bin` binary(255) NOT NULL
122 | );
123 |
124 | INSERT INTO `barcodes` (`id`, `product_id`, `hex`, `bin`) VALUES (1, 1, '00ff01', X'00ff01');
125 |
126 | DROP TABLE IF EXISTS `parameters`;
127 | CREATE TABLE `parameters` (
128 | `key` varchar(255) NOT NULL,
129 | `value` text,
130 | PRIMARY KEY (`key`)
131 | );
132 | --
133 |
--------------------------------------------------------------------------------
/src/test/resources/blog_sqlserver.sql:
--------------------------------------------------------------------------------
1 | IF (OBJECT_ID('FK_barcodes_products', 'F') IS NOT NULL)
2 | BEGIN
3 | ALTER TABLE [barcodes] DROP CONSTRAINT [FK_barcodes_products]
4 | END
5 | GO
6 | IF (OBJECT_ID('FK_posts_users', 'F') IS NOT NULL)
7 | BEGIN
8 | ALTER TABLE [posts] DROP CONSTRAINT [FK_posts_users]
9 | END
10 | GO
11 | IF (OBJECT_ID('FK_posts_categories', 'F') IS NOT NULL)
12 | BEGIN
13 | ALTER TABLE [posts] DROP CONSTRAINT [FK_posts_categories]
14 | END
15 | GO
16 | IF (OBJECT_ID('FK_post_tags_tags', 'F') IS NOT NULL)
17 | BEGIN
18 | ALTER TABLE [post_tags] DROP CONSTRAINT [FK_post_tags_tags]
19 | END
20 | GO
21 | IF (OBJECT_ID('FK_post_tags_posts', 'F') IS NOT NULL)
22 | BEGIN
23 | ALTER TABLE [post_tags] DROP CONSTRAINT [FK_post_tags_posts]
24 | END
25 | GO
26 | IF (OBJECT_ID('FK_comments_posts', 'F') IS NOT NULL)
27 | BEGIN
28 | ALTER TABLE [comments] DROP CONSTRAINT [FK_comments_posts]
29 | END
30 | GO
31 | IF (OBJECT_ID('barcodes', 'U') IS NOT NULL)
32 | BEGIN
33 | DROP TABLE [barcodes]
34 | END
35 | GO
36 | IF (OBJECT_ID('products', 'U') IS NOT NULL)
37 | BEGIN
38 | DROP TABLE [products]
39 | END
40 | GO
41 | IF (OBJECT_ID('events', 'U') IS NOT NULL)
42 | BEGIN
43 | DROP TABLE [events]
44 | END
45 | GO
46 | IF (OBJECT_ID('countries', 'U') IS NOT NULL)
47 | BEGIN
48 | DROP TABLE [countries]
49 | END
50 | GO
51 | IF (OBJECT_ID('users', 'U') IS NOT NULL)
52 | BEGIN
53 | DROP TABLE [users]
54 | END
55 | GO
56 | IF (OBJECT_ID('tags', 'U') IS NOT NULL)
57 | BEGIN
58 | DROP TABLE [tags]
59 | END
60 | GO
61 | IF (OBJECT_ID('posts', 'U') IS NOT NULL)
62 | BEGIN
63 | DROP TABLE [posts]
64 | END
65 | GO
66 | IF (OBJECT_ID('post_tags', 'U') IS NOT NULL)
67 | BEGIN
68 | DROP TABLE [post_tags]
69 | END
70 | GO
71 | IF (OBJECT_ID('comments', 'U') IS NOT NULL)
72 | BEGIN
73 | DROP TABLE [comments]
74 | END
75 | GO
76 | IF (OBJECT_ID('categories', 'U') IS NOT NULL)
77 | BEGIN
78 | DROP TABLE [categories]
79 | END
80 | GO
81 | IF (OBJECT_ID('tag_usage', 'V') IS NOT NULL)
82 | BEGIN
83 | DROP VIEW [tag_usage]
84 | END
85 | GO
86 | IF (OBJECT_ID('dbo.parameters', 'U') IS NOT NULL)
87 | BEGIN
88 | DROP TABLE [dbo.parameters]
89 | END
90 | CREATE TABLE [dbo.parameters] (
91 | "key" varchar(255) NOT NULL,
92 | value text,
93 | PRIMARY KEY ("key")
94 | )
95 | GO
96 | CREATE TABLE [categories](
97 | [id] [int] IDENTITY,
98 | [name] [nvarchar](max) NOT NULL,
99 | [icon] [varbinary](max) NULL,
100 | PRIMARY KEY CLUSTERED
101 | (
102 | [id] ASC
103 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
104 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
105 |
106 | GO
107 | SET ANSI_NULLS ON
108 | GO
109 | SET QUOTED_IDENTIFIER ON
110 | GO
111 | CREATE TABLE [comments](
112 | [id] [int] IDENTITY,
113 | [post_id] [int] NOT NULL,
114 | [message] [nvarchar](max) NOT NULL,
115 | PRIMARY KEY CLUSTERED
116 | (
117 | [id] ASC
118 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
119 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
120 |
121 | GO
122 | SET ANSI_NULLS ON
123 | GO
124 | SET QUOTED_IDENTIFIER ON
125 | GO
126 | CREATE TABLE [post_tags](
127 | [id] [int] IDENTITY,
128 | [post_id] [int] NOT NULL,
129 | [tag_id] [int] NOT NULL,
130 | PRIMARY KEY CLUSTERED
131 | (
132 | [id] ASC
133 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
134 | ) ON [PRIMARY]
135 |
136 | GO
137 | SET ANSI_NULLS ON
138 | GO
139 | SET QUOTED_IDENTIFIER ON
140 | GO
141 | CREATE TABLE [posts](
142 | [id] [int] IDENTITY,
143 | [user_id] [int] NOT NULL,
144 | [category_id] [int] NOT NULL,
145 | [content] [nvarchar](max) NOT NULL,
146 | PRIMARY KEY CLUSTERED
147 | (
148 | [id] ASC
149 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
150 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
151 |
152 | GO
153 | SET ANSI_NULLS ON
154 | GO
155 | SET QUOTED_IDENTIFIER ON
156 | GO
157 | CREATE TABLE [tags](
158 | [id] [int] IDENTITY,
159 | [name] [nvarchar](max) NOT NULL,
160 | PRIMARY KEY CLUSTERED
161 | (
162 | [id] ASC
163 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
164 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
165 |
166 | GO
167 | SET ANSI_NULLS ON
168 | GO
169 | SET QUOTED_IDENTIFIER ON
170 | GO
171 | CREATE TABLE [users](
172 | [id] [int] IDENTITY,
173 | [username] [nvarchar](max) NOT NULL,
174 | [password] [nvarchar](max) NOT NULL,
175 | [location] [geometry] NULL,
176 | CONSTRAINT [PK_users] PRIMARY KEY CLUSTERED
177 | (
178 | [id] ASC
179 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
180 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
181 |
182 | GO
183 | SET ANSI_NULLS ON
184 | GO
185 | SET QUOTED_IDENTIFIER ON
186 | GO
187 | CREATE TABLE [countries](
188 | [id] [int] IDENTITY,
189 | [name] [nvarchar](max) NOT NULL,
190 | [shape] [geometry] NOT NULL,
191 | CONSTRAINT [PK_countries] PRIMARY KEY CLUSTERED
192 | (
193 | [id] ASC
194 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
195 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
196 |
197 | GO
198 | SET ANSI_NULLS ON
199 | GO
200 | SET QUOTED_IDENTIFIER ON
201 | GO
202 | CREATE TABLE [events](
203 | [id] [int] IDENTITY,
204 | [name] [nvarchar](max) NOT NULL,
205 | [datetime] [datetime2](0) NOT NULL,
206 | [visitors] [int] NOT NULL,
207 | CONSTRAINT [PK_events] PRIMARY KEY CLUSTERED
208 | (
209 | [id] ASC
210 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
211 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
212 |
213 | GO
214 | SET ANSI_NULLS ON
215 | GO
216 | SET QUOTED_IDENTIFIER ON
217 | GO
218 | CREATE VIEW [tag_usage]
219 | AS
220 | SELECT top 100 PERCENT name, COUNT(name) AS [count] FROM tags, post_tags WHERE tags.id = post_tags.tag_id GROUP BY name ORDER BY [count] DESC, name
221 |
222 | GO
223 | SET ANSI_NULLS ON
224 | GO
225 | SET QUOTED_IDENTIFIER ON
226 | GO
227 | CREATE TABLE [products](
228 | [id] [int] IDENTITY,
229 | [name] [nvarchar](max) NOT NULL,
230 | [price] [decimal](10,2) NOT NULL,
231 | [properties] [xml] NOT NULL,
232 | [created_at] [datetime2](0) NOT NULL,
233 | [deleted_at] [datetime2](0) NULL,
234 | CONSTRAINT [PK_products] PRIMARY KEY CLUSTERED
235 | (
236 | [id] ASC
237 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
238 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
239 |
240 | GO
241 | SET ANSI_NULLS ON
242 | GO
243 | SET QUOTED_IDENTIFIER ON
244 | GO
245 | CREATE TABLE [barcodes](
246 | [id] [int] IDENTITY,
247 | [product_id] [int] NOT NULL,
248 | [hex] [nvarchar](max) NOT NULL,
249 | [bin] [varbinary](max) NOT NULL,
250 | CONSTRAINT [PK_barcodes] PRIMARY KEY CLUSTERED
251 | (
252 | [id] ASC
253 | )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
254 | ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
255 |
256 | GO
257 | SET IDENTITY_INSERT [categories] ON
258 | GO
259 | INSERT [categories] ([id], [name], [icon]) VALUES (1, N'announcement', NULL)
260 | GO
261 | INSERT [categories] ([id], [name], [icon]) VALUES (2, N'article', NULL)
262 | GO
263 | SET IDENTITY_INSERT [categories] OFF
264 | GO
265 | SET IDENTITY_INSERT [comments] ON
266 | GO
267 | INSERT [comments] ([id], [post_id], [message]) VALUES (1, 1, N'great')
268 | GO
269 | INSERT [comments] ([id], [post_id], [message]) VALUES (2, 1, N'fantastic')
270 | GO
271 | INSERT [comments] ([id], [post_id], [message]) VALUES (3, 2, N'thank you')
272 | GO
273 | INSERT [comments] ([id], [post_id], [message]) VALUES (4, 2, N'awesome')
274 | GO
275 | SET IDENTITY_INSERT [comments] OFF
276 | GO
277 | SET IDENTITY_INSERT [post_tags] ON
278 | GO
279 | INSERT [post_tags] ([id], [post_id], [tag_id]) VALUES (1, 1, 1)
280 | GO
281 | INSERT [post_tags] ([id], [post_id], [tag_id]) VALUES (2, 1, 2)
282 | GO
283 | INSERT [post_tags] ([id], [post_id], [tag_id]) VALUES (3, 2, 1)
284 | GO
285 | INSERT [post_tags] ([id], [post_id], [tag_id]) VALUES (4, 2, 2)
286 | GO
287 | SET IDENTITY_INSERT [post_tags] OFF
288 | GO
289 | SET IDENTITY_INSERT [posts] ON
290 | GO
291 | INSERT [posts] ([id], [user_id], [category_id], [content]) VALUES (1, 1, 1, N'blog started')
292 | GO
293 | INSERT [posts] ([id], [user_id], [category_id], [content]) VALUES (2, 1, 2, N'It works!')
294 | GO
295 | SET IDENTITY_INSERT [posts] OFF
296 | GO
297 | SET IDENTITY_INSERT [tags] ON
298 | GO
299 | INSERT [tags] ([id], [name]) VALUES (1, N'funny')
300 | GO
301 | INSERT [tags] ([id], [name]) VALUES (2, N'important')
302 | GO
303 | SET IDENTITY_INSERT [tags] OFF
304 | GO
305 | SET IDENTITY_INSERT [users] ON
306 | GO
307 | INSERT [users] ([id], [username], [password], [location]) VALUES (1, N'user1', N'pass1', NULL)
308 | GO
309 | INSERT [users] ([id], [username], [password], [location]) VALUES (2, N'user2', N'pass2', NULL)
310 | GO
311 | SET IDENTITY_INSERT [users] OFF
312 | GO
313 | SET IDENTITY_INSERT [countries] ON
314 | GO
315 | INSERT [countries] ([id], [name], [shape]) VALUES (1, N'Left', N'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))')
316 | GO
317 | INSERT [countries] ([id], [name], [shape]) VALUES (2, N'Right', N'POLYGON ((70 10, 80 40, 60 40, 50 20, 70 10))')
318 | GO
319 | SET IDENTITY_INSERT [countries] OFF
320 | GO
321 | SET IDENTITY_INSERT [events] ON
322 | GO
323 | INSERT [events] ([id], [name], [datetime], [visitors]) VALUES (1, N'Launch', N'2016-01-01 13:01:01', 0)
324 | GO
325 | SET IDENTITY_INSERT [events] OFF
326 | GO
327 | SET IDENTITY_INSERT [products] ON
328 | GO
329 | INSERT [products] ([id], [name], [price], [properties], [created_at]) VALUES (1, N'Calculator', N'23.01', N'false TRX-120 100 ', '1970-01-01 01:01:01')
330 | GO
331 | SET IDENTITY_INSERT [products] OFF
332 | GO
333 | SET IDENTITY_INSERT [barcodes] ON
334 | GO
335 | INSERT [barcodes] ([id], [product_id], [hex], [bin]) VALUES (1, 1, N'00ff01', 0x00ff01)
336 | GO
337 | SET IDENTITY_INSERT [barcodes] OFF
338 | GO
339 | ALTER TABLE [comments] WITH CHECK ADD CONSTRAINT [FK_comments_posts] FOREIGN KEY([post_id])
340 | REFERENCES [posts] ([id])
341 | GO
342 | ALTER TABLE [comments] CHECK CONSTRAINT [FK_comments_posts]
343 | GO
344 | ALTER TABLE [post_tags] WITH CHECK ADD CONSTRAINT [FK_post_tags_posts] FOREIGN KEY([post_id])
345 | REFERENCES [posts] ([id])
346 | GO
347 | ALTER TABLE [post_tags] CHECK CONSTRAINT [FK_post_tags_posts]
348 | GO
349 | ALTER TABLE [post_tags] WITH CHECK ADD CONSTRAINT [FK_post_tags_tags] FOREIGN KEY([tag_id])
350 | REFERENCES [tags] ([id])
351 | GO
352 | ALTER TABLE [post_tags] CHECK CONSTRAINT [FK_post_tags_tags]
353 | GO
354 | ALTER TABLE [posts] WITH CHECK ADD CONSTRAINT [FK_posts_categories] FOREIGN KEY([category_id])
355 | REFERENCES [categories] ([id])
356 | GO
357 | ALTER TABLE [posts] CHECK CONSTRAINT [FK_posts_categories]
358 | GO
359 | ALTER TABLE [posts] WITH CHECK ADD CONSTRAINT [FK_posts_users] FOREIGN KEY([user_id])
360 | REFERENCES [users] ([id])
361 | GO
362 | ALTER TABLE [posts] CHECK CONSTRAINT [FK_posts_users]
363 | GO
364 | ALTER TABLE [barcodes] WITH CHECK ADD CONSTRAINT [FK_barcodes_products] FOREIGN KEY([product_id])
365 | REFERENCES [products] ([id])
366 | GO
367 | ALTER TABLE [barcodes] CHECK CONSTRAINT [FK_barcodes_products]
368 | GO
--------------------------------------------------------------------------------