├── .gitignore
├── LICENSE
├── README.md
├── build.sbt
├── core
├── build.sbt
└── src
│ ├── main
│ └── scala
│ │ └── com
│ │ └── datasift
│ │ └── dropwizard
│ │ └── scala
│ │ ├── ScalaApplication.scala
│ │ └── ScalaBundle.scala
│ └── test
│ ├── resources
│ ├── migrations.xml
│ └── test-conf.yml
│ └── scala
│ └── com
│ └── datasift
│ └── dropwizard
│ └── scala
│ └── ScalaApplicationSpecIT.scala
├── jdbi
├── build.sbt
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── datasift
│ │ │ └── dropwizard
│ │ │ └── jdbi
│ │ │ └── tweak
│ │ │ └── BindProduct.java
│ └── scala
│ │ └── com
│ │ └── datasift
│ │ └── dropwizard
│ │ └── scala
│ │ └── jdbi
│ │ ├── JDBI.scala
│ │ ├── ResultSetDSL.scala
│ │ ├── SimpleMapper.scala
│ │ ├── package.scala
│ │ └── tweak
│ │ ├── BigDecimalArgumentFactory.scala
│ │ ├── BindProductFactory.scala
│ │ ├── IterableContainerFactory.scala
│ │ ├── OptionArgumentFactory.scala
│ │ ├── OptionContainerFactory.scala
│ │ └── ProductResultSetMapperFactory.scala
│ └── test
│ └── scala
│ └── com
│ └── datasift
│ └── dropwizard
│ └── scala
│ └── jdbi
│ └── tweak
│ ├── BigDecimalArgumentFactorySpec.scala
│ ├── IterableContainerFactorySpec.scala
│ ├── JDBIWrapperSpec.scala
│ └── OptionContainerFactorySpec.scala
├── jersey
├── build.sbt
└── src
│ └── main
│ └── scala
│ └── com
│ └── datasift
│ └── dropwizard
│ └── scala
│ └── jersey
│ ├── ParameterizedMessageBodyWriter.scala
│ ├── inject
│ ├── CollectionParamFactoryProvider.scala
│ ├── CollectionParameterExtractor.scala
│ ├── EitherMessageBodyWriter.scala
│ ├── OptionMessageBodyWriter.scala
│ ├── ParamConverters.scala
│ ├── ScalaInjectionBinder.scala
│ ├── ScalaParamConverters.scala
│ ├── ScalaParamInjectionResolver.scala
│ ├── TryMessageBodyWriter.scala
│ └── ValueFactory.scala
│ └── params
│ ├── BooleanParam.scala
│ ├── IntParam.scala
│ └── LongParam.scala
├── metrics
├── build.sbt
└── src
│ ├── main
│ └── scala
│ │ └── com
│ │ └── datasift
│ │ └── dropwizard
│ │ └── scala
│ │ └── metrics.scala
│ └── test
│ └── scala
│ └── com
│ └── datasift
│ └── dropwizard
│ └── scala
│ └── MetricImplicitsSpec.scala
├── project
├── Versions.scala
├── build.properties
└── plugins.sbt
├── test
├── build.sbt
└── src
│ └── main
│ └── scala
│ └── com
│ └── datasift
│ └── dropwizard
│ └── scala
│ └── test
│ ├── ApplicationTest.scala
│ ├── BeforeAndAfterAllMulti.scala
│ ├── LiquibaseTest.scala
│ ├── MySQLTest.scala
│ └── NotInitializedException.scala
├── validation
├── build.sbt
└── src
│ ├── main
│ └── scala
│ │ └── com
│ │ └── datasift
│ │ └── dropwizard
│ │ └── scala
│ │ └── validation
│ │ ├── ScalaValidatorsBundle.scala
│ │ ├── constraints
│ │ └── package.scala
│ │ └── validators
│ │ ├── SizeValidatorForScalaCollection.scala
│ │ └── SizeValidatorForScalaOption.scala
│ └── test
│ └── scala
│ └── com
│ └── datasift
│ └── dropwizard
│ └── scala
│ └── validation
│ └── ScalaValidatorsSpec.scala
└── version.sbt
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | /lib/
3 | target/
4 | atlassian-ide-plugin.xml
5 | dependency-reduced-pom.xml
6 | *.iml
7 | *.ipr
8 | .settings/
9 | .classpath
10 | .project
11 |
12 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Dropwizard Scala
2 | ================
3 |
4 | *Scala support for [Dropwizard](http://dropwizard.io).*
5 |
6 | **This project is no longer actively maintained. If you wish to continue using it, you're welcome to fork it and continue using in-line with the Apache 2.0 Licensing terms.**
7 |
8 | Usage
9 | -----
10 |
11 | Just add a dependency to `dropwizard-scala-core` and `dropwizard-scala-jdbi` _(optional)_ to your project:
12 |
13 | ### SBT
14 |
15 | ```scala
16 | libraryDependencies += "com.datasift.dropwizard.scala" %% "dropwizard-scala-core" % "1.0.0-1"
17 | ```
18 |
19 | ### Maven
20 |
21 | Include the `dropwizard-scala-core` artifact in your POM:
22 |
23 | ```xml
24 |
25 | com.datasift.dropwizard.scala
26 | dropwizard-scala-core_2.10.2
27 | 1.0.0-1
28 |
29 | ```
30 |
31 | It's good practice to keep your Scala version as a global property that you
32 | can use elsewhere to ensure coherence in your POM:
33 |
34 | ```xml
35 |
36 | 2.10.2
37 | 1.0.0
38 | ${dropwizard.version}-1
39 |
40 |
41 |
42 |
43 | com.datasift.dropwizard.scala
44 | dropwizard-scala-core_${scala.version}
45 | ${dropwizard.scala.version}
46 |
47 |
48 | ```
49 |
50 | Core
51 | ----
52 |
53 | * A base `ScalaApplication` trait for applications to be defined as
54 | a singleton object:
55 |
56 | ```scala
57 | import io.dropwizard.Configuration
58 | import com.datasift.dropwizard.scala.ScalaApplication
59 |
60 | class MyConfiguration extends Configuration {
61 | @NotEmpty val greeting: String = "Hello, %s!"
62 | @NotNull val greeters: List[String] = Nil
63 | }
64 |
65 | object MyApplication extends ScalaApplication[MyConfiguration] {
66 | def init(bootstrap: Bootstrap[MyConfiguration]) {
67 |
68 | }
69 |
70 | def run(conf: MyConfiguration, env: Environment) {
71 |
72 | }
73 | }
74 | ```
75 |
76 | When you build an application like this, the `ScalaBundle` is automatically
77 | added, providing everything else described here.
78 |
79 | * Jackson support for Scala collections, `Option` and case classes,
80 | enabling (de)serialization of Scala collections/case classes in
81 | configurations and within Jersey request/response entities.
82 |
83 | * `log4s` is provided automatically, via a transitive dependency. To use it,
84 | simply `import org.log4s._`. See http://github.com/log4s/log4s for more
85 | details.
86 |
87 | Metrics
88 | -------
89 |
90 | * A more idiomatic API for metrics is provided by `com.datasift.dropwizard.scala.metrics._`.
91 |
92 | ```scala
93 | import com.codahale.metrics._
94 | import com.datasift.dropwizard.scala.metrics._
95 |
96 | class MyApplication extends ScalaApplication[MyConfiguration] {
97 | def run (conf: MyConfiguration, env: Environment) {
98 | env.metrics.gauge("things.current_time") {
99 | System.currentTimeMillis()
100 | }
101 |
102 | env.metrics.timer("things.some_timer") {
103 | // do something and time the execution
104 | }
105 | }
106 | }
107 |
108 | ```
109 |
110 | Jersey
111 | ------
112 |
113 | * Support for `Option` in resource method parameters and for request/response
114 | entities.
115 |
116 | * Support for `Either[L, R]` in resource method parameters, where `L` and `R`
117 | are both types Jersey supports for parameters. By convention, it will
118 | attempt to decode the parameter first in to the right-side as an `R`, and if
119 | that fails, in to the left-side as an `L`.
120 |
121 | * Support for `Seq[A]`, `List[A]`, `Vector[A]`, `IndexedSeq[A]` and `Set[A]`
122 | in resource method parameters, where `A` is any non-collection type that
123 | Jersey supports for parameters. This is the same limitation imposed on Java
124 | collections.
125 |
126 | * Support for `BigInt` and `BigDecimal` in resource method parameters and
127 | request/response entities.
128 |
129 | * Support for Scala's native `Boolean`, `Int` and `Long` types in resource
130 | method parameters via the `BooleanParam`, `IntParam` and `LongParam` wrapper
131 | types.
132 |
133 | JDBI
134 | ----
135 |
136 | * Scala collections and `Option` as the return type for a result set (i.e.
137 | multiple rows of results).
138 |
139 | Note: when returning a single row as an `Option`, you must use the
140 | `@SingleValueResult` annotation:
141 |
142 | ```scala
143 | @SqlQuery("select i from tbl limit 1")
144 | @SingleValueResult
145 | def headOption: Option[Int]
146 | ```
147 |
148 | * Support for the `BigDecimal` and `Option` types as parameters and result
149 | column types.
150 |
151 | * Support for returning a row as a case class or tuple, with the following
152 | constraints:
153 |
154 | * selected columns must match up with constructor paramaters
155 | _positionally_.
156 | * only the first defined public constructor will be used if multiple
157 | constructors are defined.
158 | * paramater types must be directly mappable from their SQL types,
159 | without the use of a mapper. The only exceptions to this rule are
160 | `Option` and `scala.BigDecimal`, which are natively supported.
161 |
162 | * case classes and tuples as parameters using the `BindProduct` annotation:
163 |
164 | ```scala
165 | @SqlUpdate("insert into tbl (a, b, c, d) values (:x.a, :x.b, :y._1, :y._2)")
166 | def insert(@BindProduct("x") x: Thing, @BindProduct("y") y: (Int, String))
167 | ```
168 |
169 | Note: `BindProduct` will bind to any no-args method or field (prioritizing
170 | no-arg methods).
171 |
172 | * A more idiomatic JDBI API:
173 |
174 | ```scala
175 | import com.datasift.dropwizard.scala.jdbi._
176 |
177 | val db = JDBI(dataSource)
178 | val dao = db.onDemand[MyDAO]
179 | val result: Int = db.inTransaction {
180 | handle: Handle => handle.attach[MyDAO].myQuery(123)
181 | }
182 | ```
183 |
184 | To enable Scala integration for JDBI, you will need to add an extra dependency:
185 |
186 | ### SBT
187 |
188 | ```scala
189 | libraryDependencies += "com.datasift.dropwizard.scala" %% "dropwizard-scala-jdbi" % "1.0.0-1"
190 | ```
191 |
192 | ### Maven
193 |
194 | ```xml
195 |
196 | com.datasift.dropwizard.scala
197 | dropwizard-scala-jdbi_${scala.version}
198 | ${dropwizard.scala.version}
199 |
200 | ```
201 |
202 | Validation
203 | ----------
204 |
205 | * Support for all JSR-303 and Hibernate Validator constraints on Scala types.
206 | In particular, support is added for `@NotEmpty` and `@Size` on Scala
207 | collections. All other constraint annotations work on Scala types out of
208 | the box.
209 |
210 | * Validation of Scala `case class` properties using JSR-303 and Hibernate
211 | Validator constraints. To validate a `case class`, you will need to use the
212 | wrapper constraints defined in `com.datasift.dropwizard.scala.validation.constraints`:
213 |
214 | ```scala
215 | import com.datasift.dropwizard.scala.validation.constraints._
216 |
217 | class MyConfiguration extends Configuration {
218 | @NotEmpty val names: List[String] = Nil
219 | @Min(0) val age: Int = 20
220 | }
221 | ```
222 |
223 | ### Limitations
224 |
225 | In order to cascade validation using `@Valid` on collection types, Hibernate
226 | requires that the collection provide a Java `Iterator`. Since Scala collections
227 | don't provide this, they cannot cascade validation.
228 |
229 | In the following example, only `MyConfiguration` is validated. `Person` values
230 | held in the `people` collection are not validated, though the size of `people`
231 | is.
232 |
233 | ```scala
234 | case class MyConfiguration(@Valid @NotEmpty people: List[Person])
235 | extends Configuration
236 |
237 | case class Person(@NotEmpty name: String, @Min(0) age: Int)
238 | ```
239 |
240 | Test
241 | ----
242 |
243 | This module provides some utilities for aiding testing with ScalaTest.
244 | Note: this module is by far the least mature, and the API of its components is
245 | subject to change. Comments, ideas and suggestions welcome.
246 |
247 | See `core/src/test/**/ScalaApplicationSpecIT` for examples of all of these
248 | components in action.
249 |
250 | * `BeforeAndAfterMulti` - a utility trait that allows multiple functions to
251 | be registered to run `before` and `after` tests, executing the `after`
252 | functions in the reverse order to their associated `before` functions.
253 | This behaves similarly to Dropwizard's lifecycle management, except it's
254 | managing the lifecycle of test dependencies.
255 |
256 | All of the `*Test` utilities below require that your test class extend this
257 | trait.
258 |
259 | * `ApplicationTest` - runs tests in the context of a running Dropwizard
260 | Application:
261 |
262 | ```scala
263 | val app =
264 | ApplicationTest(this, configFilePath) {
265 | MyApplication
266 | }
267 | ```
268 |
269 | The returned object contains the following utility methods to work with the
270 | application:
271 |
272 | * `configuration: Try[C]` - the application's configuration.
273 | * `application: Try[A]` - the application object itself.
274 | * `environment: Try[Environment]` - the appliction's `Environment`.
275 | * `server: Try[Server]` - the application's Jetty `Server`.
276 | * `newClient(name: String): Try[Client]` - a helper to construct a Jersey
277 | `Client` that connects to the application.
278 |
279 | * `MySQLTest` - runs tests in the context of a running MySQL server:
280 |
281 | ```scala
282 | val mysql = MySQLTest(this, dataSourceFactory.getUrl) {
283 | dataSourceFactory.build(new MetricRegistry, "test")
284 | }
285 | ```
286 |
287 | The returned object contains the following utility methods to work with the
288 | MySQL server:
289 |
290 | * `dataSource: Try[ManagedDataSource]` - the `DataSource` used to create
291 | the database instance.
292 | * `baseDir: Try[File]` - the base directory for the MySQL server's data.
293 |
294 | Note: to use this object, you will need to add a dependency on
295 | `mysql:mysql-connector-mxj:5.0.12`.
296 |
297 | * `LiquibaseTest` - runs tests in the context of a database migration:
298 |
299 | ```scala
300 | val migrations = LiquibaseTest(
301 | this, LiquibaseTest.Config(migrationsFilePath)) {
302 | dataSourceFactory.build(new MetricRegistry, "migrations")
303 | }
304 | ```
305 |
306 | The returned object contains the following utility methods to work with the
307 | Liquibase context:
308 |
309 | * `dataSource: Try[ManagedDataSource]` - the `DataSource` used to connect
310 | to the database instance.
311 | * `liquibase: Try[CloseableLiquibase]` - the Liquibase context that ran the
312 | migrations.
313 |
314 |
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | import com.typesafe.sbt.pgp.PgpKeys
2 | import Keys._
3 | import sbtrelease.ReleasePlugin.autoImport._
4 | import sbtrelease._
5 |
6 | def repository(isSnapshot: Boolean) = {
7 | val nexus = "https://oss.sonatype.org/"
8 | if (isSnapshot)
9 | Some("snapshots" at nexus + "content/repositories/snapshots")
10 | else
11 | Some("releases" at nexus + "service/local/staging/deploy/maven2")
12 | }
13 |
14 | lazy val commonSettings = Seq(
15 |
16 | description := "Scala language integration for the Dropwizard project.",
17 | homepage := Option(url("http://github.com/datasift/dropwizard-scala")),
18 | startYear := Option(2014),
19 | licenses += ("Apache License 2.0", url("http://www.apache.org/licenses/LICENSE-2.0.html")),
20 | organization := "com.datasift.dropwizard.scala",
21 | scmInfo := Option(ScmInfo(
22 | browseUrl = url("http://github.com/dropwizard/dropwizard-scala/"),
23 | connection = "git://github.com/dropwizard/dropwizard-scala.git",
24 | devConnection = Option("git@github.com@:dropwizard/dropwizard-scala.git")
25 | )),
26 | scalaVersion := "2.12.1",
27 | crossScalaVersions := Seq("2.11.11", "2.12.2"),
28 | scalacOptions ++=
29 | "-deprecation" ::
30 | "-unchecked" ::
31 | "-language:implicitConversions" ::
32 | "-language:higherKinds" ::
33 | "-feature" ::
34 | Nil,
35 |
36 | javacOptions ++= "-source" :: "1.8" :: "-target" :: "1.8" :: Nil,
37 | resolvers in ThisBuild ++= Seq(
38 | "Local Maven Repository" at "file://" + Path.userHome.absolutePath + "/.m2/repository",
39 | "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
40 | ),
41 | libraryDependencies ++= Seq(
42 | "org.log4s" %% "log4s" % "1.3.4",
43 | "org.scalatest" %% "scalatest" % Versions.scalaTest % "test",
44 | "org.mockito" % "mockito-core" % Versions.mockito % "test"
45 | ),
46 | publishMavenStyle := true,
47 | publishTo := isSnapshot(repository).value,
48 | publishArtifact in Test := false,
49 | pomIncludeRepository := { _ => false },
50 | pomExtra := {
51 |
52 |
53 | nicktelford
54 | Nick Telford
55 | nick.telford@gmail.com
56 |
57 |
58 | },
59 | unmanagedSourceDirectories in Compile +=
60 | (sourceDirectory in Compile).value / ("scala_" + scalaBinaryVersion.value),
61 | PgpKeys.useGpg := true,
62 | PgpKeys.useGpgAgent := true,
63 | releaseCrossBuild := true,
64 | releasePublishArtifactsAction := PgpKeys.publishSigned.value,
65 | releaseVersion := identity[String],
66 | releaseNextVersion := { Version(_).map { v =>
67 | v.withoutQualifier.string + "-" + v.qualifier
68 | .flatMap(x => scala.util.Try(x.stripPrefix("-").toInt).toOption)
69 | .map(_ + 1).getOrElse(1)
70 | }.getOrElse(versionFormatError(version.value)) }
71 | )
72 |
73 | lazy val core = (project in file("core"))
74 | .settings(
75 | name := "Dropwizard scala core",
76 | normalizedName := "dropwizard-scala-core",
77 | commonSettings
78 | )
79 | .dependsOn(jersey, validation, metrics, test % "test", jdbi % "test")
80 |
81 | lazy val jersey = (project in file("jersey"))
82 | .settings(
83 | name := "Dropwizard scala jersey",
84 | normalizedName := "dropwizard-scala-jersey",
85 | commonSettings
86 | )
87 |
88 | lazy val validation = (project in file("validation"))
89 | .settings(
90 | name := "Dropwizard scala validation",
91 | normalizedName := "dropwizard-scala-validation",
92 | commonSettings
93 | )
94 |
95 | lazy val jdbi = (project in file("jdbi"))
96 | .settings(
97 | name := "Dropwizard scala jdbi",
98 | normalizedName := "dropwizard-scala-jdbi",
99 | commonSettings
100 | )
101 |
102 | lazy val metrics = (project in file("metrics"))
103 | .settings(
104 | name := "Dropwizard scala metrics",
105 | normalizedName := "dropwizard-scala-metrics",
106 | commonSettings
107 | )
108 |
109 | lazy val test = (project in file("test"))
110 | .settings(
111 | name := "Dropwizard scala test",
112 | normalizedName := "dropwizard-scala-test",
113 | commonSettings
114 | )
115 |
116 | lazy val parent = (project in file("."))
117 | .settings(
118 | commonSettings,
119 | name := "Dropwizard scala parent",
120 | normalizedName := "dropwizard-scala-parent",
121 | publishArtifact := false,
122 | Keys.`package` := file(""),
123 | packageBin in Global := file(""),
124 | packagedArtifacts := Map()
125 | )
126 | .aggregate(core, jersey, jdbi, validation, metrics, test)
127 |
128 |
--------------------------------------------------------------------------------
/core/build.sbt:
--------------------------------------------------------------------------------
1 | libraryDependencies ++= Seq(
2 | "io.dropwizard" % "dropwizard-core" % Versions.dropwizard,
3 | "com.fasterxml.jackson.module" %% "jackson-module-scala" % Versions.jackson,
4 | "io.dropwizard" % "dropwizard-client" % Versions.dropwizard % Test,
5 | "io.dropwizard" % "dropwizard-migrations" % Versions.dropwizard % Test,
6 | "mysql" % "mysql-connector-mxj" % "5.0.12" % Test
7 | )
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/datasift/dropwizard/scala/ScalaApplication.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala
2 |
3 | import io.dropwizard.setup.Bootstrap
4 | import io.dropwizard.{Application, Configuration}
5 |
6 | /** Base class for Dropwizard Services built in Scala.
7 | *
8 | * Building a Dropwizard Application in Scala has never been easier:
9 | * {{{
10 | * import io.dropwizard.setup.{Bootstrap, Environment}
11 | * import com.datasift.dropwizard.scala.ScalaApplication
12 | *
13 | * object MyApplication extends ScalaApplication[MyConfiguration] {
14 | * def init(bootstrap: MyConfiguration) {
15 | * // (optional) initialization code goes here
16 | * }
17 | *
18 | * def run(configuration: MyConfiguration, environment: Environment) {
19 | * // application code goes here
20 | * }
21 | * }
22 | * }}}
23 | *
24 | * Applications derived from this trait will automatically have the
25 | * [[com.datasift.dropwizard.scala.ScalaBundle]] added to them during initialization.
26 | **/
27 | trait ScalaApplication[A <: Configuration] extends Application[A] {
28 |
29 | // when the application is a singleton object, we need to strip the trailing dollar from its name
30 | override def getName: String = getClass.getSimpleName.stripSuffix("$")
31 |
32 | /** Entry point for this Dropwizard [[io.dropwizard.Application]].
33 | *
34 | * @param args the command-line arguments the program was invoked with.
35 | */
36 | final def main(args: Array[String]) {
37 | run(args: _*)
38 | }
39 |
40 | /** Service initialization.
41 | *
42 | * Ensures that [[com.datasift.dropwizard.scala.ScalaBundle]] is always included in Scala
43 | * services.
44 | *
45 | * To customize initialization behaviour, override `ScalaService#init(Bootstrap)`.
46 | *
47 | * @param bootstrap Service Bootstrap environment.
48 | */
49 | override final def initialize(bootstrap: Bootstrap[A]) {
50 | bootstrap.addBundle(new ScalaBundle)
51 | init(bootstrap)
52 | }
53 |
54 | /** Service initialization.
55 | *
56 | * @param bootstrap Service Bootstrap environment.
57 | */
58 | def init(bootstrap: Bootstrap[A]) {
59 | // do nothing extra by default, override to add additional initialization behavior
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/datasift/dropwizard/scala/ScalaBundle.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala
2 |
3 | import com.datasift.dropwizard.scala.jersey.inject.{EitherMessageBodyWriter, TryMessageBodyWriter, ScalaInjectionBinder, OptionMessageBodyWriter}
4 | import com.fasterxml.jackson.databind.introspect.{JacksonAnnotationIntrospector, AnnotationIntrospectorPair}
5 | import com.fasterxml.jackson.module.scala.introspect.ScalaAnnotationIntrospector
6 | import io.dropwizard.setup.{Bootstrap, Environment}
7 | import io.dropwizard.Bundle
8 |
9 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
10 | import com.datasift.dropwizard.scala.validation.ScalaValidatorsBundle
11 |
12 | /** Provides Scala support to core Dropwizard functionality. */
13 | class ScalaBundle extends Bundle {
14 |
15 | val validatorsBundle = new ScalaValidatorsBundle
16 |
17 | override def initialize(bootstrap: Bootstrap[_]) {
18 | val mapper = bootstrap.getObjectMapper
19 | mapper.registerModule(new DefaultScalaModule)
20 | mapper.setAnnotationIntrospector(new AnnotationIntrospectorPair(
21 | ScalaAnnotationIntrospector,
22 | new JacksonAnnotationIntrospector))
23 | validatorsBundle.initialize(bootstrap)
24 | }
25 |
26 | override def run(environment: Environment) {
27 | val jersey = environment.jersey()
28 | jersey.register(new OptionMessageBodyWriter)
29 | jersey.register(new TryMessageBodyWriter)
30 | jersey.register(new EitherMessageBodyWriter)
31 | jersey.register(new ScalaInjectionBinder)
32 | validatorsBundle.run(environment)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/core/src/test/resources/migrations.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/core/src/test/resources/test-conf.yml:
--------------------------------------------------------------------------------
1 | greeting: "Hello, %s"
2 | names:
3 | - Nick
4 | - Chris
5 | - Fiona
6 |
7 | db:
8 | driverClass: com.mysql.jdbc.Driver
9 | user: root
10 | password: ""
11 | url: jdbc:mysql:mxj://localhost:3309/test?server.basedir=./target/mysql&createDatabaseIfNotExist=true&server.initialize-user=true
12 |
--------------------------------------------------------------------------------
/core/src/test/scala/com/datasift/dropwizard/scala/ScalaApplicationSpecIT.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala
2 |
3 | import java.io.File
4 | import java.util.UUID
5 | import javax.ws.rs.client.Entity
6 |
7 | import com.codahale.metrics.MetricRegistry
8 | import com.datasift.dropwizard.jdbi.tweak.BindProduct
9 | import com.datasift.dropwizard.scala.test.{ApplicationTest, BeforeAndAfterAllMulti, LiquibaseTest, MySQLTest}
10 | import io.dropwizard.db.DataSourceFactory
11 | import org.scalatest.FlatSpec
12 | import com.datasift.dropwizard.scala.validation.constraints._
13 | import io.dropwizard.setup.Environment
14 | import io.dropwizard.Configuration
15 | import com.google.common.io.Resources
16 | import javax.ws.rs._
17 | import javax.ws.rs.core.{Form, MediaType}
18 |
19 | import org.skife.jdbi.v2.sqlobject.customizers.SingleValueResult
20 | import org.skife.jdbi.v2.sqlobject.{Bind, SqlQuery, SqlUpdate}
21 |
22 | import scala.util.{Success, Try}
23 |
24 | case class ScalaTestConfiguration(
25 | @NotEmpty greeting: Option[String] = None,
26 | @NotEmpty @Size(max = 5) names: List[String] = Nil,
27 | @NotNull @Valid db: DataSourceFactory
28 | ) extends Configuration
29 |
30 | @Consumes(Array(MediaType.APPLICATION_JSON))
31 | @Produces(Array(MediaType.APPLICATION_JSON))
32 | @Path("/") class ScalaTestResource(db: TestDAO, greeting: String, names: List[String]) {
33 |
34 | @GET def greet = greetWithList(names)
35 |
36 | @GET @Path("/maybe")
37 | def greetOrNotFound(@QueryParam("name") name: Option[String]): Option[List[String]] =
38 | name.map(greeting.format(_)).map(List(_))
39 |
40 | @GET @Path("/option")
41 | def greetWithOption(@QueryParam("name") name: Option[String]): List[String] =
42 | name.map(greeting.format(_)).toList
43 |
44 | @GET @Path("/option_echo_complex")
45 | def echoComplexOption(@QueryParam("uuid") uuid: Option[UUID]): List[Long] =
46 | uuid.map(n => List(n.getMostSignificantBits, n.getLeastSignificantBits)).getOrElse(Nil)
47 |
48 | @GET @Path("/list")
49 | def greetWithList(@QueryParam("names") names: List[String]): List[String] =
50 | greetNames(names)
51 |
52 | @GET @Path("/set")
53 | def greetWithSet(@QueryParam("names") names: Set[String]): List[String] =
54 | greetNames(names)
55 |
56 | @GET @Path("/vector")
57 | def greetWithVector(@QueryParam("names") names: Vector[String]): List[String] =
58 | greetNames(names)
59 |
60 | @GET @Path("/seq")
61 | def greetWithSeq(@QueryParam("names") names: Seq[String]): List[String] =
62 | greetNames(names)
63 |
64 | @GET @Path("/complex")
65 | def complexQuery(@QueryParam("names") names: Set[java.math.BigDecimal]): Option[Int] =
66 | names.headOption.map(_ multiply new java.math.BigDecimal(2)).map(_.intValue)
67 |
68 | @GET @Path("/complex_scala")
69 | def complexQueryScala(@QueryParam("names") names: Set[BigDecimal]): Option[Int] =
70 | names.headOption.map(_ * BigDecimal(2)).map(_.toInt)
71 |
72 | @GET @Path("/bigint")
73 | def bigint(@QueryParam("int") int: BigInt): Int = int.intValue
74 |
75 | @GET @Path("/either")
76 | def either(@QueryParam("name") name: Either[String, Integer]): Either[Throwable, String] = {
77 | name match {
78 | case Left(v) => Right(greeting.format(v))
79 | case Right(v) => Left(new Exception("Int"))
80 | }
81 | }
82 |
83 | @GET @Path("/try")
84 | def tryA(@QueryParam("name") name: Option[String]): Try[String] = Try {
85 | greeting.format(name.get)
86 | }
87 |
88 | @POST @Path("/db/separate") @Consumes(Array(MediaType.APPLICATION_FORM_URLENCODED))
89 | def insertSeparate(@FormParam("decimal") decimal: BigDecimal,
90 | @FormParam("option") option: Option[String]): Int = {
91 | db.insert(decimal, option)
92 | }
93 |
94 | @POST @Path("/db/row")
95 | def insertRow(row: Row): Int = {
96 | db.insert(row)
97 | }
98 |
99 | @POST @Path("/db/rowT")
100 | def insertRow(row: (BigDecimal, Option[String])): Int = {
101 | db.insertT(row)
102 | }
103 |
104 | @GET @Path("/db/row")
105 | def getRow: Option[Row] = {
106 | db.get()
107 | }
108 |
109 | private def greetNames(names: Iterable[String]): List[String] =
110 | names.map(greeting.format(_)).toList
111 |
112 | }
113 |
114 | object ScalaTestApp extends ScalaApplication[ScalaTestConfiguration] {
115 | import jdbi._
116 | def run(configuration: ScalaTestConfiguration, environment: Environment) {
117 | val dao = JDBI(environment, configuration.db, "test").daoFor[TestDAO]
118 | environment.jersey
119 | .register(new ScalaTestResource(dao, configuration.greeting.get, configuration.names))
120 | }
121 | }
122 |
123 | trait TestDAO {
124 |
125 | @SqlUpdate("INSERT INTO tbl (d, o) VALUES (:d, :o)")
126 | def insert(@Bind("d") x: BigDecimal,
127 | @Bind("o") y: Option[String]): Int
128 |
129 | @SqlUpdate("INSERT INTO tbl (d, o) VALUES (:row.d, :row.o)")
130 | def insert(@BindProduct("row") row: Row): Int
131 |
132 | @SqlUpdate("INSERT INTO tbl (d, o) VALUES (:row._1, :row._2)")
133 | def insertT(@BindProduct("row") row: (BigDecimal, Option[String])): Int
134 |
135 | @SingleValueResult
136 | @SqlQuery("SELECT d, o FROM tbl")
137 | def get(): Option[Row]
138 |
139 | @SqlQuery("select d from tbl")
140 | def debug(): String
141 | }
142 |
143 | case class Row(d: BigDecimal, o: Option[String])
144 |
145 | class ScalaApplicationSpecIT extends FlatSpec with BeforeAndAfterAllMulti {
146 |
147 | val fixture = "Homer" :: "Bart" :: "Lisa" :: Nil
148 | val dsFactory = new DataSourceFactory()
149 | dsFactory.setUrl("jdbc:mysql:mxj://localhost:3309/test?server.basedir=./target/mysql&createDatabaseIfNotExist=true&server.initialize-user=true")
150 | dsFactory.setDriverClass("com.mysql.jdbc.Driver")
151 | dsFactory.setUser("root")
152 |
153 | val db = MySQLTest(this, dsFactory.getUrl) {
154 | dsFactory.build(new MetricRegistry, "test")
155 | }
156 |
157 | val liquibase = LiquibaseTest(this, LiquibaseTest.Config(file = new File(Resources.getResource("migrations.xml").toURI).getAbsolutePath)) {
158 | dsFactory.build(new MetricRegistry, "migrations")
159 | }
160 |
161 | val app =
162 | ApplicationTest(
163 | this, new File(Resources.getResource("test-conf.yml").toURI).getAbsolutePath) {
164 | ScalaTestApp
165 | }
166 |
167 | lazy val client = app.newClient("test")
168 |
169 | def request() = for {
170 | client <- client
171 | server <- app.server
172 | } yield { client.target(server.getURI) }
173 |
174 | def request(target: String) = for {
175 | client <- client
176 | server <- app.server
177 | } yield { client.target(server.getURI.resolve(target)) }
178 |
179 | "GET /" should "greet with configured names" in {
180 | val expected = app.configuration
181 | .map(conf => conf.names.map(conf.greeting.getOrElse("%s").format(_)))
182 | val result = request().map(_.request().get(classOf[List[String]]))
183 | assert(result === expected)
184 | }
185 |
186 | "GET /list" should "not greet anyone when no names supplied" in {
187 | val expected = Success(List.empty[String])
188 | val result = request("/list").map {
189 | _.queryParam("names")
190 | .request(MediaType.APPLICATION_JSON)
191 | .get(classOf[List[String]])
192 | }
193 | assert(result === expected)
194 | }
195 |
196 | it should "greet with supplied names" in {
197 | val expected = app.configuration
198 | .map(conf => fixture.map(conf.greeting.getOrElse("%s").format(_)))
199 | val result = request("/list").map {
200 | _.queryParam("names", fixture: _*)
201 | .request(MediaType.APPLICATION_JSON)
202 | .get(classOf[List[String]])
203 | }
204 | assert(result === expected)
205 | }
206 |
207 | "GET /seq" should "greet with supplied names" in {
208 | val expected = app.configuration
209 | .map(conf => fixture.map(conf.greeting.getOrElse("%s").format(_)).toSeq)
210 | val result = request("/seq").map {
211 | _.queryParam("names", fixture: _*)
212 | .request(MediaType.APPLICATION_JSON)
213 | .get(classOf[Seq[String]])
214 | }
215 | assert(result === expected)
216 | }
217 |
218 | "GET /vector" should "greet with supplied names" in {
219 | val expected = app.configuration
220 | .map(conf => fixture.map(conf.greeting.getOrElse("%s").format(_)).toVector)
221 | val result = request("/vector").map {
222 | _.queryParam("names", fixture: _*)
223 | .request(MediaType.APPLICATION_JSON)
224 | .get(classOf[Vector[String]])
225 | }
226 | assert(result === expected)
227 | }
228 |
229 | "GET /set" should "greet with supplied names" in {
230 | val expected = app.configuration
231 | .map(conf => fixture.map(conf.greeting.getOrElse("%s").format(_)).toSet)
232 | val result = request("/set").map {
233 | _.queryParam("names", (fixture ++ fixture): _*)
234 | .request(MediaType.APPLICATION_JSON)
235 | .get(classOf[Set[String]])
236 | }
237 | assert(result === expected)
238 | }
239 |
240 | "GET /option" should "greet with supplied name" in {
241 | val expected = app.configuration
242 | .map(conf => fixture.map(conf.greeting.getOrElse("%s").format(_)).headOption.toIterable)
243 | val result = request("/option").map {
244 | _.queryParam("name", fixture.head)
245 | .request(MediaType.APPLICATION_JSON)
246 | .get(classOf[Iterable[String]])
247 | }
248 | assert(result === expected)
249 | }
250 |
251 | it should "not greet when no name supplied" in {
252 | val expected = Success(Iterable.empty[String])
253 | val result = request("/option").map {
254 | _.queryParam("name", null)
255 | .request(MediaType.APPLICATION_JSON)
256 | .get(classOf[Iterable[String]])
257 | }
258 | assert(result === expected)
259 | }
260 |
261 | "GET /option_echo_complex" should "echo UUID long list" in {
262 | val expected = Success(List(101L, 102L))
263 | val uuid = new UUID(101L, 102L)
264 |
265 | val result = request("/option_echo_complex").map {
266 | _.queryParam("uuid", uuid.toString)
267 | .request(MediaType.APPLICATION_JSON)
268 | .get(classOf[List[Long]])
269 | }
270 | assert(result === expected)
271 | }
272 |
273 | it should "echo empty list when no UUID supplied" in {
274 | val expected = Success(Nil)
275 | val result = request("/option_echo_complex").map {
276 | _.queryParam("uuid", null)
277 | .request(MediaType.APPLICATION_JSON)
278 | .get(classOf[List[Long]])
279 | }
280 | assert(result === expected)
281 | }
282 |
283 | "GET /maybe" should "greet with supplied name" in {
284 | val expected = app.configuration
285 | .map(conf => fixture.map(conf.greeting.getOrElse("%s").format(_)).headOption.toIterable)
286 | val result = request("/maybe").map {
287 | _.queryParam("name", fixture.head)
288 | .request(MediaType.APPLICATION_JSON)
289 | .get(classOf[Iterable[String]])
290 | }
291 | assert(result === expected)
292 | }
293 |
294 | it should "present Not Found error when no name supplied" in {
295 | val result = request("/maybe").map {
296 | _.queryParam("name", null)
297 | .request(MediaType.APPLICATION_JSON)
298 | .get(classOf[Iterable[String]])
299 | }
300 | assert(result.isFailure)
301 | assert(result.recover { case t: NotFoundException => Nil}.isSuccess)
302 | }
303 |
304 | "GET /complex" should "yield results" in {
305 | val fixture: Set[java.math.BigDecimal] =
306 | Set(new java.math.BigDecimal(1), new java.math.BigDecimal(2))
307 | val expected = 2
308 | val result = request("/complex").map {
309 | _.queryParam("names", fixture.toSeq: _*)
310 | .request(MediaType.APPLICATION_JSON)
311 | .get(classOf[Int])
312 | }
313 | assert(result === Success(expected))
314 | }
315 |
316 | "GET /complex_scala" should "yield results" in {
317 | val fixture: Set[BigDecimal] = Set(BigDecimal(1), BigDecimal(2))
318 | val expected = 2
319 | val result = request("/complex_scala").map {
320 | _.queryParam("names", fixture.toSeq: _*)
321 | .request(MediaType.APPLICATION_JSON)
322 | .get(classOf[Int])
323 | }
324 | assert(result === Success(expected))
325 | }
326 |
327 | "GET /either" should "produce failure" in {
328 | val result = request("/either").map {
329 | _.queryParam("name", 2.toString)
330 | .request(MediaType.APPLICATION_JSON)
331 | .get(classOf[String])
332 | }
333 | assert(result.isFailure)
334 | }
335 |
336 | it should "yield result" in {
337 | val expected = app.configuration
338 | .map(_.greeting.getOrElse("%s").format(fixture.head))
339 | val result = request("/either").map {
340 | _.queryParam("name", fixture.head)
341 | .request(MediaType.APPLICATION_JSON)
342 | .get(classOf[String])
343 | }
344 | assert(result === expected)
345 | }
346 |
347 | "GET /bigint" should "yield the number" in {
348 | val fixture = BigInt(500)
349 | val expected = Success(500)
350 | val result = request("/bigint").map {
351 | _.queryParam("int", fixture.toString)
352 | .request(MediaType.APPLICATION_JSON)
353 | .get(classOf[Int])
354 | }
355 | assert(result === expected)
356 | }
357 |
358 | "GET /try" should "yield the result on success" in {
359 | val expected = app.configuration
360 | .map(_.greeting.getOrElse("%s").format(fixture.head))
361 | val result = request("/try").map {
362 | _.queryParam("name", fixture.head)
363 | .request(MediaType.APPLICATION_JSON)
364 | .get(classOf[String])
365 | }
366 | assert(result === expected)
367 | }
368 |
369 | it should "yield an error, on error" in {
370 | val result = request("/try").map {
371 | _.request(MediaType.APPLICATION_JSON)
372 | .get(classOf[String])
373 | }
374 | assert(result.isFailure)
375 | assert(result.failed.get.isInstanceOf[InternalServerErrorException])
376 | }
377 |
378 | "POST /db/separate" should "write data" in {
379 | val form = new Form()
380 | .param("decimal", BigDecimal(12345.678).toString)
381 | .param("option", "Nick")
382 | val result = request("/db/separate").map {
383 | _.request(MediaType.APPLICATION_FORM_URLENCODED)
384 | .accept(MediaType.APPLICATION_JSON)
385 | .post(Entity.form(form), classOf[Int])
386 | }
387 | assert(result === Success(1))
388 | }
389 |
390 | "POST /db/row" should "write whole row" in {
391 | val result = request("/db/row").map {
392 | _.request(MediaType.APPLICATION_JSON)
393 | .post(Entity.json(Row(BigDecimal(12345.678), Option("Nick"))), classOf[Int])
394 | }
395 |
396 | assert(result === Success(1))
397 | }
398 |
399 | "POST /db/rowT" should "write whole row" in {
400 | val result = request("/db/rowT").map {
401 | _.request(MediaType.APPLICATION_JSON)
402 | .post(Entity.json((BigDecimal(12345.678), Option("Nick"))), classOf[Int])
403 | }
404 |
405 | assert(result === Success(1))
406 | }
407 |
408 | "GET /db/row" should "get whole row" in {
409 | val result = request("/db/row").map {
410 | _.request().get(classOf[Row])
411 | }
412 |
413 | assert(result === Success(Row(BigDecimal(12345.678), Option("Nick"))))
414 | }
415 | }
416 |
--------------------------------------------------------------------------------
/jdbi/build.sbt:
--------------------------------------------------------------------------------
1 | libraryDependencies ++= Seq(
2 | "io.dropwizard" % "dropwizard-jdbi" % Versions.dropwizard
3 | )
4 |
5 |
--------------------------------------------------------------------------------
/jdbi/src/main/java/com/datasift/dropwizard/jdbi/tweak/BindProduct.java:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.jdbi.tweak;
2 |
3 | import com.datasift.dropwizard.scala.jdbi.tweak.BindProductFactory;
4 | import org.skife.jdbi.v2.sqlobject.BindingAnnotation;
5 |
6 | import java.lang.annotation.ElementType;
7 | import java.lang.annotation.Retention;
8 | import java.lang.annotation.RetentionPolicy;
9 | import java.lang.annotation.Target;
10 |
11 | @Retention(RetentionPolicy.RUNTIME)
12 | @Target({ElementType.PARAMETER})
13 | @BindingAnnotation(BindProductFactory.class)
14 | public @interface BindProduct {
15 | String value() default "__jdbi_bare__";
16 | }
17 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/JDBI.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi
2 |
3 | import io.dropwizard.db.DataSourceFactory
4 | import io.dropwizard.setup.Environment
5 | import io.dropwizard.jdbi.DBIFactory
6 | import org.skife.jdbi.v2._
7 |
8 | import com.datasift.dropwizard.scala.jdbi.tweak._
9 |
10 | /** Factory object for [[org.skife.jdbi.v2.DBI]] instances. */
11 | object JDBI {
12 |
13 | /** Creates a [[org.skife.jdbi.v2.DBI]] from the given configuration.
14 | *
15 | * The name of this instance will be the JDBC URL of the database.
16 | *
17 | * @param env environment to manage the database connection lifecycle.
18 | * @param conf configuration for the database connection.
19 | * @return a configured and managed [[org.skife.jdbi.v2.DBI]] instance.
20 | */
21 | def apply(env: Environment, conf: DataSourceFactory): DBI = {
22 | apply(env, conf, conf.getUrl)
23 | }
24 |
25 | /** Creates a [[org.skife.jdbi.v2.DBI]] from the given configuration.
26 | *
27 | * @param env environment to manage the database connection lifecycle.
28 | * @param conf configuration for the database connection.
29 | * @param name the name of this DBI instance.
30 | * @return a configured and managed [[org.skife.jdbi.v2.DBI]] instance.
31 | */
32 | def apply(env: Environment, conf: DataSourceFactory, name: String): DBI = {
33 | val dbi = new DBIFactory().build(env, conf, name)
34 |
35 | // register scala type factories
36 | dbi.registerArgumentFactory(new BigDecimalArgumentFactory)
37 | dbi.registerArgumentFactory(new OptionArgumentFactory(conf.getDriverClass))
38 | dbi.registerContainerFactory(new OptionContainerFactory)
39 | dbi.registerContainerFactory(new IterableContainerFactory[scala.collection.Seq])
40 | dbi.registerContainerFactory(new IterableContainerFactory[scala.collection.Set])
41 | dbi.registerMapper(new ProductResultSetMapperFactory)
42 | dbi
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/ResultSetDSL.scala:
--------------------------------------------------------------------------------
1 |
2 | package com.datasift.dropwizard.scala.jdbi
3 |
4 | import java.sql.{ResultSet, Timestamp}
5 | import java.util.Date
6 |
7 |
8 | /** Define some implicits to allow a more concise DSL using Tuples for extracting results from an SQL result set. */
9 | object ResultSetDSL {
10 |
11 | implicit def getString(krs: (ResultSet, String)): String =
12 | krs._1.getString(krs._2)
13 |
14 | implicit def getInt(krs: (ResultSet, String)): Int =
15 | krs._1.getInt(krs._2)
16 |
17 | implicit def getLong(krs: (ResultSet, String)): Long =
18 | krs._1.getLong(krs._2)
19 |
20 | implicit def getBoolean(krs: (ResultSet, String)): Boolean =
21 | krs._1.getBoolean(krs._2)
22 |
23 | implicit def getBytes(krs: (ResultSet, String)): Array[Byte] =
24 | krs._1.getBytes(krs._2)
25 |
26 | implicit def getDate(krs: (ResultSet, String)): Date =
27 | krs._1.getDate(krs._2)
28 |
29 | implicit def getTimestamp(krs: (ResultSet, String)): Timestamp =
30 | krs._1.getTimestamp(krs._2)
31 | }
32 |
33 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/SimpleMapper.scala:
--------------------------------------------------------------------------------
1 |
2 | package com.datasift.dropwizard.scala.jdbi
3 |
4 | import java.sql.ResultSet
5 |
6 | import org.skife.jdbi.v2.StatementContext
7 | import org.skife.jdbi.v2.tweak.ResultSetMapper
8 |
9 |
10 | /**
11 | * A simple mapper that only depends on the result set
12 | *
13 | * Example -
14 | * SimpleMapper[(String, String)](r => (r.getString("firstname"), r.getString("lastname")))
15 | *
16 | * If you used with [[ResultSetDSL]] the above example can be further simplified -
17 | * SimpleMapper[(String, String)](r => (r -> "firstname", r -> "lastname"))
18 | */
19 | class SimpleMapper[A](f: ResultSet => A) extends ResultSetMapper[A] {
20 | def map(idx: Int, rs: ResultSet, ctx: StatementContext): A = f(rs)
21 | }
22 |
23 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/package.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala
2 |
3 | import scala.reflect._
4 |
5 | import org.skife.jdbi.v2._
6 | import org.skife.jdbi.v2.sqlobject.mixins.Transactional
7 | import org.skife.jdbi.v2.tweak.HandleCallback
8 |
9 | /** Global definitions and implicits for JDBI. */
10 | package object jdbi {
11 |
12 | implicit final def JDBIWrapper(db: DBI) = new JDBIWrapper(db)
13 |
14 | /** Provides idiomatic Scala enhancements to the JDBI API.
15 | *
16 | * Examples -
17 | *
18 | * dbi.open[DAO] to open a handle and attach a new sql object of the specified type to that handle
19 | *
20 | * dbi.daoFor[DAO] to create a new sql object which will obtain and release connections from this dbi instance,
21 | * as it needs to, and can, respectively
22 | *
23 | * When in scope, you can create transactions using for comprehension. For instance -
24 | * {{{
25 | * for { handle <- dbi.transaction
26 | * dao1 <- handle.attachable[Dao1]
27 | * ...
28 | * daoN <- handle.attachable[DaoN] } yield {
29 | *
30 | * dao1.some_function()
31 | * ...
32 | * daoN.some_other_function()
33 | * }
34 | * }}}
35 | *
36 | * @param db the [[org.skife.jdbi.v2.DBI]] instance to wrap.
37 | */
38 | class JDBIWrapper private[jdbi](db: DBI) {
39 |
40 | /** Creates a typed DAO instance.
41 | *
42 | * @tparam T type of the DAO to create.
43 | * @return a DAO instance for the specified type.
44 | */
45 | def open[T : ClassTag]: T = db.open[T](classTag[T].runtimeClass.asInstanceOf[Class[T]])
46 |
47 | /** Creates an on-demand typed DAO instance.
48 | *
49 | * @tparam T type of the DAO to create.
50 | * @return an on-demand DAO instance for the specified type.
51 | */
52 | def daoFor[T : ClassTag]: T = db.onDemand[T](classTag[T].runtimeClass.asInstanceOf[Class[T]])
53 |
54 | /** Executes the given function within a transaction.
55 | *
56 | * @tparam A the return type of the function to execute.
57 | * @param f the function to execute within the transaction.
58 | * @return the result of the function.
59 | * @throws Exception if an Exception is thrown by the function, the transaction will be
60 | * rolled-back.
61 | */
62 | def inTransaction[A](f: (Handle, TransactionStatus) => A): A = {
63 | db.inTransaction(new TransactionCallback[A] {
64 | def inTransaction(handle: Handle, status: TransactionStatus): A = f(handle, status)
65 | })
66 | }
67 |
68 | /** Executes the given function within a transaction.
69 | *
70 | * @tparam A the return type of the function to execute.
71 | * @param f the function to execute within the transaction.
72 | * @return the result of the function.
73 | * @throws Exception if an Exception is thrown by the function, the transaction will be
74 | * rolled-back.
75 | */
76 | def inTransaction[A](f: Handle => A): A = {
77 | db.inTransaction(new TransactionCallback[A] {
78 | def inTransaction(handle: Handle, status: TransactionStatus): A = f(handle)
79 | })
80 | }
81 |
82 | /** Executes the given function within a transaction of the given isolation level.
83 | *
84 | * @tparam A the return type of the function to execute.
85 | * @param isolation the isolation level for the transaction.
86 | * @param f the function to execute within the transaction.
87 | * @return the result of the function.
88 | * @throws Exception if an Exception is thrown by the function, the transaction will be
89 | * rolled-back.
90 | * @deprecated This method cannot be called. See https://issues.scala-lang.org/browse/SI-8021
91 | * Use JDBIWrapper#inTransactionWithIsolation.
92 | */
93 | def inTransaction[A](isolation: TransactionIsolationLevel)
94 | (f: (Handle, TransactionStatus) => A): A = {
95 | db.inTransaction(isolation, new TransactionCallback[A] {
96 | def inTransaction(handle: Handle, status: TransactionStatus): A = f(handle, status)
97 | })
98 | }
99 |
100 | /** Executes the given function within a transaction of the given isolation level.
101 | *
102 | * @tparam A the return type of the function to execute.
103 | * @param isolation the isolation level for the transaction.
104 | * @param f the function to execute within the transaction.
105 | * @return the result of the function.
106 | * @throws Exception if an Exception is thrown by the function, the transaction will be
107 | * rolled-back.
108 | * @deprecated This method cannot be called. See https://issues.scala-lang.org/browse/SI-8021
109 | * Use JDBIWrapper#inTransactionWithIsolation.
110 | */
111 | def inTransaction[A](isolation: TransactionIsolationLevel)
112 | (f: Handle => A): A = {
113 | db.inTransaction(isolation, new TransactionCallback[A] {
114 | def inTransaction(handle: Handle, status: TransactionStatus): A = f(handle)
115 | })
116 | }
117 |
118 | /** Executes the given function within a transaction of the given isolation level.
119 | * This method has been added to break the ambiguity of the methods above.
120 | *
121 | * @tparam A the return type of the function to execute.
122 | * @param isolation the isolation level for the transaction.
123 | * @param f the function to execute within the transaction.
124 | * @return the result of the function.
125 | * @throws Exception if an Exception is thrown by the function, the transaction will be
126 | * rolled-back.
127 | */
128 | def inTransactionWithIsolation[A](isolation: TransactionIsolationLevel)(f: (Handle, TransactionStatus) => A): A = {
129 | db.inTransaction(isolation, new TransactionCallback[A] {
130 | def inTransaction(handle: Handle, status: TransactionStatus): A = f(handle, status)
131 | })
132 | }
133 |
134 | /** Applies the given function with a DBI [[org.skife.jdbi.v2.Handle]].
135 | *
136 | * @tparam A the return type of the function to apply.
137 | * @param f the function to apply the handle to.
138 | * @return the result of applying the function.
139 | * @throws Exception if an Exception is thrown by the function.
140 | */
141 | def withHandle[A](f: Handle => A): A = {
142 | db.withHandle(new HandleCallback[A] {
143 | def withHandle(handle: Handle): A = f(handle)
144 | })
145 | }
146 |
147 | /** Extends this DBI to support for-comprehensions for transactions. */
148 | def transaction: JDBITransactionWrapper =
149 | new JDBITransactionWrapper(this)
150 | }
151 |
152 | /** Provides for-comprehension support for composable transactions */
153 | class JDBITransactionWrapper private[jdbi] (dbi: JDBIWrapper) {
154 | def map[A](f: Handle => A): A = dbi.inTransaction(f)
155 | def flatMap[A](f: Handle => A): A = map(f)
156 | def foreach(f: Handle => Unit): Unit = map(f)
157 | }
158 |
159 | implicit final def HandleWrapper(handle: Handle) = new HandleWrapper(handle)
160 |
161 | /** Provides idiomatic Scala enhancements to the JDBI API.
162 | *
163 | * @param handle the [[org.skife.jdbi.v2.Handle]] instance to wrap.
164 | */
165 | class HandleWrapper private[jdbi] (handle: Handle) {
166 |
167 | /** Creates a typed DAO instance attached to this [[org.skife.jdbi.v2.Handle]].
168 | *
169 | * @tparam A type of the DAO to create.
170 | * @return a DAO instance for the specified type.
171 | */
172 | def attach[A : ClassTag]: A = {
173 | handle.attach(classTag[A].runtimeClass.asInstanceOf[Class[A]])
174 | }
175 |
176 | /** Extends this [[org.skife.jdbi.v2.Handle]] to support the creation of typed DAOs through for-comprehensions. */
177 | def attachable[A : ClassTag]: HandleDaoWrapper[A] =
178 | new HandleDaoWrapper[A](handle, classTag[A].runtimeClass.asInstanceOf[Class[A]])
179 |
180 | /** Executes the given function within a transaction.
181 | *
182 | * @tparam A the return type of the function to execute.
183 | * @param f the function to execute within the transaction.
184 | * @return the result of the function.
185 | * @throws Exception if an Exception is thrown by the function, the transaction will be
186 | * rolled-back.
187 | */
188 | def inTransaction[A](f: Handle => A): A = {
189 | handle.inTransaction(new TransactionCallback[A] {
190 | def inTransaction(conn: Handle, status: TransactionStatus): A = f(conn)
191 | })
192 | }
193 |
194 | /** Executes the given function within a transaction.
195 | *
196 | * @tparam A the return type of the function to execute.
197 | * @param f the function to execute within the transaction.
198 | * @return the result of the function.
199 | * @throws Exception if an Exception is thrown by the function, the transaction will be
200 | * rolled-back.
201 | */
202 | def inTransaction[A](f: (Handle, TransactionStatus) => A): A = {
203 | handle.inTransaction(new TransactionCallback[A] {
204 | def inTransaction(conn: Handle, status: TransactionStatus): A = f(conn, status)
205 | })
206 | }
207 |
208 | /** Executes the given function within a transaction.
209 | *
210 | * @tparam A the return type of the function to execute.
211 | * @param isolation the isolation level for the transaction.
212 | * @param f the function to execute within the transaction.
213 | * @return the result of the function.
214 | * @throws Exception if an Exception is thrown by the function, the transaction will be
215 | * rolled-back.
216 | */
217 | def inTransaction[A](isolation: TransactionIsolationLevel)
218 | (f: Handle => A): A = {
219 | handle.inTransaction(isolation, new TransactionCallback[A] {
220 | def inTransaction(conn: Handle, status: TransactionStatus): A = f(conn)
221 | })
222 | }
223 |
224 | /** Executes the given function within a transaction.
225 | *
226 | * @tparam A the return type of the function to execute.
227 | * @param isolation the isolation level for the transaction.
228 | * @param f the function to execute within the transaction.
229 | * @return the result of the function.
230 | * @throws Exception if an Exception is thrown by the function, the transaction will be
231 | * rolled-back.
232 | */
233 | def inTransaction[A](isolation: TransactionIsolationLevel)
234 | (f: (Handle, TransactionStatus) => A): A = {
235 | handle.inTransaction(isolation, new TransactionCallback[A] {
236 | def inTransaction(conn: Handle, status: TransactionStatus): A = f(conn, status)
237 | })
238 | }
239 |
240 | }
241 |
242 | class HandleDaoWrapper[A] private [jdbi] (handle: Handle, clazz: Class[A]) {
243 | //require(handle.isInTransaction, "handle must be in a transaction")
244 | def map[B](f: A => B): B = f(handle.attach(clazz))
245 | def flatMap[B](f: A => B): B = map(f)
246 | def foreach(f: A => Unit): Unit = map(f)
247 | }
248 |
249 | implicit final def TransactionalWrapper[A <: Transactional[A]](transactional : A) =
250 | new TransactionalWrapper[A](transactional)
251 |
252 | /** Provides enhancements to the Dropwizard jDBI API for transactional DAOs.
253 | *
254 | * @param transactional the [[org.skife.jdbi.v2.sqlobject.mixins.Transactional]] object to wrap.
255 | */
256 | class TransactionalWrapper[A <: Transactional[A]] private[jdbi] (transactional: A) {
257 |
258 | /** Executes the given function within a transaction of the given isolation level.
259 | *
260 | * @tparam B the type of the result of the function being executed.
261 | * @param isolation the isolation level for the transaction.
262 | * @param f the function on this object to execute within the transaction.
263 | * @return the result of the function being executed.
264 | * @throws Exception if an Exception is thrown by the function, the transaction will be
265 | * rolled-back.
266 | */
267 | def inTransaction[B](isolation: TransactionIsolationLevel)
268 | (f: A => B): B = {
269 | transactional.inTransaction[B](isolation, new Transaction[B, A] {
270 | def inTransaction(tx: A, status: TransactionStatus): B = f(tx)
271 | })
272 | }
273 |
274 | /** Executes the given function within a transaction of the given isolation level.
275 | *
276 | * @tparam B the type of the result of the function being executed.
277 | * @param isolation the isolation level for the transaction.
278 | * @param f the function on this object to execute within the transaction.
279 | * @return the result of the function being executed.
280 | * @throws Exception if an Exception is thrown by the function, the transaction will be
281 | * rolled-back.
282 | */
283 | def inTransaction[B](isolation: TransactionIsolationLevel)
284 | (f: (A, TransactionStatus) => B): B = {
285 | transactional.inTransaction[B](isolation, new Transaction[B, A] {
286 | def inTransaction(tx: A, status: TransactionStatus): B = f(tx, status)
287 | })
288 | }
289 |
290 | /** Executes the given function within a transaction.
291 | *
292 | * @tparam B the type of the result of the function being executed.
293 | * @param f the function on this object to execute within the transaction.
294 | * @return the result of the function being executed.
295 | * @throws Exception if an Exception is thrown by the function, the transaction will be
296 | * rolled-back.
297 | */
298 | def inTransaction[B](f: A => B): B = {
299 | transactional.inTransaction[B](new Transaction[B, A] {
300 | def inTransaction(tx: A, status: TransactionStatus): B = f(tx)
301 | })
302 | }
303 |
304 |
305 | /** Executes the given function within a transaction.
306 | *
307 | * @tparam B the type of the result of the function being executed.
308 | * @param f the function on this object to execute within the transaction.
309 | * @return the result of the function being executed.
310 | * @throws Exception if an Exception is thrown by the function, the transaction will be
311 | * rolled-back.
312 | */
313 | def inTransaction[B](f: (A, TransactionStatus) => B): B = {
314 | transactional.inTransaction[B](new Transaction[B, A] {
315 | def inTransaction(tx: A, status: TransactionStatus): B = f(tx, status)
316 | })
317 | }
318 | }
319 | }
320 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/tweak/BigDecimalArgumentFactory.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import java.sql.{Types, PreparedStatement}
4 |
5 | import org.skife.jdbi.v2.StatementContext
6 | import org.skife.jdbi.v2.tweak.{Argument, ArgumentFactory}
7 |
8 | class BigDecimalArgumentFactory extends ArgumentFactory[BigDecimal] {
9 |
10 | override def accepts(expectedType: Class[_],
11 | value: Any,
12 | ctx: StatementContext): Boolean =
13 | value.isInstanceOf[BigDecimal]
14 |
15 | override def build(expectedType: Class[_],
16 | value: BigDecimal,
17 | ctx: StatementContext): Argument =
18 | new BigDecimalArgument(value)
19 | }
20 |
21 | class BigDecimalArgument(value: BigDecimal) extends Argument {
22 |
23 | override def apply(position: Int,
24 | statement: PreparedStatement,
25 | ctx: StatementContext): Unit = value match {
26 | case null => statement.setNull(position, Types.NUMERIC)
27 | case _ => statement.setBigDecimal(position, value.bigDecimal)
28 | }
29 |
30 | override def toString = String.valueOf(value)
31 | }
32 |
33 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/tweak/BindProductFactory.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import com.datasift.dropwizard.jdbi.tweak.BindProduct
4 | import org.skife.jdbi.v2.SQLStatement
5 | import org.skife.jdbi.v2.sqlobject.{Binder, BinderFactory}
6 |
7 | class BindProductFactory extends BinderFactory[BindProduct] {
8 | override def build(annotation: BindProduct): Binder[BindProduct, Product] = {
9 | new Binder[BindProduct, Product] {
10 | override def bind(q: SQLStatement[_],
11 | bind: BindProduct,
12 | arg: Product): Unit = {
13 | val prefix = if (bind.value == "__jdbi_bare__") "" else bind.value + "."
14 | val fields = arg.getClass.getDeclaredFields
15 | val methods = arg.getClass.getDeclaredMethods.collect {
16 | case m if m.getParameterCount == 0 => m.getName -> m
17 | }.toMap
18 |
19 | for {
20 | field <- fields
21 | } {
22 | val name = field.getName
23 | val value = methods.get(name)
24 | .map(_.invoke(arg))
25 | .getOrElse(field.get(arg)) match {
26 | case None => null
27 | case Some(x) => x
28 | case x => x
29 | }
30 |
31 | q.bind(prefix + name, value)
32 | }
33 | }
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/tweak/IterableContainerFactory.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import org.skife.jdbi.v2.ContainerBuilder
4 | import org.skife.jdbi.v2.tweak.ContainerFactory
5 | import scala.collection.generic.CanBuildFrom
6 | import scala.reflect.ClassTag
7 |
8 | /** A [[org.skife.jdbi.v2.tweak.ContainerFactory]] for Scala collections.
9 | *
10 | * @tparam CC the collection type to build.
11 | * @param tag type tag for collection for reification of generic type.
12 | * @param cbf functional dependency for collection builder.
13 | */
14 | class IterableContainerFactory[CC[_] <: Iterable[_]]
15 | (implicit tag: ClassTag[CC[_]], cbf: CanBuildFrom[CC[_], Any, CC[Any]])
16 | extends ContainerFactory[CC[Any]] {
17 |
18 | def accepts(clazz: Class[_]): Boolean = tag.runtimeClass.isAssignableFrom(clazz)
19 |
20 | def newContainerBuilderFor(clazz: Class[_]): ContainerBuilder[CC[Any]] = {
21 | new ContainerBuilder[CC[Any]] {
22 |
23 | val builder = cbf()
24 |
25 | def add(it: Any): ContainerBuilder[CC[Any]] = {
26 | builder += it
27 | this
28 | }
29 |
30 | def build(): CC[Any] = builder.result()
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/tweak/OptionArgumentFactory.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import java.sql.{PreparedStatement, Types}
4 |
5 | import org.skife.jdbi.v2.tweak.{ArgumentFactory, Argument}
6 | import org.skife.jdbi.v2.StatementContext
7 |
8 | class OptionArgumentFactory(driver: String) extends ArgumentFactory[Option[_]] {
9 |
10 | def accepts(expectedType: Class[_], value: Any, ctx: StatementContext): Boolean = {
11 | value.isInstanceOf[Option[_]]
12 | }
13 |
14 | def build(expectedType: Class[_], value: Option[_], ctx: StatementContext): Argument = {
15 | driver match {
16 | case "com.microsoft.sqlserver.jdbc.SQLServerDriver" => new Argument {
17 | def apply(position: Int, statement: PreparedStatement, ctx: StatementContext) {
18 | statement.setObject(position, value.orNull)
19 | }
20 | }
21 | case _ => new Argument {
22 | def apply(position: Int, statement: PreparedStatement, ctx: StatementContext) {
23 | value match {
24 | case Some(value) => statement.setObject(position, value)
25 | case None => statement.setNull(position, Types.OTHER)
26 | }
27 | }
28 | }
29 | }
30 | }
31 | }
32 |
33 |
34 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/tweak/OptionContainerFactory.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import org.skife.jdbi.v2.ContainerBuilder
4 | import org.skife.jdbi.v2.tweak.ContainerFactory
5 |
6 | /** A [[org.skife.jdbi.v2.tweak.ContainerFactory]] for Scala Options. */
7 | class OptionContainerFactory extends ContainerFactory[Option[Any]] {
8 |
9 | def accepts(clazz: Class[_]): Boolean =
10 | classOf[Option[_]].isAssignableFrom(clazz)
11 |
12 | def newContainerBuilderFor(clazz: Class[_]): ContainerBuilder[Option[Any]] = {
13 | new ContainerBuilder[Option[Any]] {
14 |
15 | var option: Option[Any] = None
16 |
17 | def add(it: Any): ContainerBuilder[Option[Any]] = {
18 | option = Option(it)
19 | this
20 | }
21 |
22 | def build(): Option[Any] = option
23 | }
24 | }
25 | }
26 |
27 |
28 |
--------------------------------------------------------------------------------
/jdbi/src/main/scala/com/datasift/dropwizard/scala/jdbi/tweak/ProductResultSetMapperFactory.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import java.lang.reflect.{InvocationTargetException, Constructor}
4 | import java.sql.{Time, Timestamp, ResultSet}
5 | import java.util.Date
6 |
7 | import org.skife.jdbi.v2.tweak.ResultSetMapper
8 | import org.skife.jdbi.v2.{StatementContext, ResultSetMapperFactory}
9 |
10 | class ProductResultSetMapperFactory extends ResultSetMapperFactory {
11 |
12 | override def accepts(tpe: Class[_],
13 | ctx: StatementContext): Boolean =
14 | classOf[Product].isAssignableFrom(tpe)
15 |
16 | override def mapperFor(tpe: Class[_],
17 | ctx: StatementContext): ResultSetMapper[_] = {
18 | new ProductResultSetMapper(tpe.asInstanceOf[Class[_ <: Product]])
19 | }
20 | }
21 |
22 | class ProductResultSetMapper[A <: Product](tpe: Class[A])
23 | extends ResultSetMapper[A] {
24 |
25 | // catalogue available constructors, by parameter names
26 | private[this] val ctor = tpe.getConstructors.head.asInstanceOf[Constructor[A]]
27 |
28 | override def map(index: Int,
29 | rs: ResultSet,
30 | ctx: StatementContext): A = {
31 | // use constructor parameter types to determine type to marshall column
32 | // from
33 | val types = ctor.getParameterTypes
34 | val values = for {
35 | (t, idx) <- types.zipWithIndex
36 | } yield {
37 | val i = idx + 1
38 | t match {
39 | // todo: do we need to explicitly match on the java variations of these types too?
40 | case _ if t.isAssignableFrom(classOf[Option[_]]) =>
41 | Option(rs.getObject(i))
42 | case _ if !t.isPrimitive && (rs.getObject(i) == null) =>
43 | null
44 | case _ if t.isAssignableFrom(classOf[Boolean]) =>
45 | new java.lang.Boolean(rs.getBoolean(i))
46 | case _ if t.isAssignableFrom(classOf[Byte]) =>
47 | new java.lang.Byte(rs.getByte(i))
48 | case _ if t.isAssignableFrom(classOf[Short]) =>
49 | new java.lang.Short(rs.getShort(i))
50 | case _ if t.isAssignableFrom(classOf[Int]) =>
51 | new Integer(rs.getInt(i))
52 | case _ if t.isAssignableFrom(classOf[Long]) =>
53 | new java.lang.Long(rs.getLong(i))
54 | case _ if t.isAssignableFrom(classOf[Double]) =>
55 | new java.lang.Double(rs.getDouble(i))
56 | case _ if t.isAssignableFrom(classOf[Float]) =>
57 | new java.lang.Float(rs.getFloat(i))
58 | case _ if t.isAssignableFrom(classOf[java.math.BigDecimal]) =>
59 | rs.getBigDecimal(i)
60 | case _ if t.isAssignableFrom(classOf[BigDecimal]) =>
61 | BigDecimal(rs.getBigDecimal(i))
62 | case _ if t.isAssignableFrom(classOf[Timestamp]) =>
63 | rs.getTimestamp(i)
64 | case _ if t.isAssignableFrom(classOf[Time]) =>
65 | rs.getTime(i)
66 | case _ if t.isAssignableFrom(classOf[Date]) =>
67 | rs.getDate(i)
68 | case _ if t.isAssignableFrom(classOf[String]) =>
69 | rs.getString(i)
70 | case _ if t.isEnum =>
71 | Option(rs.getString(i))
72 | .map(x => t.getEnumConstants.find(_.asInstanceOf[Enum[_]].name() == x))
73 | .orNull
74 | case _ => rs.getObject(i)
75 | }
76 | }
77 |
78 | try {
79 | ctor.newInstance(values: _*)
80 | } catch {
81 | case e: IllegalAccessException => throw new IllegalArgumentException(
82 | s"Constructor for ${tpe.getSimpleName} inaccessible", e)
83 | case e: InvocationTargetException =>
84 | throw new IllegalArgumentException("Constructor threw Exception", e)
85 | case e: InstantiationException => throw new IllegalArgumentException(
86 | s"Cannot create instances of abstract class: ${tpe.getSimpleName}",
87 | e)
88 | case e: ExceptionInInitializerError =>
89 | throw new IllegalArgumentException(
90 | s"Failed to initialize object of type: ${tpe.getSimpleName}", e)
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/jdbi/src/test/scala/com/datasift/dropwizard/scala/jdbi/tweak/BigDecimalArgumentFactorySpec.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import java.sql.{Types, PreparedStatement}
4 |
5 | import org.scalatest.FlatSpec
6 | import org.scalatest.mockito.MockitoSugar
7 | import org.skife.jdbi.v2.StatementContext
8 | import org.mockito.Mockito._
9 | import org.mockito.ArgumentMatchers.{eq => equalTo}
10 |
11 | class BigDecimalArgumentFactorySpec extends FlatSpec with MockitoSugar {
12 |
13 | val factory = new BigDecimalArgumentFactory
14 | val ctx = mock[StatementContext]
15 |
16 | "BigDecimalArgumentFactory" should
17 | "accept scala.math.BigDecimal arguments" in {
18 |
19 | assert(factory.accepts(classOf[BigDecimal], BigDecimal(123.456), ctx))
20 | }
21 |
22 | it should "reject java.math.BigDecimal arguments" in {
23 | assert(!factory.accepts(
24 | classOf[java.math.BigDecimal], new java.math.BigDecimal(123.456), ctx))
25 | }
26 |
27 | it should "reject scala.math.BigInt" in {
28 | assert(!factory.accepts(classOf[BigInt], BigInt(123), ctx))
29 | }
30 |
31 | it should "bind an argument for scala.math.BigDecimal" in {
32 | val stmt = mock[PreparedStatement]
33 | val arg = factory.build(classOf[BigDecimal], BigDecimal(123.456), ctx)
34 | arg.apply(1, stmt, ctx)
35 |
36 | verify(stmt).setBigDecimal(
37 | equalTo(1),
38 | equalTo(BigDecimal(123.456).bigDecimal))
39 | verifyNoMoreInteractions(stmt)
40 | }
41 |
42 | it should "bind null when null argument" in {
43 | val stmt = mock[PreparedStatement]
44 | val arg = factory.build(classOf[BigDecimal], null, ctx)
45 | arg.apply(1, stmt, ctx)
46 |
47 | verify(stmt).setNull(equalTo(1), equalTo(Types.NUMERIC))
48 | verifyNoMoreInteractions(stmt)
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/jdbi/src/test/scala/com/datasift/dropwizard/scala/jdbi/tweak/IterableContainerFactorySpec.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import org.scalatest.FlatSpec
4 |
5 | import scala.collection.immutable.{SortedSet, HashSet}
6 |
7 | /**
8 | * Tests [[com.datasift.dropwizard.scala.jdbi.tweak.IterableContainerFactory]]
9 | */
10 | class IterableContainerFactorySpec extends FlatSpec {
11 |
12 | "IterableContainerFactory for Seqs" should "accept Seqs" in {
13 | assert(new IterableContainerFactory[Seq].accepts(classOf[Seq[Int]]))
14 | }
15 |
16 | it should "accept Lists" in {
17 | assert(new IterableContainerFactory[Seq].accepts(classOf[List[Int]]))
18 | }
19 |
20 | it should "accept Vectors" in {
21 | assert(new IterableContainerFactory[Seq].accepts(classOf[Vector[Int]]))
22 | }
23 |
24 | it should "build an empty Seq" in {
25 | assert(new IterableContainerFactory[Seq].newContainerBuilderFor(classOf[Int])
26 | .build().isEmpty)
27 | }
28 |
29 | it should "build a Seq of Ints on demand" in {
30 | assert(new IterableContainerFactory[Seq].newContainerBuilderFor(classOf[Int]).add(123)
31 | .build() === Seq(123))
32 | }
33 |
34 | it should "build a Seq of Strings on demand" in {
35 | assert(new IterableContainerFactory[Seq].newContainerBuilderFor(classOf[String]).add("abc").add("def")
36 | .build() === Seq("abc", "def"))
37 | }
38 |
39 |
40 | "IterableContainerFactory for Sets" should "accept Sets" in {
41 | assert(new IterableContainerFactory[Set].accepts(classOf[Set[Int]]))
42 | }
43 |
44 | it should "accept SortedSets" in {
45 | assert(new IterableContainerFactory[Set].accepts(classOf[SortedSet[Int]]))
46 | }
47 |
48 | it should "accept HashSets" in {
49 | assert(new IterableContainerFactory[Set].accepts(classOf[HashSet[Int]]))
50 | }
51 |
52 | it should "build an empty Set" in {
53 | assert(new IterableContainerFactory[Set].newContainerBuilderFor(classOf[Int])
54 | .build().isEmpty)
55 | }
56 |
57 | it should "build a Set of Ints on demand" in {
58 | assert(new IterableContainerFactory[Set].newContainerBuilderFor(classOf[Int]).add(123)
59 | .build() === Set(123))
60 | }
61 |
62 | it should "build a Set of Strings on demand" in {
63 | assert(new IterableContainerFactory[Set].newContainerBuilderFor(classOf[String]).add("abc").add("def")
64 | .build() === Set("abc", "def"))
65 | }
66 |
67 | "IterableContainerFactory for mutable Seqs" should "accept mutable Seqs" in {
68 | assert(new IterableContainerFactory[scala.collection.mutable.Seq].accepts(classOf[scala.collection.mutable.Seq[Int]]))
69 | }
70 |
71 | it should "accept LinkedLists" in {
72 | assert(new IterableContainerFactory[scala.collection.mutable.Seq].accepts(classOf[scala.collection.mutable.LinkedList[Int]]))
73 | }
74 |
75 | it should "accept Buffers" in {
76 | assert(new IterableContainerFactory[scala.collection.mutable.Seq].accepts(classOf[scala.collection.mutable.Buffer[Int]]))
77 | }
78 |
79 | it should "build an empty Seq" in {
80 | assert(new IterableContainerFactory[scala.collection.mutable.Seq].newContainerBuilderFor(classOf[Int])
81 | .build().isEmpty)
82 | }
83 |
84 | it should "build a Seq of Ints on demand" in {
85 | assert(new IterableContainerFactory[scala.collection.mutable.Seq].newContainerBuilderFor(classOf[Int]).add(123)
86 | .build() === Seq(123))
87 | }
88 |
89 | it should "build a Seq of Strings on demand" in {
90 | assert(new IterableContainerFactory[scala.collection.mutable.Seq].newContainerBuilderFor(classOf[String]).add("abc").add("def")
91 | .build() === Seq("abc", "def"))
92 | }
93 |
94 |
95 | "IterableContainerFactory for mutable Sets" should "accept mutable Sets" in {
96 | assert(new IterableContainerFactory[scala.collection.mutable.Set].accepts(classOf[scala.collection.mutable.Set[Int]]))
97 | }
98 |
99 | it should "accept mutable HashSets" in {
100 | assert(new IterableContainerFactory[scala.collection.mutable.Set].accepts(classOf[scala.collection.mutable.HashSet[Int]]))
101 | }
102 |
103 | it should "build an empty Set" in {
104 | assert(new IterableContainerFactory[scala.collection.mutable.Set].newContainerBuilderFor(classOf[Int])
105 | .build().isEmpty)
106 | }
107 |
108 | it should "build a Set of Ints on demand" in {
109 | assert(new IterableContainerFactory[scala.collection.mutable.Set].newContainerBuilderFor(classOf[Int]).add(123)
110 | .build() === Set(123))
111 | }
112 |
113 | it should "build a Set of Strings on demand" in {
114 | assert(new IterableContainerFactory[scala.collection.mutable.Set].newContainerBuilderFor(classOf[String]).add("abc").add("def")
115 | .build() === Set("abc", "def"))
116 | }
117 |
118 | }
119 |
--------------------------------------------------------------------------------
/jdbi/src/test/scala/com/datasift/dropwizard/scala/jdbi/tweak/JDBIWrapperSpec.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import org.scalatest.FlatSpec
4 | import org.scalatest.mockito.MockitoSugar
5 | import org.mockito.Mockito._
6 | import org.mockito.ArgumentMatchers.{eq => equalTo}
7 | import com.datasift.dropwizard.scala.jdbi.`package`.JDBIWrapper
8 | import org.skife.jdbi.v2.TransactionIsolationLevel
9 | import org.skife.jdbi.v2.TransactionStatus
10 | import org.skife.jdbi.v2.Handle
11 |
12 | class JDBIWrapperSpec extends FlatSpec with MockitoSugar {
13 |
14 | val factory = new BigDecimalArgumentFactory
15 |
16 | "JDBIWrapper.inTransactionWithIsolation" should
17 | "be able to use isolation levels" in {
18 |
19 | // This is ambiguous:
20 | // mock[JDBIWrapper].inTransaction(TransactionIsolationLevel.SERIALIZABLE) {
21 | // (h: Handle, status: TransactionStatus) =>
22 | // assert(true)
23 | // }
24 |
25 | mock[JDBIWrapper].inTransactionWithIsolation(TransactionIsolationLevel.SERIALIZABLE) {
26 | (h: Handle, status: TransactionStatus) =>
27 | assert(true)
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/jdbi/src/test/scala/com/datasift/dropwizard/scala/jdbi/tweak/OptionContainerFactorySpec.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jdbi.tweak
2 |
3 | import org.scalatest.FlatSpec
4 |
5 | /**
6 | * Tests [[com.datasift.dropwizard.scala.jdbi.tweak.OptionContainerFactory]]
7 | */
8 | class OptionContainerFactorySpec extends FlatSpec {
9 |
10 | val factory = new OptionContainerFactory
11 |
12 | "OptionContainerFactory for Ints" should "Accepts Options" in {
13 | assert(factory.accepts(classOf[Option[Int]]))
14 | }
15 |
16 | it should "not accept Lists" in {
17 | assert(!factory.accepts(classOf[List[Int]]))
18 | }
19 |
20 | it should "build a None by default" in {
21 | assert(factory.newContainerBuilderFor(classOf[Int])
22 | .build() === None)
23 | }
24 |
25 | it should "Builds a Some of an Int on demand" in {
26 | assert(factory.newContainerBuilderFor(classOf[Int]).add(123)
27 | .build() === Some(123))
28 | }
29 |
30 | it should "Builds a Some of the last Int on demand" in {
31 | assert(factory.newContainerBuilderFor(classOf[Int]).add(123).add(456)
32 | .build() === Some(456))
33 | }
34 |
35 |
36 | "OptionContainerFactory for Strings" should "accept Options" in {
37 | assert(factory.accepts(classOf[Option[String]]))
38 | }
39 |
40 | it should "Doesn't accept Lists" in {
41 | assert(!factory.accepts(classOf[List[String]]))
42 | }
43 |
44 | it should "Builds a None by default" in {
45 | assert(factory.newContainerBuilderFor(classOf[String])
46 | .build() === None)
47 | }
48 |
49 | it should "Builds a Some of a String on demand" in {
50 | assert(factory.newContainerBuilderFor(classOf[String]).add("abc")
51 | .build() === Some("abc"))
52 | }
53 |
54 | it should "Builds a Some of the last String on demand" in {
55 | assert(factory.newContainerBuilderFor(classOf[String]).add("abc").add("def")
56 | .build() === Some("def"))
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/jersey/build.sbt:
--------------------------------------------------------------------------------
1 | libraryDependencies ++= Seq(
2 | "io.dropwizard" % "dropwizard-jersey" % Versions.dropwizard
3 | )
4 |
5 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/ParameterizedMessageBodyWriter.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey
2 |
3 | import java.lang.annotation.Annotation
4 | import java.lang.reflect.{ParameterizedType, Type}
5 | import javax.inject.{Provider, Inject}
6 | import javax.ws.rs.core.MediaType
7 | import javax.ws.rs.ext.MessageBodyWriter
8 |
9 | import org.glassfish.jersey.message.MessageBodyWorkers
10 |
11 | import scala.reflect.{ClassTag, classTag}
12 |
13 | abstract class ParameterizedMessageBodyWriter[A: ClassTag]
14 | extends MessageBodyWriter[A] {
15 |
16 | @Inject
17 | private var mbw: Provider[MessageBodyWorkers] = null
18 | private val klass = classTag[A].runtimeClass
19 |
20 | protected def getWriter(rawClass: Class[_],
21 | tpe: Type,
22 | annotations: Array[Annotation],
23 | mediaType: MediaType): Option[MessageBodyWriter[Any]] = {
24 | Option(mbw.get.getMessageBodyWriter(
25 | rawClass.asInstanceOf[Class[Any]],
26 | tpe,
27 | annotations,
28 | mediaType))
29 | }
30 |
31 | protected def getTypeArgument(tpe: Type, idx: Int): Option[Type] =
32 | tpe match {
33 | case t: ParameterizedType => Option(t.getActualTypeArguments()(idx))
34 | case _ => None
35 | }
36 |
37 | override def getSize(value: A,
38 | rawType: Class[_],
39 | genericType: Type,
40 | annotations: Array[Annotation],
41 | mediaType: MediaType): Long = 0
42 |
43 | override def isWriteable(rawType: Class[_],
44 | genericType: Type,
45 | annotations: Array[Annotation],
46 | mediaType: MediaType): Boolean = {
47 | klass.isAssignableFrom(rawType)
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/CollectionParamFactoryProvider.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import javax.inject.Inject
4 |
5 | import org.glassfish.hk2.api.{Factory, ServiceLocator}
6 | import org.glassfish.jersey.server.internal.inject._
7 | import org.glassfish.jersey.server.model.Parameter
8 |
9 | abstract class CollectionParamFactoryProvider @Inject()(mpep: MultivaluedParameterExtractorProvider,
10 | locator: ServiceLocator,
11 | source: Parameter.Source)
12 | extends AbstractValueFactoryProvider(mpep, locator, source) {
13 |
14 | override def createValueFactory(parameter: Parameter): Factory[_] = {
15 | val name = Option(parameter.getSourceName)
16 | val defaultValue = Option(parameter.getDefaultValue)
17 | val tpe = parameter.getType
18 | val klass = parameter.getRawType
19 | val annotations = parameter.getAnnotations
20 |
21 | name.filter(_.nonEmpty).flatMap { name =>
22 | ParamConverters.getFirstConverter(locator, tpe, annotations)
23 | .map(x => x.fromString(_))
24 | .flatMap(buildExtractor(klass, name, defaultValue, _))
25 | .map(buildFactory(_, !parameter.isEncoded))
26 | }.orNull
27 | }
28 |
29 | protected def buildFactory(mpe: MultivaluedParameterExtractor[_],
30 | decode: Boolean): AbstractContainerRequestValueFactory[_]
31 |
32 | private def buildExtractor(klass: Class[_],
33 | name: String,
34 | defaultValue: Option[String],
35 | conv: String => Any): Option[MultivaluedParameterExtractor[_]] = Option {
36 | if (klass == classOf[Seq[_]])
37 | CollectionParameterExtractor[Seq](name, defaultValue, conv)
38 | else if (klass == classOf[List[_]])
39 | CollectionParameterExtractor[List](name, defaultValue, conv)
40 | else if (klass == classOf[Vector[_]])
41 | CollectionParameterExtractor[Vector](name, defaultValue, conv)
42 | else if (klass == classOf[IndexedSeq[_]])
43 | CollectionParameterExtractor[IndexedSeq](name, defaultValue, conv)
44 | else if (klass == classOf[Set[_]])
45 | CollectionParameterExtractor[Set](name, defaultValue, conv)
46 | else null
47 | }
48 | }
49 |
50 | class CollectionQueryParamFactoryProvider @Inject()(mpep: MultivaluedParameterExtractorProvider,
51 | locator: ServiceLocator)
52 | extends CollectionParamFactoryProvider(mpep, locator, Parameter.Source.QUERY) {
53 | override protected def buildFactory(extractor: MultivaluedParameterExtractor[_],
54 | decode: Boolean): AbstractContainerRequestValueFactory[_] = {
55 | new QueryParamValueFactory(extractor, decode)
56 | }
57 | }
58 |
59 | class CollectionHeaderParamFactoryProvider @Inject()(mpep: MultivaluedParameterExtractorProvider,
60 | locator: ServiceLocator)
61 | extends CollectionParamFactoryProvider(mpep, locator, Parameter.Source.HEADER) {
62 | override protected def buildFactory(extractor: MultivaluedParameterExtractor[_],
63 | decode: Boolean): AbstractContainerRequestValueFactory[_] = {
64 | new HeaderParamValueFactory(extractor, decode)
65 | }
66 | }
67 |
68 | class CollectionFormParamFactoryProvider @Inject()(mpep: MultivaluedParameterExtractorProvider,
69 | locator: ServiceLocator)
70 | extends CollectionParamFactoryProvider(mpep, locator, Parameter.Source.FORM) {
71 | override protected def buildFactory(extractor: MultivaluedParameterExtractor[_],
72 | decode: Boolean): AbstractContainerRequestValueFactory[_] = {
73 | new FormParamValueFactory(extractor, decode)
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/CollectionParameterExtractor.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import org.glassfish.jersey.server.internal.inject.MultivaluedParameterExtractor
4 |
5 | import collection.generic.CanBuildFrom
6 | import javax.ws.rs.core.MultivaluedMap
7 |
8 | import scala.collection.JavaConverters.iterableAsScalaIterableConverter
9 | import scala.annotation.unchecked.{uncheckedVariance => uV}
10 | import scala.reflect.ClassTag
11 |
12 | object CollectionParameterExtractor {
13 |
14 | def apply[Col[_] <: TraversableOnce[_]]
15 | (name: String,
16 | defaultValue: Option[String],
17 | fromString: String => Any)
18 | (implicit bf: CanBuildFrom[Nothing, Any, Col[Any @uV]],
19 | ct: ClassTag[Col[_]]): MultivaluedParameterExtractor[_] = {
20 | new CollectionParameterExtractor[Any, Col](name, defaultValue, fromString)
21 | }
22 | }
23 |
24 | /** A parameter extractor for Scala collections with elements decoded by a function.
25 | *
26 | * @tparam A type of the elements in the collection.
27 | * @tparam Col type of the collection to extract.
28 | * @param name the name of the parameter to extract the collection for.
29 | * @param defaultValue the default value of the collection for when the parameter does not exist.
30 | * @param fromString a function to parse collection elements from a string
31 | * @param bf the implicit builder for the collection type.
32 | * @see [[MultivaluedParameterExtractor]]
33 | */
34 | class CollectionParameterExtractor[A, Col[_] <: TraversableOnce[_]]
35 | (name: String,
36 | defaultValue: Option[String],
37 | fromString: String => A)
38 | (implicit bf: CanBuildFrom[Nothing, A, Col[A @uV]])
39 | extends MultivaluedParameterExtractor[Col[A]] {
40 |
41 | private val default = defaultValue.toIterable
42 |
43 | override def getName = name
44 |
45 | override def getDefaultValueString = defaultValue.orNull
46 |
47 | override def extract(parameters: MultivaluedMap[String, String]): Col[A] = {
48 | val t = Option(parameters.get(name))
49 | .map(_.asScala)
50 | .getOrElse(default)
51 | .map(fromString)
52 |
53 | val b = bf()
54 | b.sizeHint(t)
55 | b ++= t
56 | b.result()
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/EitherMessageBodyWriter.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import java.io.OutputStream
4 | import java.lang.annotation.Annotation
5 | import java.lang.reflect.Type
6 | import javax.ws.rs.WebApplicationException
7 | import javax.ws.rs.core.{MultivaluedMap, MediaType}
8 |
9 | import com.datasift.dropwizard.scala.jersey.ParameterizedMessageBodyWriter
10 |
11 | class EitherMessageBodyWriter
12 | extends ParameterizedMessageBodyWriter[Either[_, _]] {
13 |
14 | override def writeTo(option: Either[_, _],
15 | rawClass: Class[_],
16 | genericType: Type,
17 | annotations: Array[Annotation],
18 | mediaType: MediaType,
19 | httpHeaders: MultivaluedMap[String, AnyRef],
20 | entityStream: OutputStream): Unit = option match {
21 | case Left(left)
22 | if classOf[WebApplicationException].isAssignableFrom(left.getClass) =>
23 | throw left.asInstanceOf[WebApplicationException]
24 | case Left(left) if classOf[Throwable].isAssignableFrom(left.getClass) =>
25 | throw left.asInstanceOf[Throwable]
26 | case Left(left) =>
27 | val klass = left.getClass
28 | getTypeArgument(genericType, 0).foreach { tpe =>
29 | getWriter(klass, tpe, annotations, mediaType).foreach {
30 | _.writeTo(
31 | left.asInstanceOf[Any],
32 | klass,
33 | tpe,
34 | annotations,
35 | mediaType,
36 | httpHeaders,
37 | entityStream
38 | )
39 | }
40 | }
41 | case Right(right) =>
42 | val klass = right.getClass
43 | getTypeArgument(genericType, 1).foreach { tpe =>
44 | getWriter(klass, tpe, annotations, mediaType).foreach { writer =>
45 | writer.writeTo(
46 | right.asInstanceOf[Any],
47 | klass,
48 | tpe,
49 | annotations,
50 | mediaType,
51 | httpHeaders,
52 | entityStream)
53 | }
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/OptionMessageBodyWriter.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import java.io.OutputStream
4 | import java.lang.annotation.Annotation
5 | import java.lang.reflect.Type
6 | import javax.ws.rs.NotFoundException
7 | import javax.ws.rs.core.{MultivaluedMap, MediaType}
8 |
9 | import com.datasift.dropwizard.scala.jersey.ParameterizedMessageBodyWriter
10 |
11 | class OptionMessageBodyWriter
12 | extends ParameterizedMessageBodyWriter[Option[_]] {
13 |
14 | override def writeTo(option: Option[_],
15 | rawType: Class[_],
16 | genericType: Type,
17 | annotations: Array[Annotation],
18 | mediaType: MediaType,
19 | httpHeaders: MultivaluedMap[String, AnyRef],
20 | entityStream: OutputStream): Unit = option match {
21 | case None => throw new NotFoundException
22 | case Some(data) =>
23 | val klass = data.getClass
24 | getTypeArgument(genericType, 0).foreach { tpe =>
25 | getWriter(klass, tpe, annotations, mediaType).foreach {
26 | _.writeTo(
27 | data.asInstanceOf[Any],
28 | klass,
29 | tpe,
30 | annotations,
31 | mediaType,
32 | httpHeaders,
33 | entityStream)
34 | }
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/ParamConverters.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import java.lang.annotation.Annotation
4 | import java.lang.reflect.Type
5 | import javax.ws.rs.ext.{ParamConverterProvider, ParamConverter}
6 |
7 | import org.glassfish.hk2.api.ServiceLocator
8 | import org.glassfish.jersey.internal.inject.Providers
9 | import org.glassfish.jersey.internal.util.ReflectionHelper
10 | import org.glassfish.jersey.internal.util.collection.ClassTypePair
11 |
12 | import collection.JavaConverters._
13 |
14 | object ParamConverters {
15 |
16 | object Identity extends ParamConverter[String] {
17 | override def fromString(value: String): String = value
18 | override def toString(value: String): String = value
19 | }
20 |
21 | def getConverter(locator: ServiceLocator,
22 | ctp: ClassTypePair,
23 | annotations: Array[Annotation]): Option[ParamConverter[_]] = {
24 | if (ctp.rawClass == classOf[String]) {
25 | Option(Identity)
26 | } else {
27 | Providers.getProviders(locator, classOf[ParamConverterProvider]).asScala
28 | .flatMap { provider =>
29 | Option(provider.getConverter(ctp.rawClass, ctp.`type`, annotations)
30 | .asInstanceOf[ParamConverter[Any]]
31 | )
32 | }
33 | .headOption
34 | }
35 | }
36 |
37 | def getFirstConverter(locator: ServiceLocator,
38 | tpe: Type,
39 | annotations: Array[Annotation]): Option[ParamConverter[_]] = {
40 | ReflectionHelper.getTypeArgumentAndClass(tpe)
41 | .asScala
42 | .headOption
43 | .flatMap(getConverter(locator, _, annotations))
44 | }
45 |
46 | def getConverters(locator: ServiceLocator,
47 | ctps: Seq[ClassTypePair],
48 | annotations: Array[Annotation]): List[ParamConverter[_]] = {
49 | ctps.flatMap(getConverter(locator, _, annotations)).toList
50 | }
51 |
52 | def getConverters(locator: ServiceLocator,
53 | tpe: Type,
54 | annotations: Array[Annotation]): List[ParamConverter[_]] = {
55 | val args = ReflectionHelper.getTypeArgumentAndClass(tpe).asScala.toList
56 | val conv = getConverters(locator, args, annotations)
57 | if (conv.size == args.size) conv
58 | else Nil
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/ScalaInjectionBinder.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import javax.inject.Singleton
4 | import javax.ws.rs.ext.ParamConverterProvider
5 | import javax.ws.rs.{FormParam, HeaderParam, QueryParam}
6 |
7 | import org.glassfish.hk2.api.{InjectionResolver, TypeLiteral}
8 | import org.glassfish.hk2.utilities.binding.AbstractBinder
9 | import org.glassfish.jersey.server.spi.internal.ValueFactoryProvider
10 |
11 | import ScalaParamInjectionResolver._
12 |
13 | class ScalaInjectionBinder extends AbstractBinder {
14 | override def configure(): Unit = {
15 | bind(classOf[CollectionQueryParamFactoryProvider])
16 | .to(classOf[ValueFactoryProvider])
17 | .in(classOf[Singleton])
18 | bind(classOf[CollectionFormParamFactoryProvider])
19 | .to(classOf[ValueFactoryProvider])
20 | .in(classOf[Singleton])
21 | bind(classOf[CollectionHeaderParamFactoryProvider])
22 | .to(classOf[ValueFactoryProvider])
23 | .in(classOf[Singleton])
24 |
25 | bind(classOf[QueryParamInjectionResolver])
26 | .to(new TypeLiteral[InjectionResolver[QueryParam]] {})
27 | .in(classOf[Singleton])
28 |
29 | bind(classOf[FormParamInjectionResolver])
30 | .to(new TypeLiteral[InjectionResolver[FormParam]] {})
31 | .in(classOf[Singleton])
32 |
33 | bind(classOf[HeaderParamInjectionResolver])
34 | .to(new TypeLiteral[InjectionResolver[HeaderParam]] {})
35 | .in(classOf[Singleton])
36 |
37 | bind(classOf[ScalaParamConvertersProvider])
38 | .to(classOf[ParamConverterProvider])
39 | .in(classOf[Singleton])
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/ScalaParamConverters.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import java.lang.annotation.Annotation
4 | import java.lang.reflect.Type
5 | import javax.inject.Inject
6 | import javax.ws.rs.ext.{ParamConverterProvider, ParamConverter}
7 |
8 | import org.glassfish.hk2.api.ServiceLocator
9 |
10 | import scala.util.{Failure, Success, Try}
11 |
12 | class BigDecimalParamConverter extends ParamConverter[BigDecimal] {
13 | override def fromString(value: String): BigDecimal = BigDecimal(value)
14 | override def toString(value: BigDecimal): String = value.toString
15 | }
16 |
17 | class BigIntParamConverter extends ParamConverter[BigInt] {
18 | override def fromString(value: String): BigInt = BigInt(value)
19 | override def toString(value: BigInt): String = value.toString
20 | }
21 |
22 | class EitherParamConverter[L, R](left: ParamConverter[L],
23 | right: ParamConverter[R])
24 | extends ParamConverter[Either[L, R]] {
25 |
26 | override def fromString(value: String): Either[L, R] = {
27 | Try(right.fromString(value)) match {
28 | case Success(v) => Right(v)
29 | case Failure(_) => Left(left.fromString(value))
30 | }
31 | }
32 |
33 | override def toString(value: Either[L, R]): String = value match {
34 | case Right(v) => right.toString(v)
35 | case Left(v) => left.toString(v)
36 | }
37 | }
38 |
39 | class OptionParamConverter[A](conv: ParamConverter[A])
40 | extends ParamConverter[Option[A]] {
41 |
42 | override def fromString(value: String): Option[A] = {
43 | Option(value).map(conv.fromString)
44 | }
45 |
46 | override def toString(value: Option[A]): String = {
47 | value.map(conv.toString).getOrElse("")
48 | }
49 | }
50 |
51 | class ScalaParamConvertersProvider @Inject() (locator: ServiceLocator)
52 | extends ParamConverterProvider {
53 |
54 | override def getConverter[T](rawType: Class[T],
55 | genericType: Type,
56 | annotations: Array[Annotation]): ParamConverter[T] = {
57 | if (rawType == classOf[BigDecimal])
58 | (new BigDecimalParamConverter).asInstanceOf[ParamConverter[T]]
59 | else if (rawType == classOf[BigInt])
60 | (new BigIntParamConverter).asInstanceOf[ParamConverter[T]]
61 | else if (rawType == classOf[Option[_]]) {
62 | ParamConverters.getFirstConverter(locator, genericType, annotations)
63 | .map(new OptionParamConverter(_).asInstanceOf[ParamConverter[T]])
64 | .orNull
65 | }
66 | else if (rawType == classOf[Either[_, _]]) {
67 | ParamConverters.getConverters(locator, genericType, annotations) match {
68 | case left :: right :: Nil =>
69 | new EitherParamConverter(left, right).asInstanceOf[ParamConverter[T]]
70 | case _ => null
71 | }
72 | } else null
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/ScalaParamInjectionResolver.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import javax.ws.rs.{HeaderParam, FormParam, QueryParam}
4 |
5 | import org.glassfish.jersey.server.internal.inject.ParamInjectionResolver
6 |
7 | object ScalaParamInjectionResolver {
8 |
9 | class QueryParamInjectionResolver
10 | extends ParamInjectionResolver[QueryParam](
11 | classOf[CollectionQueryParamFactoryProvider])
12 |
13 | class FormParamInjectionResolver
14 | extends ParamInjectionResolver[FormParam](
15 | classOf[CollectionFormParamFactoryProvider])
16 |
17 | class HeaderParamInjectionResolver
18 | extends ParamInjectionResolver[HeaderParam](
19 | classOf[CollectionHeaderParamFactoryProvider])
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/TryMessageBodyWriter.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import java.io.OutputStream
4 | import java.lang.annotation.Annotation
5 | import java.lang.reflect.Type
6 | import javax.ws.rs.WebApplicationException
7 | import javax.ws.rs.core.{MultivaluedMap, MediaType}
8 |
9 | import com.datasift.dropwizard.scala.jersey.ParameterizedMessageBodyWriter
10 |
11 | import scala.util.{Success, Failure, Try}
12 |
13 | class TryMessageBodyWriter extends ParameterizedMessageBodyWriter[Try[_]] {
14 |
15 | override def writeTo(value: Try[_],
16 | rawClass: Class[_],
17 | genericType: Type,
18 | annotations: Array[Annotation],
19 | mediaType: MediaType,
20 | httpHeaders: MultivaluedMap[String, AnyRef],
21 | entityStream: OutputStream): Unit = value match {
22 | case Failure(t) if t.isInstanceOf[WebApplicationException] => throw t
23 | case Failure(t) => throw new WebApplicationException(t)
24 | case Success(data) =>
25 | val klass = data.getClass
26 | getTypeArgument(genericType, 0).foreach { innerGenericType =>
27 | getWriter(klass, innerGenericType, annotations, mediaType).foreach {
28 | _.writeTo(
29 | data,
30 | klass,
31 | innerGenericType,
32 | annotations,
33 | mediaType,
34 | httpHeaders,
35 | entityStream)
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/inject/ValueFactory.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.inject
2 |
3 | import javax.ws.rs.ProcessingException
4 | import javax.ws.rs.core.Form
5 |
6 | import org.glassfish.jersey.internal.inject.ExtractorException
7 | import org.glassfish.jersey.server.ParamException._
8 | import org.glassfish.jersey.server.internal.inject.{MultivaluedParameterExtractor, AbstractContainerRequestValueFactory}
9 |
10 | class QueryParamValueFactory(extractor: MultivaluedParameterExtractor[_],
11 | decode: Boolean)
12 | extends AbstractContainerRequestValueFactory[AnyRef] {
13 |
14 | override def provide(): AnyRef = try {
15 | val parameters = getContainerRequest
16 | .getUriInfo
17 | .getQueryParameters(decode)
18 |
19 | extractor.extract(parameters).asInstanceOf[AnyRef]
20 | } catch {
21 | case e: ExtractorException => throw new QueryParamException(
22 | e.getCause, extractor.getName, extractor.getDefaultValueString)
23 | }
24 | }
25 |
26 | class FormParamValueFactory(extractor: MultivaluedParameterExtractor[_],
27 | decode: Boolean)
28 | extends AbstractContainerRequestValueFactory[AnyRef] {
29 |
30 | override def provide(): AnyRef = try {
31 | getContainerRequest.bufferEntity()
32 | val form = getContainerRequest.readEntity(classOf[Form])
33 | extractor.extract(form.asMap()).asInstanceOf[AnyRef]
34 | } catch {
35 | case e: ProcessingException => throw new FormParamException(
36 | e.getCause, extractor.getName, extractor.getDefaultValueString)
37 | }
38 | }
39 |
40 | class HeaderParamValueFactory(extractor: MultivaluedParameterExtractor[_],
41 | decode: Boolean)
42 | extends AbstractContainerRequestValueFactory[AnyRef] {
43 |
44 | override def provide(): AnyRef = try {
45 | extractor.extract(getContainerRequest.getHeaders).asInstanceOf[AnyRef]
46 | } catch {
47 | case e: ExtractorException => throw new HeaderParamException(
48 | e.getCause, extractor.getName, extractor.getDefaultValueString)
49 | }
50 | }
51 |
52 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/params/BooleanParam.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.params
2 |
3 | import io.dropwizard.jersey.params.AbstractParam
4 |
5 | /** Factory object for [[io.dropwizard.jersey.params.BooleanParam]]. */
6 | object BooleanParam {
7 |
8 | /** Creates a parameter extractor for the given value. */
9 | def apply(value: Boolean): AbstractParam[Boolean] = BooleanParam(value.toString)
10 | }
11 |
12 | /** Parameter extractor for [[scala.Boolean]].
13 | *
14 | * @param s the input data to extract the [[scala.Boolean]] from.
15 | *
16 | * @see [[io.dropwizard.jersey.params.AbstractParam]]
17 | */
18 | case class BooleanParam(s: String) extends AbstractParam[Boolean](s) {
19 |
20 | protected def parse(input: String) = input.toBoolean
21 | }
22 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/params/IntParam.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.params
2 |
3 | import io.dropwizard.jersey.params.AbstractParam
4 |
5 | /** Factory object for [[io.dropwizard.jersey.params.IntParam]]. */
6 | object IntParam {
7 |
8 | /** Creates a parameter extractor for the given value. */
9 | def apply(value: Int): AbstractParam[Int] = IntParam(value.toString)
10 | }
11 |
12 | /** Parameter extractor for [[scala.Int]].
13 | *
14 | * @param s the input data to extract the [[scala.Int]] from.
15 | *
16 | * @see [[io.dropwizard.jersey.params.AbstractParam]]
17 | */
18 | case class IntParam(s: String) extends AbstractParam[Int](s) {
19 |
20 | protected def parse(input: String) = s.toInt
21 | }
22 |
--------------------------------------------------------------------------------
/jersey/src/main/scala/com/datasift/dropwizard/scala/jersey/params/LongParam.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.jersey.params
2 |
3 | import io.dropwizard.jersey.params.AbstractParam
4 |
5 | /** Factory object for [[io.dropwizard.jersey.params.LongParam]]. */
6 | object LongParam {
7 |
8 | /** Creates a parameter extractor for the given value. */
9 | def apply(value: Long): AbstractParam[Long] = LongParam(value.toString)
10 | }
11 |
12 | /** Parameter extractor for [[scala.Long]].
13 | *
14 | * @param s the input data to extract the [[scala.Long]] from.
15 | *
16 | * @see [[io.dropwizard.jersey.params.AbstractParam]]
17 | */
18 | case class LongParam(s: String) extends AbstractParam[Long](s) {
19 | protected def parse(input: String) = s.toLong
20 | }
21 |
--------------------------------------------------------------------------------
/metrics/build.sbt:
--------------------------------------------------------------------------------
1 | libraryDependencies ++= Seq(
2 | "io.dropwizard" % "dropwizard-core" % Versions.dropwizard
3 | )
4 |
5 |
--------------------------------------------------------------------------------
/metrics/src/main/scala/com/datasift/dropwizard/scala/metrics.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala
2 |
3 | import com.codahale.metrics._
4 | import io.dropwizard.util.Duration
5 |
6 | import java.util.concurrent.TimeUnit
7 |
8 | object metrics {
9 |
10 | implicit final def TimerWrapper(t: Timer) = new TimerWrapper(t)
11 | implicit final def CounterWrapper(c: Counter) = new CounterWrapper(c)
12 | implicit final def HistogramWrapper(h: Histogram) = new HistogramWrapper(h)
13 | implicit final def MetricRegistryWrapper(r: MetricRegistry) = new MetricRegistryWrapper(r)
14 | implicit final def GaugeWrapper[A](g: Gauge[A]) = new GaugeWrapper[A](g)
15 |
16 | class MetricRegistryWrapper private[metrics] (r: MetricRegistry) {
17 |
18 | def gauge[A](name: String)
19 | (f: => A): Gauge[A] = r.register(name, new Gauge[A] {
20 | override def getValue = f
21 | })
22 |
23 | def gauge[A](name: String, timeout: Long, timeoutUnit: TimeUnit)
24 | (f: => A): CachedGauge[A] = {
25 | r.register(name, new CachedGauge[A](timeout, timeoutUnit) {
26 | override def loadValue() = f
27 | })
28 | }
29 |
30 | def gauge[A](name: String, timeout: Duration)
31 | (f: => A): CachedGauge[A] = {
32 | gauge(name, timeout.getQuantity, timeout.getUnit)(f)
33 | }
34 |
35 | def gauge[A](name: String, clock: Clock, timeout: Long, timeoutUnit: TimeUnit)
36 | (f: => A): CachedGauge[A] = {
37 | r.register(name, new CachedGauge[A](clock, timeout, timeoutUnit) {
38 | override def loadValue() = f
39 | })
40 | }
41 |
42 | def gauge[A](name: String, clock: Clock, timeout: Duration)
43 | (f: => A): CachedGauge[A] = {
44 | gauge(name, clock, timeout.getQuantity, timeout.getUnit)(f)
45 | }
46 | }
47 |
48 | class GaugeWrapper[A] private[metrics] (g: Gauge[A]) {
49 |
50 | def map[B](f: A => B): Gauge[B] = new DerivativeGauge[A, B](g) {
51 | override def transform(value: A): B = f(value)
52 | }
53 | }
54 |
55 | class TimerWrapper private[metrics] (t: Timer) {
56 |
57 | def timed[A](f: => A): A = {
58 | val ctx = t.time()
59 | val res = try {
60 | f
61 | } finally {
62 | ctx.stop()
63 | }
64 | res
65 | }
66 | }
67 |
68 | class CounterWrapper private[metrics] (c: Counter) {
69 |
70 | def +=(delta: Long): Counter = {
71 | c.inc(delta)
72 | c
73 | }
74 |
75 | def -=(delta: Long): Counter = {
76 | c.dec(delta)
77 | c
78 | }
79 | }
80 |
81 | class HistogramWrapper private[metrics] (h: Histogram) {
82 |
83 | def +=(value: Long): Histogram = {
84 | h.update(value)
85 | h
86 | }
87 |
88 | def +=(value: Int): Histogram = {
89 | h.update(value)
90 | h
91 | }
92 |
93 | def snapshot = h.getSnapshot
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/metrics/src/test/scala/com/datasift/dropwizard/scala/MetricImplicitsSpec.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala
2 |
3 | import com.codahale.metrics._
4 | import com.datasift.dropwizard.scala.metrics._
5 | import io.dropwizard.util.Duration
6 | import org.scalatest.FlatSpec
7 |
8 | import scala.collection.JavaConverters.mapAsScalaMapConverter
9 |
10 | class MetricImplicitsSpec extends FlatSpec {
11 |
12 | class TestClock(var tick: Long = 0) extends Clock {
13 | override def getTick = tick
14 | }
15 |
16 | "Timer" should "time execution of a function" in {
17 |
18 |
19 | val clock = new TestClock(0)
20 | val timer = new Timer(new UniformReservoir(), clock)
21 | timer.timed { clock.tick += 1 }
22 |
23 | assert(clock.tick === 1)
24 | }
25 |
26 | "Counter" should "increment using +=" in {
27 | val counter = new Counter()
28 | counter += 250
29 | assert(counter.getCount === 250)
30 | }
31 |
32 | it should "decrement using -=" in {
33 | val counter = new Counter()
34 | counter.inc(1000)
35 | counter -= 250
36 | assert(counter.getCount === 750)
37 | }
38 |
39 | it should "chain += and -=" in {
40 | val counter = new Counter()
41 | counter += 500 -= 125
42 | assert(counter.getCount === 375)
43 | }
44 |
45 | "Histogram" should "add Longs with +=" in {
46 | val histogram = new Histogram(new UniformReservoir())
47 | val long = Int.MaxValue.toLong * 200
48 | histogram += long
49 | assert(histogram.getCount === 1)
50 | assert(histogram.getSnapshot.getValues === Array(long))
51 | }
52 |
53 | it should "add Ints with +=" in {
54 | val histogram = new Histogram(new UniformReservoir())
55 | val int = 5678
56 | histogram += int
57 | assert(histogram.getCount === 1)
58 | assert(histogram.getSnapshot.getValues === Array(int))
59 | }
60 |
61 | "MetricRegistry" should "register a gauge for an arbitrary function" in {
62 | val registry = new MetricRegistry
63 | var value = 5
64 | val gauge = registry.gauge("test.gauge")(value)
65 | assert(registry.getGauges.asScala === Map("test.gauge" -> gauge))
66 | assert(gauge.getValue === value)
67 | value = 15
68 | assert(gauge.getValue === value)
69 | }
70 |
71 | it should "register a cached gauge for an arbitrary function" in {
72 | val registry = new MetricRegistry
73 | var value = 5
74 | val clock = new TestClock(0)
75 | val gauge = registry.gauge("test.gauge", clock, Duration.nanoseconds(5))(value)
76 | assert(registry.getGauges.asScala === Map("test.gauge" -> gauge))
77 | assert(gauge.getValue === value)
78 | val oldValue = value
79 | value = 50
80 | assert(gauge.getValue === oldValue)
81 | clock.tick = 4
82 | assert(gauge.getValue === oldValue)
83 | clock.tick = 5
84 | assert(gauge.getValue === value)
85 | }
86 |
87 | "Gauge" should "be transformable by another function" in {
88 | val registry = new MetricRegistry
89 | val gauge = registry.gauge("test.gauge")(50)
90 | val transformed = gauge.map(2 * _)
91 | assert(gauge.getValue === 50)
92 | assert(transformed.getValue === 100)
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/project/Versions.scala:
--------------------------------------------------------------------------------
1 |
2 | object Versions {
3 |
4 | val dropwizard = "1.3.7"
5 | val jackson = "2.9.7"
6 | val mockito = "2.7.12"
7 | val scalaTest = "3.0.3"
8 |
9 | }
10 |
11 |
12 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.2.6
2 |
3 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | // make releases
2 | addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.10")
3 |
4 | // sign releases
5 | addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2")
6 |
7 | // release to Sonatype OSS
8 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.3")
9 |
--------------------------------------------------------------------------------
/test/build.sbt:
--------------------------------------------------------------------------------
1 | libraryDependencies ++= Seq(
2 | "io.dropwizard" % "dropwizard-core" % Versions.dropwizard,
3 | "io.dropwizard" % "dropwizard-client" % Versions.dropwizard,
4 | "org.scalatest" %% "scalatest" % "3.0.1",
5 | "io.dropwizard" % "dropwizard-migrations" % Versions.dropwizard % "optional",
6 | "mysql" % "mysql-connector-mxj" % "5.0.12" % "optional"
7 | )
8 |
9 |
--------------------------------------------------------------------------------
/test/src/main/scala/com/datasift/dropwizard/scala/test/ApplicationTest.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.test
2 |
3 | import javax.ws.rs.client.{WebTarget, Client}
4 |
5 | import io.dropwizard.cli.ServerCommand
6 | import io.dropwizard.client.JerseyClientBuilder
7 | import io.dropwizard.lifecycle.ServerLifecycleListener
8 | import io.dropwizard.setup.{Bootstrap, Environment}
9 | import io.dropwizard.{Application, Configuration}
10 | import net.sourceforge.argparse4j.inf.Namespace
11 | import org.eclipse.jetty.server.Server
12 |
13 | import scala.util.{Failure, Try}
14 | import scala.collection.JavaConverters._
15 |
16 | object ApplicationTest {
17 |
18 | def apply[C <: Configuration]
19 | (suite: BeforeAndAfterAllMulti,
20 | configPath: String,
21 | args: Map[String, AnyRef] = Map.empty)
22 | (newApp: => Application[C]): ApplicationTest[C] =
23 | new ApplicationTest[C](suite, configPath, args)(newApp)
24 |
25 | }
26 |
27 | class ApplicationTest[C <: Configuration]
28 | (suite: BeforeAndAfterAllMulti,
29 | configPath: String,
30 | args: Map[String, AnyRef] = Map.empty)
31 | (newApp: => Application[C]) {
32 |
33 | private var _configuration: Try[C] = Failure(NotInitializedException)
34 | private var _application: Try[Application[C]] = Failure(NotInitializedException)
35 | private var _environment: Try[Environment] = Failure(NotInitializedException)
36 | private var _server: Try[Server] = Failure(NotInitializedException)
37 |
38 | def configuration: Try[C] = _configuration
39 | def application: Try[Application[C]] = _application
40 | def environment: Try[Environment] = _environment
41 | def server: Try[Server] = _server
42 |
43 | def newClient(name: String): Try[Client] =
44 | environment.map(new JerseyClientBuilder(_).build(name))
45 |
46 | suite.beforeAll {
47 | Try(newApp).foreach { app =>
48 | _application = Try(app)
49 | val bootstrap = new Bootstrap[C](app) {
50 | override def run(configuration: C, environment: Environment): Unit = {
51 | _environment = Try(environment)
52 | _configuration = Try(configuration)
53 | super.run(configuration, environment)
54 | environment.lifecycle.addServerLifecycleListener(
55 | new ServerLifecycleListener {
56 | override def serverStarted(server: Server): Unit = {
57 | _server = Try(server)
58 | }
59 | }
60 | )
61 | }
62 | }
63 |
64 | app.initialize(bootstrap)
65 | val command = new ServerCommand[C](app)
66 | val ns = new Namespace(Map[String, AnyRef]("file" -> configPath).asJava)
67 | command.run(bootstrap, ns)
68 | }
69 | }
70 |
71 | suite.afterAll {
72 | server.foreach(_.stop())
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/test/src/main/scala/com/datasift/dropwizard/scala/test/BeforeAndAfterAllMulti.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.test
2 |
3 | import org.scalatest.{Suite, BeforeAndAfterAll}
4 |
5 | import collection.mutable
6 |
7 | trait BeforeAndAfterAllMulti extends BeforeAndAfterAll { this: Suite =>
8 |
9 | private var before: mutable.Buffer[() => Unit] = mutable.Buffer.empty
10 | private var after: mutable.Buffer[() => Unit] = mutable.Buffer.empty
11 |
12 | def beforeAll(f: => Unit): Unit = {
13 | before.append(() => f)
14 | }
15 |
16 | def afterAll(f: => Unit): Unit = {
17 | after.prepend(() => f)
18 | }
19 |
20 | override def beforeAll(): Unit = {
21 | before.foreach(_())
22 | }
23 |
24 | override def afterAll(): Unit = {
25 | after.foreach(_())
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/test/src/main/scala/com/datasift/dropwizard/scala/test/LiquibaseTest.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.test
2 |
3 | import java.util.Date
4 |
5 | import io.dropwizard.db.ManagedDataSource
6 | import io.dropwizard.migrations.{CloseableLiquibase, CloseableLiquibaseWithFileSystemMigrationsFile}
7 |
8 | import scala.util.{Failure, Try}
9 |
10 | object LiquibaseTest {
11 |
12 | def apply(suite: BeforeAndAfterAllMulti,
13 | config: Config = Config())
14 | (newDataSource: => ManagedDataSource): LiquibaseTest =
15 | new LiquibaseTest(suite, config)(newDataSource)
16 |
17 | case class Config(file: String = "migrations.xml",
18 | contexts: Seq[String] = Seq.empty)
19 | }
20 |
21 | class LiquibaseTest(suite: BeforeAndAfterAllMulti,
22 | config: LiquibaseTest.Config = LiquibaseTest.Config())
23 | (newDataSource: => ManagedDataSource) {
24 |
25 | private var _dataSource: Try[ManagedDataSource] =
26 | Failure(NotInitializedException)
27 | private var _liquibase: Try[CloseableLiquibase] =
28 | Failure(NotInitializedException)
29 |
30 | def dataSource: Try[ManagedDataSource] = _dataSource
31 | def liquibase: Try[CloseableLiquibase] = _liquibase
32 |
33 | suite.beforeAll {
34 | _dataSource = Try(newDataSource)
35 | _liquibase = _dataSource
36 | .flatMap(ds => Try(new CloseableLiquibaseWithFileSystemMigrationsFile(ds, config.file)))
37 |
38 | _liquibase.foreach(_.update(config.contexts.mkString(",")))
39 | }
40 |
41 | suite.afterAll {
42 | _liquibase.foreach { liquibase =>
43 | liquibase.rollback(new Date(0), config.contexts.mkString(","))
44 | liquibase.close()
45 | }
46 | }
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/test/src/main/scala/com/datasift/dropwizard/scala/test/MySQLTest.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.test
2 |
3 | import java.io.{IOException, File}
4 | import java.nio.file.attribute.BasicFileAttributes
5 | import java.nio.file.{FileVisitResult, SimpleFileVisitor, Path, Files}
6 |
7 | import com.mysql.management.driverlaunched.ServerLauncherSocketFactory
8 | import io.dropwizard.db.ManagedDataSource
9 |
10 | import scala.util.{Failure, Try}
11 |
12 | object MySQLTest {
13 |
14 | private val UriRegex =
15 | "^jdbc:mysql:mxj://[^:]+:\\d+/[^?]+?.*server\\.basedir=([^&]+).*$".r
16 |
17 | def apply(suite: BeforeAndAfterAllMulti, connectionURI: => String)
18 | (newDataSource: => ManagedDataSource): MySQLTest =
19 | new MySQLTest(suite, connectionURI)(newDataSource)
20 | }
21 |
22 | class MySQLTest(suite: BeforeAndAfterAllMulti, connectionURI: => String)
23 | (newDataSource: => ManagedDataSource) {
24 |
25 | import MySQLTest.UriRegex
26 |
27 | var _dataSource: Try[ManagedDataSource] = Failure(NotInitializedException)
28 | var _baseDir: Try[File] = Failure(NotInitializedException)
29 |
30 | def dataSource: Try[ManagedDataSource] = _dataSource
31 | def baseDir: Try[File] = _baseDir
32 |
33 | suite.beforeAll {
34 | _dataSource = Try(newDataSource)
35 | _baseDir = Try(connectionURI match {
36 | case UriRegex(dir) => new File(dir)
37 | })
38 | _dataSource.foreach(_.getConnection)
39 | }
40 |
41 | suite.afterAll {
42 | _baseDir.foreach { baseDir =>
43 | if (!ServerLauncherSocketFactory.shutdown(baseDir, null)) {
44 | deleteRecursively(baseDir.toPath)
45 | }
46 | }
47 | }
48 |
49 | private def deleteRecursively(path: Path) {
50 | Files.walkFileTree(path, new SimpleFileVisitor[Path]() {
51 | override def visitFile(file: Path,
52 | attrs: BasicFileAttributes): FileVisitResult = {
53 | Files.delete(file)
54 | FileVisitResult.CONTINUE
55 | }
56 |
57 | override def postVisitDirectory(dir: Path,
58 | ex: IOException): FileVisitResult = {
59 | Files.delete(dir)
60 | FileVisitResult.CONTINUE
61 | }
62 | })
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/test/src/main/scala/com/datasift/dropwizard/scala/test/NotInitializedException.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.test
2 |
3 | import scala.util.control.NoStackTrace
4 |
5 | case object NotInitializedException extends Exception with NoStackTrace
6 |
--------------------------------------------------------------------------------
/validation/build.sbt:
--------------------------------------------------------------------------------
1 | libraryDependencies ++= Seq(
2 | "io.dropwizard" % "dropwizard-core" % Versions.dropwizard,
3 | "io.dropwizard" % "dropwizard-validation" % Versions.dropwizard
4 | )
5 |
6 |
--------------------------------------------------------------------------------
/validation/src/main/scala/com/datasift/dropwizard/scala/validation/ScalaValidatorsBundle.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.validation
2 |
3 | import io.dropwizard.Bundle
4 | import io.dropwizard.setup.{Bootstrap, Environment}
5 | import com.datasift.dropwizard.scala.validation.validators._
6 | import org.hibernate.validator.internal.engine.ValidatorFactoryImpl
7 | import org.hibernate.validator.internal.metadata.core.ConstraintHelper
8 |
9 | import scala.reflect._
10 | import scala.collection.JavaConverters.seqAsJavaListConverter
11 |
12 | import java.util
13 | import java.lang.annotation.Annotation
14 | import javax.validation.ConstraintValidator
15 | import javax.validation.constraints.Size
16 |
17 | object ScalaValidatorsBundle {
18 |
19 | val sizeValidators = List(
20 | classOf[SizeValidatorForScalaCollection],
21 | classOf[SizeValidatorForScalaOption])
22 | }
23 |
24 | /** Adds support for Scala collections to the validation framework. */
25 | class ScalaValidatorsBundle extends Bundle {
26 |
27 | import ScalaValidatorsBundle._
28 |
29 | def initialize(bootstrap: Bootstrap[_]) {
30 | bootstrap.getValidatorFactory match {
31 | case factory: ValidatorFactoryImpl => {
32 | // we need to resort to reflection here to get access to the ConstraintHelper, where all the magic happens
33 | val constraintHelperMethod = classOf[ValidatorFactoryImpl].getDeclaredField("constraintHelper")
34 | constraintHelperMethod.setAccessible(true)
35 | val constraintHelper = constraintHelperMethod.get(factory).asInstanceOf[ConstraintHelper]
36 |
37 | // add custom constraint mappings
38 | addValidators[Size](constraintHelper, sizeValidators)
39 | }
40 | case _ => // ignore unrecognized implementations
41 | }
42 | }
43 |
44 | def run(environment: Environment) {
45 |
46 | }
47 |
48 | private def addValidators[A <: Annotation : ClassTag](helper: ConstraintHelper,
49 | validators: List[Class[_ <: ConstraintValidator[A, _]]]) {
50 | val annoClass = classTag[A].runtimeClass.asInstanceOf[Class[A]]
51 | val allValidators = new util.LinkedList[Class[_ <: ConstraintValidator[A, _]]](validators.asJava)
52 |
53 | // ensure we don't replace existing validators
54 | allValidators.addAll(helper.getAllValidatorClasses(annoClass))
55 |
56 | helper.putValidatorClasses(annoClass, allValidators, false)
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/validation/src/main/scala/com/datasift/dropwizard/scala/validation/constraints/package.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.validation
2 |
3 | import scala.annotation.meta.{param, field}
4 | import javax.validation.{constraints => jsr}
5 | import org.hibernate.validator.{constraints => hibernate}
6 |
7 | /** Type aliased constraints for case classes. */
8 | package object constraints {
9 |
10 | // JSR-303 constraints
11 | type AssertFalse = jsr.AssertFalse @field @param
12 | type AssertTrue = jsr.AssertTrue @field @param
13 | type DecimalMax = jsr.DecimalMax @field @param
14 | type DecimalMin = jsr.DecimalMin @field @param
15 | type Digits = jsr.Digits @field @param
16 | type Future = jsr.Future @field @param
17 | type Max = jsr.Max @field @param
18 | type Min = jsr.Min @field @param
19 | type NotNull = jsr.NotNull @field @param
20 | type Null = jsr.Null @field @param
21 | type Past = jsr.Past @field @param
22 | type Pattern = jsr.Pattern @field @param
23 | type Size = jsr.Size @field @param
24 | type Valid = javax.validation.Valid @field @param
25 |
26 | // extra Hibernate Validator constraints
27 | type ConstraintComposition = hibernate.ConstraintComposition
28 | type CreditCardNumber = hibernate.CreditCardNumber @field @param
29 | type Email = hibernate.Email @field @param
30 | type Length = hibernate.Length @field @param
31 | type Mod10Check = hibernate.Mod10Check @field @param
32 | type Mod11Check = hibernate.Mod11Check @field @param
33 | type NotBlank = hibernate.NotBlank @field @param
34 | type NotEmpty = hibernate.NotEmpty @field @param
35 | type Range = hibernate.Range @field @param
36 | type SafeHtml = hibernate.SafeHtml @field @param
37 | type ScriptAssert = hibernate.ScriptAssert
38 | type URL = hibernate.URL @field @param
39 | }
40 |
--------------------------------------------------------------------------------
/validation/src/main/scala/com/datasift/dropwizard/scala/validation/validators/SizeValidatorForScalaCollection.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.validation.validators
2 |
3 | import org.hibernate.validator.internal.util.logging.LoggerFactory
4 | import javax.validation.{ConstraintValidatorContext, ConstraintValidator}
5 | import javax.validation.constraints.Size
6 | import scala.collection.GenTraversableOnce
7 |
8 | object SizeValidatorForScalaCollection {
9 | val log = LoggerFactory.make()
10 | }
11 |
12 | class SizeValidatorForScalaCollection extends ConstraintValidator[Size, GenTraversableOnce[_]] {
13 |
14 | import SizeValidatorForScalaCollection.log
15 |
16 | var min = 0
17 | var max = 0
18 |
19 | def initialize(parameters: Size) {
20 | min = parameters.min()
21 | max = parameters.max()
22 |
23 | if (min < 0) throw log.getMinCannotBeNegativeException
24 | if (max < 0) throw log.getMaxCannotBeNegativeException
25 | if (max < min) throw log.getLengthCannotBeNegativeException
26 | }
27 |
28 | def isValid(value: GenTraversableOnce[_], context: ConstraintValidatorContext): Boolean = {
29 | if (value == null) {
30 | true
31 | } else {
32 | val size = value.size
33 | size >= min && size <= max
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/validation/src/main/scala/com/datasift/dropwizard/scala/validation/validators/SizeValidatorForScalaOption.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.validation.validators
2 |
3 | import org.hibernate.validator.internal.util.logging.LoggerFactory
4 | import javax.validation.{ConstraintValidatorContext, ConstraintValidator}
5 | import javax.validation.constraints.Size
6 |
7 | object SizeValidatorForScalaOption {
8 | val log = LoggerFactory.make()
9 | }
10 |
11 | class SizeValidatorForScalaOption extends ConstraintValidator[Size, Option[_]] {
12 |
13 | import SizeValidatorForScalaCollection.log
14 |
15 | var min = 0
16 | var max = 0
17 |
18 | def initialize(parameters: Size) {
19 | min = parameters.min()
20 | max = parameters.max()
21 |
22 | if (min < 0) throw log.getMinCannotBeNegativeException
23 | if (max < 0) throw log.getMaxCannotBeNegativeException
24 | if (max < min) throw log.getLengthCannotBeNegativeException
25 | }
26 |
27 | def isValid(value: Option[_], context: ConstraintValidatorContext): Boolean = value match {
28 | case null => true
29 | case None if min == 0 => true
30 | case Some(_) if max > 0 => true
31 | case _ => false
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/validation/src/test/scala/com/datasift/dropwizard/scala/validation/ScalaValidatorsSpec.scala:
--------------------------------------------------------------------------------
1 | package com.datasift.dropwizard.scala.validation
2 |
3 | import org.scalatest.FlatSpec
4 |
5 | import io.dropwizard.setup.{Environment, Bootstrap}
6 | import io.dropwizard.{Configuration, Application}
7 | import com.datasift.dropwizard.scala.validation.constraints._
8 |
9 | import scala.collection.JavaConverters.asScalaSetConverter
10 |
11 | import javax.validation.Validator
12 |
13 | class ScalaValidatorsSpec extends FlatSpec {
14 |
15 | case class Fixture (
16 | @NotEmpty mandatoryOption: Option[Int] = Option(5),
17 | @NotEmpty mandatoryList: List[Int] = List(1, 2, 3),
18 |
19 | @Size(min = 1, max = 3) shortList: List[Int] = List(1, 2),
20 |
21 | @AssertTrue beTrue: Boolean = true,
22 | @AssertFalse beFalse: Boolean = false,
23 |
24 | @Min(5) minInt: Int = 10,
25 | @Max(15) maxInt: Int = 10,
26 |
27 | @Min(5) minBigInt: BigInt = BigInt(10),
28 | @Max(15) maxBigInt: BigInt = BigInt(10),
29 |
30 | @DecimalMin("5") minDecimalBigInt: BigInt = BigInt(10),
31 | @DecimalMax("15") maxDecimalBigInt: BigInt = BigInt(10)
32 | )
33 |
34 | object MockApplication extends Application[Configuration] {
35 | override def run(configuration: Configuration, environment: Environment) {}
36 | override def initialize(bootstrap: Bootstrap[Configuration]) {}
37 | }
38 |
39 | val bootstrap = new Bootstrap[Configuration](MockApplication)
40 |
41 | new ScalaValidatorsBundle().initialize(bootstrap)
42 | val validator: Validator = bootstrap.getValidatorFactory.getValidator
43 |
44 | "@NotEmpty Option" should "not permit None" in {
45 | val violations = validator.validate(Fixture(mandatoryOption = None)).asScala
46 | assert(violations.size === 1)
47 | }
48 |
49 | it should "permit Some" in {
50 | assert(validator.validate(Fixture(mandatoryOption = Option(5))).asScala.isEmpty)
51 | }
52 |
53 |
54 | "@NotEmpty List" should "not permit empty List" in {
55 | val violations = validator.validate(Fixture(mandatoryList = Nil)).asScala
56 | assert(violations.size === 1)
57 | }
58 |
59 | it should "permit List with elements" in {
60 | assert(validator.validate(Fixture(mandatoryList = List(5))).asScala.isEmpty)
61 | }
62 |
63 |
64 | "@Size List" should "permit in-range" in {
65 | assert(validator.validate(Fixture(shortList = List(1, 2))).asScala.isEmpty)
66 | }
67 |
68 | it should "not permit too few" in {
69 | val violations = validator.validate(Fixture(shortList = Nil)).asScala
70 | assert(violations.size === 1)
71 | }
72 |
73 | it should "not permit too many" in {
74 | val violations = validator.validate(Fixture(shortList = List(1, 2, 3, 4, 5))).asScala
75 | assert(violations.size === 1)
76 | }
77 |
78 |
79 | "@AssertTrue Boolean" should "permit true" in {
80 | assert(validator.validate(Fixture(beTrue = true)).asScala.isEmpty)
81 | }
82 |
83 | it should "not permit false" in {
84 | assert(validator.validate(Fixture(beTrue = false)).asScala.size === 1)
85 | }
86 |
87 |
88 | "@AssertFalse Boolean" should "permit false" in {
89 | assert(validator.validate(Fixture(beFalse = false)).asScala.isEmpty)
90 | }
91 |
92 | it should "not permit true" in {
93 | assert(validator.validate(Fixture(beFalse = true)).asScala.size === 1)
94 | }
95 |
96 |
97 | "@Min Int" should "permit equal" in {
98 | assert(validator.validate(Fixture(minInt = 5)).asScala.isEmpty)
99 | }
100 |
101 | it should "permit larger" in {
102 | assert(validator.validate(Fixture(minInt = 500)).asScala.isEmpty)
103 | }
104 |
105 | it should "not permit smaller" in {
106 | assert(validator.validate(Fixture(minInt = 4)).asScala.size === 1)
107 | }
108 |
109 |
110 | "@Max Int" should "permit equal" in {
111 | assert(validator.validate(Fixture(maxInt = 15)).asScala.isEmpty)
112 | }
113 |
114 | it should "permit smaller" in {
115 | assert(validator.validate(Fixture(maxInt = 5)).asScala.isEmpty)
116 | }
117 |
118 | it should "not permit larger" in {
119 | assert(validator.validate(Fixture(maxInt = 500)).asScala.size === 1)
120 | }
121 |
122 |
123 | "@Min BigInt" should "permit equal" in {
124 | assert(validator.validate(Fixture(minBigInt = BigInt(5))).asScala.isEmpty)
125 | }
126 |
127 | it should "permit larger" in {
128 | assert(validator.validate(Fixture(minBigInt = BigInt(500))).asScala.isEmpty)
129 | }
130 |
131 | it should "not permit smaller" in {
132 | assert(validator.validate(Fixture(minBigInt = BigInt(4))).asScala.size === 1)
133 | }
134 |
135 |
136 | "@Max BigInt" should "permit equal" in {
137 | assert(validator.validate(Fixture(maxBigInt = BigInt(15))).asScala.isEmpty)
138 | }
139 |
140 | it should "permit smaller" in {
141 | assert(validator.validate(Fixture(maxBigInt = BigInt(5))).asScala.isEmpty)
142 | }
143 |
144 | it should "not permit larger" in {
145 | assert(validator.validate(Fixture(maxBigInt = BigInt(500))).asScala.size === 1)
146 | }
147 |
148 |
149 | "@DecimalMin BigInt" should "permit equal" in {
150 | assert(validator.validate(Fixture(minDecimalBigInt = BigInt(5))).asScala.isEmpty)
151 | }
152 |
153 | it should "permit larger" in {
154 | assert(validator.validate(Fixture(minDecimalBigInt = BigInt(500))).asScala.isEmpty)
155 | }
156 |
157 | it should "not permit smaller" in {
158 | assert(validator.validate(Fixture(minDecimalBigInt = BigInt(4))).asScala.size === 1)
159 | }
160 |
161 |
162 | "@DecimalMax BigInt" should "permit equal" in {
163 | assert(validator.validate(Fixture(maxDecimalBigInt = BigInt(15))).asScala.isEmpty)
164 | }
165 |
166 | it should "permit smaller" in {
167 | assert(validator.validate(Fixture(maxDecimalBigInt = BigInt(5))).asScala.isEmpty)
168 | }
169 |
170 | it should "not permit larger" in {
171 | assert(validator.validate(Fixture(maxDecimalBigInt = BigInt(500))).asScala.size === 1)
172 | }
173 | }
174 |
--------------------------------------------------------------------------------
/version.sbt:
--------------------------------------------------------------------------------
1 | version in ThisBuild := "1.3.7-2"
2 |
--------------------------------------------------------------------------------