├── .gitignore
├── LICENSE
├── README.md
├── build.sbt
├── core
└── src
│ ├── main
│ └── scala
│ │ └── io
│ │ └── github
│ │ └── netvl
│ │ └── picopickle
│ │ ├── backend.scala
│ │ ├── backends
│ │ └── collections
│ │ │ ├── CollectionsBackend.scala
│ │ │ ├── CollectionsBackendComponent.scala
│ │ │ └── CollectionsPickler.scala
│ │ ├── collections.scala
│ │ ├── converters.scala
│ │ ├── defaults.scala
│ │ ├── discriminator.scala
│ │ ├── exceptions.scala
│ │ ├── nulls.scala
│ │ ├── objectkeys.scala
│ │ ├── pickler.scala
│ │ ├── primitives.scala
│ │ ├── shapeless.scala
│ │ ├── types.scala
│ │ ├── utils
│ │ └── DoubleOrStringNumberRepr.scala
│ │ └── values.scala
│ └── test
│ └── scala
│ └── io
│ └── github
│ └── netvl
│ └── picopickle
│ ├── ConvertersTestBase.scala
│ ├── Fixtures.scala
│ └── backends
│ └── collections
│ └── CollectionsConvertersTest.scala
├── jawn
└── src
│ ├── main
│ └── scala
│ │ └── io
│ │ └── github
│ │ └── netvl
│ │ └── picopickle
│ │ └── backends
│ │ └── jawn
│ │ ├── ast.scala
│ │ └── json.scala
│ └── test
│ └── scala
│ └── io
│ └── github
│ └── netvl
│ └── picopickle
│ └── backends
│ └── jawn
│ └── JsonConvertersTest.scala
├── mongodb
└── src
│ ├── main
│ └── scala
│ │ └── io
│ │ └── github
│ │ └── netvl
│ │ └── picopickle
│ │ └── backends
│ │ └── mongodb
│ │ ├── backend.scala
│ │ └── bson.scala
│ └── test
│ └── scala
│ └── io
│ └── github
│ └── netvl
│ └── picopickle
│ └── backends
│ └── mongodb
│ └── MongodbBsonConvertersTest.scala
├── notes
├── 0.1.0.markdown
├── 0.1.1.markdown
├── 0.1.2.markdown
├── 0.1.3.markdown
├── 0.2.0.markdown
├── 0.2.1.markdown
├── 0.3.0.markdown
└── about.markdown
└── project
├── TestGeneration.scala
├── Versions.scala
├── build.properties
├── build.sbt
├── plugins.sbt
└── tests
└── Pickler.yml
/.gitignore:
--------------------------------------------------------------------------------
1 | target/
2 | .idea/
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2015 Qubell
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | picopickle 0.3.2
2 | ================
3 |
4 | **Unmaintained: use various alternatives like Circe, uPickle or other format-specific libs.**
5 |
6 | picopickle is a serialization library for Scala. Its main features are:
7 |
8 | * Small and almost dependency-less (the core library depends only on [shapeless]).
9 | * Extensibility: you can define your own serializators for your types and you can create
10 | custom *backends*, that is, you can use the same library for the different serialization formats
11 | (collections, JSON, BSON, etc.); other parts of the serialization behavior like nulls handling
12 | can also be customized.
13 | * Flexibility and convenience: the default serialization format is fine for most uses, but it can
14 | be customized almost arbitrarily with support from a convenient converters DSL.
15 | * Static serialization without reflection: shapeless [`Generic`][Generic] macros are used to
16 | provide serializers for arbitrary types, which means that no reflection is used.
17 |
18 | [shapeless]: https://github.com/milessabin/shapeless
19 | [Generic]: https://github.com/milessabin/shapeless/wiki/Feature-overview:-shapeless-2.0.0#generic-representation-of-sealed-families-of-case-classes
20 |
21 | Contents
22 | --------
23 |
24 | * [Getting started](#getting-started)
25 | + [Backend dependencies](#backend-dependencies)
26 | * [Serialization mechanism](#serialization-mechanism)
27 | * [Usage](#usage)
28 | + [Basic usage](#basic-usage)
29 | + [Serializer objects](#serializer-objects)
30 | + [Custom picklers](#custom-picklers)
31 | + [Backends](#backends)
32 | + [Extending backends](#extending-backends)
33 | + [Creating custom serializers](#creating-custom-serializers)
34 | + [Extractors and backend conversion implicits](#extractors-and-backend-conversion-implicits)
35 | * [Converters](#converters)
36 | * [Supported types](#supported-types)
37 | + [Primitives and basic types](#primitives-and-basic-types)
38 | + [Numbers and accuracy](#numbers-and-accuracy)
39 | + [Tuples](#tuples)
40 | + [Collections](#collections)
41 | + [Map serialization with non-string keys](#map-serialization-with-non-string-keys)
42 | + [Sealed trait hierarchies](#sealed-trait-hierarchies)
43 | + [Changing the discriminator key](#changing-the-discriminator-key)
44 | + [Serialization of optional fields](#serialization-of-optional-fields)
45 | + [Renaming fields and sealed trait variants](#renaming-fields-and-sealed-trait-variants)
46 | + [Default values of case class fields](#default-values-of-case-class-fields)
47 | + [Varargs](#varargs)
48 | + [Nulls](#nulls)
49 | + [Accurate numbers serialization](#accurate-numbers-serialization)
50 | + [Value classes](#value-classes)
51 | * [Official backends](#official-backends)
52 | + [Collections pickler](#collections-pickler)
53 | + [JSON pickler](#json-pickler)
54 | + [BSON pickler](#bson-pickler)
55 | * [Error handling](#error-handling)
56 | * [Limitations](#limitations)
57 | * [Changelog](#changelog)
58 |
59 | Getting started
60 | ----------------------------------------------
61 |
62 | The library is published to the Maven central, so you can just add the following line
63 | to your `build.sbt` file in order to use the core library:
64 |
65 | ```scala
66 | libraryDependencies += "io.github.netvl.picopickle" %% "picopickle-core" % "0.3.2"
67 | ```
68 |
69 | The library is compiled for Scala versions 2.10, 2.11, 2.12. If you use 2.10, however,
70 | you will need to add [Macro Paradise] compiler plugin because shapeless macros depend on it:
71 |
72 | ```scala
73 | libraryDependencies += compilerPlugin("org.scalamacros" %% "paradise" % "2.0.1" cross CrossVersion.full)
74 | // or
75 | addCompilerPlugin("org.scalamacros" %% "paradise" % "2.0.1" cross CrossVersion.full)
76 | ```
77 |
78 | Scala 2.11/12 users do not need this as all relevant macro support is already present in 2.11/12.
79 |
80 | [Macro Paradise]: http://docs.scala-lang.org/overviews/macros/paradise.html
81 |
82 | ### Backend dependencies
83 |
84 | Picopickle supports different *backends*. A backend defines the target serialization format,
85 | for example, JSON, BSON or just regular collections. The core library provides collections
86 | backend, and an additional JSON backend based on [Jawn] parser is available as
87 | `picopickle-backend-jawn`:
88 |
89 | ```scala
90 | libraryDependencies += "io.github.netvl.picopickle" %% "picopickle-backend-jawn" % "0.3.2"
91 | ```
92 |
93 | Jawn backend uses Jawn parser (naturally!) to read JSON strings but it uses custom renderer
94 | to print JSON AST as a string in order to keep dependencies to the minimum. This renderer
95 | is very basic and does not support pretty-printing; this is something which is likely to be
96 | fixed in one of the future versions.
97 |
98 | You can create your own backends to support your own data formats; more information on how
99 | to do it is available below. It is likely that more officially supported backends will be
100 | available later.
101 |
102 | [Jawn]: https://github.com/non/jawn
103 |
104 | Serialization mechanism
105 | --------------------------------------------------------------
106 |
107 | picopickle uses the pretty standard typeclass approach where the way the type is serialized
108 | or deserialized is defined through implicit objects (called `Reader[T]` and `Writer[T]` in picopickle)
109 | in scope. The library defines corresponding instances for a lot of standard types:
110 |
111 | * primitives and other basic types: `Byte`, `Short`, `Int`, `Long`, `Float`, `Double`, `Boolean`,
112 | `Char`, `String`, `Unit`, `Null`, `Symbol`, `Option[T]`, `Either[A, B]`;
113 | * tuples (currently generated as a part of build process for lengths from 1 to 22);
114 | * most of standard Scala collections;
115 | * sealed trait hierarchies: case classes and case objects, possibly implementing some sealed trait,
116 | and the sealed trait itself.
117 |
118 | Serializers for sealed trait hierarchies are derived automatically with the help of shapeless
119 | `LabelledGeneric` type class. The library defines several generic instances for the core shapeless
120 | types (`HList` and `Coproduct`), and shapeless does the hard work of inspecting case classes
121 | and sealed traits.
122 |
123 | Since sealed trait hierarchies are equivalent to algebraic data types, their representation
124 | with the shapeless type is fairly natural: each case class/case object is represented
125 | by a `HList` of corresponding field types labelled with field names, and the whole hierarchy
126 | is represented by a `Coproduct` of the corresponding types which implement the sealed trait.
127 |
128 | picopickle also supports recursive types, that is, when a case class eventually depends on
129 | itself or on the sealed trait it belongs to, for example:
130 |
131 | ```scala
132 | sealed trait Root
133 | case object A extends Root
134 | case class B(x: Int, b: Option[B]) extends Root // depends on itself
135 | case class C(next: Root) extends Root // depends on the sealed trait
136 | ```
137 |
138 | picopickle also supports default values and variable arguments in case classes and renaming of fields
139 | or sealed trait descendants with a bit of custom macros.
140 |
141 | Usage
142 | --------------------------
143 |
144 | ### Basic usage
145 |
146 | picopickle is structured using the cake pattern, that is, it consists of several traits providing
147 | parts of the functionality which are then combined into a single object called a *pickler*. It
148 | provides everything necessary for the serialization via a glob import:
149 |
150 | ```scala
151 | import some.package.SomePickler._
152 |
153 | write("Hello") shouldEqual SomePicklerBackend.StringValue("Hello")
154 | ```
155 |
156 | The core library and the Jawn backend library provide default picklers, so if you don't need
157 | any customization (e.g. you don't need to define custom serializers for your types) you can just
158 | import the internals of one of these picklers:
159 |
160 | ```scala
161 | import io.github.netvl.picopickle.backends.collections.CollectionsPickler._
162 |
163 | case class A(x: Int, y: String)
164 |
165 | write(A(10, "hi")) shouldEqual Map("x" -> 10, "y" -> "hi")
166 | read[A](Map("x" -> 10, "y" -> "hi")) shouldEqual A(10, "hi")
167 | ```
168 |
169 | Jawn-based pickler also provides additional functions, `readString()`/`writeString()` and
170 | `readAst()`/`writeAst()`, which [de]serialize objects to strings and JSON AST to strings,
171 | respectively:
172 |
173 | ```scala
174 | import io.github.netvl.picopickle.backends.jawn.JsonPickler._
175 |
176 | case class A(x: Int, y: String)
177 |
178 | writeString(A(10, "hi")) shouldEqual """{"x":10,"y":"hi"}"""
179 | readString[A]("""{"x":10,"y":"hi"}""") shouldEqual A(10, "hi")
180 | ```
181 |
182 | Currently the string JSON representation is not prettified (but prettification may be implemented in later versions).
183 |
184 | ### Serializer objects
185 |
186 | Sometimes you need to work with serialization and deserializaton in the same piece of code (e.g. writing and reading
187 | data from database). Then it would be convenient to have `read` and `write` methods fixed for some specific type,
188 | both for correctness sake and in order to instantiate corresponding readers and writers in one place (which potentially
189 | may speed up the compilation).
190 |
191 | picopickle provides a special serializer class which can be constructed for any type which has `Reader` and `Writer`
192 | instances. This class provides `read` and `write` methods specified for the type which this serializer is created for:
193 |
194 | ```scala
195 | import io.github.netvl.picopickle.backends.collections.CollectionsPickler._
196 |
197 | case class A(x: Int, y: String)
198 |
199 | val aSerializer = serializer[A]
200 |
201 | aSerializer.write(A(10, "hi")) shouldEqual Map("x" -> 10, "y" -> "hi")
202 | // aSerializer.write("whatever") // won't compile - write() accepts values of type A only
203 |
204 | aSerializer.read(Map("x" -> 10, "y" -> "hi") shouldEqual A(10, "hi")
205 | // val x: String = aSerializer.read("whatever") // won't compile - read() returns values of type A
206 | ```
207 |
208 | Jawn-based pickler extends this class to provide `readString` and `writeString` methods:
209 |
210 | ```scala
211 | import io.github.netvl.picopickle.backends.jawn.JsonPickler._
212 |
213 | case class A(x: Int, y: String)
214 |
215 | val aSerializer = serializer[A]
216 |
217 | aSerializer.writeString(A(10, "hi")) shouldEqual """{"x":10,"y":"hi"}"""
218 | aSerializer.readString("""{"x":10,"y":"hi"}""") shouldEqual A(10, "hi")
219 | ```
220 |
221 | ### Custom picklers
222 |
223 | It is possible that you would want to define custom serializers for some of your
224 | types. In that case you can define custom serializer instances in a trait which "depends" on
225 | `BackendComponent` and `TypesComponent` via a self type annotation:
226 |
227 | ```scala
228 | import io.github.netvl.picopickle.{BackendComponent, TypesComponent}
229 |
230 | case class DefinedByInt(x: Int, y: String)
231 |
232 | trait CustomSerializers {
233 | this: BackendComponent with TypesComponent =>
234 |
235 | implicit val definedByIntWriter: Writer[DefinedByInt] = Writer {
236 | case DefinedByInt(x, _) => backend.makeNumber(x)
237 | }
238 |
239 | implicit val definedByIntReader: Reader[DefinedByInt] = Reader {
240 | case backend.Extract.Number(x) => DefinedByInt(x.intValue(), x.intValue().toString)
241 | }
242 | }
243 | ```
244 |
245 | Then this trait should be mixed into the corresponding pickler trait conveniently defined
246 | in the library in order to create the pickler object:
247 |
248 | ```scala
249 | import io.github.netvl.picopickle.backends.jawn.JsonPickler
250 |
251 | object CustomPickler extends JsonPickler with CustomSerializers
252 | ```
253 |
254 | You can also define the serializers directly in the pickler object if they are not supposed
255 | to be reused or if you only have one pickler object in your program:
256 |
257 | ```scala
258 | import io.github.netvl.picopickle.backends.jawn.JsonPickler
259 |
260 | object CustomPickler extends JsonPickler {
261 | implicit val definedByIntWriter: Writer[DefinedByInt] = Writer {
262 | case DefinedByInt(x, _) => backend.makeNumber(x)
263 | }
264 |
265 | implicit val definedByIntReader: Reader[DefinedByInt] = Reader {
266 | case backend.Extract.Number(x) => DefinedByInt(x.intValue(), x.intValue().toString)
267 | }
268 | }
269 | ```
270 |
271 | Alternatively, you can import internals of some pickler and define serializers anywhere, but then you need
272 | to add an import for wherever these serializers are defined in:
273 |
274 | ```
275 | object CustomPickler extends JsonPickler
276 |
277 | object CustomSerializers {
278 | import CustomPickler._
279 |
280 | implicit val definedByIntWriter: Writer[DefinedByInt] = Writer {
281 | case DefinedByInt(x, _) => backend.makeNumber(x)
282 | }
283 |
284 | implicit val definedByIntReader: Reader[DefinedByInt] = Reader {
285 | case backend.Extract.Number(x) => DefinedByInt(x.intValue(), x.intValue().toString)
286 | }
287 | }
288 |
289 | import CustomSerializers._
290 |
291 | CustomPickler.write(DefinedByInt(10, "10")) shouldEqual """10"""
292 | ```
293 |
294 | This way also prohibits you from using the same serializers for different kinds of picklers in your
295 | program.
296 |
297 | picopickle provides several utilities which help you writing custom serializers and deserializers; at first, however,
298 | we need to explain what backends are.
299 |
300 | ### Backends
301 |
302 | A *backend* in picopickle defines an intermediate AST called *backend representation* which is the media
303 | which values of other types can be serialized into. For example, for JSON it is JSON AST, that is, a set of classes
304 | which together can be used to form any correct JSON object tree. Additionally, a backend provides
305 | methods to construct these values generically from basic Scala types and collections and to deconstruct
306 | these values back into these basic types.
307 |
308 | In general, a backend may be arbitrarily complex. It can consist of a lot of classes with various relationships
309 | between them and all the necessary methods to construct them. However, in order to provide the ability to
310 | serialize arbitrary types to arbitrary backend representations, some restrictions should be put on the structure
311 | of the backend representation, that is, there should be some minimal set of primitives which should be supported
312 | by all backends. picopickle requires that all backends support basic JSON-like tree AST, that is,
313 | objects keyed by strings, arrays indexed by integers, strings, numbers, booleans and null. Using these primitives,
314 | picopickle is able to provide serializers for basic primitive types and sealed trait hierarchies out of the box.
315 |
316 | `Backend` trait is used to represent backends in Scala code. This trait contains abstract types which
317 | define the AST and a lot of methods to construct the AST from basic types. Each implementation of this trait
318 | should provide the following abstract types:
319 |
320 | ```scala
321 | type BValue
322 | type BObject <: BValue
323 | type BArray <: BValue
324 | type BString <: BValue
325 | type BNumber <: BValue
326 | type BBoolean <: BValue
327 | type BNull <: BValue
328 | ```
329 |
330 | Also each implementation must provide a set of methods for converting between these abstract types and basic Scala
331 | types. The mapping is as follows:
332 |
333 | ```
334 | BObject -> Map[String, BValue]
335 | BArray -> Vector[BValue]
336 | BString -> String
337 | BNumber -> Number
338 | BBoolean -> Boolean
339 | BNull -> Null
340 | ```
341 |
342 | That is, each backend should provide methods to convert from `BValue` to `Map[String, BValue]` and back etc. These
343 | methods can be divided into three groups:
344 |
345 | * those which convert Scala values to backend representation: prefixed with `make`;
346 | * those which convert backend representation to Scala values: prefixed with `from`;
347 | * those which extract concrete backend type (e.g. `BObject`, `BString`) from the abstract `BValue`: prefixed with `get`.
348 |
349 | The last group of methods return `Option[]` because they are partial in their nature.
350 |
351 | There are also some convenience methods like `makeEmptyObject` or `getArrayValueAt` which can be defined via
352 | a conversion with the corresponding `from` method and then a query on the resulting Scala object, but these
353 | methods may query the underlying backend representation directly, saving on the intermediate objects construction.
354 |
355 | In order to create a custom backend you need to implement `Backend` trait first:
356 |
357 | ```scala
358 | object MyBackend extends Backend {
359 | type BValue = ...
360 | ...
361 | }
362 | ```
363 |
364 | Then you need to create a cake component for this backend; this component must implement `BackendComponent` trait:
365 |
366 | ```scala
367 | trait MyBackendComponent extends BackendComponent {
368 | override val backend = MyBackend
369 | }
370 | ```
371 |
372 | And finally you should extend `DefaultPickler`, mixing it with your backend component:
373 |
374 | ```scala
375 | trait MyPickler extends DefaultPickler with MyBackendComponent
376 | object MyPickler extends MyPickler
377 | ```
378 |
379 | Naturally, you can choose not to merge the `DefaultPickler` fully into your pickler if you don't want to, for example,
380 | if you don't need the automatic writers materialization for sealed trait hierarchies. In that case you can
381 | mix in only those traits you need. See `DefaultPickler` documentation to find out which components it consists of
382 | (**TODO**).
383 |
384 | After this `MyPickler.read` and `MyPickler.write` methods will work with your backend representation.
385 |
386 | ### Extending backends
387 |
388 | Sometimes the set of types and methods provided by the default `Backend` trait is simply not enough
389 | because the desired target representation supports more types. One example is BSON, which supports,
390 | aside from all the standard JSON types, also things like date and time, object ids, explicit 32-bit
391 | and 64-bit integers and byte arrays. Naturally, one would like to automatically serialize Scala types
392 | to the most efficient representation available.
393 |
394 | picopickle is extensible in this regard too. Because backends are just implementations of one trait,
395 | nothing prevents you adding new concrete types into your backend implementation, adding new conversion
396 | functions and build your own serializers which make use of these functions:
397 |
398 | ```scala
399 | // Define a new backend
400 | object BsonBackend extends Backend {
401 | // implement required types
402 | override type BValue = BsonValue
403 | override type BObject = BsonDocument
404 | ...
405 |
406 | // add new types
407 | type BObjectId = BsonObjectId
408 | type BInt32 = BsonInt32
409 | type BInt64 = BsonInt64
410 | ...
411 |
412 | // add new conversion functions, possibly following the existing interface
413 | def fromObjectId(oid: BObjectId): ObjectId = ...
414 | def makeObjectId(oid: ObjectId): BObjectId = ...
415 | def getObjectId(value: BValue): BObjectId = ...
416 |
417 | def fromInt32(n: BInt32): Int = ...
418 | ...
419 |
420 | def fromInt64(n: BInt64): Long = ...
421 | ...
422 | }
423 |
424 | // define a backend component
425 | trait BsonBackendComponent extends BackendComponent {
426 | override val backend = BsonBackend
427 | }
428 |
429 | // define a trait with custom serializers
430 | trait BsonBackendSerializers {
431 | // it should depend on the concrete BsonBackendComponent, not on generic BackendComponent
432 | this: BsonBackendComponent with TypesComponent =>
433 |
434 | import backend._
435 |
436 | // and here we can use all custom functions defined in the custom backend
437 |
438 | implicit val objectIdReadWriter: ReadWriter[ObjectId] =
439 | ReadWriter.writing(backend.makeObjectId).reading {
440 | case bv: BObjectId => backend.fromObjectId(bv)
441 | }
442 |
443 | implicit val intReadWriter: ReadWriter[Int] =
444 | ReadWriter.writing(backend.makeInt32).reading {
445 | case bv: BInt32 => backend.fromInt32(bv)
446 | }
447 |
448 | ...
449 | }
450 |
451 | // finally, define the pickler trait by mixing it all together
452 | trait BsonPickler extends DefaultPickler with BsonBackendComponent with BsonBackendSerializers
453 | object BsonPickler extends BsonPickler
454 | ```
455 |
456 | Note that picklers defined in the custom trait will have a greater priority than picklers inherited
457 | from the `DefaultPickler` trait. Therefore, `intReadWriter` defined in the trait above
458 | will be used instead of the `intReader`/`intWriter` pair defined in `PrimitiveReadWritersComponent`
459 | which is inherited by `DefaultPickler`.
460 |
461 | You can find an example of this technique in the [official BSON backend implementation][bson-backend].
462 |
463 | [bson-backend]: https://github.com/netvl/picopickle/blob/master/mongodb/src/main/scala/io/github/netvl/picopickle/backends/mongodb/bson.scala
464 |
465 | ### Creating custom serializers
466 |
467 | picopickle defines `Writer` and `Reader` basic types in `TypesComponent` which are called *serializers*.
468 | They are responsible for converting arbitrary types to their backend representation and back, respectively.
469 | The most basic way to construct custom serializers is to use `apply` method on `Reader` and `Writer`
470 | companion objects, which take `PartialFunction[backend.BValue, T]` and `T => backend.BValue`,
471 | respectively (you can find examples of both above).
472 |
473 | (Terminology note: `Writer` and `Reader` are called *serializers*, while typed serialization objects described above,
474 | that is, the ones returned by the call of `serializer[T]` method, are called *serializer objects*. While related,
475 | these are different things. Serializer objects are completely optional, you won't have to use them if you don't want;
476 | on the other hand, serializers are the key entities in picopickle and you can't do away with them.)
477 |
478 | Any `Writer`, since it receives a total function, should be able to serialize any values of its corresponding type.
479 | `Reader`, however, can fail to match the backend representation. See below for more information on error
480 | handling in picopickle.
481 |
482 | `TypesComponent` also defines a combined serializer called `ReadWriter`:
483 |
484 | ```scala
485 | type ReadWriter[T] = Reader[T] with Writer[T]
486 | ```
487 |
488 | Its companion object also provides convenient facilities to create its instances. The example above can be
489 | rewritten with `ReadWriter` like this:
490 |
491 | ```scala
492 | implicit val definedByIntReadWriter: ReadWriter[DefinedByInt] = ReadWriter.reading {
493 | case backend.Extract.Number(x) => DefinedByInt(x.intValue(), x.intValue().toString)
494 | }.writing {
495 | case DefinedByInt(x, _) => backend.makeNumber(x)
496 | }
497 | ```
498 |
499 | You can switch `reading`/`writing` branches order if you like.
500 |
501 | ### Extractors and backend conversion implicits
502 |
503 | `Backend` trait provides methods to create and deconstruct objects of backend representation: these are `make*`,
504 | `from*` and `get*` methods described above. To simplify writing custom serializers, however, picopickle
505 | provides a set of tools which help you writing conversions. The most basic of them are *extractors* and
506 | *backend conversion implicits*.
507 |
508 | Backend object contains several singleton objects with `unapply` methods which can be used to pattern-match
509 | on `backend.BValue` and obtain the low-level values out of it, for example, to get a `Map[String, backend.BValue]`
510 | out of `backend.BObject`, if this particular `backend.BValue` which you're matching on indeed is a `backend.BObject`:
511 |
512 | ```scala
513 | backend.makeObject(...) match {
514 | case backend.Extract.Object(m) => // m is of type Map[String, backend.BValue]
515 | }
516 | ```
517 |
518 | There are extractors for all of the main backend representation variants:
519 | * `backend.Extract.Object`
520 | * `backend.Extract.Array`
521 | * `backend.Extract.String`
522 | * `backend.Extract.Number`
523 | * `backend.Extract.Boolean`
524 |
525 | Their `unapply` implementation simply calls corresponding `get*` and `from*` methods, like this:
526 |
527 | ```scala
528 | object Extractors {
529 | object String {
530 | def unapply(value: BValue): Option[String] = getString(value).map(fromString)
531 | }
532 | }
533 | ```
534 |
535 | The opposite conversion (from primitives to the backend representation) can be done with `make*` methods on the
536 | backend, but picopickle also provides a set of implicit decorators which provide `toBackend` method on all of
537 | the basic types. These decorators are defined in `backend.conversionImplicits` object:
538 |
539 | ```scala
540 | import backend.conversionImplicits._
541 |
542 | val s: backend.BString = "hello world".toBackend
543 |
544 | // the above is equivalent to this:
545 |
546 | val s: backend.BString = backend.makeString("hello world")
547 | ```
548 |
549 | These implicit methods are somewhat more convenient than `make*` functions.
550 |
551 | Converters
552 | ------------------------------------
553 |
554 | Low-level conversions, however, may be overly verbose to write. picopickle provides a declarative way of
555 | defining how the backend representation should be translated to the desired Scala objects and vice versa.
556 | This is done with *converters*.
557 |
558 | A converter looks much like a `ReadWriter`; however, it is parameterized by two types, source and target:
559 |
560 | ```scala
561 | trait Converter[-T, +U] {
562 | def toBackend(v: T): backend.BValue
563 | def isDefinedAt(bv: backend.BValue): Boolean
564 | def fromBackend(bv: backend.BValue): U
565 | }
566 | ```
567 |
568 | The converters library defines several implicit conversions which allow any converter to be used as the
569 | corresponding `Reader`, `Writer` or `ReadWriter`:
570 |
571 | ```scala
572 | Converter[T, _] -> Writer[T]
573 | Converter[_, U] -> Reader[U]
574 | Converter[T, T] -> ReadWriter[T]
575 | ```
576 |
577 | A converter which consumes and produces the same type is called an *identity* converter for that type. Naturally,
578 | only identity converters can be used as `ReadWriter`s. Identity converters have a convenient type alias
579 | `Converter.Id[T]`.
580 |
581 | Converters library also defines several combinators on converters which allow combining them to obtain new
582 | converters, and it also provides built-in converters for basic primitive types and objects and arrays.
583 |
584 | For example, here is how you can define a conversion for some case class manually:
585 |
586 | ```scala
587 | case class A(a: Boolean, b: Double)
588 |
589 | trait CustomSerializers extends JsonPickler {
590 | import shapeless._
591 | import converters._
592 |
593 | val aConverter: Converter.Id[A] = unlift(A.unapply) >>> obj {
594 | "a" -> bool ::
595 | "b" -> num.double ::
596 | HNil
597 | } >>> A.apply _
598 |
599 | val aReadWriter: ReadWriter[A] = aConverter // an implicit conversion is used here
600 | }
601 | ```
602 |
603 | Here `obj.apply` is used to define an identity converter for `Boolean :: Double :: HNil`,
604 | and `>>>` operations "prepend" and "append" a deconstructor and a constructor for class `A`:
605 |
606 | ```scala
607 | A.unapply : A => Option[(Boolean, Double)]
608 | unlift(A.unapply) : A => (Boolean, Double)
609 |
610 | A.apply _ : (Boolean, Double) => A
611 |
612 | obj {
613 | "a" -> bool ::
614 | "b" -> num.double ::
615 | HNil
616 | } : Converter.Id[Boolean :: Double :: HNil]
617 | ```
618 |
619 | `bool` and `num.double` are identity converters for `Boolean` and `Double`, respectively.
620 |
621 | `>>>` operations employ a little of shapeless magic to convert the functions like the ones above to functions
622 | which consume and produce `HList`s. There is also `>>` combinator which does not use shapeless and "prepends"
623 | and "appends" a function of corresponding type directly:
624 |
625 | ```scala
626 | (A => B) >> Converter[B, C] >> (C => D) -> Converter[A, D]
627 |
628 | // compare:
629 |
630 | (A => (T1, T2, ..., Tn)) >>> Converter.Id[T1 :: T2 :: ... :: Tn :: HNil] >>> ((T1, T2, ..., Tn) => A) -> Converter.Id[A]
631 | ```
632 |
633 | Note that this is very type-safe. For example, if you get the order or the types of fields in `obj` wrong, it won't compile.
634 |
635 | picopickle additionally provides a convenient implicit alias for `andThen` on functions, also called `>>`. Together with
636 | `>>` on converters this allows writing chains of transformations easily. For example, suppose you have an object which
637 | can be represented as an array of bytes. Then you want to serialize this byte array as a string in Base64 encoding.
638 | This can be written as follows:
639 |
640 | ```scala
641 | import java.util.Base64
642 | import java.nio.charset.StandardCharsets
643 |
644 | case class Data(s: String)
645 | object Data {
646 | def asBytes(d: Data) = d.s.getBytes(StandardCharsets.UTF_8)
647 | def fromBytes(b: Array[Byte]) = Data(new String(b, StandardCharsets.UTF_8))
648 | }
649 |
650 | val dataReadWriter: ReadWriter[Data] =
651 | Data.asBytes _ >>
652 | Base64.getEncoder.encodeToString _ >>
653 | str >>
654 | Base64.getDecoder.decode _ >>
655 | Data.fromBytes _
656 | ```
657 |
658 | The sequence of functions chained with `>>` naturally defines the transformation order in both directions.
659 |
660 | Similar thing is also possible for arrays. For example, you can serialize your case class as an array
661 | of fields:
662 |
663 | ```scala
664 | val aReadWriter: ReadWriter[A] = unlift(A.unapply) >>> arr(bool :: num.double :: HNil) >>> A.apply _
665 | ```
666 |
667 | Naturally, there are converters for homogeneous arrays and objects too - they allow mapping to Scala collections:
668 |
669 | ```scala
670 | val intListConv: Converter.Id[List[Int]] = arr.as[List].of(num.int)
671 | val vecTreeMapConv: Converter.Id[TreeMap[String, Vector[Double]]] = obj.as[TreeMap].to(arr.as[Vector].of(num.double))
672 | ```
673 |
674 | There is also a converter which delegates to `Reader` and `Writer` if corresponding implicit instances are available:
675 |
676 | ```scala
677 | val optionStringConv: Converter.Id[Option[String]] = value[Option[String]]
678 | ```
679 |
680 | You can find more on converters in their Scaladoc section (**TODO**).
681 |
682 | Supported types
683 | ----------------------------------------------
684 |
685 | By default picopickle provides a lot of serializers for various types which do their
686 | best to represent their respective types in the serialized form as close as possible.
687 | These serializers are then mixed into a single pickler.
688 |
689 | The serializers are defined in a couple of traits:
690 |
691 | ```
692 | io.github.netvl.picopickle.{CollectionReaders, CollectionWriters, CollectionReaderWritersComponent}
693 | io.github.netvl.picopickle.{ShapelessReaders, ShapelessWriters, ShapelessReaderWritersComponent}
694 | io.github.netvl.picopickle.{PrimitiveReaders, PrimitiveWriters, PrimitiveReaderWritersComponent}
695 | io.github.netvl.picopickle.{TupleReaders, TupleWriters, TupleReaderWritersComponent} // generated automatically
696 | ```
697 |
698 | Every serializer is an overloadable `def` or `val`, so you can easily customize serialization
699 | format by overriding the corresponding implicit definition with your own one.
700 |
701 | Examples below use `JsonPickler`, so it is implicitly assumed that something like
702 |
703 | ```scala
704 | import io.github.netvl.picopickle.backends.jawn.JsonPickler._
705 | ```
706 |
707 | is present in the code.
708 |
709 | ### Primitives and basic types
710 |
711 | picopickle natively supports serialization of all primitive and basic types:
712 |
713 | ```scala
714 | writeString(1: Int) shouldEqual "1"
715 | writeString(2L: Long) shouldEqual "2"
716 | writeString(12.2: Double) shouldEqual "3"
717 | writeString('a') shouldEqual "\"a\""
718 | writeString("hello") shouldEqual "\"hello\""
719 | writeString(true) shouldEqual "true"
720 | writeString(false) shouldEqual "false"
721 | writeString(null) shouldEqual "null"
722 | writeString('symbol) shouldEqual "\"symbol\""
723 | ```
724 |
725 | By default characters are serialized as strings, but, for example, collections backend redefines this behavior.
726 |
727 | picopickle also can serialize `Option[T]` and `Either[L, R]` as long as there are serializers for their type
728 | parameters:
729 |
730 | ```scala
731 | writeString(Some(1)) shouldEqual "[1]"
732 | writeString(None) shouldEqual "1"
733 |
734 | writeString(Left("hello")) shouldEqual """[0,"hello"]"""
735 | writeString(Right('goodbye)) shouldEqual """[1,"goodbye"]"""
736 | ```
737 |
738 | Optional values are also handled specially when they are a part of case class definition; see below for more
739 | explanation.
740 |
741 | Please note that `Either[L, R]` serialization format is not final and can change in future versions.
742 |
743 | ### Numbers and accuracy
744 |
745 | Most JSON libraries represent numbers as 64-bit floats, i.e. `Double`s, but some numerical values do not fit into
746 | `Double`, and rounding occurs:
747 |
748 | ```scala
749 | 80000000000000000.0 shouldEqual 80000000000000008.0 // does not throw
750 | ```
751 |
752 | In order to represent numbers as accurately as possible picopickle by default serializes all `Long`s which
753 | cannot be represented as `Double` precisely as strings:
754 |
755 | ```scala
756 | writeString(80000000000000000L) shouldEqual "\"80000000000000000\""
757 | writeString(Double.PositiveInfinity) shouldEqual "Infinity"
758 | ```
759 |
760 | The same mechanism will probably be used when `BigInt`/`BigDecimal` handlers will be added.
761 |
762 | In some backends, however, this behavior can be overridden, as is done, for example, in the
763 | official BSON backend.
764 |
765 | ### Tuples
766 |
767 | Tuples are serialized as arrays:
768 |
769 | ```scala
770 | writeString((1, true, "a")) shouldEqual "[1,true,\"a\"]"
771 | ```
772 |
773 | The only exception is a tuple of zero items, usually called `Unit`. It is serialized as an empty object:
774 |
775 | ```scala
776 | writeString(()) shouldEqual "{}"
777 | ```
778 |
779 | Naturally, all elements of tuples must be serializable as well.
780 |
781 | Tuple serializer instances are generated as a part of build process, and currently only
782 | tuples with the length up to and including 22 are supported.
783 |
784 | ### Collections
785 |
786 | Most of Scala collections library classes are supported, including all of the abstract ones below the `Iterable`,
787 | as well as arrays:
788 |
789 | ```scala
790 | writeString(Iterable(1, 2, 3, 4)) shouldEqual "[1,2,3,4]"
791 | writeString(Seq(1, 2, 3, 4)) shouldEqual "[1,2,3,4]"
792 | writeString(Set(1, 2, 3, 4)) shouldEqual "[1,2,3,4]"
793 | writeString(Map(1 -> 2, 3 -> 4)) shouldEqual "[[1,2],[3,4]]"
794 |
795 | writeString(1 :: 2 :: 3 :: Nil) shouldEqual "[1,2,3]"
796 | writeString(Vector(1, 2, 3)) shouldEqual "[1,2,3]"
797 | wrtieString(TreeMap(1 -> 2, 3 -> 4)) shouldEqual "[[1,2],[3,4]]"
798 |
799 | writeString(Array(1, 2, 3)) shouldEqual "[1,2,3]"
800 | ```
801 |
802 | Mutable collections can be [de]serialized as well.
803 |
804 | Maps are serialized like iterables of two-element tuples, that is, into arrays of two-element arrays. However,
805 | if the map has string keys (which is determined statically), it will be serialized as an object:
806 |
807 | ```scala
808 | writeString(Map("a" -> 1, "b" -> 2)) shouldEqual """{"a":1,"b":2}"""
809 | ```
810 |
811 | The above behavior of serializing maps with string keys is the default, but it can be extended. See below.
812 |
813 | If you're using abstract collection types like `Seq`, `Set` or `Map`, picopickle will work flawlessly. If you
814 | use concrete collection types, however, there could be problems. picopickle has a lot of instances for most of
815 | the main concrete implementations, but not for all of them. If you need something which is not present in the
816 | library, feel free to file an issue.
817 |
818 | ### Map serialization with non-string keys
819 |
820 | JSON-like languages usually don't allow using non-string values as object keys, and picopickle enforces this
821 | restriction by its `BObject` representation which requires string keys. However, this is sometimes overly restrictive,
822 | especially in a richly typed language like Scala and because of common patterns which follow from this.
823 |
824 | It is not unusual for Scala projects to have a newtype or several for `String`, for example, for different
825 | kind of identifiers:
826 |
827 | ```scala
828 | case class PostId(id: String)
829 | case class UserId(id: String)
830 | ```
831 |
832 | Alternatively, it is possible to have such simple value class which does not wrap a `String` but which can easily
833 | be converted to and from a string:
834 |
835 | ```scala
836 | case class EntityPath(elems: Vector[String]) {
837 | override def toString = elems.mkString("/")
838 | }
839 | object EntityPath {
840 | def fromString(s: String) = EntityPath(s.split("/").toVector)
841 | }
842 | ```
843 |
844 | It is sometimes desirable to have these classes as keys in maps:
845 |
846 | ```scala
847 | type UserMap = Map[UserId, User]
848 | type EntityLocations = Map[EntityPath, Entity]
849 | ```
850 |
851 | One would naturally want for these maps to have an object-based representation (instead of an array of arrays)
852 | because keys are easily converted to and from strings. In picopickle, however, only maps of type `Map[String, T]`
853 | can be directly serialized as objects.
854 |
855 | To allow this kind of pattern, picopickle provides a way to define custom converters for map keys. When a map
856 | with keys of type `T` is serialized or deserialized, and if there is an instance of type
857 | `ObjectKeyReader[T]`/`ObjectKeyWriter[T]`/`ObjectKeyReadWriter[T]` in scope, then it will be used to obtain
858 | a `String` from `T` (or vice versa) which will then be used as an object key:
859 |
860 | ```scala
861 | implicit val userIdObjectKeyReadWriter = ObjectKeyReadWriter(_.id, UserId)
862 |
863 | // below a `_.toString` conversion is implicitly used
864 | implicit val entityPathObjectKeyReadWriter = ObjectKeyReadWriter(EntityPath.fromString)
865 |
866 | write[UserMap](Map(UserId("u1") -> ..., UserId("u2") -> ...)) shouldEqual
867 | Map("u1" -> ..., "u2" -> ...)
868 |
869 | write[EntityLocations](Map(EntityPath(Vector("a", "b")) -> ..., EntityPath(Vector("a", "c")) -> ...)) shouldEqual
870 | Map("a/b" -> ..., "a/c" -> ...)
871 |
872 | // reading works just as well
873 | ```
874 |
875 | However, with this flexibility in large codebases where one pickler is shared by lots of different classes it is easy
876 | to accidentally add a conversion which would break serialization format in some other part of the project. To
877 | allow controlling this picopickle supports *disabling* of automatic map serialization for unknown key types.
878 | You would need then to define an object key serializer for this particular type or explicitly allow maps with
879 | this type as a key to be serialized as an array of arrays. You need to create a custom pickler and mix
880 | `MapPicklingDisabledByDefault` trait into it:
881 |
882 | ```scala
883 | object CustomPickler extends CollectionsPickler with MapPicklingDisabledByDefault
884 |
885 | // won't compile because there is no ObjectKeyWriter[Int] in scope and serialization of maps
886 | // with Int keys is not allowed
887 | write(Map(1 -> "a", 2 -> "b"))
888 |
889 | // ---
890 |
891 | object CustomPickler extends CollectionsPickler with MapPicklingDisabledByDefault {
892 | implicit val intObjectKeyReadWriter = ObjectKeyReadWriter(_.toInt)
893 | }
894 |
895 | // works because we have defined an object key serializer for Int
896 | write(Map(1 -> "a", 2 -> "b")) shouldEqual Map("1" -> "a", "2" -> "b")
897 |
898 | // ---
899 |
900 | object CustomPickler extends CollectionsPickler with MapPicklingDisabledByDefault {
901 | implicit val intObjectKeyAllowed = allowMapPicklingWithKeyOfType[Int]
902 | }
903 |
904 | // works because we explicitly allowed maps of type Map[Int, T] to be serialized as an array of arrays
905 | write(Map(1 -> "a", 2 -> "b")) shouldEqual Vector(Vector(1, "a"), Vector(1, "b"))
906 | ```
907 |
908 | Note that currently even if map pickling is allowed like in the above piece of code, putting an object key serializer
909 | for the corresponding type will force picopickle to use it, allowing potential unexpected changes of
910 | serialization format like described above. However: first, this will be fixed in future versions; second,
911 | it still disallows one to *accidentally* serialize maps as arrays of arrays and then have broken format
912 | by deliberate introduction of keys serializer, which looks like the most likely possibility of introducing
913 | such breaking changes.
914 |
915 | ### Sealed trait hierarchies
916 |
917 | picopickle supports automatic serialization of sealed trait hierarchies (STH), that is, case classes, probably
918 | inheriting a sealed trait. In other words, picopickle can serialize algebraic data types.
919 |
920 | The most trivial examples of STH are standalone case objects and case classes:
921 |
922 | ```scala
923 | case object A
924 | case class B(x: Int, y: A)
925 |
926 | writeString(A) shouldEqual "{}"
927 | writeString(B(10, A)) shouldEqual """{"x":10,"y":{}}"""
928 | ```
929 |
930 | By default picopickle serializes case classes as objects with keys being the names of the fields. Case objects
931 | are serialized as empty objects.
932 |
933 | Case classes and objects can have a sealed trait as their parent:
934 |
935 | ```scala
936 | sealed trait Root
937 | case object A extends Root
938 | case class B(x: Int, y: Boolean)
939 | case class C(name: String, y: Root) extends Root
940 | ```
941 |
942 | When you explicitly set the serialized type to `Root` (or pass a value of type `Root` but not of some concrete
943 | subclass), it will be serialized as an object with a *discriminator key*:
944 |
945 | ```scala
946 | writeString[Root](A) shouldEqual """{"$variant":"A"}"""
947 | writeString[Root](B(10, true)) shouldEqual """{"$variant":"B","x":10,"y":true}"""
948 | writeString[Root](C("me", A)) shouldEqual """{"$variant":"C","name":"me","y":{"$variant":"A"}}"""
949 | ```
950 |
951 | If you don't request `Root` explicitly, the classes will be serialized as if they were not a part of an STH:
952 |
953 | ```scala
954 | writeString(B(10, true)) shouldEqual """{"x":10,"y":true}"""
955 | ```
956 |
957 | Usually this is not a problem, however, because if you are working with a sealed trait, you usually have variables
958 | of its type, not of its subtypes.
959 |
960 | Sealed trait hierarchies serialization is implemented using shapeless `LabelledGeneric` implicitly materialized
961 | instances and a bit of custom macros which handle field renaming and default values (both are not supported by
962 | shapeless natively).
963 |
964 | ### Changing the discriminator key
965 |
966 | You can customize the discriminator key used by shapeless serializers by overriding
967 | `discriminatorKey` field defined in `io.github.netvl.picopickle.SealedTraitDiscriminator` trait
968 | (its default value is `"$variant"`):
969 |
970 | ```scala
971 | object CustomPickler extends JsonPickler {
972 | override val discriminatorKey = "$type"
973 | }
974 |
975 | // STH is from the example above
976 | CustomPickler.writeString[Root](A) shouldEqual """{"$type":"A"}"""
977 | ```
978 |
979 | Of course, you can extract it into a separate trait and mix it into different picklers if you want.
980 |
981 | Alternatively, since 0.2.0 you can specify the discriminator key for the specific sealed trait
982 | hierarchy by putting a `@discriminator` annotation on the sealed trait:
983 |
984 | ```scala
985 | import io.github.netvl.picopickle.discriminator
986 |
987 | @discriminator("status") sealed trait Root
988 | case object Stopped extends Root
989 | case class Running(name: String) extends Root
990 |
991 | writeString[Root](Stopped) shouldEqual """{status:"Stopped"}"""
992 | writeString[Root](Running("me")) shouldEqual """{status:"Running","name":"me"}"""
993 | ```
994 |
995 | If `@discriminator` annotation is present, then its value will be used as discriminator key;
996 | otherwise, the default value from `discriminatorKey` pickler field will be used.
997 |
998 | ### Serialization of optional fields
999 |
1000 | If a case class has a field of type `Option[T]`, then this field is serialized in a different way than
1001 | a regular option: if the value of the field is `None`, then the corresponding key will be absent from the serialized
1002 | data, and if it is `Some(x)`, then the key will be present and its value will be just `x`, without an additional
1003 | layer of an array:
1004 |
1005 | ```scala
1006 | case class A(name: String, x: Option[Long])
1007 |
1008 | writeString(A("absent")) shouldEqual """{"name":"absent"}"""
1009 | writeString(A("present", Some(42L)) shouldEqual """{"name":"present","x":42}"""
1010 | ```
1011 |
1012 | This allows easy evolution of your data structures - you can always add an `Option`al field and the data serialized
1013 | before this update will still be deserialized correctly, putting a `None` into the new field.
1014 |
1015 | If an optional field again contains an option:
1016 |
1017 | ```scala
1018 | case class A(x: Option[Option[Long]])
1019 | ```
1020 |
1021 | then the "outer" option is serialized as described in the above paragraph while the "inner" option is serialized
1022 | as a possibly empty array, just like options are serialized in other contexts:
1023 |
1024 | ```scala
1025 | writeString(A(None)) shouldEqual """{}"""
1026 | writeString(A(Some(None))) shouldEqual """{"x":[]}"""
1027 | writeString(A(Some(Some(10L)))) shouldEqual """{"x":[10]}"""
1028 | ```
1029 |
1030 | ### Renaming fields and sealed trait variants
1031 |
1032 | picopickle also provides an ability to rename fields and STH variant labels. This can be done by annotating
1033 | fields with `@key` annotation:
1034 |
1035 | ```scala
1036 | import io.github.netvl.picopickle.key
1037 |
1038 | sealed trait Root
1039 | @key("0") case object A
1040 | @key("1") case class B(@key("a") x: Int, @key("b") y: Boolean)
1041 |
1042 | writeString[Root](A) shouldEqual """{"$variant":"0"}"""
1043 | writeString[Root](B(10, false)) shouldEqual """{"$variant":"1","a":10,"b":false}"""
1044 | ```
1045 |
1046 | Keys always are strings, though.
1047 |
1048 | ### Default values of case class fields
1049 |
1050 | picopickle also respects default values defined in a case class, which simplifies changes in your data classes
1051 | even more. If a field has a default value and the serialized object does not contain the corresponding field,
1052 | the default value will be used:
1053 |
1054 | ```scala
1055 | case class A(n: Int = 11)
1056 |
1057 | readString[A]("""{"n":22}""") shouldEqual A(22)
1058 | readString[A]("""{}""") shouldEqual A()
1059 | ```
1060 |
1061 | As you can see, this mechanism naturally interferes with the optional fields handling. picopickle resolves
1062 | this conflict in the following way: if no value is present at the corresponding key and a default value is
1063 | set for the field, then it takes precedence over option handling. This affects a rather rare case when there
1064 | is an optional field with a default value other than `None`:
1065 |
1066 | ```scala
1067 | case class A(n: Option[Int] = Some(10))
1068 |
1069 | readString[A]("{}") shouldEqual A(Some(10)) // not A(None)
1070 | ```
1071 |
1072 | This is what usually expected in such situation.
1073 |
1074 | ### Varargs
1075 |
1076 | Since version 0.2.0 picopickle supports reading and writing case classes with variable arguments. All of
1077 | the arguments passed to such case class will be serialized as an array:
1078 |
1079 | ```scala
1080 | case class A(x: Int*)
1081 |
1082 | writeString(A(1, 2, 3)) shouldEqual """{"x":[1,2,3]}"""
1083 | ```
1084 |
1085 | Naturally, all elements of this array are serialized with their respective serializers.
1086 |
1087 | ### Nulls
1088 |
1089 | `null` value, as is widely known, tends to cause problems, and it is discouraged in idiomatic Scala code.
1090 | Unfortunately, sometimes you need to interact with external systems which do use nulls. JSON has null value as well.
1091 | Because of this picopickle supports nulls (it even has `BNull` as one of the fundamental backend types) but
1092 | it also provides means to control how nulls should be handled.
1093 |
1094 | `Reader` and `Writer` traits do not contain any special logic to handle nulls. Instances of `Reader` and `Writer`
1095 | created through their companion objects, however, do have such logic: they delegate null handling to a `NullHandler`
1096 | instance provided by `NullHandlerComponent`. `NullHandler` is a trait of the following structure:
1097 |
1098 | ```scala
1099 | trait NullHandler {
1100 | def handlesNull: Boolean
1101 | def toBackend[T](value: T, cont: T => backend.BValue): backend.BValue
1102 | def fromBackend[T](value: backend.BValue, cont: backend.BValue => T): T
1103 | }
1104 | ```
1105 |
1106 | That is, it is a kind of a preprocessor which inspects the passed value for nulls and can [de]serialize them
1107 | specially or prohibit the [de]serialization at all.
1108 |
1109 | By default picopickle allows nulls everywhere (`DefaultPickler` includes `DefaultNullHandlerComponent`). That is,
1110 | if a null is serialized, it will be represented unconditionally with `backend.BNull`, and `backend.BNull` will
1111 | be deserialized (again, unconditionally) as a `null`.
1112 |
1113 | There is another `NullHandlerComponent` implementation, namely `ProhibitiveNullHandlerComponent`, which disallows
1114 | serialization of nulls, throwing an exception if it encounters a null value either in Scala object or in a
1115 | backend object. If you don't need to keep compatibility with some external system which uses null values then
1116 | it may be sensible to extend the desired pickler, overriding the default null handler:
1117 |
1118 | ```scala
1119 | trait MyJsonPickler extends JsonPickler with ProhibitiveNullHandlerComponent
1120 | ```
1121 |
1122 | As long as you use `Reader`/`Writer` companion objects or converters to create your custom serializers,
1123 | the null handling behavior will be consistent for all types handled by your pickler.
1124 |
1125 | ### Accurate numbers serialization
1126 |
1127 | Some backends do not allow serializing some numbers accurately. For example, most JSON implementations
1128 | represent all numbers with 64-bit floating point numbers, i.e. `Double`s. Scala `Long`, for example,
1129 | can't be represented accurately with `Double` in its entirety. This is even more true for big integers and decimals.
1130 |
1131 | picopickle backends provide means to serialize arbitrary numbers as accurately as possible with these methods:
1132 |
1133 | ```scala
1134 | def makeNumberAccurately(n: Number): BValue
1135 | def fromNumberAccurately(value: BValue): Number
1136 | ```
1137 |
1138 | You can see that these methods take and return `BValue` instead of `BNumber`. Backend implementations can take
1139 | advantage of this and serialize long numbers as strings or in some other format in order to keep the precision.
1140 | Built-in serializers for numbers use these methods by default.
1141 |
1142 | picopickle also provides a special trait, `DoubleOrStringNumberRepr`, which provides methods to store a number
1143 | as a `BNumber` if it can be represented precisely in `Double` as a `BString` otherwise.
1144 | This trait is useful e.g. when writing a JSON-based backend.
1145 |
1146 | ### Value classes
1147 |
1148 | With picopickle, you can opt-in to serialize value classes (i.e. the ones extending the `AnyVal` class) directly
1149 | as values, bypassing the usual map representation of objects. To enable this behavior, extend your pickler with
1150 | `ValueClassReaderWritersComponent`:
1151 |
1152 | ```scala
1153 | trait MyJsonPickler extends JsonPickler with ValueClassReaderWritersComponent
1154 | import MyJsonPickler._
1155 |
1156 | class A(val x: Int) extends AnyVal
1157 | writeString(A(10)) shouldEqual "10" // not """{"x":10}"""
1158 | ```
1159 |
1160 | Official backends
1161 | --------------------------------------------------
1162 |
1163 | ### Collections pickler
1164 |
1165 | picopickle has several "official" backends. One of them, provided by `picopickle-core` library, allows serialization
1166 | into a tree of collections. This backend is available immediately with only the `core` dependency:
1167 |
1168 | ```scala
1169 | libraryDependencies += "io.github.netvl.picopickle" %% "picopickle-core" % "0.3.2"
1170 | ```
1171 |
1172 | In this backend the following AST mapping holds:
1173 |
1174 | ```
1175 | BValue -> Any
1176 | BObject -> Map[String, Any]
1177 | BArray -> Vector[Any]
1178 | BString -> String
1179 | BNumber -> Number
1180 | BBoolean -> Boolean
1181 | BNull -> Null
1182 | ```
1183 |
1184 | In this backend the backend representation coincide with the target media, so no conversion methods except the
1185 | basic `read`/`write` are necessary.
1186 |
1187 | This backend also tweaks the default `Char` serializer to write and read characters as `Char`s, not
1188 | as `String`s (which is the default behavior).
1189 |
1190 | Note that everything else, even other collections, are still serialized as usual, so, for example, tuples are
1191 | represented as vectors and maps are represented as vectors of vectors:
1192 |
1193 | ```scala
1194 | write((2: Int, "abcde": String)) -> Vector(2, "abcde")
1195 | write(Map(1 -> 2, 3 -> 4)) -> Vector(Vector(1, 2), Vector(3, 4))
1196 | ```
1197 |
1198 | Collections pickler also do not use accurate number serialization because its backend representation is already
1199 | as accurate as possible.
1200 |
1201 | ### JSON pickler
1202 |
1203 | Another official backend is used for conversion to and from JSON. JSON parsing is done with [jawn] library;
1204 | JSON rendering, however, is custom. This backend is available in `picopickle-backend-jawn`:
1205 |
1206 | ```scala
1207 | libraryDependencies += "io.github.netvl.picopickle" %% "picopickle-backend-jawn" % "0.3.2"
1208 | ```
1209 |
1210 | This backend's AST is defined in `io.github.netvl.picopickle.backends.jawn.JsonAst` and consists of several
1211 | basic case classes corresponding to JSON basic types. No additional utilities for JSON manipulation are provided;
1212 | you should use another library if you want this.
1213 |
1214 | JSON backend additionally provides two sets of methods: `readAst`/`writeAst`, which convert JSON AST from and to the
1215 | JSON rendered as a string, and `readString`/`writeString`, which [de]serialize directly from and to a string.
1216 | Usually the last pair of methods is what you want to use when you want to work with JSON serialization.
1217 |
1218 | No support for streaming serialization is available and is not likely to appear in the future because of the
1219 | abstract nature of backends (not every backend support streaming, for example, collections backend doesn't) and
1220 | because it would require a completely different architecture.
1221 |
1222 | ### BSON pickler
1223 |
1224 | Another official backend is used for conversion to and from BSON AST, as defined by [MongoDB BSON][bson] library.
1225 |
1226 | ```scala
1227 | libraryDependencies += "io.github.netvl.picopickle" %% "picopickle-backend-mongodb-bson" % "0.3.2"
1228 | ```
1229 |
1230 | In this backend the following AST mapping holds:
1231 |
1232 | ```
1233 | BValue -> BsonValue
1234 | BObject -> BsonDocument
1235 | BArray -> BsonArray
1236 | BString -> BsonString
1237 | BNumber -> BsonNumber
1238 | BBoolean -> BsonBoolean
1239 | BNull -> BsonNull
1240 | ```
1241 |
1242 | BSON backend also defines additional types as follows:
1243 |
1244 | ```
1245 | BObjectId -> BsonObjectId
1246 | BInt32 -> BsonInt32
1247 | BInt64 -> BsonInt64
1248 | BDouble -> BsonDouble
1249 | BDateTime -> BsonDateTime
1250 | BBinary -> BsonBinary
1251 | BSymbol -> BsonSymbol
1252 | ```
1253 |
1254 | Additional functions for conversion from Scala core types to these types are available in the
1255 | backend:
1256 |
1257 | ```scala
1258 | def fromBinary(bin: BBinary): Array[Byte]
1259 | def makeBinary(arr: Array[Byte]): BBinary
1260 | def getBinary(value: BValue): Option[BBinary]
1261 |
1262 | def fromObjectId(oid: BObjectId): ObjectId
1263 | def makeObjectId(oid: ObjectId): BObjectId
1264 | def getObjectId(value: BValue): Option[BObjectId]
1265 |
1266 | def fromDateTime(dt: BDateTime): Long
1267 | def makeDateTime(n: Long): BDateTime
1268 | def getDateTime(value: BValue): Option[BDateTime]
1269 |
1270 | def fromSymbol(sym: BSymbol): Symbol
1271 | def makeSymbol(sym: Symbol): BSymbol
1272 | def getSymbol(value: BValue): Option[BSymbol]
1273 |
1274 | def fromInt32(n: BInt32): Int
1275 | def makeInt32(n: Int): BInt32
1276 | def getInt32(value: BValue): Option[BsonInt32]
1277 |
1278 | def fromInt64(n: BInt64): Long
1279 | def makeInt64(n: Long): BInt64
1280 | def getInt64(value: BValue): Option[BsonInt64]
1281 |
1282 | def fromDouble(n: BDouble): Double
1283 | def makeDouble(n: Double): BDouble
1284 | def getDouble(value: BValue): Option[BsonDouble]
1285 | ```
1286 |
1287 | Corresponding extractors are available in `backend.BsonExtract` object, and backend conversion
1288 | implicits are defined in `backend.bsonConversionImplicits`:
1289 |
1290 | ```scala
1291 | Reader {
1292 | case backend.BsonExtract.ObjectId(oid) =>
1293 | // oid: backend.BObjectId == org.bson.BsonObjectId
1294 | }
1295 |
1296 | import backend.bsonConversionImplicits._
1297 |
1298 | val bin: backend.BBinary = Array[Byte](1, 2, 3).toBackend
1299 | ```
1300 |
1301 | This backend overrides numerical readers and writers to serialize Scala numbers to the smallest
1302 | type possible, i.e. `Byte`, `Short` and `Int` are serialized as `BInt32`, `Long` is serialized
1303 | as `BInt64`, and `Float` and `Double` are serialized as `BDouble`. You can see that in this backend
1304 | there is no need to use additional measures to serialize numbers accurately.
1305 |
1306 | This backend also provides serializers for `Array[Byte]`, `Symbol`, `Date` and `ObjectId` types
1307 | which are serialized as `BBinary`, `BSymbol`, `BDateTime` and `BObjectId`, respectively.
1308 |
1309 | And finally, this backend provides identity serializers for all `BValue` children types, that is,
1310 | it serializes `BValue` as `BValue`, `BString` as `BString`, `BInt64` as `BInt64` and so on.
1311 |
1312 | [bson]: http://mongodb.github.io/mongo-java-driver/3.0/bson/
1313 |
1314 | Error handling
1315 | --------------------------------------------
1316 |
1317 | While serialization is straightforward and should never fail (if it does, it is most likely a bug in the library
1318 | or in some `Writer` implementation), deserialization is prone to errors because the serialized representation usually
1319 | has free-form structure and is not statically mapped on its Scala representation.
1320 |
1321 | picopickle has a special exception type which is thrown upon deserialization errors. This exception is defined
1322 | in `ExceptionsComponent` like this:
1323 |
1324 | ```scala
1325 | case class ReadException(message: String, data: backend.BValue, cause: Throwable = null)
1326 | extends BaseException(message, cause)
1327 |
1328 | object ReadException {
1329 | def apply(reading: String, expected: String, got: backend.BValue): ReadException =
1330 | ReadException(s"reading $reading, expected $expected, got $got", data = got)
1331 | }
1332 | ```
1333 |
1334 | When deserialization of some type is attempted over a backend representation which is incompatible with
1335 | the requested type, for most of the built-in deserializers the exception will contain the message about
1336 | what was being read, what was expected and what was actually provided to the deserializer:
1337 |
1338 | ```scala
1339 | readString[Int]("""{"some":"thing"}""")
1340 | io.github.netvl.picopickle.ExceptionsComponent$ReadException: reading number, expected number or string containing a number, got JsonObject(Map(some -> JsonString(thing)))
1341 | ```
1342 |
1343 | You can participate in this exception handling with your own deserializers very easily. `Reader` and `ReadWriter`
1344 | has certain methods to create deserializers which allow you to use custom messages for errors:
1345 |
1346 | ```scala
1347 | case class A(name: String)
1348 |
1349 | // Pre-defined message format, like above
1350 | Reader.reading[A] {
1351 | case backend.Extract.String(s) => A(s)
1352 | }.orThrowing(whenReading = "A", expected = "string")
1353 |
1354 | // Arbitrary custom message
1355 | Reader.reading[A] {
1356 | case backend.Extract.String(s) => A(s)
1357 | }.orThrowing(v => s"Got $v instead of string when reading A")
1358 |
1359 | // ReadWriters also can be customized
1360 | ReadWriter.writing[A](_.name.toBackend)
1361 | .reading { case backend.Extract.String(s) => A(s) }
1362 | .orThrowing(whenReading = "A", expected = "string")
1363 |
1364 | // Works in any order
1365 | ReadWriter.reading[A] { case backend.Extract.String(s) => A(s) }
1366 | .orThrowing(whenReading = "A", expected = "string")
1367 | .writing(_.name.toBackend)
1368 | ```
1369 |
1370 | In readers constructed in the above form the error will be thrown when the partial function
1371 | used for reading is not defined on the incoming value. That is, the following reader
1372 | won't ever throw a `ReadException`:
1373 |
1374 | ```scala
1375 | Reader.reading[A] {
1376 | case value => A(backend.fromString(value.asInstanceOf[BString]))
1377 | }.orThrowing(whenReading = "A", expected = "string")
1378 | ```
1379 |
1380 | It will throw a `ClassCastException` instead if something which is not a string is supplied.
1381 |
1382 | If you still need to use a catch-all partial function for a reader, you can always throw a `ReadException`
1383 | yourself:
1384 |
1385 | ```scala
1386 | Reader[A] {
1387 | case value => if (value.isInstanceOf[String]) A(backend.fromString(value.asInstanceOf[BString])
1388 | else throw ReadException(reading = "A", expected = "string", got = value)
1389 | }
1390 | ```
1391 |
1392 | While the example above is absolutely contrived, there are legitimate use cases for it.
1393 |
1394 | Additional backend implementations may inherit `ExceptionsComponent.BaseException` to implement custom
1395 | errors. For example, this is done in JSON backend to wrap a Jawn parse exception.
1396 |
1397 | Finally, `Pickler` trait provides `tryRead()` method which returns `Try[T]` instead of `T` returned
1398 | by `read()`. This method never throws any exceptions and instead returns them as a `Failure` variant
1399 | of `Try[T]`. Serializer objects also have such methods, as well as official backends with custom
1400 | serialization methods, like Jawn's `tryReadString()`.
1401 |
1402 |
1403 | Limitations
1404 | --------------------------------------
1405 |
1406 | picopickle does not support serializing `Any` in any form because it relies on the static knowledge of
1407 | types being serialized. However, its design, as far as I can tell, in principle does not disallow writing
1408 | a serializer for `Any` which would use reflection. This is not even in plans, however.
1409 |
1410 | It also seems that trying to serialize sealed trait hierarchies where the sealed trait itself has a type parameter
1411 | causes the compiler to die horribly. Regular parameterized case classes work fine, however.
1412 |
1413 | Object graphs with circular loops are not supported and will cause stack overflows. This is not usually a problem
1414 | because it is only possible to construct such graphs when at least a part of them is mutable (e.g. a `var` field
1415 | or a mutable collection) which is discouraged in idiomatic Scala code.
1416 |
1417 | Due to limitations of how Scala reflection/macros work, it is better not to re-define serializers in the same
1418 | place as the serialized classes if these classes form a sealed trait hierarchy. For example, something like this
1419 | won't work:
1420 |
1421 | ```scala
1422 | object Serializers {
1423 | import SomePickler._
1424 |
1425 | sealed trait Root
1426 | case class A(x: Int) extends Root
1427 | case object B extends Root
1428 |
1429 | implicit val rootReadWriter = ReadWriter[Root]
1430 | }
1431 | ```
1432 |
1433 | This won't compile because it is impossible to inspect the sealed trait hierarchy of `Root` at the point where
1434 | a `LabelledGeneric` is materialized here (in the implicit parameters of `ReadWriter[Root]` call). If you want to
1435 | pre-generate serializers for your classes, write them in another object:
1436 |
1437 | ```scala
1438 | object Classes {
1439 | sealed trait Root
1440 | case class A(x: Int) extends Root
1441 | case object B extends Root
1442 | }
1443 |
1444 | object Serializers {
1445 | import SomePicker._
1446 | import Classes._
1447 |
1448 | implicit val rootReadWriter = ReadWriter[Root]
1449 | }
1450 | ```
1451 |
1452 |
1453 | Plans
1454 | --------------------------
1455 |
1456 | * Consider adding support for more types
1457 | * Consider adding more converters (e.g. for tuples)
1458 | * Add proper support for error handling in conversions
1459 | * Add more tests
1460 | * Add more documentation
1461 |
1462 |
1463 | Changelog
1464 | ----------------------------------
1465 |
1466 | ### 0.3.2
1467 |
1468 | * Updated scala to 2.12.3
1469 |
1470 | ### 0.3.0
1471 |
1472 | * Updated scala to 2.11.8
1473 | * Added support for serializing value classes as values
1474 |
1475 | ### 0.2.1
1476 |
1477 | * Updated shapeless to 2.3.0, macroparadise to 2.1.0, jawn to 0.8.4, bson to 3.2.2, scala to 2.10.6
1478 | * Switched to macro-compat instead of hand-written macro API for 2.10 and 2.11
1479 |
1480 | ### 0.2.0
1481 |
1482 | * Updated shapeless to 2.2.3, jawn to 0.8.8, scala to 2.11.7
1483 | * Fixed support for varargs (consequence of shapeless update)
1484 | * Improved reader interface (added `readOrElse` method and changed existing code to depend on it)
1485 | * Added proper error handling (#2)
1486 | * Added new BSON-based backend (#6)
1487 | * Added support for changing STH discriminator key on per-STH basis (#7)
1488 |
1489 | ### 0.1.3
1490 |
1491 | * Added serializer object feature (#5)
1492 | * Added support for serializing arbitrary types as map keys provided there is a converter (#4)
1493 |
1494 | ### 0.1.2
1495 |
1496 | * Updated Scala 2.10 minor version (4 -> 5)
1497 |
1498 | ### 0.1.1
1499 |
1500 | * Fixed handling of classes with overloaded `apply` method in companions (#1)
1501 |
1502 | ### 0.1.0
1503 |
1504 | * More serializer instances
1505 | * Added generic handling for accurate numbers serialization
1506 | * Added collections backend
1507 | * Support for recursive types
1508 | * Added converters
1509 | * Improved API for custom serializers
1510 | * Added support for renaming fields and sealed trait variants
1511 | * Added support for default values in case classes
1512 | * Added proper support for nulls
1513 | * Added test generators
1514 | * Started adding tests
1515 |
1516 | ### 0.0.2
1517 |
1518 | * Added more instances for primitive types
1519 | * Improved API
1520 |
1521 | ### 0.0.1
1522 |
1523 | * Initial release
1524 |
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | crossScalaVersions := Seq("2.10.6", "2.11.11", "2.12.3")
2 |
3 | val commonCommonSettings = Seq(
4 | organization := "io.github.netvl.picopickle",
5 | version := "0.3.2",
6 | scalaVersion := "2.12.3",
7 |
8 | autoAPIMappings := true
9 | )
10 |
11 | val commonSettings = commonCommonSettings ++ Seq(
12 | bintrayPackage := "picopickle",
13 | bintrayReleaseOnPublish in ThisBuild := false,
14 |
15 | licenses := Seq("MIT" -> url("https://raw.githubusercontent.com/netvl/picopickle/master/LICENSE")),
16 | homepage := Some(url("https://github.com/netvl/picopickle")),
17 |
18 | publishMavenStyle := true,
19 |
20 | pomExtra :=
21 |
22 |
23 | Vladimir Matveev
24 | vladimir.matweev@gmail.com
25 | https://github.com/netvl
26 |
27 |
28 |
29 | scm:git:https://github.com/netvl/picopickle
30 | scm:git:git@github.com:netvl/picopickle.git
31 | https://github.com/netvl/picopickle
32 |
33 | )
34 |
35 | def commonDependencies = Seq(
36 | "org.typelevel" %% "macro-compat" % Versions.macroCompat,
37 | compilerPlugin("org.scalamacros" % "paradise" % Versions.paradise cross CrossVersion.full),
38 |
39 | "com.chuusai" %% "shapeless" % Versions.shapeless,
40 | "org.scalatest" %% "scalatest" % Versions.scalatest % "test"
41 | )
42 |
43 | lazy val core = project
44 | .settings(commonSettings: _*)
45 | .settings(
46 | name := "picopickle-core",
47 |
48 | libraryDependencies ++= commonDependencies ++ Seq(
49 | "org.scala-lang" % "scala-reflect" % scalaVersion.value % "provided",
50 | "org.scala-lang" % "scala-compiler" % scalaVersion.value % "provided"
51 | ),
52 |
53 | sourceGenerators in Compile += task[Seq[File]] {
54 | val outFile = (sourceManaged in Compile).value / "io" / "github" / "netvl" / "picopickle" / "generated.scala"
55 |
56 | // TODO: this probably could be replaced with shapeless
57 | val tupleInstances = (1 to 22).map { i =>
58 | def mkCommaSeparated(f: Int => String) = (1 to i).map(f).mkString(", ")
59 | val types = mkCommaSeparated(j => s"T$j")
60 | val readers = mkCommaSeparated(j => s"r$j: Reader[T$j]")
61 | val writers = mkCommaSeparated(j => s"w$j: Writer[T$j]")
62 | val vars = mkCommaSeparated(j => s"x$j")
63 | val reads = mkCommaSeparated(j => s"r$j.read(x$j)")
64 | val writes = mkCommaSeparated(j => s"w$j.write(x$j)")
65 |
66 | val tupleReader =
67 | s"""
68 | | implicit def tuple${i}Reader[$types](implicit $readers): Reader[Tuple$i[$types]] =
69 | | Reader.reading {
70 | | case backend.Extract.Array(Vector($vars)) =>
71 | | Tuple$i($reads)
72 | | }.orThrowing(whenReading = "$i-tuple", expected = "array")
73 | """.stripMargin
74 |
75 | val tupleWriter =
76 | s"""
77 | | implicit def tuple${i}Writer[$types](implicit $writers): Writer[Tuple$i[$types]] =
78 | | Writer {
79 | | case Tuple$i($vars) => backend.makeArray(Vector($writes))
80 | | }
81 | """.stripMargin
82 |
83 | (tupleReader, tupleWriter)
84 | }
85 | val (tupleReaders, tupleWriters) = tupleInstances.unzip
86 |
87 | val generatedSource =
88 | s"""
89 | |package io.github.netvl.picopickle
90 | |
91 | |trait TupleReaders {
92 | | this: BackendComponent with TypesComponent =>
93 | |${tupleReaders.mkString("")}
94 | |}
95 | |
96 | |trait TupleWriters {
97 | | this: BackendComponent with TypesComponent =>
98 | |${tupleWriters.mkString("")}
99 | |}
100 | |
101 | |trait TupleReaderWritersComponent extends TupleReaders with TupleWriters {
102 | | this: BackendComponent with TypesComponent =>
103 | |}
104 | """.stripMargin
105 |
106 | IO.write(outFile, generatedSource, IO.utf8)
107 |
108 | Seq(outFile)
109 | },
110 |
111 | sourceGenerators in Test += TestGeneration.generatedFiles(sourceManaged in Test).taskValue
112 | )
113 |
114 | lazy val jawn = project
115 | .dependsOn(core % "compile->compile;test->test")
116 | .settings(commonSettings: _*)
117 | .settings(
118 | name := "picopickle-backend-jawn",
119 |
120 | sourceGenerators in Test += TestGeneration.generatedFiles(sourceManaged in Test).taskValue,
121 |
122 | libraryDependencies ++= commonDependencies ++ Seq(
123 | "org.spire-math" %% "jawn-parser" % Versions.jawn
124 | )
125 | )
126 |
127 | lazy val mongodb = project
128 | .dependsOn(core % "compile->compile;test->test")
129 | .settings(commonSettings: _*)
130 | .settings(
131 | name := "picopickle-backend-mongodb-bson",
132 |
133 | sourceGenerators in Test += TestGeneration.generatedFiles(sourceManaged in Test).taskValue,
134 |
135 | libraryDependencies ++= commonDependencies ++ Seq(
136 | "org.mongodb" % "bson" % Versions.mongodbBson
137 | )
138 | )
139 |
140 | lazy val root = (project in file("."))
141 | .aggregate(core, jawn, mongodb)
142 | .settings(commonCommonSettings: _*)
143 | .settings(unidocSettings: _*)
144 | .settings(site.settings ++ ghpages.settings: _*)
145 | .settings(
146 | name := "picopickle",
147 |
148 | site.addMappingsToSiteDir(mappings in (ScalaUnidoc, packageDoc), "latest/api"),
149 | git.remoteRepo := "git@github.com:netvl/picopickle.git",
150 |
151 | publish := {},
152 | publishLocal := {},
153 | packagedArtifacts := Map.empty
154 | )
155 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/backend.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | trait BackendComponent {
4 | val backend: Backend
5 | }
6 |
7 | trait Backend {
8 | type BValue
9 | type BObject <: BValue
10 | type BArray <: BValue
11 | type BString <: BValue
12 | type BNumber <: BValue
13 | type BBoolean <: BValue
14 | type BNull <: BValue
15 |
16 | def fromObject(obj: BObject): Map[String, BValue]
17 | def makeObject(m: Map[String, BValue]): BObject
18 | def getObject(value: BValue): Option[BObject]
19 |
20 | def getObjectKey(obj: BObject, key: String): Option[BValue]
21 | def setObjectKey(obj: BObject, key: String, value: BValue): BObject
22 | def containsObjectKey(obj: BObject, key: String): Boolean = getObjectKey(obj, key).isDefined
23 | def removeObjectKey(obj: BObject, key: String): BObject
24 | def makeEmptyObject: BObject = makeObject(Map.empty)
25 |
26 | def fromArray(arr: BArray): Vector[BValue]
27 | def makeArray(v: Vector[BValue]): BArray
28 | def getArray(value: BValue): Option[BArray]
29 |
30 | def getArrayLength(arr: BArray): Int
31 | def getArrayValueAt(arr: BArray, idx: Int): BValue
32 | def pushToArray(arr: BArray, value: BValue): BArray
33 | def makeEmptyArray: BArray = makeArray(Vector.empty)
34 |
35 | def fromString(str: BString): String
36 | def makeString(s: String): BString
37 | def getString(value: BValue): Option[BString]
38 |
39 | def fromNumber(num: BNumber): Number
40 | def makeNumber(n: Number): BNumber
41 | def getNumber(value: BValue): Option[BNumber]
42 |
43 | def makeNumberAccurately(n: Number): BValue
44 | def fromNumberAccurately: PartialFunction[BValue, Number]
45 | def fromNumberAccuratelyExpected: String
46 |
47 | def fromBoolean(bool: BBoolean): Boolean
48 | def makeBoolean(b: Boolean): BBoolean
49 | def getBoolean(value: BValue): Option[BBoolean]
50 |
51 | def makeNull: BNull
52 | def getNull(value: BValue): Option[BNull]
53 |
54 | object Extract {
55 | object Object {
56 | def unapply(value: BValue): Option[Map[String, BValue]] = getObject(value).map(fromObject)
57 | }
58 |
59 | object Array {
60 | def unapply(value: BValue): Option[Vector[BValue]] = getArray(value).map(fromArray)
61 | }
62 |
63 | object String {
64 | def unapply(value: BValue): Option[String] = getString(value).map(fromString)
65 | }
66 |
67 | object Number {
68 | def unapply(value: BValue): Option[Number] = getNumber(value).map(fromNumber)
69 | }
70 |
71 | object Boolean {
72 | def unapply(value: BValue): Option[Boolean] = getBoolean(value).map(fromBoolean)
73 | }
74 | }
75 |
76 | object Get {
77 | object Object {
78 | def unapply(value: BValue): Option[BObject] = Backend.this.getObject(value)
79 | }
80 |
81 | object Array {
82 | def unapply(value: BValue): Option[BArray] = Backend.this.getArray(value)
83 | }
84 |
85 | object String {
86 | def unapply(value: BValue): Option[BString] = Backend.this.getString(value)
87 | }
88 |
89 | object Number {
90 | def unapply(value: BValue): Option[BNumber] = Backend.this.getNumber(value)
91 | }
92 |
93 | object Boolean {
94 | def unapply(value: BValue): Option[BBoolean] = Backend.this.getBoolean(value)
95 | }
96 |
97 | object Null {
98 | def unapply(value: BValue): Option[BNull] = Backend.this.getNull(value)
99 | }
100 | }
101 |
102 | object conversionImplicits {
103 | implicit class MapToBackendExt(val m: Map[String, BValue]) {
104 | def toBackend: BObject = makeObject(m)
105 | }
106 |
107 | implicit class VectorToBackendExt(val v: Vector[BValue]) {
108 | def toBackend: BArray = makeArray(v)
109 | }
110 |
111 | implicit class StringToBackendExt(val s: String) {
112 | def toBackend: BString = makeString(s)
113 | }
114 |
115 | implicit class NumberLikeToBackendExt[N](val n: N)(implicit conv: N => Number) {
116 | def toBackend: BNumber = makeNumber(n)
117 | def toBackendAccurately: BValue = makeNumberAccurately(n)
118 | }
119 |
120 | implicit class BooleanToBackendExt(val b: Boolean) {
121 | def toBackend: BBoolean = makeBoolean(b)
122 | }
123 | }
124 | }
125 |
126 |
127 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/backends/collections/CollectionsBackend.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.collections
2 |
3 | import shapeless.syntax.typeable._
4 |
5 | import io.github.netvl.picopickle.Backend
6 |
7 | object CollectionsBackend extends Backend {
8 | override type BValue = Any
9 | override type BObject = Map[String, Any]
10 | override type BArray = Vector[Any]
11 | override type BString = String
12 | override type BNumber = Number
13 | override type BBoolean = Boolean
14 | override type BNull = Null
15 |
16 | override def fromObject(obj: BObject): Map[String, BValue] = obj
17 | override def makeObject(m: Map[String, BValue]): BObject = m
18 | override def getObject(value: BValue): Option[BObject] = value.cast[Map[String, Any]]
19 |
20 | override def getObjectKey(obj: BObject, key: String): Option[BValue] = obj.get(key)
21 | override def setObjectKey(obj: BObject, key: String, value: BValue): BObject = obj + (key -> value)
22 | override def containsObjectKey(obj: BObject, key: String): Boolean = obj.contains(key)
23 | override def removeObjectKey(obj: BObject, key: String): BObject = obj - key
24 |
25 | override def fromArray(arr: BArray): Vector[BValue] = arr
26 | override def makeArray(v: Vector[BValue]): BArray = v
27 | override def getArray(value: BValue): Option[BArray] = value.cast[Vector[Any]]
28 | override def pushToArray(arr: BArray, value: BValue) = arr :+ value
29 |
30 | override def getArrayLength(arr: BArray): Int = arr.length
31 | override def getArrayValueAt(arr: BArray, idx: Int): BValue = arr(idx)
32 |
33 | override def fromString(str: BString): String = str
34 | override def makeString(s: String): BString = s
35 | override def getString(value: BValue): Option[BString] = value.cast[String]
36 |
37 | override def fromNumber(num: BNumber): Number = num
38 | override def makeNumber(n: Number): BNumber = n
39 | override def getNumber(value: BValue): Option[BNumber] = value.cast[Number]
40 |
41 | override def makeNumberAccurately(n: Number): BValue = n
42 | override def fromNumberAccurately: PartialFunction[BValue, Number] = {
43 | case value: Number => value
44 | }
45 | override def fromNumberAccuratelyExpected: String = "number"
46 |
47 | override def fromBoolean(bool: BBoolean): Boolean = bool
48 | override def makeBoolean(b: Boolean): BBoolean = b
49 | override def getBoolean(value: BValue): Option[BBoolean] = value.cast[Boolean]
50 |
51 | override def makeNull: BNull = null
52 | override def getNull(value: BValue): Option[BNull] = if (value == null) Some(null) else None
53 |
54 | def anyToValue(any: Any): BValue = any
55 | def valueToAny(value: BValue): Any = value
56 | }
57 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/backends/collections/CollectionsBackendComponent.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.collections
2 |
3 | import io.github.netvl.picopickle.BackendComponent
4 |
5 | trait CollectionsBackendComponent extends BackendComponent {
6 | override val backend = CollectionsBackend
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/backends/collections/CollectionsPickler.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.collections
2 |
3 | import io.github.netvl.picopickle.DefaultPickler
4 |
5 | object CollectionsPickler extends CollectionsPickler
6 |
7 | trait CollectionsPickler extends DefaultPickler with CollectionsBackendComponent {
8 | override implicit val charWriter: Writer[Char] = Writer[Char](identity)
9 | override implicit val charReader: Reader[Char] = Reader.reading {
10 | case c: Char => c
11 | }.orThrowing(whenReading = "character", expected = "character")
12 | }
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/collections.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import scala.annotation.implicitNotFound
4 | import scala.reflect.ClassTag
5 | import scala.{collection => coll}
6 | import scala.collection.{mutable => mut}
7 | import scala.collection.{immutable => imm}
8 | import scala.collection.generic.CanBuildFrom
9 | import scala.reflect.macros.whitebox
10 | import scala.language.higherKinds
11 | import scala.language.experimental.macros
12 |
13 | trait MapPicklingComponent {
14 | @implicitNotFound("Can't [un]pickle a map with keys of type ${T} neither as an object nor as an array of tuples; " +
15 | "either define an `ObjectKeyReadWriter[${T}]` converter or explicitly allow " +
16 | "serializing this map as an array of tuples via putting " +
17 | "an implicit value returned by `allowMapPicklingWithKeyOfType[${T}]` in scope")
18 | sealed class MapPicklingIsAllowed[T]
19 | protected object MapPicklingIsAllowed extends MapPicklingIsAllowed[Nothing]
20 | def allowMapPicklingWithKeysOfType[T]: MapPicklingIsAllowed[T] =
21 | MapPicklingIsAllowed.asInstanceOf[MapPicklingIsAllowed[T]]
22 |
23 | def mapPicklingIsAllowedByDefault[T]: MapPicklingIsAllowed[T]
24 | }
25 |
26 | trait MapPicklingEnabledByDefault extends MapPicklingComponent {
27 | // by default it is enabled for all key types
28 | override implicit def mapPicklingIsAllowedByDefault[T]: MapPicklingIsAllowed[T] =
29 | allowMapPicklingWithKeysOfType[T]
30 | }
31 |
32 | trait MapPicklingDisabledByDefault extends MapPicklingComponent {
33 | this: ObjectKeyTypesComponent =>
34 |
35 | // here it is disabled via an aborting macro
36 | override implicit def mapPicklingIsAllowedByDefault[T]: MapPicklingIsAllowed[T] =
37 | macro MapPicklingDisabledByDefaultMacros.killItself[T]
38 |
39 | // but another implicit value is defined for all keys which are readable/writable as object keys
40 | implicit def mapPicklingIsAllowedForAppropriateKeyTypes[T: ObjectKeyReader: ObjectKeyWriter] =
41 | allowMapPicklingWithKeysOfType[T]
42 | }
43 |
44 | @macrocompat.bundle
45 | class MapPicklingDisabledByDefaultMacros(val c: whitebox.Context) {
46 | def killItself[T: c.WeakTypeTag]: c.Expr[T] =
47 | c.abort(c.enclosingPosition, "aborting expansion of an offending implicit")
48 | }
49 |
50 | trait CollectionWriters {
51 | this: ObjectKeyTypesComponent with MapPicklingComponent with BackendComponent with TypesComponent =>
52 |
53 | protected final def mkIterableWriter[T, C[_] <: Iterable[_]](implicit w: Writer[T]): Writer[C[T]] =
54 | Writer { c =>
55 | backend.makeArray(c.iterator.asInstanceOf[Iterator[T]].map(e => w.write(e)).toVector)
56 | }
57 |
58 | protected final def mkMapWriter[A, B, M[K, V] <: coll.Map[K, V] with coll.MapLike[K, V, M[K, V]]]
59 | (implicit wa: Writer[A], wb: Writer[B], wab: Writer[(A, B)], kw: ObjectKeyWriter[A]): Writer[M[A, B]] =
60 | if (kw != null) Writer.fromF0[M[A, B]] { (m: coll.MapLike[A, B, M[A, B]]) => {
61 | case Some(backend.Get.Object(obj)) => m.foldLeft(obj) { (acc, t) =>
62 | backend.setObjectKey(acc, kw.write(t._1), wb.write(t._2))
63 | }
64 | case None => backend.makeObject(m.map { case (k, v) => (kw.write(k), wb.write(v)) }.toMap)
65 | }}
66 | else Writer[M[A, B]] { (m: coll.MapLike[A, B, M[A, B]]) =>
67 | backend.makeArray(m.map(t => wab.write(t)).toVector)
68 | }
69 |
70 | implicit def iterableWriter[T: Writer]: Writer[Iterable[T]] = mkIterableWriter[T, Iterable]
71 |
72 | implicit def seqWriter[T: Writer]: Writer[coll.Seq[T]] = mkIterableWriter[T, coll.Seq]
73 | implicit def immSeqWriter[T: Writer]: Writer[imm.Seq[T]] = mkIterableWriter[T, imm.Seq]
74 | implicit def mutSeqWriter[T: Writer]: Writer[mut.Seq[T]] = mkIterableWriter[T, mut.Seq]
75 |
76 | implicit def setWriter[T: Writer]: Writer[coll.Set[T]] = mkIterableWriter[T, coll.Set]
77 | implicit def immSetWriter[T: Writer]: Writer[imm.Set[T]] = mkIterableWriter[T, imm.Set]
78 | implicit def mutSetWriter[T: Writer]: Writer[mut.Set[T]] = mkIterableWriter[T, mut.Set]
79 |
80 | implicit def indexedSeqWriter[T: Writer]: Writer[coll.IndexedSeq[T]] = mkIterableWriter[T, coll.IndexedSeq]
81 | implicit def immIndexedSeqWriter[T: Writer]: Writer[imm.IndexedSeq[T]] = mkIterableWriter[T, imm.IndexedSeq]
82 | implicit def mutIndexedSeqWriter[T: Writer]: Writer[mut.IndexedSeq[T]] = mkIterableWriter[T, mut.IndexedSeq]
83 |
84 | implicit def linearSeqWriter[T: Writer]: Writer[coll.LinearSeq[T]] = mkIterableWriter[T, coll.LinearSeq]
85 | implicit def immLinearSeqWriter[T: Writer]: Writer[imm.LinearSeq[T]] = mkIterableWriter[T, imm.LinearSeq]
86 | implicit def mutLinearSeqWriter[T: Writer]: Writer[mut.LinearSeq[T]] = mkIterableWriter[T, mut.LinearSeq]
87 |
88 | implicit def sortedSetWriter[T: Writer: Ordering]: Writer[coll.SortedSet[T]] = mkIterableWriter[T, coll.SortedSet]
89 | implicit def immSortedSetWriter[T: Writer: Ordering]: Writer[imm.SortedSet[T]] = mkIterableWriter[T, imm.SortedSet]
90 | implicit def mutSortedSetWriter[T: Writer: Ordering]: Writer[mut.SortedSet[T]] = mkIterableWriter[T, mut.SortedSet]
91 |
92 | implicit def queueWriter[T: Writer]: Writer[imm.Queue[T]] = mkIterableWriter[T, imm.Queue]
93 | implicit def vectorWriter[T: Writer]: Writer[imm.Vector[T]] = mkIterableWriter[T, imm.Vector]
94 | implicit def listWriter[T: Writer]: Writer[imm.List[T]] = mkIterableWriter[T, imm.List]
95 | implicit def streamWriter[T: Writer]: Writer[imm.Stream[T]] = mkIterableWriter[T, imm.Stream]
96 | implicit def listSetWriter[T: Writer]: Writer[imm.ListSet[T]] = mkIterableWriter[T, imm.ListSet]
97 | implicit def treeSetWriter[T: Writer: Ordering]: Writer[imm.TreeSet[T]] = mkIterableWriter[T, imm.TreeSet]
98 |
99 | implicit def immHashSetWriter[T: Writer]: Writer[imm.HashSet[T]] = mkIterableWriter[T, imm.HashSet]
100 | implicit def mutHashSetWriter[T: Writer]: Writer[mut.HashSet[T]] = mkIterableWriter[T, mut.HashSet]
101 |
102 | implicit def bufferWriter[T: Writer]: Writer[mut.Buffer[T]] = mkIterableWriter[T, mut.Buffer]
103 | implicit def arrayBufferWriter[T: Writer]: Writer[mut.ArrayBuffer[T]] = mkIterableWriter[T, mut.ArrayBuffer]
104 | implicit def linkedListWriter[T: Writer]: Writer[mut.LinkedList[T]] = mkIterableWriter[T, mut.LinkedList]
105 | implicit def linkedHashSetWriter[T: Writer]: Writer[mut.LinkedHashSet[T]] = mkIterableWriter[T, mut.LinkedHashSet]
106 |
107 | implicit def mapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
108 | kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[coll.Map[A, B]] = mkMapWriter[A, B, coll.Map]
109 | implicit def immMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
110 | kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[imm.Map[A, B]] = mkMapWriter[A, B, imm.Map]
111 | implicit def mutMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
112 | kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[mut.Map[A, B]] = mkMapWriter[A, B, mut.Map]
113 |
114 | implicit def immHashMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
115 | kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[imm.HashMap[A, B]] = mkMapWriter[A, B, imm.HashMap]
116 | implicit def mutHashMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
117 | kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[mut.HashMap[A, B]] = mkMapWriter[A, B, mut.HashMap]
118 |
119 | implicit def treeMapWriter[A: Writer: Ordering, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
120 | kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[imm.TreeMap[A, B]] = mkMapWriter[A, B, imm.TreeMap]
121 | implicit def listMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
122 | kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[imm.ListMap[A, B]] = mkMapWriter[A, B, imm.ListMap]
123 |
124 | implicit def linkedHashMapWriter[A: Writer, B: Writer](implicit allowed: MapPicklingIsAllowed[A],
125 | kw: ObjectKeyWriter[A] = null, wab: Writer[(A, B)]): Writer[mut.LinkedHashMap[A, B]] = mkMapWriter[A, B, mut.LinkedHashMap]
126 |
127 | implicit def arrayWriter[T: Writer]: Writer[Array[T]] = Writer {
128 | case arr => iterableWriter[T].write(arr)
129 | }
130 | }
131 |
132 | trait CollectionReaders {
133 | this: ObjectKeyTypesComponent with MapPicklingComponent with BackendComponent with TypesComponent =>
134 |
135 | protected final def mkIterableReader[T, C[_] <: Iterable[_]](implicit r: Reader[T],
136 | cbf: CanBuildFrom[C[T], T, C[T]]): Reader[C[T]] =
137 | Reader.reading {
138 | case backend.Extract.Array(arr) => arr.map(r.read).to[C]
139 | }.orThrowing(whenReading = "iterable", expected = "array")
140 |
141 | protected final def mkMapReader[A, B, M[_, _] <: coll.Map[_, _]]
142 | (implicit ra: Reader[A], rb: Reader[B], kr: ObjectKeyReader[A], rab: Reader[(A, B)],
143 | cbf: CanBuildFrom[M[A, B], (A, B), M[A, B]]) =
144 | if (kr != null) Reader.reading {
145 | case backend.Extract.Object(m) =>
146 | val builder = cbf.apply()
147 | m.foreach {
148 | case (k, v) => builder += (kr.read(k) -> rb.read(v))
149 | }
150 | builder.result()
151 | }.orThrowing(whenReading = "map with object keys", expected = "object") else Reader.reading {
152 | case backend.Extract.Array(arr) =>
153 | val builder = cbf.apply()
154 | arr.foreach { e => builder += rab.read(e) }
155 | builder.result()
156 | }.orThrowing(whenReading = "map", expected = "array")
157 |
158 | implicit def seqReader[T: Reader]: Reader[coll.Seq[T]] = mkIterableReader[T, coll.Seq]
159 | implicit def immSeqReader[T: Reader]: Reader[imm.Seq[T]] = mkIterableReader[T, imm.Seq]
160 | implicit def mutSeqReader[T: Reader]: Reader[mut.Seq[T]] = mkIterableReader[T, mut.Seq]
161 |
162 | implicit def setReader[T: Reader]: Reader[coll.Set[T]] = mkIterableReader[T, coll.Set]
163 | implicit def immSetReader[T: Reader]: Reader[imm.Set[T]] = mkIterableReader[T, imm.Set]
164 | implicit def mutSetReader[T: Reader]: Reader[mut.Set[T]] = mkIterableReader[T, mut.Set]
165 |
166 | implicit def indexedSeqReader[T: Reader]: Reader[coll.IndexedSeq[T]] = mkIterableReader[T, coll.IndexedSeq]
167 | implicit def immIndexedSeqReader[T: Reader]: Reader[imm.IndexedSeq[T]] = mkIterableReader[T, imm.IndexedSeq]
168 | implicit def mutIndexedSeqReader[T: Reader]: Reader[mut.IndexedSeq[T]] = mkIterableReader[T, mut.IndexedSeq]
169 |
170 | implicit def linearSeqReader[T: Reader]: Reader[coll.LinearSeq[T]] = mkIterableReader[T, coll.LinearSeq]
171 | implicit def immLinearSeqReader[T: Reader]: Reader[imm.LinearSeq[T]] = mkIterableReader[T, imm.LinearSeq]
172 | implicit def mutLinearSeqReader[T: Reader]: Reader[mut.LinearSeq[T]] = mkIterableReader[T, mut.LinearSeq]
173 |
174 | implicit def sortedSetReader[T: Reader: Ordering]: Reader[coll.SortedSet[T]] = mkIterableReader[T, coll.SortedSet]
175 | implicit def immSortedSetReader[T: Reader: Ordering]: Reader[imm.SortedSet[T]] = mkIterableReader[T, imm.SortedSet]
176 | implicit def mutSortedSetReader[T: Reader: Ordering]: Reader[mut.SortedSet[T]] = mkIterableReader[T, mut.SortedSet]
177 |
178 | implicit def queueReader[T: Reader]: Reader[imm.Queue[T]] = mkIterableReader[T, imm.Queue]
179 | implicit def vectorReader[T: Reader]: Reader[imm.Vector[T]] = mkIterableReader[T, imm.Vector]
180 | implicit def listReader[T: Reader]: Reader[imm.List[T]] = mkIterableReader[T, imm.List]
181 | implicit def streamReader[T: Reader]: Reader[imm.Stream[T]] = mkIterableReader[T, imm.Stream]
182 | implicit def listSetReader[T: Reader]: Reader[imm.ListSet[T]] = mkIterableReader[T, imm.ListSet]
183 | implicit def treeSetReader[T: Reader: Ordering]: Reader[imm.TreeSet[T]] = mkIterableReader[T, imm.TreeSet]
184 |
185 | implicit def immHashSetReader[T: Reader]: Reader[imm.HashSet[T]] = mkIterableReader[T, imm.HashSet]
186 | implicit def mutHashSetReader[T: Reader]: Reader[mut.HashSet[T]] = mkIterableReader[T, mut.HashSet]
187 |
188 | implicit def bufferReader[T: Reader]: Reader[mut.Buffer[T]] = mkIterableReader[T, mut.Buffer]
189 | implicit def arrayBufferReader[T: Reader]: Reader[mut.ArrayBuffer[T]] = mkIterableReader[T, mut.ArrayBuffer]
190 | implicit def linkedListReader[T: Reader]: Reader[mut.LinkedList[T]] = mkIterableReader[T, mut.LinkedList]
191 | implicit def linkedHashSetReader[T: Reader]: Reader[mut.LinkedHashSet[T]] = mkIterableReader[T, mut.LinkedHashSet]
192 |
193 | implicit def mapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
194 | kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[coll.Map[A, B]] = mkMapReader[A, B, coll.Map]
195 | implicit def immMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
196 | kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[imm.Map[A, B]] = mkMapReader[A, B, imm.Map]
197 | implicit def mutMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
198 | kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[mut.Map[A, B]] = mkMapReader[A, B, mut.Map]
199 |
200 | implicit def immHashMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
201 | kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[imm.HashMap[A, B]] = mkMapReader[A, B, imm.HashMap]
202 | implicit def mutHashMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
203 | kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[mut.HashMap[A, B]] = mkMapReader[A, B, mut.HashMap]
204 |
205 | implicit def treeMapReader[A: Reader: Ordering, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
206 | kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[imm.TreeMap[A, B]] = mkMapReader[A, B, imm.TreeMap]
207 | implicit def listMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
208 | kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[imm.ListMap[A, B]] = mkMapReader[A, B, imm.ListMap]
209 |
210 | implicit def linkedHashMapReader[A: Reader, B: Reader](implicit allowed: MapPicklingIsAllowed[A],
211 | kr: ObjectKeyReader[A] = null, rab: Reader[(A, B)]): Reader[mut.LinkedHashMap[A, B]] = mkMapReader[A, B, mut.LinkedHashMap]
212 |
213 | implicit def arrayReader[T: ClassTag](implicit r: Reader[T]): Reader[Array[T]] = Reader.reading {
214 | case backend.Extract.Array(arr) => arr.map(r.read).toArray[T]
215 | }.orThrowing(whenReading = "array", expected = "array")
216 | }
217 |
218 | trait CollectionReaderWritersComponent extends CollectionReaders with CollectionWriters {
219 | this: ObjectKeyTypesComponent with MapPicklingComponent with BackendComponent with TypesComponent =>
220 | }
221 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/converters.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import shapeless._
4 | import shapeless.ops.function.FnToProduct
5 |
6 | import scala.collection.breakOut
7 | import scala.collection.generic.CanBuildFrom
8 | import scala.language.{higherKinds, implicitConversions}
9 |
10 | trait ConvertersComponent {
11 | this: BackendComponent with TypesComponent =>
12 |
13 | object converters {
14 | trait Converter[-T, +U] {
15 | self =>
16 |
17 | def toBackend(v: T): backend.BValue
18 | def isDefinedAt(bv: backend.BValue): Boolean
19 | def fromBackend(bv: backend.BValue): U
20 |
21 | final def >>[U1](f: U => U1): Converter[T, U1] = this andThen f
22 | final def andThen[U1](f: U => U1): Converter[T, U1] = new Converter[T, U1] {
23 | override def toBackend(v: T): backend.BValue = self.toBackend(v)
24 | override def isDefinedAt(bv: backend.BValue): Boolean = self.isDefinedAt(bv)
25 | override def fromBackend(bv: backend.BValue): U1 = f(self.fromBackend(bv))
26 | }
27 |
28 | final def ||[U1 >: U](c: Converter[Nothing, U1]): Converter[T, U1] = this orElse c
29 | final def orElse[U1 >: U](c: Converter[Nothing, U1]): Converter[T, U1] = new Converter[T, U1] {
30 | override def toBackend(v: T): backend.BValue = self.toBackend(v)
31 | override def isDefinedAt(bv: backend.BValue): Boolean = self.isDefinedAt(bv) || c.isDefinedAt(bv)
32 | override def fromBackend(bv: backend.BValue): U1 = bv match {
33 | case _ if self.isDefinedAt(bv) => self.fromBackend(bv)
34 | case _ if c.isDefinedAt(bv) => c.fromBackend(bv)
35 | }
36 | }
37 |
38 | final def lift: Converter[T, Option[U]] = new Converter[T, Option[U]] {
39 | override def toBackend(v: T): backend.BValue = self.toBackend(v)
40 | override def isDefinedAt(bv: backend.BValue): Boolean = true
41 | override def fromBackend(bv: backend.BValue): Option[U] =
42 | if (self.isDefinedAt(bv)) Some(self.fromBackend(bv))
43 | else None
44 | }
45 | }
46 | trait LowerPriorityImplicits {
47 | def converterAsWriter[T](c: Converter[T, Any]): Writer[T]
48 | def converterAsReader[U](c: Converter[Nothing, U]): Reader[U]
49 |
50 | implicit def converterAsReadWriter[T](c: Converter[T, T]): ReadWriter[T] =
51 | ReadWriter[T](converterAsReader(c), converterAsWriter(c))
52 | }
53 | object Converter extends LowerPriorityImplicits {
54 | type Id[T] = Converter[T, T]
55 |
56 | def onlyWriter[T](to: T => backend.BValue): Converter[T, Any] =
57 | apply(to) { case _ => throw new IllegalArgumentException }
58 |
59 | def onlyReader[U](from: PF[backend.BValue, U]): Converter[Nothing, U] =
60 | apply((_: Any) => throw new IllegalArgumentException)(from)
61 |
62 | def apply[T, U](to: T => backend.BValue)(from: PF[backend.BValue, U]): Converter[T, U] =
63 | new Converter[T, U] {
64 | override def toBackend(v: T): backend.BValue = to(v)
65 | override def isDefinedAt(bv: backend.BValue): Boolean = from.isDefinedAt(bv)
66 | override def fromBackend(bv: backend.BValue): U = from(bv)
67 | }
68 |
69 | implicit class ConverterHListOps[L, T](c: Converter[T, L]) {
70 | def >>>[F, V](f: F)(implicit ev: L <:< HList, tp: FnToProduct.Aux[F, L => V]): Converter[T, V] =
71 | this andThenUnpacked f
72 | def andThenUnpacked[F, V](f: F)(implicit ev: L <:< HList, tp: FnToProduct.Aux[F, L => V]): Converter[T, V] =
73 | c >> tp(f)
74 | }
75 |
76 | implicit def converterAsWriter[T](c: Converter[T, Any]): Writer[T] =
77 | Writer(c.toBackend)
78 |
79 | implicit def converterAsReader[U](c: Converter[Nothing, U]): Reader[U] =
80 | Reader {
81 | case bv if c.isDefinedAt(bv) => c.fromBackend(bv)
82 | }
83 | }
84 |
85 | def value[T](implicit wt: Writer[T], rt: Reader[T]): Converter.Id[T] =
86 | new Converter[T, T] {
87 | override def toBackend(v: T): backend.BValue = wt.write(v)
88 | override def isDefinedAt(bv: backend.BValue) = rt.canRead(bv)
89 | override def fromBackend(bv: backend.BValue) = rt.read(bv)
90 | }
91 |
92 | val `null`: Converter.Id[Null] = Converter[Null, Null](_ => backend.makeNull) {
93 | case backend.Get.Null(_) => null
94 | }
95 |
96 | val bool: Converter.Id[Boolean] = Converter(backend.makeBoolean) {
97 | case backend.Extract.Boolean(b) => b
98 | }
99 |
100 | val num: Converter.Id[Number] = Converter(backend.makeNumber) {
101 | case backend.Extract.Number(n) => n
102 | }
103 |
104 | val str: Converter.Id[String] = Converter(backend.makeString) {
105 | case backend.Extract.String(s) => s
106 | }
107 |
108 | trait ObjectMapping[CC <: HList] {
109 | type In <: HList
110 | type Out <: HList
111 |
112 | def toBackend(in: In, cc: CC, bo: backend.BObject): backend.BObject
113 | def isDefinedAt(cc: CC, bo: backend.BObject): Boolean
114 | def fromBackend(cc: CC, bo: backend.BObject): Option[Out]
115 | }
116 | object ObjectMapping {
117 | type Aux[CC <: HList, In0 <: HList, Out0 <: HList] = ObjectMapping[CC] { type In = In0; type Out = Out0 }
118 |
119 | implicit val hlistObjectMapping: Aux[HNil, HNil, HNil] = new ObjectMapping[HNil] {
120 | override type In = HNil
121 | override type Out = HNil
122 |
123 | override def toBackend(in: HNil, cc: HNil, bo: backend.BObject): backend.BObject = bo
124 | override def isDefinedAt(cc: HNil, bo: backend.BObject): Boolean = true
125 | override def fromBackend(cc: HNil, bo: backend.BObject): Option[HNil] = Some(HNil)
126 | }
127 |
128 | implicit def hconsObjectMapping[T, U, CS <: HList, TS <: HList, US <: HList]
129 | (implicit tm: Aux[CS, TS, US]): Aux[(String, Converter[T, U]) :: CS, T :: TS, U :: US] =
130 | new ObjectMapping[(String, Converter[T, U]) :: CS] {
131 | override type In = T :: TS
132 | override type Out = U :: US
133 |
134 | override def toBackend(in: T :: TS, cc: (String, Converter[T, U]) :: CS, bo: backend.BObject): backend.BObject =
135 | (in, cc) match {
136 | case (t :: ts, (k, c) :: cs) =>
137 | val nbo = backend.setObjectKey(bo, k, c.toBackend(t))
138 | tm.toBackend(ts, cs, nbo)
139 | }
140 |
141 | override def isDefinedAt(cc: (String, Converter[T, U]) :: CS, bo: backend.BObject): Boolean = cc match {
142 | case (k, c) :: cs => backend.getObjectKey(bo, k).exists(c.isDefinedAt) && tm.isDefinedAt(cs, bo)
143 | }
144 |
145 | override def fromBackend(s: (String, Converter[T, U]) :: CS, bo: backend.BObject): Option[U :: US] = s match {
146 | case (k, c) :: cc =>
147 | for {
148 | v <- backend.getObjectKey(bo, k)
149 | mv <- c.lift.fromBackend(v)
150 | mt <- tm.fromBackend(cc, bo)
151 | } yield mv :: mt
152 | }
153 | }
154 | }
155 |
156 | object obj {
157 | def apply[CC <: HList](converters: CC)(implicit m: ObjectMapping[CC]): Converter[m.In, m.Out] =
158 | new Converter[m.In, m.Out] {
159 | override def toBackend(v: m.In): backend.BValue =
160 | m.toBackend(v, converters, backend.makeEmptyObject)
161 |
162 | override def isDefinedAt(bv: backend.BValue): Boolean = bv match {
163 | case backend.Get.Object(bo) => m.isDefinedAt(converters, bo)
164 | case _ => false
165 | }
166 |
167 | override def fromBackend(bv: backend.BValue): m.Out = bv match {
168 | case backend.Get.Object(bo) => m.fromBackend(converters, bo).get
169 | }
170 | }
171 |
172 | def as[M[A, B] <: Map[A, B]] = new AsBuilder[M]
173 |
174 | class AsBuilder[M[A, B] <: Map[A, B]] {
175 | def to[V](conv: Converter.Id[V])(implicit cbf: CanBuildFrom[M[String, V], (String, V), M[String, V]]): Converter.Id[M[String, V]] =
176 | Converter[M[String, V], M[String, V]](m => backend.makeObject(m.mapValues(conv.toBackend))) {
177 | case backend.Extract.Object(obj) if obj.values.forall(conv.isDefinedAt) =>
178 | val b = cbf()
179 | b ++= obj.mapValues(conv.fromBackend)
180 | b.result()
181 | }
182 | }
183 | }
184 |
185 | trait ArrayMapping[CC <: HList] {
186 | type In
187 | type Out
188 |
189 | def toBackend(in: In, cc: CC, ba: backend.BArray): backend.BArray
190 | def isDefinedAt(cc: CC, ba: backend.BArray, idx: Int): Boolean
191 | def fromBackend(cc: CC, ba: backend.BArray, idx: Int): Option[Out]
192 | }
193 | object ArrayMapping {
194 | type Aux[CC <: HList, In0 <: HList, Out0 <: HList] = ArrayMapping[CC] { type In = In0; type Out = Out0 }
195 |
196 | implicit val hnilArrayMapping: Aux[HNil, HNil, HNil] = new ArrayMapping[HNil] {
197 | override type In = HNil
198 | override type Out = HNil
199 |
200 | override def toBackend(in: HNil, cc: HNil, ba: backend.BArray): backend.BArray = ba
201 | override def isDefinedAt(cc: HNil, ba: backend.BArray, idx: Int): Boolean = true
202 | override def fromBackend(cc: HNil, ba: backend.BArray, idx: Int): Option[HNil] = Some(HNil)
203 | }
204 |
205 | implicit def hconsArrayMapping[T, U, CS <: HList, TS <: HList, US <: HList](implicit tm: Aux[CS, TS, US])
206 | : Aux[Converter[T, U] :: CS, T :: TS, U :: US] =
207 | new ArrayMapping[Converter[T, U] :: CS] {
208 | override type In = T :: TS
209 | override type Out = U :: US
210 |
211 | override def toBackend(in: T :: TS, cc: Converter[T, U] :: CS,
212 | ba: backend.BArray): backend.BArray = (in, cc) match {
213 | case (t :: ts, c :: cs) =>
214 | val nba = backend.pushToArray(ba, c.toBackend(t))
215 | tm.toBackend(ts, cs, nba)
216 | }
217 |
218 | override def isDefinedAt(cc: Converter[T, U] :: CS, ba: backend.BArray, idx: Int): Boolean = cc match {
219 | case c :: cs if idx < backend.getArrayLength(ba) =>
220 | c.isDefinedAt(backend.getArrayValueAt(ba, idx)) && tm.isDefinedAt(cs, ba, idx + 1)
221 | case _ => false
222 | }
223 |
224 | override def fromBackend(cc: Converter[T, U] :: CS, ba: backend.BArray, idx: Int): Option[U :: US] = cc match {
225 | case c :: cs if idx < backend.getArrayLength(ba) =>
226 | val bv = backend.getArrayValueAt(ba, idx)
227 | for {
228 | mv <- c.lift.fromBackend(bv)
229 | mt <- tm.fromBackend(cs, ba, idx+1)
230 | } yield mv :: mt
231 | case _ => None
232 | }
233 | }
234 | }
235 |
236 | object arr {
237 | def apply[CC <: HList](converters: CC)(implicit m: ArrayMapping[CC]): Converter[m.In, m.Out] =
238 | new Converter[m.In, m.Out] {
239 | override def toBackend(v: m.In) = m.toBackend(v, converters, backend.makeEmptyArray)
240 |
241 | override def isDefinedAt(bv: backend.BValue) = bv match {
242 | case backend.Get.Array(ba) => m.isDefinedAt(converters, ba, 0)
243 | case _ => false
244 | }
245 |
246 | override def fromBackend(bv: backend.BValue) = bv match {
247 | case backend.Get.Array(ba) => m.fromBackend(converters, ba, 0).get
248 | }
249 | }
250 |
251 | def as[C[T] <: Traversable[T]] = new AsBuilder[C]
252 |
253 | class AsBuilder[C[T] <: Traversable[T]] {
254 | def of[U](conv: Converter.Id[U])(implicit cbf: CanBuildFrom[C[U], U, C[U]]): Converter.Id[C[U]] =
255 | Converter[C[U], C[U]](c => backend.makeArray(c.toVector.map(conv.toBackend)(breakOut))) {
256 | case backend.Extract.Array(arr) if arr.forall(conv.isDefinedAt) =>
257 | arr.map[U, C[U]](conv.fromBackend)(breakOut)
258 | }
259 | }
260 | }
261 |
262 | def unlift[T, U](f: T => Option[U]): T => U = t => f(t).get
263 |
264 | implicit class ConverterFunctionOps[V, T](f: V => T) {
265 | def >>[U](c: Converter[T, U]): Converter[V, U] =
266 | new Converter[V, U] {
267 | def isDefinedAt(bv: backend.BValue): Boolean = c.isDefinedAt(bv)
268 | def fromBackend(bv: backend.BValue): U = c.fromBackend(bv)
269 | def toBackend(v: V): backend.BValue = c.toBackend(f(v))
270 | }
271 |
272 | def >>[U](g: T => U): V => U = v => g(f(v))
273 | }
274 |
275 | implicit class ConverterProductFunctionOps[V, P <: Product](f: V => P) {
276 | def >>>[U, L <: HList](c: Converter[L, U])(implicit gen: Generic.Aux[P, L]): Converter[V, U] =
277 | new Converter[V, U] {
278 | def isDefinedAt(bv: backend.BValue): Boolean = c.isDefinedAt(bv)
279 | def fromBackend(bv: backend.BValue): U = c.fromBackend(bv)
280 | def toBackend(v: V): backend.BValue = c.toBackend(gen.to(f(v)))
281 | }
282 | }
283 |
284 | implicit class NumberConverterExt[U](m: Converter.Id[Number]) {
285 | private def conv[T](implicit f: T => Number): T => Number = f
286 |
287 | def byte: Converter.Id[Byte] = conv[Byte] >> m >> (_.byteValue)
288 | def short: Converter.Id[Short] = conv[Short] >> m >> (_.shortValue)
289 | def int: Converter.Id[Int] = conv[Int] >> m >> (_.intValue)
290 | def long: Converter.Id[Long] = conv[Long] >> m >> (_.longValue)
291 | def float: Converter.Id[Float] = conv[Float] >> m >> (_.floatValue)
292 | def double: Converter.Id[Double] = conv[Double] >> m >> (_.doubleValue)
293 | }
294 | }
295 | }
296 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/defaults.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import scala.language.experimental.macros
4 | import scala.reflect.macros.whitebox
5 | import shapeless._
6 |
7 | import io.github.netvl.picopickle.SourceTypeTag.@@@
8 |
9 | trait DefaultValuesComponent {
10 |
11 | // I have *no* idea why this trait and its materialization do not work outside of a cake component
12 | // without explicit imports o_O
13 | trait DefaultValue {
14 | type T
15 | type K
16 | type V
17 | def value: Option[V]
18 | }
19 |
20 | object DefaultValue {
21 | type Aux[T0, K0, V0] = DefaultValue { type T = T0; type K = K0; type V = V0 }
22 | implicit def materializeDefaultValue[T, K <: Symbol, V]: DefaultValue.Aux[T, K, V] =
23 | macro DefaultValueMacros.materializeDefaultValueImpl[DefaultValue.Aux[T, K, V], T, K, V]
24 | }
25 | }
26 |
27 | @macrocompat.bundle
28 | class DefaultValueMacros(override val c: whitebox.Context) extends SingletonTypeMacros(c) {
29 | import c.universe._
30 |
31 | def materializeDefaultValueImpl[S, T: WeakTypeTag, K: WeakTypeTag, V: WeakTypeTag]: Tree = {
32 | val kTpe = weakTypeOf[K].dealias
33 | val fieldName = kTpe match {
34 | case SingletonSymbolType(s) => s
35 | case _ => c.abort(c.enclosingPosition, s"Type $kTpe is not a tagged symbol type")
36 | }
37 |
38 | val tTpe = weakTypeOf[T]
39 | val tCompanionSym = tTpe.typeSymbol.companion
40 | if (tCompanionSym == NoSymbol)
41 | c.abort(c.enclosingPosition, s"No companion symbol is available for type $tTpe")
42 |
43 | val ctorSym = tTpe.decl(termNames.CONSTRUCTOR).asTerm.alternatives.collectFirst {
44 | case ctor: MethodSymbol if ctor.isPrimaryConstructor => ctor
45 | }.getOrElse(c.abort(c.enclosingPosition, s"Could not find the primary constructor for type $tTpe"))
46 |
47 | val vTpe = weakTypeOf[V]
48 |
49 | val defaultMethodName = ctorSym.paramLists.headOption.flatMap { argSyms =>
50 | argSyms.map(_.asTerm).zipWithIndex.collect {
51 | case (p, i) if p.isParamWithDefault && p.name.toString == fieldName && p.typeSignature =:= vTpe =>
52 | TermName(s"$$lessinit$$greater$$default$$${i+1}") // TODO: not sure if this couldn't be made more correct
53 | }.headOption
54 | }
55 |
56 | val invocation = defaultMethodName match {
57 | case Some(name) => q"_root_.scala.Some($tCompanionSym.$name)"
58 | case None => q"_root_.scala.None"
59 | }
60 |
61 | val generatedClassName = TypeName(s"DefaultValue$$${tTpe.typeSymbol.name}$$$fieldName")
62 | q"""
63 | {
64 | final class $generatedClassName extends DefaultValue {
65 | type T = $tTpe
66 | type K = $kTpe
67 | type V = $vTpe
68 | def value: _root_.scala.Option[$vTpe] = $invocation
69 | }
70 | new $generatedClassName
71 | }
72 | """
73 | }
74 | }
75 |
76 | object SourceTypeTag {
77 | def apply[U] = new Tagger[U]
78 |
79 | trait Tag[T]
80 | type @@@[+T, U] = T with Tag[U]
81 |
82 | class Tagger[U] {
83 | def attachTo[T](value: T): T @@@ U = value.asInstanceOf[T @@@ U]
84 | }
85 | }
86 |
87 | trait TagWithType[In, U] {
88 | type Out
89 | def wrap(source: In): Out
90 | def unwrap(source: Out): In
91 | }
92 |
93 | object TagWithType {
94 | def apply[In, U](implicit t: TagWithType[In, U]): Aux[In, U, t.Out] = t
95 |
96 | type Aux[In, U, Out0] = TagWithType[In, U] { type Out = Out0 }
97 |
98 | implicit def tagWithTypeHNil[U]: TagWithType.Aux[HNil, U, HNil] = new TagWithType[HNil, U] {
99 | type Out = HNil
100 | def wrap(source: HNil): HNil = HNil
101 | def unwrap(source: HNil): HNil = HNil
102 | }
103 |
104 | implicit def tagWithTypeHCons[U, H, T <: HList, O <: HList](implicit tt: TagWithType.Aux[T, U, O])
105 | : TagWithType.Aux[H :: T, U, (H @@@ U) :: O] =
106 | new TagWithType[H :: T, U] {
107 | type Out = (H @@@ U) :: O
108 | def wrap(source: H :: T) = source match {
109 | case h :: t => SourceTypeTag[U].attachTo(h) :: tt.wrap(t)
110 | }
111 | def unwrap(source: (H @@@ U) :: O): H :: T = source match {
112 | case h :: t => h :: tt.unwrap(t)
113 | }
114 | }
115 |
116 | implicit def tagWithTypeCNil[U]: TagWithType.Aux[CNil, U, CNil] = new TagWithType[CNil, U]{
117 | type Out = CNil
118 | // XXX: maybe this should throw something?
119 | def wrap(source: CNil): CNil = source
120 | def unwrap(source: CNil): CNil = source
121 | }
122 |
123 | implicit def tagWithTypeCCons[U, L, R <: Coproduct, O <: Coproduct](implicit tr: TagWithType.Aux[R, U, O])
124 | : TagWithType.Aux[L :+: R, U, (L @@@ U) :+: O] =
125 | new TagWithType[L :+: R, U] {
126 | type Out = (L @@@ U) :+: O
127 | def wrap(source: L :+: R): (L @@@ U) :+: O = source match {
128 | case Inl(left) => Inl(SourceTypeTag[U].attachTo(left))
129 | case Inr(right) => Inr(tr.wrap(right))
130 | }
131 | def unwrap(source: (L @@@ U) :+: O): L :+: R = source match {
132 | case Inl(left) => Inl(left)
133 | case Inr(right) => Inr(tr.unwrap(right))
134 | }
135 | }
136 | }
137 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/discriminator.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import shapeless.{DefaultSymbolicLabelling, LabelledMacros}
4 |
5 | import scala.annotation.StaticAnnotation
6 | import scala.language.experimental.macros
7 | import scala.reflect.macros.whitebox
8 |
9 | class discriminator(k: String) extends StaticAnnotation
10 | class key(k: String) extends StaticAnnotation
11 |
12 | trait SealedTraitDiscriminatorComponent {
13 | def defaultDiscriminatorKey: String
14 |
15 | trait Discriminator {
16 | type T
17 | def value: Option[String]
18 | }
19 |
20 | object Discriminator {
21 | type Aux[T0] = Discriminator { type T = T0 }
22 | implicit def materializeDiscriminator[T]: Discriminator.Aux[T] =
23 | macro DiscriminatorMacros.materializeDiscriminatorImpl[Discriminator.Aux[T], T]
24 | }
25 | }
26 |
27 | trait DefaultSealedTraitDiscriminatorComponent extends SealedTraitDiscriminatorComponent {
28 | override lazy val defaultDiscriminatorKey: String = "$variant"
29 | }
30 |
31 | @macrocompat.bundle
32 | class DiscriminatorMacros(val c: whitebox.Context) {
33 | import c.universe._
34 |
35 | def materializeDiscriminatorImpl[S, T: WeakTypeTag]: Tree = {
36 | val tTpe = weakTypeOf[T]
37 | val tSym = tTpe.typeSymbol
38 |
39 | if (tSym.isClass && tSym.asClass.isSealed && tSym.asClass.isTrait) { // sealed trait
40 | val discriminatorValue = tSym.annotations
41 | .find(isDiscriminatorAnnotation)
42 | .flatMap(_.tree.children.tail.headOption)
43 | .collect { case Literal(Constant(s)) => s.toString }
44 |
45 | val discriminatorTree = discriminatorValue match {
46 | case Some(value) => q"_root_.scala.Some($value)"
47 | case None => q"_root_.scala.None"
48 | }
49 |
50 | val generatedClassName = TypeName(s"Discriminator$$${tSym.name}")
51 | q"""
52 | {
53 | final class $generatedClassName extends Discriminator {
54 | type T = $tTpe
55 | def value: _root_.scala.Option[_root_.scala.Predef.String] = $discriminatorTree
56 | }
57 | new $generatedClassName
58 | }
59 | """
60 | } else {
61 | c.abort(c.enclosingPosition, "Discriminators can only be obtained for sealed traits")
62 | }
63 | }
64 |
65 | def isDiscriminatorAnnotation(ann: Annotation): Boolean = ann.tree.tpe =:= typeOf[discriminator]
66 | }
67 |
68 | trait AnnotationSupportingSymbolicLabellingComponent {
69 | implicit def mkSymbolicLabelling[T]: DefaultSymbolicLabelling[T] =
70 | macro AnnotationSupportSymbolicLabelling.mkAnnotatedSymbolicLabellingImpl[T]
71 | }
72 |
73 | // Extracted almost entirely from shapeless and tweaked to support custom annotations
74 | @macrocompat.bundle
75 | class AnnotationSupportSymbolicLabelling(override val c: whitebox.Context) extends LabelledMacros(c) {
76 | import c.universe._
77 |
78 | def mkAnnotatedSymbolicLabellingImpl[T](implicit tTag: c.WeakTypeTag[T]): Tree = {
79 | val tTpe = weakTypeOf[T]
80 | val labels: List[String] =
81 | if (isProduct(tTpe)) fieldSymbolsOf(tTpe).map(obtainKeyOfField(_, tTpe))
82 | else if (isCoproduct(tTpe)) ctorsOf(tTpe).map(obtainKeyOfType)
83 | else c.abort(c.enclosingPosition, s"$tTpe is not case class like or the root of a sealed family of types")
84 |
85 | val labelTpes = labels.map(SingletonSymbolType.apply)
86 | val labelValues = labels.map(mkSingletonSymbol)
87 |
88 | val labelsTpe = mkHListTpe(labelTpes)
89 | val labelsValue =
90 | labelValues.foldRight(q"_root_.shapeless.HNil": Tree) {
91 | case (elem, acc) => q"_root_.shapeless.::($elem, $acc)"
92 | }
93 |
94 | q"""
95 | new _root_.shapeless.DefaultSymbolicLabelling[$tTpe] {
96 | type Out = $labelsTpe
97 | def apply(): $labelsTpe = $labelsValue
98 | } : _root_.shapeless.DefaultSymbolicLabelling.Aux[$tTpe, $labelsTpe]
99 | """
100 | }
101 |
102 | def isKeyAnnotation(ann: Annotation): Boolean = ann.tree.tpe =:= typeOf[key]
103 |
104 | def obtainKeyOfSym(sym: Symbol) = {
105 | sym.annotations
106 | .find(isKeyAnnotation)
107 | .flatMap(_.tree.children.tail.headOption)
108 | .collect { case Literal(Constant(s)) => s.toString }
109 | .getOrElse(nameAsString(sym.name))
110 | }
111 |
112 | def obtainKeyOfType(tpe: Type): String = obtainKeyOfSym(tpe.typeSymbol)
113 |
114 | def obtainKeyOfField(sym: Symbol, tpe: Type): String = {
115 | tpe
116 | .decls
117 | .collect { case d if d.name == termNames.CONSTRUCTOR => d.asMethod }
118 | .flatMap(_.paramLists.flatten)
119 | .filter(_.name == sym.name) // don't know if this is a good idea but I see no other way
120 | .flatMap(_.annotations)
121 | .find(isKeyAnnotation)
122 | .flatMap(_.tree.children.tail.headOption)
123 | .collect { case Literal(Constant(s)) => s.toString }
124 | .getOrElse(nameAsString(sym.name))
125 | }
126 |
127 | def fieldSymbolsOf(tpe: Type): List[TermSymbol] =
128 | tpe.decls.toList collect {
129 | case sym: TermSymbol if isCaseAccessorLike(sym) => sym
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/exceptions.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | trait ExceptionsComponent {
4 | this: BackendComponent =>
5 |
6 | abstract class BaseException(message: String, cause: Throwable)
7 | extends RuntimeException(message, cause)
8 |
9 | case class ReadException(message: String, data: backend.BValue, cause: Throwable = null)
10 | extends BaseException(message, cause)
11 |
12 | object ReadException {
13 | def apply(reading: String, expected: String, got: backend.BValue): ReadException =
14 | ReadException(s"reading $reading, expected $expected, got $got", data = got)
15 | }
16 | }
17 |
18 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/nulls.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | trait NullHandlerComponent {
4 | this: TypesComponent with BackendComponent =>
5 |
6 | def nullHandler: NullHandler
7 |
8 | trait NullHandler {
9 | def handlesNull: Boolean
10 | def toBackend[T](value: T, cont: T => backend.BValue): backend.BValue
11 | def fromBackend[T](value: backend.BValue, cont: backend.BValue => T): T
12 |
13 | def canRead(value: backend.BValue, cont: backend.BValue => Boolean): Boolean = value match {
14 | case backend.Get.Null(_) => handlesNull
15 | case nv => cont(nv)
16 | }
17 | }
18 | }
19 |
20 | trait DefaultNullHandlerComponent extends NullHandlerComponent {
21 | this: TypesComponent with BackendComponent =>
22 |
23 | override def nullHandler: NullHandler = new NullHandler {
24 | override def handlesNull: Boolean = true
25 |
26 | override def fromBackend[T](value: backend.BValue, cont: backend.BValue => T): T = value match {
27 | case backend.Get.Null(_) => null.asInstanceOf[T]
28 | case _ => cont(value)
29 | }
30 |
31 | override def toBackend[T](value: T, cont: T => backend.BValue): backend.BValue = value match {
32 | case null => backend.makeNull
33 | case _ => cont(value)
34 | }
35 | }
36 | }
37 |
38 | trait ProhibitiveNullHandlerComponent extends NullHandlerComponent {
39 | this: TypesComponent with BackendComponent with ExceptionsComponent =>
40 |
41 | override def nullHandler: NullHandler = new NullHandler {
42 | override def handlesNull: Boolean = false
43 |
44 | override def fromBackend[T](value: backend.BValue, cont: backend.BValue => T): T = value match {
45 | case backend.Get.Null(_) => throw ReadException("null values are prohibited", value)
46 | case _ => cont(value)
47 | }
48 |
49 | override def toBackend[T](value: T, cont: T => backend.BValue): backend.BValue = value match {
50 | case null => throw new IllegalArgumentException("null values are prohibited")
51 | case _ => cont(value)
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/objectkeys.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import scala.annotation.implicitNotFound
4 | import scala.{collection => coll}
5 | import scala.collection.{mutable => mut, immutable => imm}
6 |
7 | trait ObjectKeyTypesComponent {
8 | @implicitNotFound("Don't know how to write ${T} as a map key; make sure that an implicit `ObjectKeyWriter[${T}]` is in scope")
9 | trait ObjectKeyWriter[T] {
10 | def write(value: T): String
11 | }
12 |
13 | object ObjectKeyWriter {
14 | def apply[T](conv: T => String): ObjectKeyWriter[T] = new ObjectKeyWriter[T] {
15 | override def write(value: T): String = conv(value)
16 | }
17 | }
18 |
19 | @implicitNotFound("Don't know how to read ${T} as a map key; make sure that an implicit `ObjectKeyReader[${T}]` is in scope")
20 | trait ObjectKeyReader[T] {
21 | def read(value: String): T
22 | }
23 |
24 | object ObjectKeyReader {
25 | def apply[T](conv: String => T): ObjectKeyReader[T] = new ObjectKeyReader[T] {
26 | override def read(value: String): T = conv(value)
27 | }
28 | }
29 |
30 | type ObjectKeyReadWriter[T] = ObjectKeyReader[T] with ObjectKeyWriter[T]
31 |
32 | object ObjectKeyReadWriter {
33 | def apply[T](from: String => T): ObjectKeyReadWriter[T] = apply((t: T) => t.toString, from)
34 |
35 | def apply[T](to: T => String, from: String => T): ObjectKeyReadWriter[T] = new ObjectKeyReader[T] with ObjectKeyWriter[T] {
36 | override def write(value: T): String = to(value)
37 | override def read(value: String): T = from(value)
38 | }
39 |
40 | def apply[T](implicit r: ObjectKeyReader[T], w: ObjectKeyWriter[T]) = new ObjectKeyReader[T] with ObjectKeyWriter[T] {
41 | override def write(value: T): String = w.write(value)
42 | override def read(value: String): T = r.read(value)
43 | }
44 | }
45 |
46 | }
47 |
48 | trait ObjectKeyWritersComponent {
49 | this: ObjectKeyTypesComponent =>
50 |
51 | implicit val stringObjectKeyWriter: ObjectKeyWriter[String] = ObjectKeyWriter(identity)
52 | }
53 |
54 | trait ObjectKeyReadersComponent {
55 | this: ObjectKeyTypesComponent =>
56 |
57 | implicit val stringObjectKeyReader: ObjectKeyReader[String] = ObjectKeyReader(identity)
58 | }
59 |
60 | trait ObjectKeyReaderWritersComponent extends ObjectKeyReadersComponent with ObjectKeyWritersComponent {
61 | this: ObjectKeyTypesComponent =>
62 | }
63 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/pickler.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import scala.util.Try
4 |
5 | trait Pickler {
6 | self: BackendComponent with TypesComponent =>
7 | def read[T: Reader](value: backend.BValue): T
8 | def write[T: Writer](value: T): backend.BValue
9 |
10 | def tryRead[T: Reader](value: backend.BValue): Try[T] = Try(read(value))
11 |
12 | class Serializer[T: Reader: Writer] {
13 | def read(value: backend.BValue): T = self.read(value)
14 | def tryRead(value: backend.BValue): Try[T] = self.tryRead(value)
15 |
16 | def write(value: T): backend.BValue = self.write(value)
17 | }
18 | def serializer[T: Reader: Writer] = new Serializer[T]
19 | }
20 |
21 | trait DefaultPickler
22 | extends Pickler
23 | with ExceptionsComponent
24 | with ShapelessReaderWritersComponent
25 | with DefaultValuesComponent
26 | with DefaultNullHandlerComponent
27 | with AnnotationSupportingSymbolicLabellingComponent
28 | with DefaultSealedTraitDiscriminatorComponent
29 | with PrimitiveReaderWritersComponent
30 | with CollectionReaderWritersComponent
31 | with ObjectKeyTypesComponent
32 | with ObjectKeyReaderWritersComponent
33 | with MapPicklingComponent
34 | with MapPicklingEnabledByDefault
35 | with TupleReaderWritersComponent
36 | with ConvertersComponent
37 | with TypesComponent {
38 | this: BackendComponent =>
39 |
40 | override def read[T](value: backend.BValue)(implicit r: Reader[T]): T = r.read(value)
41 | override def write[T](value: T)(implicit w: Writer[T]): backend.BValue = w.write(value)
42 | }
43 |
44 |
45 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/primitives.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | trait PrimitiveWriters {
4 | this: BackendComponent with TypesComponent =>
5 |
6 | implicit val unitWriter: Writer[Unit] = Writer {
7 | case _ => backend.makeEmptyObject
8 | }
9 |
10 | protected final def numWriter[T](implicit asNumber: T => Number): Writer[T] = Writer {
11 | case x => backend.makeNumberAccurately(asNumber(x))
12 | }
13 |
14 | implicit val byteWriter: Writer[Byte] = numWriter[Byte]
15 | implicit val shortWriter: Writer[Short] = numWriter[Short]
16 | implicit val intWriter: Writer[Int] = numWriter[Int]
17 | implicit val longWriter: Writer[Long] = numWriter[Long]
18 | implicit val floatWriter: Writer[Float] = numWriter[Float]
19 | implicit val doubleWriter: Writer[Double] = numWriter[Double]
20 |
21 | implicit val charWriter: Writer[Char] = Writer {
22 | case c => backend.makeString(c.toString)
23 | }
24 |
25 | implicit val booleanWriter: Writer[Boolean] = Writer {
26 | case b => backend.makeBoolean(b)
27 | }
28 |
29 | implicit val stringWriter: Writer[String] = Writer {
30 | case s => backend.makeString(s)
31 | }
32 |
33 | implicit def optionWriter[T](implicit w: Writer[T]): Writer[Option[T]] = Writer {
34 | case Some(value) => backend.makeArray(Vector(w.write(value)))
35 | case None => backend.makeArray(Vector.empty)
36 | }
37 | implicit def someWriter[T: Writer]: Writer[Some[T]] = Writer {
38 | case s => optionWriter[T].write(s)
39 | }
40 | implicit val noneWriter: Writer[None.type] = Writer {
41 | case None => optionWriter[Int].write(None)
42 | }
43 |
44 | implicit def eitherWriter[A, B](implicit wa: Writer[A], wb: Writer[B]): Writer[Either[A, B]] = Writer {
45 | case Left(l) => backend.makeArray(Vector(backend.makeNumber(0), wa.write(l)))
46 | case Right(r) => backend.makeArray(Vector(backend.makeNumber(1), wb.write(r)))
47 | }
48 | implicit def leftWriter[A: Writer, B: Writer]: Writer[Left[A, B]] = Writer {
49 | case l => eitherWriter[A, B].write(l)
50 | }
51 | implicit def rightWriter[A: Writer, B: Writer]: Writer[Right[A, B]] = Writer {
52 | case r => eitherWriter[A, B].write(r)
53 | }
54 |
55 | implicit val symbolWriter: Writer[Symbol] = Writer {
56 | case s => backend.makeString(s.name)
57 | }
58 |
59 | implicit val nullWriter: Writer[Null] = Writer {
60 | case null => backend.makeNull
61 | }
62 | }
63 |
64 | trait PrimitiveReaders {
65 | this: BackendComponent with TypesComponent with ExceptionsComponent =>
66 |
67 | // basic primitives
68 |
69 | implicit val unitReader: Reader[Unit] = Reader.reading {
70 | case backend.Extract.Object(m) if m.isEmpty => ()
71 | }.orThrowing(whenReading = "unit", expected = "empty object")
72 |
73 | protected final def numReader[T](name: String, f: Number => T): Reader[T] = Reader.reading {
74 | case n if backend.fromNumberAccurately.isDefinedAt(n) =>
75 | f(backend.fromNumberAccurately(n))
76 | }.orThrowing(whenReading = name, expected = backend.fromNumberAccuratelyExpected)
77 |
78 | implicit val byteReader: Reader[Byte] = numReader("byte", _.byteValue())
79 | implicit val shortReader: Reader[Short] = numReader("short", _.shortValue())
80 | implicit val intReader: Reader[Int] = numReader("int", _.intValue())
81 | implicit val longReader: Reader[Long] = numReader("long", _.longValue())
82 | implicit val floatReader: Reader[Float] = numReader("float", _.floatValue())
83 | implicit val doubleReader: Reader[Double] = numReader("double", _.doubleValue())
84 |
85 | implicit val charReader: Reader[Char] = Reader.reading {
86 | case backend.Extract.String(s) => s.charAt(0)
87 | }.orThrowing(whenReading = "char", expected = "string")
88 |
89 | implicit val booleanReader: Reader[Boolean] = Reader.reading {
90 | case backend.Extract.Boolean(b) => b
91 | }.orThrowing(whenReading = "boolean", expected = "boolean")
92 |
93 | implicit val stringReader: Reader[String] = Reader.reading {
94 | case backend.Extract.String(s) => s
95 | }.orThrowing(whenReading = "string", expected = "string")
96 |
97 | // option
98 |
99 | implicit def optionReader[T](implicit r: Reader[T]): Reader[Option[T]] = Reader.reading {
100 | case backend.Extract.Array(arr) if arr.length <= 1 => arr.headOption.map(r.read)
101 | }.orThrowing(whenReading = "option", expected = "array")
102 |
103 | implicit def someReader[T](implicit r: Reader[T]): Reader[Some[T]] = Reader.reading {
104 | case backend.Extract.Array(arr) if arr.length == 1 => Some(r.read(arr.head))
105 | }.orThrowing(whenReading = "some", expected = "array with one element")
106 |
107 | implicit val noneReader: Reader[None.type] = Reader.reading {
108 | case backend.Extract.Array(arr) if arr.isEmpty => None
109 | }.orThrowing(whenReading = "none", expected = "empty array")
110 |
111 | // either
112 |
113 | implicit def eitherReader[A, B](implicit ra: Reader[A], rb: Reader[B]): Reader[Either[A, B]] = Reader.reading[Either[A, B]] {
114 | case backend.Extract.Array(Vector(backend.Extract.Number(n), bv)) if n.intValue() == 0 =>
115 | Left(ra.read(bv))
116 | case backend.Extract.Array(Vector(backend.Extract.Number(n), bv)) if n.intValue() == 1 =>
117 | Right(rb.read(bv))
118 | }.orThrowing(whenReading = "either", expected = "array with first element 0 or 1")
119 |
120 | implicit def leftReader[A, B](implicit ra: Reader[A]): Reader[Left[A, B]] = Reader.reading[Left[A, B]] {
121 | case backend.Extract.Array(Vector(backend.Extract.Number(n), bv)) if n.intValue() == 0 =>
122 | Left(ra.read(bv))
123 | }.orThrowing(whenReading = "left", expected = "array with first element 0")
124 |
125 | implicit def rightReader[A, B](implicit rb: Reader[B]): Reader[Right[A, B]] = Reader.reading[Right[A, B]] {
126 | case backend.Extract.Array(Vector(backend.Extract.Number(n), bv)) if n.intValue() == 1 =>
127 | Right(rb.read(bv))
128 | }.orThrowing(whenReading = "right", expected = "array with first element 1")
129 |
130 | implicit val symbolReader: Reader[Symbol] = Reader.reading {
131 | case backend.Extract.String(s) => Symbol(s)
132 | }.orThrowing(whenReading = "symbol", expected = "string")
133 |
134 | implicit val nullReader: Reader[Null] = Reader.reading {
135 | case backend.Get.Null(_) => null
136 | }.orThrowing(whenReading = "null", expected = "null")
137 | }
138 |
139 | trait PrimitiveReaderWritersComponent extends PrimitiveReaders with PrimitiveWriters {
140 | this: BackendComponent with TypesComponent with ExceptionsComponent =>
141 | }
142 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/shapeless.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import io.github.netvl.picopickle.SourceTypeTag.@@@
4 | import shapeless._
5 | import shapeless.labelled._
6 |
7 | trait LowerPriorityShapelessWriters2 {
8 | this: BackendComponent with TypesComponent =>
9 |
10 | implicit def genericCoproductWriter[T, R <: Coproduct, RT <: Coproduct](implicit g: LabelledGeneric.Aux[T, R],
11 | rt: TagWithType.Aux[R, T, RT],
12 | wr: Lazy[Writer[RT]]): Writer[T] =
13 | Writer.fromF1 {
14 | case (v, bv) => wr.value.write0(rt.wrap(g.to(v)), bv)
15 | }
16 |
17 | implicit def genericHListWriter[T, R <: HList](implicit g: LabelledGeneric.Aux[T, R],
18 | wr: Lazy[Writer[R]]): Writer[T] =
19 | Writer.fromF1 {
20 | case (f, bv) => wr.value.write0(g.to(f), bv)
21 | }
22 | }
23 |
24 | trait LowerPriorityShapelessWriters extends LowerPriorityShapelessWriters2 {
25 | this: BackendComponent with TypesComponent =>
26 |
27 | implicit def fieldTypeWriter[K <: Symbol, V](implicit kw: Witness.Aux[K], vw: Lazy[Writer[V]]): Writer[FieldType[K, V]] =
28 | Writer.fromF0N { f => {
29 | case Some(backend.Get.Object(v)) => backend.setObjectKey(v, kw.value.name, vw.value.write(f))
30 | case None => backend.makeObject(Map(kw.value.name -> vw.value.write(f)))
31 | }}
32 | }
33 |
34 | trait ShapelessWriters extends LowerPriorityShapelessWriters {
35 | this: BackendComponent with TypesComponent with SealedTraitDiscriminatorComponent =>
36 |
37 | implicit def optionFieldTypeWriter[K <: Symbol, V](implicit kw: Witness.Aux[K],
38 | vw: Lazy[Writer[V]]): Writer[FieldType[K, Option[V]]] =
39 | Writer.fromF0N { f => {
40 | case Some(backend.Get.Object(v)) => (f: Option[V]) match {
41 | case Some(value) => backend.setObjectKey(v, kw.value.name, vw.value.write(value))
42 | case None => v: backend.BValue
43 | }
44 | case None => (f: Option[V]) match {
45 | case Some(value) => backend.makeObject(Map(kw.value.name -> vw.value.write(value)))
46 | case None => backend.makeEmptyObject
47 | }
48 | } }
49 |
50 | implicit def recordHeadWriter[H, T <: HList](implicit hw: Lazy[Writer[H]], tw: Lazy[Writer[T]],
51 | ev: H <:< FieldType[_, _]): Writer[H :: T] =
52 | Writer.fromF1 {
53 | case (h :: t, bv) => tw.value.write0(t, Some(hw.value.write0(h, bv)))
54 | }
55 |
56 | implicit val hnilWriter: Writer[HNil] =
57 | Writer.fromF0N { _ => {
58 | case Some(bv) => bv
59 | case None => backend.makeEmptyObject
60 | } }
61 |
62 | protected object ObjectOrEmpty {
63 | def unapply(bv: Option[backend.BValue]): Option[backend.BObject] = bv match {
64 | case Some(backend.Get.Object(obj)) => Some(obj)
65 | case None => Some(backend.makeEmptyObject)
66 | case _ => None
67 | }
68 | }
69 |
70 | implicit def coproductWriter[K <: Symbol, V, U, R <: Coproduct](implicit vw: Lazy[Writer[V]],
71 | tw: Lazy[Writer[R]],
72 | discriminator: Discriminator.Aux[U] = null,
73 | kw: Witness.Aux[K]): Writer[(FieldType[K, V] @@@ U) :+: R] =
74 | Writer.fromF1 {
75 | case (Inl(h), ObjectOrEmpty(obj)) =>
76 | val dkey = Option(discriminator).flatMap(_.value).getOrElse(defaultDiscriminatorKey)
77 | vw.value.write0(h, Some(backend.setObjectKey(obj, dkey, backend.makeString(kw.value.name))))
78 |
79 | case (Inr(t), ObjectOrEmpty(obj)) =>
80 | tw.value.write0(t, Some(obj))
81 | }
82 |
83 | implicit val cnilWriter: Writer[CNil] =
84 | Writer.fromF0N { _ => {
85 | case Some(obj) => obj // pass through the accumulated value
86 | // This is impossible, I believe
87 | case None => throw new IllegalStateException("Couldn't serialize a sealed trait")
88 | } }
89 | }
90 |
91 | trait LowerPriorityShapelessReaders2 {
92 | this: BackendComponent with TypesComponent =>
93 |
94 | implicit def genericReader[T, R, RT](implicit g: LabelledGeneric.Aux[T, R],
95 | rt: TagWithType.Aux[R, T, RT],
96 | rr: Lazy[Reader[RT]]): Reader[T] =
97 | rr.value.andThen(rt.unwrap _ andThen g.from)
98 | }
99 |
100 | trait LowerPriorityShapelessReaders extends LowerPriorityShapelessReaders2 {
101 | this: BackendComponent with TypesComponent with DefaultValuesComponent =>
102 |
103 | implicit def fieldTypeReaderTagged[K <: Symbol, V, T](implicit kw: Witness.Aux[K],
104 | vr: Lazy[Reader[V]],
105 | dv: DefaultValue.Aux[T, K, V]): Reader[FieldType[K, V] @@@ T] =
106 | Reader.reading {
107 | case backend.Get.Object(v) if backend.containsObjectKey(v, kw.value.name) || dv.value.isDefined =>
108 | val value = backend.getObjectKey(v, kw.value.name).map(vr.value.read).orElse(dv.value).get
109 | SourceTypeTag[T].attachTo(field[K](value))
110 | }.orThrowing(
111 | whenReading = s"case class field '${kw.value.name}'",
112 | expected = s"object with key '${kw.value.name}' or a default value for this field"
113 | )
114 | }
115 |
116 | trait ShapelessReaders extends LowerPriorityShapelessReaders {
117 | this: BackendComponent with TypesComponent with SealedTraitDiscriminatorComponent
118 | with DefaultValuesComponent with ExceptionsComponent =>
119 |
120 | implicit def optionFieldTypeReaderTagged[K <: Symbol, V, T](implicit kw: Witness.Aux[K],
121 | vr: Lazy[Reader[V]],
122 | dv: DefaultValue.Aux[T, K, Option[V]])
123 | : Reader[FieldType[K, Option[V]] @@@ T] =
124 | Reader.reading {
125 | case backend.Get.Object(v) =>
126 | val value = backend.getObjectKey(v, kw.value.name).map(vr.value.read).orElse(dv.value.flatten)
127 | SourceTypeTag[T].attachTo(field[K](value))
128 | }.orThrowing(whenReading = s"case class field '${kw.value.name}'", expected = "object")
129 |
130 | implicit def recordHeadReader[H, T <: HList](implicit hr: Lazy[Reader[H]], tr: Lazy[Reader[T]],
131 | ev: H <:< FieldType[_, _]): Reader[H :: T] =
132 | Reader.reading {
133 | case bv@backend.Get.Object(_) => hr.value.read(bv) :: tr.value.read(bv)
134 | }.orThrowing(whenReading = "case class", expected = "object")
135 |
136 | implicit val hnilReader: Reader[HNil] = Reader { case _ => HNil }
137 |
138 | protected class ObjectWithDiscriminatorExtractor(discriminator: String) {
139 | def unapply(value: backend.BValue): Option[String] =
140 | backend.Extract.Object.unapply(value)
141 | .flatMap(_.get(discriminator))
142 | .flatMap(backend.Extract.String.unapply)
143 | }
144 |
145 | implicit def coproductReader[K <: Symbol, V, U, R <: Coproduct](implicit vr: Lazy[Reader[V]],
146 | tr: Lazy[Reader[R]],
147 | discriminator: Discriminator.Aux[U] = null,
148 | kw: Witness.Aux[K]): Reader[(FieldType[K, V] @@@ U) :+: R] = {
149 | val dkey = Option(discriminator).flatMap(_.value).getOrElse(defaultDiscriminatorKey)
150 | val ObjectWithDiscriminator = new ObjectWithDiscriminatorExtractor(dkey)
151 | Reader.reading[(FieldType[K, V] @@@ U) :+: R] {
152 | case bv@ObjectWithDiscriminator(key) =>
153 | if (key == kw.value.name) Inl[FieldType[K, V] @@@ U, R](SourceTypeTag[U].attachTo(field[K](vr.value.read(bv))))
154 | else Inr[FieldType[K, V] @@@ U, R](tr.value.read(bv))
155 | }.orThrowing(
156 | whenReading = "sealed trait hierarchy member",
157 | expected = s"object with discriminator key '$defaultDiscriminatorKey'"
158 | )
159 | }
160 |
161 | implicit val cnilReader: Reader[CNil] = Reader {
162 | case bv =>
163 | throw ReadException(
164 | reading = "sealed trait hierarchy member",
165 | expected = s"object with discriminator key '$defaultDiscriminatorKey' equal to known value",
166 | got = bv
167 | )
168 | }
169 | }
170 |
171 | trait ShapelessReaderWritersComponent extends ShapelessReaders with ShapelessWriters {
172 | this: BackendComponent with TypesComponent with SealedTraitDiscriminatorComponent
173 | with DefaultValuesComponent with ExceptionsComponent =>
174 | }
175 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/types.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import scala.annotation.implicitNotFound
4 |
5 | /**
6 | * Contains basic types used by the library: [[TypesComponent#Reader Reader]] and
7 | * [[TypesComponent#Writer Writer]], and basic constructors for them.
8 | *
9 | * Mixed into every [[Pickler]] object.
10 | */
11 | trait TypesComponent {
12 | this: BackendComponent with ExceptionsComponent with NullHandlerComponent =>
13 | /**
14 | * Convenient alias for [[scala.PartialFunction PartialFunction]].
15 | */
16 | final type PF[-A, +B] = PartialFunction[A, B]
17 |
18 | /**
19 | * A type class trait for writing objects of the specified type to their backend representation.
20 | *
21 | * All serialization is done by implicit instances of this trait.
22 | *
23 | * Most clients don't need to subclass this trait directly; use [[Writer$ Writer]] companion
24 | * object to create writers.
25 | *
26 | * Serialization is a success-only process: correctly written serializers will always succeed to write
27 | * their objects. It is expected that [[Writer]] instances can handle ''all'' values of their source types,
28 | * that is, writers are like total functions.
29 | *
30 | * @tparam T source type
31 | */
32 | @implicitNotFound("Don't know how to write ${T}; make sure that an implicit `Writer[${T}]` is in scope")
33 | trait Writer[T] {
34 | /**
35 | * Serializes the given value into its backend representation.
36 | *
37 | * This method also accepts an ''accumulator'' parameter which is used
38 | * when serializing complex objects which require multiple serializers
39 | * to work together (for example, serializing `HList`s to obtain a map).
40 | *
41 | * This method shouldn't be invoked directly as it is for internal use. Use [[write]] method
42 | * instead.
43 | *
44 | * @param value a value to be serialized
45 | * @param acc possibly absent accumulator
46 | * @return serialized representation of `value`
47 | */
48 | def write0(value: T, acc: Option[backend.BValue]): backend.BValue
49 |
50 | /**
51 | * Wraps [[Writer.write0 write0]] call, passing [[scala.None None]] as the second argument.
52 | *
53 | * Just a shorthand for `write0(value, None)`. This is the main method which should be used
54 | * for writing objects.
55 | *
56 | * @param value a value to be serialized
57 | * @return serialized representation of `value`
58 | */
59 | final def write(value: T): backend.BValue = write0(value, None)
60 | }
61 |
62 | /**
63 | * Contains various constructors for custom [[Writer Writers]].
64 | */
65 | object Writer {
66 | /**
67 | * Creates a new writer from a function of type `T => (Option[backend.BValue] => backend.BValue)`.
68 | *
69 | * Mostly intended for internal use. Regular clients should use [[Writer$.apply apply]] method.
70 | *
71 | * @param ff a function defining writer behavior
72 | * @tparam T source type
73 | * @return a writer delegating to the provided function
74 | */
75 | def fromF0[T](ff: T => (Option[backend.BValue] => backend.BValue)): Writer[T] =
76 | new Writer[T] {
77 | override def write0(value: T, acc: Option[backend.BValue]): backend.BValue =
78 | nullHandler.toBackend[T](value, ff(_)(acc))
79 | }
80 |
81 | /**
82 | * Same as [[Writer$.writePF0 writePF0]], but does not delegate nulls handling to `NullHandler`.
83 | *
84 | * Mostly intended for internal use. Regular clients should use [[Writer$.apply apply]] method.
85 | *
86 | * @param ff a function defining writer behavior
87 | * @tparam T source type
88 | * @return a writer delegating to the provided function
89 | */
90 | def fromF0N[T](ff: T => (Option[backend.BValue] => backend.BValue)): Writer[T] =
91 | new Writer[T] {
92 | override def write0(value: T, acc: Option[backend.BValue]): backend.BValue =
93 | ff(value)(acc)
94 | }
95 |
96 | /**
97 | * Creates a new writer from a partial function of type `(T, Option[backend.BValue]) => backend.BValue`.
98 | *
99 | * Mostly intended for internal use. Regular clients should use [[Writer$.apply apply]] method.
100 | *
101 | * @param ff a function defining writer behavior
102 | * @tparam T source type
103 | * @return a writer delegating to the provided function
104 | */
105 | def fromF1[T](ff: (T, Option[backend.BValue]) => backend.BValue): Writer[T] =
106 | new Writer[T] {
107 | override def write0(value: T, acc: Option[backend.BValue]): backend.BValue =
108 | nullHandler.toBackend[T](value, ff(_, acc))
109 | }
110 |
111 | /**
112 | * Creates a new writer from a partial function of type `T => backend.BValue`.
113 | *
114 | * This is the main constructor for custom writers. The writers returned by this function ignore
115 | * the accumulator argument (as most of writers should do).
116 | *
117 | * An example:
118 | * {{{
119 | * case class A(x: Int, y: String)
120 | *
121 | * import backendConversionImplicits._
122 | * implicit val aWriter: Writer[A] = Writer {
123 | * case A(x, y) => Map("a" -> x.toBackend, "b" -> y.toBackend).toBackend
124 | * }
125 | * }}}
126 | *
127 | * As manual construction of complex objects may quickly turn very unwieldy, it is recommended
128 | * to use [[io.github.netvl.picopickle.ConvertersComponent converters]] instead.
129 | *
130 | * @param ff a function defining writer behavior
131 | * @tparam T source type
132 | * @return a writer delegating to the provided function
133 | */
134 | def apply[T](ff: T => backend.BValue): Writer[T] =
135 | new Writer[T] {
136 | override def write0(value: T, acc: Option[backend.BValue]): backend.BValue =
137 | nullHandler.toBackend[T](value, ff)
138 | }
139 | }
140 |
141 | /**
142 | * A type class for reading a backend representation into an object of the specified type.
143 | *
144 | * All deserialization is done by implicit instances of this trait.
145 | *
146 | * Most clients don't need to subclass this trait directly; use [[Reader$ Reader]] companion object
147 | * to create readers.
148 | *
149 | * Deserialization process can fail if its input is not valid. Consequently, readers are more like
150 | * partial functions: they can fail on certain inputs.
151 | *
152 | * @tparam T target type
153 | */
154 | @implicitNotFound("Don't know how to read ${T}; make sure that an implicit `Reader[${T}]` is in scope")
155 | trait Reader[T] { source =>
156 | /**
157 | * Checks if this reader can handle the provided value.
158 | *
159 | * @param value a backend value
160 | * @return `true` if this reader can read from `value`, `false` otherwise
161 | */
162 | def canRead(value: backend.BValue): Boolean
163 |
164 | /**
165 | * Deserializes the value of the specified type from the provided backend value.
166 | *
167 | * This method should fail with an exception if [[Reader.canRead canRead]] returns `false`
168 | * on this value and it returns a deserialized object successfully otherwise.
169 | *
170 | * @param value a backend value
171 | * @return deserialized variant of `value`
172 | */
173 | def read(value: backend.BValue): T
174 |
175 | /**
176 | * Deserializes the value of the specified type from the provided backend value or applies the given
177 | * function if it is impossible.
178 | *
179 | * This method is equivalent to `if (this.canRead(value)) this.read(value) else fallback(value)`
180 | * (which is in fact its default implementation) but it can be overridden to avoid excessive
181 | * checks. See [[PartialFunction.applyOrElse]] method for longer explanation.
182 | *
183 | * [[Reader.apply]] and [[Reader.reading]] method overrides this method to employ the provided partial function
184 | * `applyOrElse` method. Consider implementing this method for your readers if for some
185 | * reason you don't use `Reader.apply` or `Reader.reading`.
186 | *
187 | * @param value a backend value
188 | * @param fallback a fallback function
189 | * @return deserialized variant of `value` or the result of `fallback` application
190 | */
191 | def readOrElse(value: backend.BValue, fallback: backend.BValue => T): T =
192 | if (this.canRead(value)) this.read(value) else fallback(value)
193 |
194 | /**
195 | * Combines this reader with the specified fallback reader which is used if this reader can't
196 | * handle the provided value based on its [[Reader.canRead canRead]] result.
197 | *
198 | * @param other the fallback reader
199 | * @return a reader which delegates to this reader if this reader can deserialize a value
200 | * or to the `other` reader otherwise
201 | */
202 | final def orElse(other: Reader[T]): Reader[T] = new Reader[T] {
203 | override def canRead(value: backend.BValue) =
204 | source.canRead(value) || other.canRead(value)
205 | override def read(value: backend.BValue): T =
206 | source.readOrElse(value, other.read)
207 | }
208 |
209 | /**
210 | * A shorthand for `this.orElse(Reader(other))`, where `other` is a partial function.
211 | * See [[Reader.apply]].
212 | *
213 | * @param other a partial function which is used to create the fallback reader
214 | * @return see another `orElse` method
215 | */
216 | final def orElse(other: PF[backend.BValue, T]): Reader[T] = this orElse Reader(other)
217 |
218 | /**
219 | * Returns a reader which applies the given function to the result of the deserialization.
220 | *
221 | * @param f a transformation function
222 | * @tparam U result type
223 | * @return a reader which reads a value of type `T` and then applies `f` to obtain a value of type `U`
224 | */
225 | final def andThen[U](f: T => U): Reader[U] = new Reader[U] {
226 | override def canRead(value: backend.BValue) = source.canRead(value)
227 | override def read(value: backend.BValue): U = f(source.read(value))
228 | override def readOrElse(value: backend.BValue, fallback: backend.BValue => U): U = {
229 | val result = source.readOrElse(value, checkFallback[T])
230 | if (!fallbackOccured(result)) f(result) else fallback(value)
231 | }
232 | }
233 | }
234 |
235 | private[this] val fallbackF: Any => Any = x => x
236 | private def checkFallback[B] = fallbackF.asInstanceOf[Any => B]
237 | private def fallbackOccured[B](x: B) = fallbackF eq fallbackF.asInstanceOf[AnyRef]
238 |
239 | /**
240 | * Contains various constructors for custom [[Reader Readers]].
241 | */
242 | object Reader {
243 | /**
244 | * Creates a reader using the given partial function.
245 | *
246 | * This is the main constructor for custom readers. The provided partial function is used
247 | * to reconstruct a value from its backend representation. [[Reader.canRead canRead]] method
248 | * of the constructed reader delegates to [[scala.PartialFunction.isDefinedAt isDefinedAt]] on
249 | * the partial function.
250 | *
251 | * An example:
252 | * {{{
253 | * case class A(x: Int, y: String)
254 | *
255 | * implicit val aReader: Reader[A] = Reader {
256 | * case backend.Extract.Object(m) if m.contains("a") && m.contains("b") &&
257 | * backend.getNumber(m("a")).isDefined &&
258 | * backend.getString(m("b")).isDefined =>
259 | * A(
260 | * backend.Extract.Number.unapply(m("a")).get,
261 | * backend.Extract.String.unapply(m("b")).get
262 | * )
263 | * }
264 | * }}}
265 | *
266 | * As manual deconstruction of complex object may quickly turn very unwieldy, it is recommended
267 | * to use [[io.github.netvl.picopickle.ConvertersComponent converters]] instead.
268 | *
269 | * @param f a partial function from backend representation to the target type
270 | * @tparam T target type
271 | * @return a reader delegating to the provided function.
272 | */
273 | def apply[T](f: PF[backend.BValue, T]): Reader[T] =
274 | new Reader[T] {
275 | override def canRead(value: backend.BValue) = nullHandler.canRead(value, f.isDefinedAt)
276 |
277 | override def read(value: backend.BValue): T =
278 | readOrElse(value, defaultReadError)
279 |
280 | override def readOrElse(value: backend.BValue, fallback: backend.BValue => T): T =
281 | nullHandler.fromBackend(value, v => f.applyOrElse(v, fallback))
282 | }
283 |
284 | def reading[T](f: PF[backend.BValue, T]): ReaderBuilder[T] = new ReaderBuilder[T](f)
285 |
286 | class ReaderBuilder[T](f: PF[backend.BValue, T]) {
287 | def orThrowing(fmt: backend.BValue => String): Reader[T] = Reader(f orElse {
288 | case value => customReadError(fmt)(value)
289 | })
290 | def orThrowing(whenReading: => String, expected: => String): Reader[T] = Reader(f orElse {
291 | case value => parameterizedReadError(whenReading, expected)(value)
292 | })
293 | }
294 | }
295 |
296 | type ReadWriter[T] = Reader[T] with Writer[T]
297 |
298 | object ReadWriter {
299 | def apply[T](implicit r: Reader[T], w: Writer[T]): ReadWriter[T] = new Reader[T] with Writer[T] {
300 | override def canRead(value: backend.BValue) = r.canRead(value)
301 | override def read(value: backend.BValue) = r.read(value)
302 | override def readOrElse(value: backend.BValue, fallback: backend.BValue => T) = r.readOrElse(value, fallback)
303 | override def write0(value: T, acc: Option[backend.BValue]) = w.write0(value, acc)
304 | }
305 |
306 | def reading[T](rf: PF[backend.BValue, T]) = new WriterBuilder(rf, defaultReadError)
307 | def writing[T](wf: T => backend.BValue) = new ReaderBuilder(wf)
308 |
309 | class WriterBuilder[T](rf: PF[backend.BValue, T], error: backend.BValue => Nothing) {
310 | def writing(wf: T => backend.BValue) = new PfReadWriter[T](rf, wf, error)
311 | def orThrowing(whenReading: => String, expected: => String) = new WriterBuilder[T](rf, parameterizedReadError(whenReading, expected))
312 | def orThrowing(fmt: backend.BValue => String) = new WriterBuilder[T](rf, customReadError(fmt))
313 | }
314 |
315 | class ReaderBuilder[T](wf: T => backend.BValue) {
316 | def reading(rf: PF[backend.BValue, T]) = new PfReadWriter[T](rf, wf, defaultReadError)
317 | }
318 |
319 | class PfReadWriter[T] private[ReadWriter] (rf: PF[backend.BValue, T],
320 | wf: T => backend.BValue,
321 | error: backend.BValue => Nothing) extends Reader[T] with Writer[T] {
322 | def orThrowing(whenReading: => String, expected: => String): ReadWriter[T] =
323 | new PfReadWriter[T](rf, wf, parameterizedReadError(whenReading, expected))
324 | def orThrowing(fmt: backend.BValue => String): ReadWriter[T] =
325 | new PfReadWriter[T](rf, wf, customReadError(fmt))
326 |
327 | override def canRead(value: backend.BValue) = nullHandler.canRead(value, rf.isDefinedAt)
328 |
329 | override def read(value: backend.BValue) =
330 | readOrElse(value, error)
331 |
332 | override def readOrElse(value: backend.BValue, fallback: backend.BValue => T) =
333 | nullHandler.fromBackend(value, v => rf.applyOrElse(v, fallback))
334 |
335 | override def write0(value: T, acc: Option[backend.BValue]) = nullHandler.toBackend(value, wf)
336 | }
337 | }
338 |
339 | private def defaultReadError(v: backend.BValue): Nothing =
340 | throw ReadException(s"unexpected backend value: $v", data = v)
341 | private def parameterizedReadError(reading: => String, expected: => String)(value: backend.BValue): Nothing =
342 | throw ReadException(reading, expected, value)
343 | private def customReadError(fmt: backend.BValue => String)(value: backend.BValue): Nothing =
344 | throw ReadException(fmt(value), data = value)
345 | }
346 |
347 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/utils/DoubleOrStringNumberRepr.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.utils
2 |
3 | import io.github.netvl.picopickle.Backend
4 |
5 | import scala.util.Try
6 |
7 | trait DoubleOrStringNumberRepr {
8 | this: Backend =>
9 |
10 | protected final val MaxLongInDouble: Long = 1L << 53 // Double has 53 bits of precision
11 |
12 | protected def numberToBackendNumberOrString(value: Any): BValue = value match {
13 | case x @ Double.PositiveInfinity => makeString(x.toString)
14 | case x @ Double.NegativeInfinity => makeString(x.toString)
15 | case x: Double if x.isNaN => makeString(x.toString)
16 | case x @ Float.PositiveInfinity => makeString(x.toString)
17 | case x @ Float.NegativeInfinity => makeString(x.toString)
18 | case x: Float if x.isNaN => makeString(x.toString)
19 | // those longs which do not fit into double
20 | case x: Long if x.abs > MaxLongInDouble => makeString(x.toString)
21 | case x: Number => makeNumber(x)
22 | }
23 |
24 | protected def doubleOrStringFromBackendNumberOrString: PartialFunction[BValue, Number] = {
25 | case Extract.Number(n) => n
26 | case Extract.String(s)
27 | if s == Double.PositiveInfinity.toString ||
28 | s == Double.NegativeInfinity.toString ||
29 | s == Double.NaN.toString => s.toDouble
30 | case Extract.String(s) if Try(s.toLong).isSuccess => s.toLong // handles big longs
31 | }
32 |
33 | protected def backendNumberOrStringExpected: String = "number or string containing a number"
34 | }
35 |
--------------------------------------------------------------------------------
/core/src/main/scala/io/github/netvl/picopickle/values.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import shapeless._
4 | import shapeless.ops.hlist.IsHCons
5 |
6 | trait ValueClassReaders {
7 | this: TypesComponent =>
8 |
9 | implicit def valueClassReader[T <: AnyVal, R <: HList, V](implicit gen: Generic.Aux[T, R],
10 | isHCons: IsHCons.Aux[R, V, HNil],
11 | ev: (V :: HNil) =:= R,
12 | vr: Reader[V]): Reader[T] =
13 | vr.andThen { value => gen.from(value :: HNil) }
14 | }
15 |
16 | trait ValueClassWriters {
17 | this: TypesComponent =>
18 |
19 | implicit def valueClassWriter[T <: AnyVal, R <: HList, V](implicit gen: Generic.Aux[T, R],
20 | isHCons: IsHCons.Aux[R, V, HNil],
21 | vw: Writer[V]): Writer[T] =
22 | Writer(t ⇒ vw.write(gen.to(t).head))
23 | }
24 |
25 | trait ValueClassReaderWritersComponent extends ValueClassReaders with ValueClassWriters {
26 | this: TypesComponent =>
27 | }
28 |
--------------------------------------------------------------------------------
/core/src/test/scala/io/github/netvl/picopickle/ConvertersTestBase.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | import scala.collection.immutable.{TreeMap, TreeSet}
4 | import scala.collection.mutable
5 | import shapeless._
6 |
7 | import org.scalatest.{FreeSpec, Matchers}
8 |
9 | object ConvertersTestBase {
10 | object ComplexObjects {
11 | case class A(x: Int, y: String, z: B)
12 | case class B(a: Boolean, b: Double)
13 | }
14 | }
15 |
16 | trait ConvertersTestBase extends FreeSpec with Matchers with DefaultPickler {
17 | this: BackendComponent =>
18 |
19 | import backend._
20 | import conversionImplicits._
21 | import converters._
22 | import ConvertersTestBase.ComplexObjects._
23 |
24 | def backendName: String = "backend"
25 |
26 | "Converters" - {
27 | s"should convert to and from the $backendName representation" - {
28 | "null" in {
29 | `null`.isDefinedAt(makeNull) shouldBe true
30 | (`null`.fromBackend(makeNull): Any) shouldEqual (null: Any)
31 |
32 | `null`.isDefinedAt("something".toBackend) shouldBe false
33 |
34 | `null`.toBackend(null) shouldEqual (makeNull: Any)
35 | }
36 |
37 | "booleans" in {
38 | bool.isDefinedAt(true.toBackend) shouldBe true
39 | bool.fromBackend(true.toBackend) shouldBe true
40 |
41 | bool.isDefinedAt(false.toBackend) shouldBe true
42 | bool.fromBackend(false.toBackend) shouldBe false
43 |
44 | bool.isDefinedAt("something".toBackend) shouldBe false
45 |
46 | bool.toBackend(true) shouldBe true.toBackend
47 | bool.toBackend(false) shouldBe false.toBackend
48 | }
49 |
50 | "numbers" in {
51 | num.isDefinedAt((1: Int).toBackend) shouldBe true
52 | num.fromBackend((1: Int).toBackend) shouldEqual 1
53 |
54 | num.isDefinedAt((10.1: Double).toBackend) shouldBe true
55 | num.fromBackend((10.1: Double).toBackend) shouldEqual 10.1
56 |
57 | num.isDefinedAt("something".toBackend) shouldBe false
58 |
59 | num.toBackend(133: Long) shouldEqual 133L.toBackend
60 | num.toBackend(42.2f) shouldEqual 42.2f.toBackend
61 | }
62 |
63 | "strings" in {
64 | str.isDefinedAt("abcde".toBackend) shouldBe true
65 | str.fromBackend("abcde".toBackend) shouldEqual "abcde"
66 |
67 | str.isDefinedAt(12345.toBackend) shouldBe false
68 |
69 | str.toBackend("hello") shouldEqual "hello".toBackend
70 | }
71 |
72 | "objects" in {
73 | val m = obj {
74 | ("a" -> num.int) ::
75 | ("b" -> str) ::
76 | HNil
77 | }
78 |
79 | val t1 = Map("a" -> 123.toBackend, "b" -> "hello".toBackend).toBackend
80 | val t2 = Map("a" -> false.toBackend, "b" -> "hello".toBackend).toBackend
81 | val t3 = Map("b" -> "hello".toBackend).toBackend
82 | val t4 = Map("a" -> 342.toBackend, "b" -> "goodbye".toBackend, "c" -> false.toBackend).toBackend
83 |
84 | m.isDefinedAt(t1) shouldBe true
85 | m.fromBackend(t1) shouldEqual (123 :: "hello" :: HNil)
86 |
87 | m.isDefinedAt(t2) shouldBe false
88 | m.isDefinedAt(t3) shouldBe false
89 |
90 | m.isDefinedAt(t4) shouldBe true
91 | m.fromBackend(t4) shouldEqual (342 :: "goodbye" :: HNil)
92 |
93 | m.toBackend(234 :: "blabla" :: HNil) shouldEqual Map("a" -> 234.toBackend, "b" -> "blabla".toBackend).toBackend
94 | }
95 |
96 | "complex classes" in {
97 |
98 | val bc: Converter.Id[B] = unlift(B.unapply) >>> obj {
99 | "a" -> bool ::
100 | "b" -> num.double ::
101 | HNil
102 | } >>> B.apply _
103 |
104 | val ac: Converter.Id[A] = unlift(A.unapply) >>> obj {
105 | "x" -> num.int ::
106 | "y" -> str ::
107 | "z" -> bc ::
108 | HNil
109 | } >>> A.apply _
110 |
111 | val s = A(
112 | 10,
113 | "hello",
114 | B(true, 42.4)
115 | )
116 |
117 | val t = Map(
118 | "x" -> 10.toBackend,
119 | "y" -> "hello".toBackend,
120 | "z" -> Map(
121 | "a" -> true.toBackend,
122 | "b" -> 42.4.toBackend
123 | ).toBackend
124 | ).toBackend
125 |
126 | ac.isDefinedAt(t) shouldBe true
127 | ac.fromBackend(t) shouldEqual s
128 |
129 | ac.toBackend(s) shouldEqual t
130 | }
131 |
132 | "homogeneous arrays" in {
133 | val cv = arr.as[Vector] of num.int
134 | val cs = arr.as[Set] of num.int
135 |
136 | val c1 = Vector(1.toBackend, 2.toBackend, 3.toBackend).toBackend
137 | cv.isDefinedAt(c1) shouldBe true
138 | cv.fromBackend(c1) shouldEqual Vector(1, 2, 3)
139 | cv.toBackend(Vector(1, 2, 3)) shouldEqual c1
140 | cs.isDefinedAt(c1) shouldBe true
141 | cs.fromBackend(c1) shouldEqual Set(1, 2, 3)
142 | cs.toBackend(TreeSet(1, 2, 3)) shouldEqual c1
143 |
144 | val c2 = Vector("a".toBackend, "e".toBackend).toBackend
145 | cv.isDefinedAt(c2) shouldBe false
146 | cs.isDefinedAt(c2) shouldBe false
147 | }
148 |
149 | "heterogenous arrays" in {
150 | val ma = arr(str :: num :: arr.as[Set].of(bool) :: HNil)
151 | val me = arr(HNil: HNil)
152 |
153 | val c1 = Vector("a".toBackend, 1.toBackend, Vector(false.toBackend, true.toBackend).toBackend).toBackend
154 | val r1 = "a" :: (1: Number) :: Set(true, false) :: HNil
155 | ma.isDefinedAt(c1) shouldBe true
156 | ma.fromBackend(c1) shouldEqual r1
157 | ma.toBackend("a" :: (1: Number) :: TreeSet(false, true) :: HNil) shouldEqual c1
158 |
159 | val c2 = Vector("too small".toBackend).toBackend
160 | ma.isDefinedAt(c2) shouldBe false
161 |
162 | val c3 = Vector(
163 | "too large".toBackend, 1.toBackend, Vector(true.toBackend).toBackend, "a".toBackend,
164 | 34.toBackend, 22.9.toBackend, "zzz".toBackend
165 | ).toBackend
166 | val r3 = "too large" :: (1: Number) :: Set(true) :: HNil
167 | ma.isDefinedAt(c3) shouldBe true
168 | ma.fromBackend(c3) shouldEqual r3
169 | ma.toBackend(r3) shouldEqual Vector("too large".toBackend, 1.toBackend, Vector(true.toBackend).toBackend).toBackend
170 |
171 | val c4 = Vector("incorrect types".toBackend, true.toBackend, Vector(false.toBackend).toBackend).toBackend
172 | ma.isDefinedAt(c4) shouldBe false
173 |
174 | val c5 = Vector().toBackend // empty
175 | me.isDefinedAt(c1) shouldBe true
176 | me.fromBackend(c1) shouldEqual HNil
177 | me.isDefinedAt(c5) shouldBe true
178 | me.fromBackend(c5) shouldEqual HNil
179 | me.toBackend(HNil) shouldEqual c5
180 | }
181 |
182 | "object as map" in {
183 | val mm = obj.as[Map] to num.double
184 | val mt = obj.as[TreeMap] to num.double
185 |
186 | val t1 = Map.empty[String, BValue].toBackend
187 | mm.isDefinedAt(t1) shouldBe true
188 | mm.fromBackend(t1) shouldBe 'empty
189 | mm.toBackend(Map.empty) shouldEqual t1
190 | mt.isDefinedAt(t1) shouldBe true
191 | mt.fromBackend(t1) shouldBe 'empty
192 | mt.toBackend(TreeMap.empty) shouldEqual t1
193 |
194 | val t2 = Map[String, BValue]("a" -> 12.3.toBackend, "b" -> 13.4.toBackend).toBackend
195 | val s2m = Map("a" -> 12.3, "b" -> 13.4)
196 | val s2t = TreeMap("a" -> 12.3, "b" -> 13.4)
197 | mm.isDefinedAt(t2) shouldBe true
198 | mm.fromBackend(t2) shouldEqual s2m
199 | mm.toBackend(s2m) shouldEqual t2
200 | mt.isDefinedAt(t2) shouldBe true
201 | mt.fromBackend(t2) shouldEqual s2t
202 | mt.toBackend(s2t) shouldEqual t2
203 |
204 | val t3 = Map[String, BValue]("a" -> true.toBackend, "b" -> Vector(1.toBackend).toBackend).toBackend
205 | mm.isDefinedAt(t3) shouldBe false
206 | mt.isDefinedAt(t3) shouldBe false
207 | }
208 |
209 | "autoconverted classes" in {
210 |
211 | val m =
212 | {
213 | (k: String, vs: mutable.LinkedHashSet[A]) => k :: vs :: HNil
214 | }.tupled >> obj(
215 | "k" -> str ::
216 | "vs" -> arr.as[mutable.LinkedHashSet].of(converters.value[A]) ::
217 | HNil
218 | ) >> {
219 | case k :: vs :: HNil => (k, vs)
220 | }
221 |
222 | val t1 = Map(
223 | "k" -> "hello".toBackend,
224 | "vs" -> Vector(
225 | Map(
226 | "x" -> 10.toBackend,
227 | "y" -> "hello".toBackend,
228 | "z" -> Map(
229 | "a" -> true.toBackend,
230 | "b" -> 42.4.toBackend
231 | ).toBackend
232 | ).toBackend,
233 | Map(
234 | "x" -> 11.toBackend,
235 | "y" -> "bye".toBackend,
236 | "z" -> Map(
237 | "a" -> false.toBackend,
238 | "b" -> (-42.4).toBackend
239 | ).toBackend
240 | ).toBackend
241 | ).toBackend
242 | ).toBackend
243 | val r1 = ("hello", mutable.LinkedHashSet(A(10, "hello", B(true, 42.4)), A(11, "bye", B(false, -42.4))))
244 | m.isDefinedAt(t1) shouldBe true
245 | m.fromBackend(t1) shouldEqual r1
246 | m.toBackend(r1) shouldEqual t1
247 | }
248 | }
249 | }
250 | }
251 |
--------------------------------------------------------------------------------
/core/src/test/scala/io/github/netvl/picopickle/Fixtures.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle
2 |
3 | object Fixtures {
4 | object CaseClass {
5 | case class A(x: Int, y: String)
6 | }
7 |
8 | object CaseObject {
9 | case object A
10 | }
11 |
12 | object SealedTrait {
13 | sealed trait Root
14 | case class A(x: Int, y: String) extends Root
15 | case class B(a: Long, b: Vector[Double]) extends Root
16 | case object C extends Root
17 | }
18 |
19 | object Recursives {
20 | sealed trait Root
21 | case object A extends Root
22 | case class B(x: Int, b: Option[B]) extends Root
23 | case class C(next: Root) extends Root
24 | }
25 |
26 | object Renames {
27 | sealed trait Root
28 | @key("0") case object A extends Root
29 | case class B(x: Int, @key("zzz") y: String) extends Root
30 | }
31 |
32 | object CustomDiscriminatorKeys {
33 | @discriminator("whatever") sealed trait Root
34 | case object A extends Root
35 | case class B(x: Int) extends Root
36 | }
37 |
38 | object Defaults {
39 | sealed trait Root
40 | case class A(x: Int, name: String = "me", enabled: Boolean = false) extends Root
41 | }
42 |
43 | object WithOverloadedApplyMethod {
44 | case class A(x: Int, y: String, z: Double = 12.3)
45 |
46 | object A {
47 | def apply(s: String): A = A(s.toInt, s, s.toDouble)
48 | }
49 | }
50 |
51 | object WithOverloadedConstructor {
52 | case class A(x: Int, y: String, z: Double = 12.3) {
53 | def this(s: String) = this(s.toInt, s, s.toDouble)
54 | }
55 | }
56 |
57 | object WithVarargs {
58 | case class A(x: Int, y: String*)
59 | }
60 |
61 | object ValueClass {
62 | case class A(value: String) extends AnyVal
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/core/src/test/scala/io/github/netvl/picopickle/backends/collections/CollectionsConvertersTest.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.collections
2 |
3 | import io.github.netvl.picopickle.ConvertersTestBase
4 |
5 | class CollectionsConvertersTest extends ConvertersTestBase with CollectionsPickler {
6 | override lazy val backendName: String = "collections"
7 | }
8 |
--------------------------------------------------------------------------------
/jawn/src/main/scala/io/github/netvl/picopickle/backends/jawn/ast.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.jawn
2 |
3 | import jawn.{FContext, Facade}
4 | import shapeless.syntax.typeable._
5 |
6 | import io.github.netvl.picopickle.Backend
7 | import io.github.netvl.picopickle.utils.DoubleOrStringNumberRepr
8 |
9 | import scala.annotation.switch
10 | import scala.collection.mutable
11 |
12 | object JsonAst {
13 | sealed trait JsonValue
14 |
15 | case class JsonObject(values: Map[String, JsonValue]) extends JsonValue
16 | case class JsonArray(values: Vector[JsonValue]) extends JsonValue
17 | case class JsonString(value: String) extends JsonValue
18 | case class JsonNumber(value: Double) extends JsonValue
19 |
20 | sealed trait JsonBoolean extends JsonValue
21 | case object JsonTrue extends JsonBoolean
22 | case object JsonFalse extends JsonBoolean
23 |
24 | case object JsonNull extends JsonValue
25 |
26 | object Backend extends Backend with DoubleOrStringNumberRepr {
27 | override type BValue = JsonValue
28 | override type BObject = JsonObject
29 | override type BArray = JsonArray
30 | override type BString = JsonString
31 | override type BNumber = JsonNumber
32 | override type BBoolean = JsonBoolean
33 | override type BNull = JsonNull.type
34 |
35 | override def fromObject(obj: BObject): Map[String, BValue] = obj.values
36 | override def makeObject(m: Map[String, BValue]): BObject = JsonObject(m)
37 | override def getObject(value: BValue): Option[BObject] = value.cast[JsonObject]
38 |
39 | override def getObjectKey(obj: BObject, key: String): Option[BValue] =
40 | obj.values.get(key)
41 | override def setObjectKey(obj: BObject, key: String, value: BValue): BObject =
42 | obj.copy(values = obj.values + (key -> value))
43 | override def containsObjectKey(obj: BObject, key: String): Boolean =
44 | obj.values.contains(key)
45 | override def removeObjectKey(obj: BObject, key: String): BObject =
46 | obj.copy(values = obj.values - key)
47 |
48 | override def fromArray(arr: BArray): Vector[BValue] = arr.values
49 | override def makeArray(v: Vector[BValue]): BArray = JsonArray(v)
50 | override def getArray(value: BValue): Option[BArray] = value.cast[JsonArray]
51 | override def pushToArray(arr: BArray, value: BValue) = JsonArray(arr.values :+ value)
52 |
53 | override def getArrayLength(arr: BArray): Int = arr.values.length
54 | override def getArrayValueAt(arr: BArray, idx: Int): BValue = arr.values(idx)
55 |
56 | override def fromString(str: BString): String = str.value
57 | override def makeString(s: String): BString = JsonString(s)
58 | override def getString(value: BValue): Option[BString] = value.cast[JsonString]
59 |
60 | override def fromNumber(num: BNumber): Number = num.value
61 | override def makeNumber(n: Number): BNumber = JsonNumber(n.doubleValue())
62 | override def getNumber(value: BValue): Option[BNumber] = value.cast[JsonNumber]
63 |
64 | override def makeNumberAccurately(n: Number): BValue = numberToBackendNumberOrString(n)
65 | override def fromNumberAccurately: PartialFunction[BValue, Number] = doubleOrStringFromBackendNumberOrString
66 | override def fromNumberAccuratelyExpected: String = backendNumberOrStringExpected
67 |
68 | override def fromBoolean(bool: BBoolean): Boolean = bool match {
69 | case JsonTrue => true
70 | case JsonFalse => false
71 | }
72 | override def makeBoolean(b: Boolean): BBoolean = if (b) JsonTrue else JsonFalse
73 | override def getBoolean(value: BValue): Option[BBoolean] = value.cast[JsonBoolean]
74 |
75 | override def makeNull: BNull = JsonNull
76 | override def getNull(value: BValue): Option[BNull] = value.cast[JsonNull.type]
77 | }
78 | }
79 |
80 | object JawnFacade extends MutableFacade[JsonAst.JsonValue] {
81 | import JsonAst._
82 |
83 | override def jobject(vs: mutable.Builder[(String, JsonValue), Map[String, JsonValue]]): JsonValue =
84 | JsonObject(vs.result())
85 | override def jarray(vs: mutable.Builder[JsonValue, Vector[JsonValue]]): JsonValue =
86 | JsonArray(vs.result())
87 |
88 | override def jnum(s: CharSequence, decIndex: Int, expIndex: Int): JsonValue = JsonNumber(s.toString.toDouble)
89 |
90 | override def jstring(s: CharSequence): JsonValue = JsonString(s.toString)
91 |
92 | override def jtrue(): JsonValue = JsonTrue
93 | override def jfalse(): JsonValue = JsonFalse
94 |
95 | override def jnull(): JsonValue = JsonNull
96 | }
97 |
98 | private[jawn] trait MutableFacade[J] extends Facade[J] {
99 | def jobject(vs: mutable.Builder[(String, J), Map[String, J]]): J
100 | def jarray(vs: mutable.Builder[J, Vector[J]]): J
101 |
102 | override def singleContext(): FContext[J] = new FContext[J] {
103 | private var value: J = _
104 | override def isObj: Boolean = false
105 | override def add(s: CharSequence): Unit = value = jstring(s)
106 | override def add(v: J): Unit = value = v
107 | override def finish: J = value
108 | }
109 |
110 | override def objectContext(): FContext[J] = new FContext[J] {
111 | private var key: String = null
112 | private val builder = Map.newBuilder[String, J]
113 | override def isObj: Boolean = true
114 | override def add(s: CharSequence): Unit =
115 | if (key == null) key = s.toString
116 | else {
117 | builder += key -> jstring(s)
118 | key = null
119 | }
120 | override def add(v: J): Unit = {
121 | builder += key -> v
122 | key = null
123 | }
124 | override def finish: J = jobject(builder)
125 | }
126 |
127 | override def arrayContext(): FContext[J] = new FContext[J] {
128 | private val builder = Vector.newBuilder[J]
129 | override def isObj: Boolean = false
130 | override def add(s: CharSequence): Unit = builder += jstring(s)
131 | override def add(v: J): Unit = builder += v
132 | override def finish: J = jarray(builder)
133 | }
134 | }
135 |
136 | // Heavily based on upickle's (https://github.com/lihaoyi/upickle) JSON renderer
137 | object JsonRenderer {
138 | import JsonAst._
139 |
140 | def render(value: JsonValue): String = {
141 | val sb = new StringBuilder
142 | render(sb, value)
143 | sb.toString()
144 | }
145 |
146 | private def render(sb: StringBuilder, value: JsonValue): Unit = {
147 | value match {
148 | case JsonNull => sb.append("null")
149 | case JsonTrue => sb.append("true")
150 | case JsonFalse => sb.append("false")
151 | case JsonNumber(n) => sb.append(if (n.isWhole()) n.toLong.toString else n.toString)
152 | case JsonString(s) => renderString(sb, s)
153 | case JsonArray(arr) => renderArray(sb, arr)
154 | case JsonObject(obj) => renderObject(sb, obj)
155 | }
156 | }
157 |
158 | private def renderArray(sb: StringBuilder, arr: Vector[JsonValue]): Unit = {
159 | if (arr.isEmpty) sb.append("[]")
160 | else {
161 | val it = arr.iterator
162 | sb.append("[")
163 | render(sb, it.next())
164 | while (it.hasNext) {
165 | sb.append(",")
166 | render(sb, it.next())
167 | }
168 | sb.append("]")
169 | }
170 | }
171 |
172 | private def renderObject(sb: StringBuilder, obj: Map[String, JsonValue]): Unit = {
173 | if (obj.isEmpty) sb.append("{}")
174 | else {
175 | val it = obj.iterator
176 | sb.append("{")
177 | val (k0, v0) = it.next()
178 | renderString(sb, k0)
179 | sb.append(":")
180 | render(sb, v0)
181 | while (it.hasNext) {
182 | val (k, v) = it.next()
183 | sb.append(",")
184 | renderString(sb, k)
185 | sb.append(":")
186 | render(sb, v)
187 | }
188 | sb.append("}")
189 | }
190 | }
191 |
192 | private def renderString(sb: StringBuilder, s: String): Unit = {
193 | sb.append('"')
194 | var i = 0
195 | val len = s.length
196 | while (i < len) {
197 | (s.charAt(i): @switch) match {
198 | case '"' => sb.append("\\\"")
199 | case '\\' => sb.append("\\\\")
200 | case '\b' => sb.append("\\b")
201 | case '\f' => sb.append("\\f")
202 | case '\n' => sb.append("\\n")
203 | case '\r' => sb.append("\\r")
204 | case '\t' => sb.append("\\t")
205 | case c =>
206 | if (c < ' ') sb.append(f"\\u${c.toInt}%04x")
207 | else sb.append(c)
208 | }
209 | i += 1
210 | }
211 | sb.append('"')
212 | }
213 | }
214 |
--------------------------------------------------------------------------------
/jawn/src/main/scala/io/github/netvl/picopickle/backends/jawn/json.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.jawn
2 |
3 | import io.github.netvl.picopickle._
4 |
5 | import scala.util.{Try, Success, Failure}
6 |
7 | trait JsonBackendComponent extends BackendComponent {
8 | override val backend = JsonAst.Backend
9 | }
10 |
11 | trait JsonExceptionsComponent extends ExceptionsComponent {
12 | this: BackendComponent =>
13 |
14 | case class JsonParseException(message: String, cause: Throwable)
15 | extends BaseException(message, cause)
16 | }
17 |
18 | trait JsonStringSerializationComponent {
19 | self: Pickler with TypesComponent with JsonBackendComponent with JsonExceptionsComponent =>
20 |
21 | def readAst(str: String): JsonAst.JsonValue = jawn.Parser.parseFromString(str)(JawnFacade) match {
22 | case Success(r) => r
23 | case Failure(e) => throw JsonParseException(s"invalid JSON: $str", e)
24 | }
25 | def writeAst(ast: JsonAst.JsonValue): String = JsonRenderer.render(ast)
26 |
27 | def readString[T: Reader](str: String): T = read[T](readAst(str))
28 | def tryReadString[T: Reader](str: String): Try[T] = Try(readString[T](str))
29 |
30 | def writeString[T: Writer](value: T): String = writeAst(write(value))
31 |
32 | class JsonSerializer[T: Reader: Writer] extends Serializer[T] {
33 | def readString(str: String): T = self.readString[T](str)
34 | def tryReadString(str: String): Try[T] = self.tryReadString[T](str)
35 |
36 | def writeString(value: T): String = self.writeString(value)
37 | }
38 |
39 | override def serializer[T: Reader: Writer]: JsonSerializer[T] = new JsonSerializer[T]
40 | }
41 |
42 | trait JsonPickler
43 | extends DefaultPickler
44 | with JsonBackendComponent
45 | with JsonStringSerializationComponent
46 | with JsonExceptionsComponent
47 |
48 | object JsonPickler extends JsonPickler
49 |
--------------------------------------------------------------------------------
/jawn/src/test/scala/io/github/netvl/picopickle/backends/jawn/JsonConvertersTest.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.jawn
2 |
3 | import io.github.netvl.picopickle.ConvertersTestBase
4 |
5 | class JsonConvertersTest extends ConvertersTestBase with JsonPickler {
6 | override lazy val backendName: String = "JSON"
7 | }
8 |
--------------------------------------------------------------------------------
/mongodb/src/main/scala/io/github/netvl/picopickle/backends/mongodb/backend.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.mongodb
2 |
3 | import io.github.netvl.picopickle.Backend
4 | import org.bson._
5 | import org.bson.types.ObjectId
6 | import scala.collection.convert.decorateAll._
7 | import shapeless.syntax.typeable._
8 |
9 | object MongodbBsonBackend extends Backend {
10 | override type BValue = BsonValue
11 | override type BObject = BsonDocument
12 | override type BArray = BsonArray
13 | override type BString = BsonString
14 | override type BNumber = BsonNumber
15 | override type BBoolean = BsonBoolean
16 | override type BNull = BsonNull
17 |
18 | type BObjectId = BsonObjectId
19 | type BInt32 = BsonInt32
20 | type BInt64 = BsonInt64
21 | type BDouble = BsonDouble
22 | type BDateTime = BsonDateTime
23 | type BBinary = BsonBinary
24 | type BSymbol = BsonSymbol
25 |
26 | // XXX: do we need to make copies instead of mutating the original values here?
27 |
28 | override def fromObject(obj: BObject): Map[String, BValue] = obj.asScala.toMap
29 | override def makeObject(m: Map[String, BValue]): BObject = m.foldLeft(new BsonDocument()) { case (d, (k, v)) => d.append(k, v) }
30 | override def getObject(value: BValue): Option[BObject] = value.cast[BsonDocument]
31 |
32 | override def getObjectKey(obj: BObject, key: String): Option[BValue] = Option(obj.get(key)) // values can't be null
33 | override def setObjectKey(obj: BObject, key: String, value: BValue): BObject = obj.append(key, value)
34 | override def containsObjectKey(obj: BObject, key: String): Boolean = obj.containsKey(key)
35 | override def removeObjectKey(obj: BObject, key: String): BObject = { obj.remove(key); obj }
36 | override def makeEmptyObject: BObject = new BsonDocument()
37 |
38 | override def fromArray(arr: BArray): Vector[BValue] = arr.asScala.toVector
39 | override def makeArray(v: Vector[BValue]): BArray = new BsonArray(v.asJava)
40 | override def getArray(value: BValue): Option[BArray] = value.cast[BsonArray]
41 |
42 | override def getArrayLength(arr: BArray): Int = arr.size()
43 | override def getArrayValueAt(arr: BArray, idx: Int): BValue = arr.get(idx)
44 | override def pushToArray(arr: BArray, value: BValue): BArray = { arr.add(value); arr }
45 | override def makeEmptyArray: BArray = new BsonArray()
46 |
47 | def fromBinary(bin: BBinary): Array[Byte] = bin.getData
48 | def makeBinary(arr: Array[Byte]): BBinary = new BsonBinary(arr)
49 | def getBinary(value: BValue): Option[BBinary] = value.cast[BBinary]
50 |
51 | def fromObjectId(oid: BObjectId): ObjectId = oid.getValue
52 | def makeObjectId(oid: ObjectId): BObjectId = new BsonObjectId(oid)
53 | def getObjectId(value: BValue): Option[BObjectId] = value.cast[BsonObjectId]
54 |
55 | def fromDateTime(dt: BDateTime): Long = dt.getValue
56 | def makeDateTime(n: Long): BDateTime = new BsonDateTime(n)
57 | def getDateTime(value: BValue): Option[BDateTime] = value.cast[BsonDateTime]
58 |
59 | override def fromString(str: BString): String = str.getValue
60 | override def makeString(s: String): BString = new BsonString(s)
61 | override def getString(value: BValue): Option[BString] = value.cast[BsonString]
62 |
63 | def fromSymbol(sym: BSymbol): Symbol = Symbol(sym.getSymbol)
64 | def makeSymbol(sym: Symbol): BSymbol = new BsonSymbol(sym.name)
65 | def getSymbol(value: BValue): Option[BSymbol] = value.cast[BsonSymbol]
66 |
67 | def fromInt32(n: BInt32): Int = n.getValue
68 | def makeInt32(n: Int): BInt32 = new BsonInt32(n)
69 | def getInt32(value: BValue): Option[BsonInt32] = value.cast[BsonInt32]
70 |
71 | def fromInt64(n: BInt64): Long = n.getValue
72 | def makeInt64(n: Long): BInt64 = new BsonInt64(n)
73 | def getInt64(value: BValue): Option[BsonInt64] = value.cast[BsonInt64]
74 |
75 | def fromDouble(n: BDouble): Double = n.getValue
76 | def makeDouble(n: Double): BDouble = new BsonDouble(n)
77 | def getDouble(value: BValue): Option[BsonDouble] = value.cast[BsonDouble]
78 |
79 | override def fromNumber(num: BNumber): Number = fromNumberAccurately(num)
80 | override def makeNumber(n: Number): BNumber = makeNumberAccurately(n).asInstanceOf[BNumber]
81 | override def getNumber(value: BValue): Option[BNumber] = value.cast[BsonNumber]
82 |
83 | override def makeNumberAccurately(n: Number): BValue = n match {
84 | case (_: java.lang.Byte | _: java.lang.Short | _: java.lang.Integer) => new BsonInt32(n.intValue())
85 | case _: java.lang.Long => new BsonInt64(n.longValue())
86 | case _: java.lang.Float | _: java.lang.Double => new BsonDouble(n.doubleValue())
87 | case _ => new BsonDouble(n.doubleValue()) // FIXME: there are other types which should be handled properly
88 | }
89 | override def fromNumberAccurately: PartialFunction[BValue, Number] = {
90 | case n: BsonInt32 => n.intValue()
91 | case n: BsonInt64 => n.longValue()
92 | case n: BsonDouble => n.doubleValue()
93 | }
94 | override def fromNumberAccuratelyExpected: String = "number"
95 |
96 | def fromBoolean(bool: BBoolean): Boolean = bool.getValue
97 | def makeBoolean(b: Boolean): BBoolean = new BsonBoolean(b)
98 | def getBoolean(value: BValue): Option[BBoolean] = value.cast[BsonBoolean]
99 |
100 | def makeNull: BNull = new BsonNull
101 | def getNull(value: BValue): Option[BNull] = value.cast[BsonNull]
102 |
103 | object BsonExtract {
104 | object ObjectId {
105 | def unapply(value: BValue): Option[ObjectId] = getObjectId(value).map(fromObjectId)
106 | }
107 |
108 | object Int32 {
109 | def unapply(value: BValue): Option[Int] = getInt32(value).map(fromInt32)
110 | }
111 |
112 | object Int64 {
113 | def unapply(value: BValue): Option[Long] = getInt64(value).map(fromInt64)
114 | }
115 |
116 | object Double {
117 | def unapply(value: BValue): Option[Double] = getDouble(value).map(fromDouble)
118 | }
119 |
120 | object DateTime {
121 | def unapply(value: BValue): Option[Long] = getDateTime(value).map(fromDateTime)
122 | }
123 |
124 | object Binary {
125 | def unapply(value: BValue): Option[Array[Byte]] = getBinary(value).map(fromBinary)
126 | }
127 |
128 | object Symbol {
129 | def unapply(value: BValue): Option[Symbol] = getSymbol(value).map(fromSymbol)
130 | }
131 | }
132 |
133 | object bsonConversionImplicits {
134 | implicit class ObjectIdToBackendExt(val oid: ObjectId) {
135 | def toBackend: BObjectId = makeObjectId(oid)
136 | }
137 |
138 | implicit class BinaryToBackendExt(val arr: Array[Byte]) {
139 | def toBackend: BBinary = makeBinary(arr)
140 | }
141 |
142 | implicit class SymbolToBackendExt(val sym: Symbol) {
143 | def toBackend: BSymbol = makeSymbol(sym)
144 | }
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/mongodb/src/main/scala/io/github/netvl/picopickle/backends/mongodb/bson.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.mongodb
2 |
3 | import java.util.Date
4 |
5 | import _root_.io.github.netvl.picopickle.{TypesComponent, DefaultPickler, ExceptionsComponent, BackendComponent}
6 | import org.bson._
7 | import org.bson.types.ObjectId
8 |
9 | import scala.reflect.{ClassTag, classTag}
10 |
11 | trait MongodbBsonBackendComponent extends BackendComponent {
12 | override val backend = MongodbBsonBackend
13 | }
14 |
15 | trait MongodbBsonSerializersComponent {
16 | this: MongodbBsonBackendComponent with TypesComponent =>
17 |
18 | private def identityBsonReadWriter[T <: backend.BValue : ClassTag] =
19 | ReadWriter.writing[T](identity).reading { case value: T => value }
20 | .orThrowing(whenReading = classTag[T].runtimeClass.getSimpleName, expected = classTag[T].runtimeClass.getSimpleName)
21 |
22 | implicit val bsonValueReadWriter: ReadWriter[BsonValue] =
23 | ReadWriter.writing[BsonValue](identity).reading(PartialFunction(identity))
24 |
25 | implicit val bsonDocumentReadWriter: ReadWriter[BsonDocument] = identityBsonReadWriter[BsonDocument]
26 | implicit val bsonArrayReadWriter: ReadWriter[BsonArray] = identityBsonReadWriter[BsonArray]
27 | implicit val bsonStringReadWriter: ReadWriter[BsonString] = identityBsonReadWriter[BsonString]
28 | implicit val bsonNumberReadWriter: ReadWriter[BsonNumber] = identityBsonReadWriter[BsonNumber]
29 | implicit val bsonBooleanReadWriter: ReadWriter[BsonBoolean] = identityBsonReadWriter[BsonBoolean]
30 | implicit val bsonNullReadWriter: ReadWriter[BsonNull] = identityBsonReadWriter[BsonNull]
31 | implicit val bsonObjectIdReadWriter: ReadWriter[BsonObjectId] = identityBsonReadWriter[BsonObjectId]
32 | implicit val bsonInt32ReadWriter: ReadWriter[BsonInt32] = identityBsonReadWriter[BsonInt32]
33 | implicit val bsonInt64ReadWriter: ReadWriter[BsonInt64] = identityBsonReadWriter[BsonInt64]
34 | implicit val bsonDoubleReadWriter: ReadWriter[BsonDouble] = identityBsonReadWriter[BsonDouble]
35 | implicit val bsonDateTimeReadWriter: ReadWriter[BsonDateTime] = identityBsonReadWriter[BsonDateTime]
36 | implicit val bsonBinaryReadWriter: ReadWriter[BsonBinary] = identityBsonReadWriter[BsonBinary]
37 | implicit val bsonSymbolReadWriter: ReadWriter[BsonSymbol] = identityBsonReadWriter[BsonSymbol]
38 |
39 | // TODO: add a test for this
40 | implicit val dateReadWriter: ReadWriter[Date] = ReadWriter.writing[Date](d => backend.makeDateTime(d.getTime))
41 | .reading {
42 | case backend.BsonExtract.DateTime(ts) => new Date(ts)
43 | }.orThrowing(whenReading = "date", expected = "datetime")
44 |
45 | implicit val symbolReadWriter: ReadWriter[Symbol] = ReadWriter.writing(backend.makeSymbol)
46 | .reading {
47 | case backend.BsonExtract.Symbol(sym) => sym
48 | }.orThrowing(whenReading = "symbol", expected = "symbol")
49 |
50 | implicit val binaryReadWriter: ReadWriter[Array[Byte]] = ReadWriter.writing(backend.makeBinary)
51 | .reading {
52 | case backend.BsonExtract.Binary(arr) => arr
53 | }.orThrowing(whenReading = "array of bytes", expected = "binary")
54 |
55 | implicit val intReadWriter: ReadWriter[Int] = ReadWriter.writing(backend.makeInt32)
56 | .reading {
57 | case backend.BsonExtract.Int32(n) => n
58 | }.orThrowing(whenReading = "int", expected = "32-bit integer")
59 |
60 | implicit val longReadWriter: ReadWriter[Long] = ReadWriter.writing(backend.makeInt64)
61 | .reading {
62 | case backend.BsonExtract.Int64(n) => n
63 | }.orThrowing(whenReading = "long", expected = "64-bit integer")
64 |
65 | implicit val doubleReadWriter: ReadWriter[Double] = ReadWriter.writing(backend.makeDouble)
66 | .reading {
67 | case backend.BsonExtract.Double(n) => n
68 | }.orThrowing(whenReading = "double", expected = "double")
69 |
70 | implicit val objectIdReadWriter: ReadWriter[ObjectId] = ReadWriter.writing(backend.makeObjectId)
71 | .reading {
72 | case backend.BsonExtract.ObjectId(oid) => oid
73 | }.orThrowing(whenReading = "object id", expected = "object id")
74 | }
75 |
76 | trait MongodbBsonPickler
77 | extends DefaultPickler
78 | with MongodbBsonBackendComponent
79 | with MongodbBsonSerializersComponent
80 |
81 | object MongodbBsonPickler extends MongodbBsonPickler
82 |
--------------------------------------------------------------------------------
/mongodb/src/test/scala/io/github/netvl/picopickle/backends/mongodb/MongodbBsonConvertersTest.scala:
--------------------------------------------------------------------------------
1 | package io.github.netvl.picopickle.backends.mongodb
2 |
3 | import io.github.netvl.picopickle.ConvertersTestBase
4 |
5 | class MongodbBsonConvertersTest extends ConvertersTestBase with MongodbBsonPickler {
6 | override lazy val backendName: String = "MongoDB BSON"
7 | }
8 |
--------------------------------------------------------------------------------
/notes/0.1.0.markdown:
--------------------------------------------------------------------------------
1 | This is the first public release.
2 |
3 | picopickle is a lightweight shapeless-based serialization library. It is very extensible and flexible,
4 | does not use reflection and supports almost arbitrary serialization formats - anything which is representable
5 | with JSON-like data types is supported out of the box with a minimal shim code, and writing support
6 | for something more complex is very simple.
7 |
8 | Currently picopickle supports serialization and deserialization to and from JSON and Scala collections,
9 | but more backends are planned. picopickle uses a very fast JSON parser, [jawn](https://github.com/non/jawn),
10 | to parse JSON data.
11 |
12 | What is available in this release:
13 |
14 | * serialization and deserialization of all basic Scala types: primitives, strings, symbols, options, eithers, etc;
15 | * serialization and deserialization of almost all Scala collections;
16 | * serialization and deserialization of almost arbitrary sealed trait hierarchies, i.e. case classes and case objects,
17 | possibly implementing a sealed trait;
18 | * case class serialization supports renaming the fields and classes, default values and optional fields, as well as
19 | circular type dependencies;
20 | * customizable nulls handling;
21 | * two backends - JSON and collections, allowing serialization and deserialization to and from JSON and collections,
22 | respectively;
23 | * a converters library which provides a DSL for writing custom serializers in a declarative way.
24 |
25 | picopickle is heavily inspired by [upickle](https://github.com/lihaoyi/upickle), but it does not do everything I needed,
26 | and I couldn't manage to fix it - there were compiler errors I didn't know how to overcome. shapeless, however, provides
27 | a lot of tools, including lazy implicits, which made writing a serialization library a very easy task.
28 |
29 | You can find more in the [readme](https://github.com/netvl/picopickle#readme).
30 |
--------------------------------------------------------------------------------
/notes/0.1.1.markdown:
--------------------------------------------------------------------------------
1 | This is a bugfix release.
2 |
3 | * Fixed a problem with classes whose companion objects have overloaded `apply` methods.
--------------------------------------------------------------------------------
/notes/0.1.2.markdown:
--------------------------------------------------------------------------------
1 | Updated Scala 2.10 minor version (was 2.10.4, became 2.10.5). This matters because shapeless depends on the minor version of Scala 2.10.
--------------------------------------------------------------------------------
/notes/0.1.3.markdown:
--------------------------------------------------------------------------------
1 | Added two new features - serialization objects and object key serialization:
2 |
3 | * serialization objects allow one to "specialize" generic `read()`/`write()` pickler
4 | methods for some specific type for convenience and type safety;
5 | * object key serialization makes it possible to serialize maps with arbitrary keys
6 | as backend objects (which currently is possible only for string keys) provided that
7 | there is a special serializer for key type is present in implicit scope. It is also now
8 | possible to disallow the default behavior when maps are serialized as arrays of arrays
9 | and only enable it explicitly for specific key types.
10 |
11 | More information can be found in the [readme](https://github.com/netvl/picopickle#readme).
--------------------------------------------------------------------------------
/notes/0.2.0.markdown:
--------------------------------------------------------------------------------
1 | This release contains a lot of changes and new features.
2 |
3 | ### Updated shapeless to 2.2.3, jawn to 0.8.8, scala to 2.11.7.
4 |
5 | In particular, updating shapeless allowed using picopickle with case classes with variable arguments.
6 |
7 | ### Improved reader interface - added `readOrElse` method and changed existing code to depend on it.
8 |
9 | `readOrElse` is borrowed from `PartialFunction` trait where it is called `applyOrElse`. It is an
10 | important method for optimization because it allows checking whether a function (reader) can be
11 | applied to a value and apply it to this value at the same time. Now `Reader` trait has this method
12 | and it is defined and used correctly by the built-in `Reader` combinators, in particular,
13 | for error checks.
14 |
15 | ### Added proper error handling.
16 |
17 | While writing data to backend representation is usually an error-free operation (if there is a writer
18 | for some type, it should handle all values of this type), reading data from the backend representation
19 | is a source of errors. This happens because the backend representation has much weaker typing guarantees
20 | than Scala code and can't correspond directly to Scala types.
21 |
22 | Previously picopickle didn't provide any special error handling. If the backend value couldn't be
23 | deserialized, picopickle would throw some obscure `MatchError` or `IllegalArgumentException`. Since
24 | 0.2.0 picopickle has a proper exception system, and if a read error occurs, the exception would contain
25 | much more information about what was expected and what was actually found. You can find more on it
26 | in [Readme][readme-error-handling].
27 |
28 | ### Added new BSON-based backend.
29 |
30 | A new officially supported backend has been added. It uses [MongoDB BSON][mongodb-bson] data types
31 | as the backend representation.
32 |
33 | With this backend it is possible to use picopickle for serialization using the official Mongo drivers.
34 |
35 | It also serves as an example extended backend implementation with more types than the basic backend
36 | supports.
37 |
38 | ### Added support for changing STH discriminator key on per-STH basis.
39 |
40 | It is now possible to change sealed trait hierarchy discriminator key for each sealed trait separately:
41 |
42 | @discriminator("status") sealed trait Root
43 | case object Stopped extends Root
44 | case class Running(name: String) extends Root
45 |
46 | writeString[Root](Stopped) shouldEqual """{"status":"Stopped"}"""
47 | writeString[Root](Running("me")) shouldEqual """{"status":"Running","name":"me"}"""
48 |
49 | This allows even more flexibility in defining serialization formats, especially when matching some
50 | existing interface.
51 |
52 | More information can be found in the [readme](https://github.com/netvl/picopickle#readme).
53 |
54 | [readme-error-handling]: https://github.com/netvl/picopickle#error-handling
55 | [mongodb-bson]: http://mongodb.github.io/mongo-java-driver/3.0/bson/
56 |
57 |
--------------------------------------------------------------------------------
/notes/0.2.1.markdown:
--------------------------------------------------------------------------------
1 | This release contains only internal changes and dependency versions bumps.
2 |
3 | * Updated shapeless to 2.3.0, macroparadise to 2.1.0, jawn to 0.8.4, bson to 3.2.2, scala to 2.10.6. The most
4 | important update here is shapeless 2.3.0.
5 | * After shapless, switched to [macro-compat](https://github.com/milessabin/macro-compat) instead of hand-written
6 | macro API for 2.10 and 2.11. This should greatly simplify supporting custom macros.
7 |
--------------------------------------------------------------------------------
/notes/0.3.0.markdown:
--------------------------------------------------------------------------------
1 | This release contains only one small feature, but it is a minor version bump instead of a patch version
2 | because the previous version bump was incorrect - in 0.2.1 shapeless dependency was updated, which is binary
3 | incompatible with its previous versions.
4 |
5 | * Added support for serializing value classes, that is, those extending `AnyVal`, as regular values. While this
6 | was absolutely possible to do manually in previous versions of picopickle, this release introduces tools to make
7 | it automatic. You can find more in the [readme][value-classes].
8 | * Updated Scala version to 2.11.8.
9 |
10 | [value-classes]: https://github.com/netvl/picopickle#value-classes
11 |
--------------------------------------------------------------------------------
/notes/about.markdown:
--------------------------------------------------------------------------------
1 | [picopickle](https://github.com/netvl/picopickle) is a lightweight but powerful serialization library based on shapeless.
2 |
--------------------------------------------------------------------------------
/project/TestGeneration.scala:
--------------------------------------------------------------------------------
1 | import java.io.File
2 | import java.nio.charset.StandardCharsets
3 | import java.nio.file.Files
4 | import java.util
5 |
6 | import ImplicitUtils._
7 | import org.yaml.snakeyaml.Yaml
8 | import org.yaml.snakeyaml.constructor.SafeConstructor
9 | import sbt.Keys._
10 | import sbt._
11 | import shapeless._
12 | import shapeless.syntax.typeable._
13 |
14 | import scala.collection.convert.decorateAsScala._
15 | import scala.language.higherKinds
16 |
17 | sealed trait TestGenerator {
18 | def generate(config: Map[String, Any], variantName: String, input: Any): String
19 | }
20 | object TestGenerator {
21 | val Generators: Map[String, TestGenerator] = Map(
22 | "rw" -> RwTestGenerator
23 | )
24 | def forName(name: String): TestGenerator = Generators(name)
25 | }
26 |
27 | object RwTestGenerator extends TestGenerator {
28 | override def generate(config: Map[String, Any], variantName: String, input: Any): String = {
29 | require(variantName != "source", "variant name cannot be 'source'")
30 |
31 | val pattern = config("pattern").ecast[Vector[String]]
32 | val sourceIndex = pattern.indexOf("source")
33 | val variantIndex = pattern.indexOf(variantName)
34 | require(sourceIndex >= 0, s"source index is not set")
35 | require(variantIndex >= 0, s"unknown variant: $variantName")
36 |
37 | input.ecast[Vector[Any]].map { check =>
38 | val (kind, argType, sourceArg0, expectedArg0) = check match {
39 | case `Map[String, Any]`(m) =>
40 | val items = m("items").ecast[Vector[String]]
41 | (m.getOrElse("kind", "rw").ecast[String], m.get("type").ecast[Option[String]], items(sourceIndex), items(variantIndex))
42 | case `Vector[Any]`(c) =>
43 | ("rw", None, c(sourceIndex).ecast[String], c(variantIndex).ecast[String])
44 | }
45 | val (sourceArg, expectedArg) = (sourceArg0.trim, expectedArg0.trim)
46 | val finalArgType = argType.fold("")(t => s"[$t]")
47 | val invocation = kind match {
48 | case "rw" => s"testRW$finalArgType"
49 | case "r" => s"testR$finalArgType"
50 | }
51 | def tooLong(s: String) = s.contains("\n") || s.length > 45
52 | if (tooLong(sourceArg) || tooLong(expectedArg))
53 | s"""|$invocation(
54 | |${Strings.reindent(sourceArg, 2)},
55 | |${Strings.reindent(expectedArg, 2)}
56 | |)""".stripMargin
57 | else s"$invocation($sourceArg, $expectedArg)"
58 | }.mkString("\n")
59 | }
60 | }
61 |
62 | case class TestCase(name: String,
63 | prepend: Option[String],
64 | additionalFixtureExtends: Option[String],
65 | items: Map[String, Any])
66 | case class TestVariant(name: String,
67 | targetProject: String,
68 | context: Map[String, String])
69 | case class TestDefinition(name: String,
70 | filePattern: String,
71 | template: String,
72 | indent: Int,
73 | variants: Vector[TestVariant],
74 | global: Map[String, Map[String, Any]],
75 | cases: Vector[TestCase])
76 |
77 | object TestGeneration {
78 | def parseCases(root: Any): Vector[TestCase] = {
79 | val v = root.ecast[Vector[Map[String, Any]]]
80 | v.map {
81 | case c =>
82 | TestCase(
83 | c("name").ecast[String],
84 | c.get("prepend").ecast[Option[String]],
85 | c.get("additionalFixtureExtends").map(_.ecast[String]),
86 | c - "name" - "prepend" - "additionalFixtureExtends"
87 | )
88 | }
89 | }
90 |
91 | def parseVariants(root: Any): Vector[TestVariant] = {
92 | val m = root.ecast[Map[String, Map[String, String]]]
93 | m.map {
94 | case (k, v) => TestVariant(k, v("targetProject"), v)
95 | }.toVector
96 | }
97 |
98 | def parseGlobal(root: Any): Map[String, Map[String, Any]] = root.ecast[Map[String, Map[String, Any]]]
99 |
100 | def parseDefinition(name: String, root: Any): TestDefinition = {
101 | val m = root.ecast[Map[String, Any]]
102 | TestDefinition(
103 | name,
104 | m("file-pattern").ecast[String],
105 | m("template").ecast[String],
106 | m("indent").ecast[Int],
107 | m("variants") |> parseVariants,
108 | m("global") |> parseGlobal,
109 | m("cases") |> parseCases
110 | )
111 | }
112 |
113 | def loadDefinitions(testsDir: File): Vector[TestDefinition] = {
114 | val yaml = new Yaml(new SafeConstructor)
115 | (testsDir ** "*.yml").get
116 | .map(_.toPath)
117 | .map(p => p.getFileName -> Files.readAllLines(p, StandardCharsets.UTF_8).asScala.mkString("\n"))
118 | .map { case (n, f) => n.toString.split("\\.")(0) -> YamlUtils.convertTree(yaml.load(f)) }
119 | .map { case (n, y) => parseDefinition(n, y) }
120 | .toVector
121 | }
122 |
123 | case class EvaluatedDefinition(projectName: String, fileName: String, packageName: String, body: String)
124 |
125 | def evaluateDefinitionIn(projectName: String, streams: TaskStreams, definition: TestDefinition): Option[EvaluatedDefinition] = {
126 | definition.variants.find(_.targetProject == projectName).map { variant =>
127 | val projectName = variant.targetProject
128 | val fileName = definition.filePattern.interpolate(Map("name" -> variant.context("name")))
129 | val packageName = variant.context("package")
130 | val body = definition.template.interpolate(variant.context + ("cases" -> runGenerators(definition, variant)))
131 | EvaluatedDefinition(projectName, fileName, packageName, body)
132 | } <| {
133 | case None =>
134 | streams.log.warn(s"No variant of test ${definition.name}.yml for project $projectName was found")
135 | case _ =>
136 | }
137 | }
138 |
139 | def runGenerators(definition: TestDefinition, variant: TestVariant): String = {
140 | val generatedTests = definition.cases.map(runGenerators(definition, variant, _))
141 | generatedTests.map(Strings.reindent(_, definition.indent)).mkString("\n\n")
142 | }
143 |
144 | def runGenerators(definition: TestDefinition, variant: TestVariant, testCase: TestCase): String = {
145 | val body = testCase.items.toVector.map {
146 | case (generatorName, generatorInput) =>
147 | val gen = TestGenerator.forName(generatorName)
148 | val (moreConfig, items) = generatorInput match {
149 | case `Map[String, Any]`(m) =>
150 | (m.getOrElse("config", Map.empty).ecast[Map[String, Any]], m("input").ecast[Vector[Any]])
151 | case `Vector[Any]`(c) =>
152 | (Map.empty[String, Any], c)
153 | }
154 | gen.generate(definition.global.getOrElse(generatorName, Map.empty) ++ moreConfig, variant.name, items)
155 | }.mkString
156 |
157 | val finalBodyUnindented = testCase.prepend
158 | .map(_.interpolate(variant.context))
159 | .fold(body)(_ + "\n" + body)
160 | val finalBody = Strings.reindent(finalBodyUnindented, 2)
161 | s"""|"${testCase.name}" in new Fixture ${testCase.additionalFixtureExtends.map(_ + " ").getOrElse("")}{
162 | |$finalBody
163 | |}""".stripMargin
164 | }
165 |
166 | def generatedFiles(sourceRoot: SettingKey[File]) = Def.task[Seq[File]] {
167 | val projectId = thisProject.value.id
168 | val testDefinitions = loadDefinitions((baseDirectory in ThisBuild).value / "project" / "tests")
169 | testDefinitions.flatMap(evaluateDefinitionIn(projectId, streams.value, _)).map { definition =>
170 | val pkg = definition.packageName.replace('.', File.separatorChar)
171 | val targetFile = sourceRoot.value / pkg / definition.fileName
172 | IO.write(targetFile, definition.body, StandardCharsets.UTF_8)
173 | targetFile
174 | }
175 | }
176 | }
177 |
178 | object Strings {
179 | def interpolate(s: String, context: Map[String, Any]): String = {
180 | val result = new StringBuilder
181 | var i = 0
182 | while (i < s.length) {
183 | val j = s.indexOf('$', i)
184 | if (j == -1) {
185 | result ++= s.substring(i)
186 | i = s.length
187 | } else if (j < s.length-1) {
188 | result ++= s.substring(i, j)
189 | if (s(j+1) == '$') {
190 | result += '$'
191 | i = j+2
192 | } else if (s(j+1) == '{') {
193 | val k = s.indexOf('}', j+2)
194 | if (k != -1) {
195 | val key = s.substring(j+2, k)
196 | result ++= context(key).toString
197 | i = k+1
198 | } else {
199 | result ++= "${"
200 | i = j+2
201 | }
202 | } else {
203 | result += '$'
204 | i = j+1
205 | }
206 | } else {
207 | result += '$'
208 | i = s.length
209 | }
210 | }
211 | result.toString()
212 | }
213 |
214 | def reindent(s: String, indent: Int): String = s.replaceAll("(?m)^", " " * indent)
215 | }
216 |
217 | object YamlUtils {
218 | def convertTree(root: Any): Any = root match {
219 | case m: util.Map[Any @unchecked, Any @unchecked] => m.asScala.toMap.map {
220 | case (k, v) => convertTree(k) -> convertTree(v)
221 | }
222 | case s: util.Set[Any @unchecked] => s.asScala.toSet.map(convertTree)
223 | case c: util.List[Any @unchecked] => c.asScala.toVector.map(convertTree)
224 | case n: Int => n
225 | case n: Long => n
226 | case n: Double => n
227 | case s: String => s
228 | case b: Boolean => b
229 | }
230 | }
231 |
232 | object ImplicitUtils {
233 | val `Map[String, Any]` = TypeCase[Map[String, Any]]
234 | val `Vector[Any]` = TypeCase[Vector[Any]]
235 |
236 | implicit class StringExt(val s: String) extends AnyVal {
237 | def interpolate(context: Map[String, Any]): String = Strings.interpolate(s, context)
238 | }
239 |
240 | implicit class AnyExt[T: Manifest](val t: T) {
241 | def ecast[U: Typeable: Manifest]: U = t.cast[U].getOrElse(
242 | throw new ClassCastException(s"Cannot cast ${manifest[T]} to ${manifest[U]}")
243 | )
244 |
245 | def |>[U](f: T => U): U = f(t)
246 |
247 | def <|[U](f: T => U): T = { f(t); t }
248 | }
249 |
250 | }
251 |
--------------------------------------------------------------------------------
/project/Versions.scala:
--------------------------------------------------------------------------------
1 | object Versions {
2 | val shapeless = "2.3.2"
3 | val paradise = "2.1.0"
4 | val macroCompat = "1.1.1"
5 | val scalatest = "3.0.1"
6 | val jawn = "0.11.0"
7 | val mongodbBson = "3.2.2"
8 | }
9 |
10 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version = 0.13.11
2 |
--------------------------------------------------------------------------------
/project/build.sbt:
--------------------------------------------------------------------------------
1 | scalaVersion := "2.10.5"
2 |
3 | libraryDependencies ++= Seq(
4 | "com.chuusai" %% "shapeless" % "2.3.0",
5 | "org.yaml" % "snakeyaml" % "1.15"
6 | )
7 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | logLevel := Level.Warn
2 |
3 | resolvers += Resolver.url(
4 | "bintray-sbt-plugin-releases",
5 | url("http://dl.bintray.com/content/sbt/sbt-plugin-releases")
6 | )(Resolver.ivyStylePatterns)
7 | addSbtPlugin("me.lessis" % "bintray-sbt" % "0.3.0")
8 |
9 | addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.2")
10 |
11 | resolvers += "jgit-repo" at "http://download.eclipse.org/jgit/maven"
12 | addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.8.1")
13 | addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.5.3")
14 |
--------------------------------------------------------------------------------
/project/tests/Pickler.yml:
--------------------------------------------------------------------------------
1 | file-pattern: ${name}PicklerTest.scala
2 | template: |
3 | package ${package}
4 |
5 | import scala.collection.immutable.ListMap
6 | import org.scalatest.{FreeSpec, Matchers}
7 | import io.github.netvl.picopickle.ValueClassReaderWritersComponent
8 | import io.github.netvl.picopickle.Fixtures._
9 | ${additionalImports}
10 |
11 | class ${name}PicklerTest extends FreeSpec with Matchers {
12 | trait Fixture extends ${picklerClass} {
13 | def testRW[T: Reader: Writer](t: T, a: ${targetType}): Unit = {
14 | val s = ${write}(t)
15 | s shouldEqual a
16 | val u = ${read}[T](s)
17 | u shouldEqual t
18 | }
19 |
20 | def testR[T: Reader](t: T, a: ${targetType}): Unit = {
21 | val u = ${read}[T](a)
22 | u shouldEqual t
23 | }
24 | }
25 |
26 | "A ${descriptiveName} pickler" - {
27 | "should serialize and deserialize" - {
28 | ${cases}
29 | }
30 | }
31 | }
32 | indent: 6
33 | variants:
34 | collections:
35 | name: Collections
36 | descriptiveName: collections
37 | write: write
38 | read: read
39 | targetType: Any
40 | package: io.github.netvl.picopickle.backends.collections
41 | picklerClass: CollectionsPickler
42 | targetProject: core
43 | additionalImports: ''
44 | json:
45 | name: Json
46 | descriptiveName: JSON
47 | write: writeString
48 | read: readString
49 | targetType: String
50 | package: io.github.netvl.picopickle.backends.jawn
51 | picklerClass: JsonPickler
52 | targetProject: jawn
53 | additionalImports: ''
54 | mongodb:
55 | name: MongodbBson
56 | descriptiveName: MongoDB BSON
57 | write: write
58 | read: read
59 | targetType: BsonValue
60 | package: io.github.netvl.picopickle.backends.mongodb
61 | picklerClass: MongodbBsonPickler
62 | targetProject: mongodb
63 | additionalImports: |
64 | import scala.collection.convert.decorateAsJava._
65 | import org.bson._
66 | global:
67 | rw:
68 | pattern:
69 | - source
70 | - collections
71 | - json
72 | - mongodb
73 | cases:
74 | - name: numbers to numbers
75 | rw:
76 | - - '1: Byte'
77 | - '1: Byte'
78 | - '"1"'
79 | - "new BsonInt32(1)"
80 | - - '2: Short'
81 | - '2: Short'
82 | - '"2"'
83 | - "new BsonInt32(2)"
84 | - - '3: Int'
85 | - '3: Int'
86 | - '"3"'
87 | - "new BsonInt32(3)"
88 | - - '4: Long'
89 | - '4: Long'
90 | - '"4"'
91 | - "new BsonInt64(4)"
92 | - - '5: Float'
93 | - '5: Float'
94 | - '"5"'
95 | - "new BsonDouble(5)"
96 | - - '6: Double'
97 | - '6: Double'
98 | - '"6"'
99 | - "new BsonDouble(6)"
100 | - name: characters
101 | rw:
102 | input:
103 | - - "'a'"
104 | - "'a'"
105 | - '""""a""""'
106 | - 'new BsonString("a")'
107 | - name: string to string
108 | rw:
109 | - - '"hello world"'
110 | - '"hello world"'
111 | - '""""hello world""""'
112 | - 'new BsonString("hello world")'
113 | - name: null to null
114 | rw:
115 | - - 'null'
116 | - 'null'
117 | - '"null"'
118 | - 'new BsonNull'
119 | - name: unit to an empty object
120 | rw:
121 | - - '()'
122 | - 'Map()'
123 | - '"{}"'
124 | - 'new BsonDocument()'
125 | - name: boolean to boolean
126 | rw:
127 | - - 'true'
128 | - 'true'
129 | - '"true"'
130 | - 'BsonBoolean.TRUE'
131 | - - 'false'
132 | - 'false'
133 | - '"false"'
134 | - 'BsonBoolean.FALSE'
135 | - name: collection to an array
136 | rw:
137 | - - 'Seq("a", "b")'
138 | - 'Vector("a", "b")'
139 | - '"""["a","b"]"""'
140 | - 'new BsonArray(Vector(new BsonString("a"), new BsonString("b")).asJava)'
141 | - name: option to an array
142 | rw:
143 | - - 'Option("a")'
144 | - 'Vector("a")'
145 | - '"""["a"]"""'
146 | - 'new BsonArray(Vector(new BsonString("a")).asJava)'
147 | - - 'None'
148 | - 'Vector.empty'
149 | - '"""[]"""'
150 | - 'new BsonArray()'
151 | - name: map to an array of arrays
152 | rw:
153 | - - 'ListMap(1 -> 2, 3 -> 4)'
154 | - 'Vector(Vector(1, 2), Vector(3, 4))'
155 | - '"""[[1,2],[3,4]]"""'
156 | - |
157 | new BsonArray(Vector(
158 | new BsonArray(Vector(new BsonInt32(1), new BsonInt32(2)).asJava),
159 | new BsonArray(Vector(new BsonInt32(3), new BsonInt32(4)).asJava)
160 | ).asJava)
161 | - name: map with string keys to an object
162 | rw:
163 | - - 'ListMap("a" -> 1, "b" -> 2)'
164 | - 'Map("a" -> 1, "b" -> 2)'
165 | - '"""{"a":1,"b":2}"""'
166 | - 'new BsonDocument().append("a", new BsonInt32(1)).append("b", new BsonInt32(2))'
167 | - name: case class to an object
168 | prepend: |
169 | import CaseClass._
170 | rw:
171 | - - 'A(10, "hi")'
172 | - 'Map("x" -> 10, "y" -> "hi")'
173 | - '"""{"x":10,"y":"hi"}"""'
174 | - 'new BsonDocument().append("x", new BsonInt32(10)).append("y", new BsonString("hi"))'
175 | - name: value class to a value
176 | prepend: |
177 | import ValueClass._
178 | additionalFixtureExtends: with ValueClassReaderWritersComponent
179 | rw:
180 | - - 'A("hi")'
181 | - '"hi"'
182 | - '""""hi""""'
183 | - 'new BsonString("hi")'
184 | - name: case object to an empty object
185 | prepend: |
186 | import CaseObject._
187 | rw:
188 | - - 'A'
189 | - 'Map()'
190 | - '"{}"'
191 | - 'new BsonDocument()'
192 | - name: sealed trait hierarchy to an object with a discriminator key
193 | prepend: |
194 | import SealedTrait._
195 | rw:
196 | - type: Root
197 | items:
198 | - 'A(12, "hello")'
199 | - |
200 | Map(
201 | "$variant" -> "A",
202 | "x" -> 12,
203 | "y" -> "hello"
204 | )
205 | - '"""{"$variant":"A","x":12,"y":"hello"}"""'
206 | - |
207 | new BsonDocument()
208 | .append("$variant", new BsonString("A"))
209 | .append("x", new BsonInt32(12))
210 | .append("y", new BsonString("hello"))
211 | - type: Root
212 | items:
213 | - 'B(42L, Vector(1.0, 2.0, 3.0))'
214 | - |
215 | Map(
216 | "$variant" -> "B",
217 | "a" -> 42L,
218 | "b" -> Vector(1.0, 2.0, 3.0)
219 | )
220 | - '"""{"$variant":"B","a":42,"b":[1,2,3]}"""'
221 | - |
222 | new BsonDocument()
223 | .append("$variant", new BsonString("B"))
224 | .append("a", new BsonInt64(42))
225 | .append("b", new BsonArray(Vector(
226 | new BsonDouble(1.0), new BsonDouble(2.0), new BsonDouble(3.0)
227 | ).asJava))
228 | - type: Root
229 | items:
230 | - 'C'
231 | - 'Map("$variant" -> "C")'
232 | - '"""{"$variant":"C"}"""'
233 | - 'new BsonDocument().append("$variant", new BsonString("C"))'
234 | - name: recursive types
235 | prepend: |
236 | import Recursives._
237 | rw:
238 | - type: Root
239 | items:
240 | - 'A'
241 | - 'Map("$variant" -> "A")'
242 | - '"""{"$variant":"A"}"""'
243 | - 'new BsonDocument().append("$variant", new BsonString("A"))'
244 | - type: Root
245 | items:
246 | - 'B(1, Some(B(2, Some(B(3, None)))))'
247 | - |
248 | Map(
249 | "$variant" -> "B",
250 | "x" -> 1,
251 | "b" -> Map(
252 | "x" -> 2,
253 | "b" -> Map(
254 | "x" -> 3
255 | )
256 | )
257 | )
258 | - '"""{"$variant":"B","x":1,"b":{"x":2,"b":{"x":3}}}"""'
259 | - |
260 | new BsonDocument()
261 | .append("$variant", new BsonString("B"))
262 | .append("x", new BsonInt32(1))
263 | .append("b", new BsonDocument()
264 | .append("x", new BsonInt32(2))
265 | .append("b", new BsonDocument()
266 | .append("x", new BsonInt32(3))
267 | )
268 | )
269 | - type: Root
270 | items:
271 | - 'C(A)'
272 | - |
273 | Map(
274 | "$variant" -> "C",
275 | "next" -> Map(
276 | "$variant" -> "A"
277 | )
278 | )
279 | - '"""{"$variant":"C","next":{"$variant":"A"}}"""'
280 | - |
281 | new BsonDocument()
282 | .append("$variant", new BsonString("C"))
283 | .append("next", new BsonDocument()
284 | .append("$variant", new BsonString("A"))
285 | )
286 | - name: fields and classes renamed with annotations
287 | prepend: |
288 | import Renames._
289 | rw:
290 | - type: Root
291 | items:
292 | - 'A'
293 | - 'Map("$variant" -> "0")'
294 | - '"""{"$variant":"0"}"""'
295 | - 'new BsonDocument().append("$variant", new BsonString("0"))'
296 | - type: Root
297 | items:
298 | - 'B(12, "hello")'
299 | - |
300 | Map(
301 | "$variant" -> "B",
302 | "x" -> 12,
303 | "zzz" -> "hello"
304 | )
305 | - '"""{"$variant":"B","x":12,"zzz":"hello"}"""'
306 | - |
307 | new BsonDocument()
308 | .append("$variant", new BsonString("B"))
309 | .append("x", new BsonInt32(12))
310 | .append("zzz", new BsonString("hello"))
311 | - name: sealed trait hierarchy with renamed discriminator key
312 | prepend: |
313 | import CustomDiscriminatorKeys._
314 | rw:
315 | - type: Root
316 | items:
317 | - 'A'
318 | - 'Map("whatever" -> "A")'
319 | - '"""{"whatever":"A"}"""'
320 | - 'new BsonDocument().append("whatever", new BsonString("A"))'
321 | - type: Root
322 | items:
323 | - 'B(42)'
324 | - 'Map("whatever" -> "B", "x" -> 42)'
325 | - '"""{"whatever":"B","x":42}"""'
326 | - 'new BsonDocument().append("whatever", new BsonString("B")).append("x", new BsonInt32(42))'
327 | - name: case classes with default values
328 | prepend: |
329 | import Defaults._
330 | rw:
331 | - kind: r
332 | items:
333 | - 'A(10)'
334 | - 'Map("x" -> 10)'
335 | - '"""{"x":10}"""'
336 | - 'new BsonDocument().append("x", new BsonInt32(10))'
337 | - kind: r
338 | items:
339 | - 'A(10, "wow")'
340 | - 'Map("x" -> 10, "name" -> "wow")'
341 | - '"""{"x":10,"name":"wow"}"""'
342 | - 'new BsonDocument().append("x", new BsonInt32(10)).append("name", new BsonString("wow"))'
343 | - kind: r
344 | type: Root
345 | items:
346 | - 'A(10, enabled = true)'
347 | - 'Map("$variant" -> "A", "x" -> 10, "enabled" -> true)'
348 | - '"""{"$variant":"A","x":10,"enabled":true}"""'
349 | - |
350 | new BsonDocument()
351 | .append("$variant", new BsonString("A"))
352 | .append("x", new BsonInt32(10))
353 | .append("enabled", BsonBoolean.TRUE)
354 | - name: case classes with null fields
355 | prepend: |
356 | import CaseClass._
357 | rw:
358 | - - 'A(10, null)'
359 | - 'Map("x" -> 10, "y" -> null)'
360 | - '"""{"x":10,"y":null}"""'
361 | - 'new BsonDocument().append("x", new BsonInt32(10)).append("y", new BsonNull)'
362 | - name: case classes with an overloaded apply method in their companions
363 | prepend: |
364 | import WithOverloadedApplyMethod._
365 | rw:
366 | - - 'A(1, "2", 3.4)'
367 | - 'Map("x" -> 1, "y" -> "2", "z" -> 3.4)'
368 | - '"""{"x":1,"y":"2","z":3.4}"""'
369 | - |
370 | new BsonDocument()
371 | .append("x", new BsonInt32(1))
372 | .append("y", new BsonString("2"))
373 | .append("z", new BsonDouble(3.4))
374 | - name: case classes with an overloaded constructor
375 | prepend: |
376 | import WithOverloadedConstructor._
377 | rw:
378 | - - 'A(1, "2", 3.4)'
379 | - 'Map("x" -> 1, "y" -> "2", "z" -> 3.4)'
380 | - '"""{"x":1,"y":"2","z":3.4}"""'
381 | - |
382 | new BsonDocument()
383 | .append("x", new BsonInt32(1))
384 | .append("y", new BsonString("2"))
385 | .append("z", new BsonDouble(3.4))
386 | - name: maps with non-string keys but with a defined object key serializer
387 | prepend: |
388 | implicit val intObjectKeyRW = ObjectKeyReadWriter(_.toInt)
389 | rw:
390 | - - 'Map(1 -> "a", 2 -> "b")'
391 | - 'Map("1" -> "a", "2" -> "b")'
392 | - '"""{"1":"a","2":"b"}"""'
393 | - 'new BsonDocument().append("1", new BsonString("a")).append("2", new BsonString("b"))'
394 | - name: case classes with varargs
395 | prepend: |
396 | import WithVarargs._
397 | rw:
398 | - - 'A(1, "a", "b", "c")'
399 | - 'Map("x" -> 1, "y" -> Vector("a", "b", "c"))'
400 | - '"""{"x":1,"y":["a","b","c"]}"""'
401 | - |
402 | new BsonDocument()
403 | .append("x", new BsonInt32(1))
404 | .append("y", new BsonArray(Vector(new BsonString("a"), new BsonString("b"), new BsonString("c")).asJava))
405 |
--------------------------------------------------------------------------------