├── .gitignore ├── LICENSE ├── README.md ├── build.sbt ├── deploy.sh ├── doc ├── README.md ├── SUMMARY.md ├── book.json ├── macrame.md ├── playjsonext.md └── variants.md ├── install_dev.sh ├── play-json-extra ├── js │ └── src │ │ ├── main │ │ └── scala │ │ │ └── play │ │ │ └── json │ │ │ └── extra │ │ │ └── DateConstants.scala │ │ └── test │ │ └── scala │ │ └── play │ │ └── json │ │ └── extra │ │ ├── LocalDatetimeTests.scala │ │ └── MapManagementTest.scala ├── jvm │ └── src │ │ ├── main │ │ └── scala │ │ │ └── play │ │ │ └── json │ │ │ └── extra │ │ │ └── DateConstants.scala │ │ └── test │ │ └── scala │ │ └── play │ │ └── json │ │ └── extra │ │ ├── DefaultValueTest.scala │ │ ├── JsonTests.scala │ │ ├── KeyChangeTest.scala │ │ ├── MapManagementTest.scala │ │ ├── NoImport.scala │ │ ├── PlayJsonExtensionsTest.scala │ │ └── VariantsSpec.scala └── shared │ └── src │ ├── main │ └── scala │ │ └── play │ │ └── json │ │ └── extra │ │ ├── JsonFormat.scala │ │ ├── MacroCommons.scala │ │ ├── Picklers.scala │ │ ├── Variants.scala │ │ ├── extra.scala │ │ └── key.scala │ └── test │ └── scala │ └── play │ └── json │ └── extra │ └── JsonFormatterTests.scala └── project ├── Common.scala ├── Dependencies.scala ├── Resolvers.scala ├── build.properties └── plugins.sbt /.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | *.ipr 3 | *.pyc 4 | *.iws 5 | .idea/ 6 | \.tmp/ 7 | target/ 8 | vendor/ 9 | node_modules/ 10 | *.log 11 | .DS_Store 12 | /tmp/ 13 | ._.DS_Store 14 | currentLog 15 | node_modules/ 16 | *.log 17 | sbt 18 | _book/ 19 | tmp/ 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | play-json-extra 2 | =============== 3 | 4 | I love working in Scala and ScalaJS. This library is my **opinionated** wrapper of several libraries merged to 5 | efficiently working in JSON word. 6 | 7 | Main Features are: 8 | 9 | * Play JSON for ScalaJS 10 | * **JsonFormat** macro annotation for lazy people 11 | * Default field values populated in missing JSON field 12 | * Field rewrite for Play JSON ("key") 13 | * +22 field case class formatter and more [Play Json Extension](http://play-json-extra.megl.io/playjsonext.html) 14 | * **Joda Datetime** for Scala and ScalaJS 15 | * [Strong Typed Enum](http://play-json-extra.megl.io/macrame.html) (String and Int) for Scala and ScalaJS 16 | * [Variant Types](http://play-json-extra.megl.io/variants.html) 17 | 18 | Full Documentation [here](http://play-json-extra.megl.io). 19 | 20 | ## Installing 21 | 22 | For the stable version (to be release): 23 | 24 | ```scala 25 | resolvers += "Sonatype releases" at "https://oss.sonatype.org/content/repositories/releases" 26 | 27 | libraryDependencies += "io.megl" %% "play-json-extra" % "2.4.3" 28 | ``` 29 | 30 | 31 | For the snapshot version: 32 | 33 | ```scala 34 | resolvers += "Sonatype snapshots" at "http://oss.sonatype.org/content/repositories/snapshots/" 35 | 36 | libraryDependencies += "io.megl" %% "play-json-extra" % "2.4.3-SNAPSHOT" 37 | ``` 38 | 39 | play-json-extra requires scala 2.11.x 40 | 41 | ## Documentation 42 | 43 | Full Documentation [here](http://play-json-extra.megl.io). 44 | 45 | ## Thanks 46 | 47 | Thank you very much to: 48 | 49 | * Li Haoyi for [upickle](https://github.com/lihaoyi/upickle-pprint) and all his scalaJS stuff 50 | * Jan Christopher Vogt for [Play JSON Extensions](https://github.com/cvogt/play-json-extensions) 51 | * Chris Neveu for [Macramé](https://github.com/ChrisNeveu/macrame) 52 | * Julien Richard-Foy for [Play JSON Variants](https://github.com/julienrf/play-json-variants) 53 | * The ScalaJS team 54 | * The Play Framework team 55 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | //publishTo := sonatypePublishTo.value 2 | 3 | Common.settings 4 | 5 | 6 | val playJsonExtra = crossProject.in(file("play-json-extra")) 7 | .settings(Common.settings: _*) 8 | .settings( 9 | name := s"play-json-extra", 10 | scalacOptions += "-language:reflectiveCalls", 11 | scalaVersion := Versions.scala, 12 | version := Versions.app, 13 | libraryDependencies ++= DependencyHelpers.compile(Library.scalastm, Library.scalaCompiler) ++ 14 | Seq( 15 | "com.typesafe.play" %%% "play-json" % Versions.playJson, 16 | "org.scalatest" %%% "scalatest" % Versions.scalaTestJS % Test, 17 | "org.specs2" %%% "specs2-core" % Versions.specs2 % Test, 18 | "com.lihaoyi" %%% "utest" % "0.6.3" % "test") ++ 19 | DependencyHelpers.provided(Library.scalaReflect) 20 | ) 21 | 22 | lazy val playJsonExtraJS = playJsonExtra.js 23 | 24 | lazy val playJsonExtraJVM = playJsonExtra.jvm 25 | 26 | lazy val root = 27 | project.in(file(".")).settings( 28 | publishArtifact := false, 29 | packagedArtifacts := Map.empty) // doesn't work - https://github.com/sbt/sbt-pgp/issues/42 30 | .aggregate(playJsonExtraJS, playJsonExtraJVM) 31 | 32 | -------------------------------------------------------------------------------- /deploy.sh: -------------------------------------------------------------------------------- 1 | cd doc && gitbook build && scp -r _book/* alberto@www.megl.io:/home/www/play-json-extra/ 2 | 3 | -------------------------------------------------------------------------------- /doc/README.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | I love working in Scala and ScalaJS. This library is my **opinionated** wrapper of several libraries merged to 4 | efficiently working in JSON word. 5 | 6 | Main Features are: 7 | 8 | * Play JSON for ScalaJS 9 | * **JsonFormat** macro annotation for lazy people 10 | * Default field values populated in missing JSON 11 | * Field rewrite for Play JSON ("key") 12 | * +22 field case class formatter and more [Play Json Extension](playjsonext.md) 13 | * **Joda Datetime** for Scala and ScalaJS 14 | * Strong Typed Enum (String and Int) for Scala and ScalaJS 15 | * [Variant Types](variants.md) 16 | 17 | ## Installing 18 | 19 | For the stable version (to be release): 20 | 21 | ```scala 22 | resolvers += "Sonatype releases" at "https://oss.sonatype.org/content/repositories/releases" 23 | 24 | libraryDependencies += "io.megl" % "play-json-extra" % "2.4.3" 25 | ``` 26 | 27 | 28 | For the snapshot version: 29 | 30 | ```scala 31 | resolvers += "Sonatype snapshots" at "http://oss.sonatype.org/content/repositories/snapshots/" 32 | 33 | libraryDependencies += "io.megl" % "play-json-extra" % "2.4.3-SNAPSHOT" 34 | ``` 35 | 36 | 37 | ## Play JSON for ScalaJS 38 | 39 | You can share the same code for backend and frontend. 40 | 41 | > The implementation is targeting play-json 2.4.3 on scala 2.11 42 | 43 | In ScalaJS world the [upickle](https://github.com/lihaoyi/upickle-pprint) library is used under the hood. 44 | 45 | ## JsonFormat annotation macro 46 | 47 | **JsonFormat** annotation macro reduce the requirement to manually write Play Json.Format code. It automatically uses **all** 48 | the features of **Play Json Extra**. 49 | 50 | ```scala 51 | import play.json.extra.JsonFormat 52 | import play.json.extra.Picklers._ 53 | 54 | @JsonFormat 55 | case class TestDataTime(dt: DateTime, children: List[TestDataTime] = Nil) 56 | 57 | test("basic datetime test") { 58 | val dt = new DateTime(2015, 8, 11, 12, 1, 2, 3) 59 | val ts = TestDataTime(dt) 60 | val json = Json.toJson(ts) 61 | assert(Json.stringify(json) === """{"dt":"2015-08-11T12:01:02.003","children":[]}""") 62 | } 63 | ``` 64 | 65 | ## Default field values populated in missing JSON 66 | 67 | Default values are added automatically if they are missing in the JSON. 68 | 69 | This allows a much faster refactory of your models adding new fields with default values. 70 | 71 | ```scala 72 | case class DefaultValueTestClass(@key("ok-field") field:String="p", lp:List[String], 73 | l1:Option[Option[Int]]) 74 | 75 | test("json default serialize/deserialize") { 76 | import play.json.extra.implicits.optionWithNull 77 | 78 | implicit def fmt1 = Jsonx.formatCaseClass[DefaultValueTestClass] 79 | val t1=DefaultValueTestClass(lp=Nil, l1=None) 80 | 81 | assert(Json.stringify(Json.toJson(t1)) === """{"ok-field":"p","lp":[]}""") 82 | assert(t1 === Json.parse("{}").as[DefaultValueTestClass]) 83 | } 84 | ``` 85 | 86 | 87 | 88 | ## Field rewrite for Play JSON 89 | 90 | Field translation during the Serialization/Deserialization 91 | 92 | 93 | ```scala 94 | 95 | import play.json.extra.{JsonFormat, key} 96 | 97 | @JsonFormat 98 | case class DefaultTest(@key("ok1") a1: Int = 1, @key("ok2") a2: Int = 2, @key("ok3") a3: Int = 3, @key("ok4") a4: Int = 4, 99 | @key("ok5") a5: Int = 5, @key("ok6") a6: Int = 6, @key("ok7") a7: Int = 7, @key("ok8") a8: Int = 8, 100 | @key("ok9") a9: Int = 9, @key("ok10") a10: Int = 10, 101 | @key("ok11") a11: Int = 11, @key("ok12") a12: Int = 12, @key("ok13") a13: Int = 13, @key("ok14") a14: Int = 14, 102 | @key("ok15") a15: Int = 15, @key("ok16") a16: Int = 16, @key("ok17") a17: Int = 17, @key("ok18") a18: Int = 18, 103 | @key("ok19") a19: Int = 19, @key("ok20") a20: Int = 20, 104 | @key("ok21") a21: Int = 21, @key("ok22") a22: Int = 22, @key("ok23") a23: Int = 23, @key("ok24") a24: Int = 24, 105 | @key("ok25") a25: Int = 25, @key("ok26") a26: Int = 26, @key("ok27") a27: Int = 27, @key("ok28") a28: Int = 28, 106 | @key("ok29") a29: Int = 29 107 | ) 108 | 109 | 110 | test("default test key + >21 fields + des/ser") { 111 | val ts = DefaultTest() 112 | val json = Json.toJson(ts).as[JsObject] 113 | assert(json.fields.length === 29) 114 | val fieldNames=List( 115 | "ok1", "ok2", "ok3", "ok4", "ok5", "ok6", "ok7", "ok8", "ok9", "ok10", 116 | "ok10", "ok11", "ok12", "ok13", "ok14", "ok15", "ok16", "ok17", "ok18", "ok19", 117 | "ok20", "ok21", "ok22", "ok23", "ok24", "ok25", "ok26", "ok27", "ok28", "ok29" 118 | ) 119 | 120 | fieldNames.foreach(value => assert(json.fields.map(_._1).contains(value) === true)) 121 | val json2=Json.parse("""{"ok27":27,"ok16":16,"ok4":4,"ok25":25,"ok14":14,"ok10":10,"ok17":17,"ok21":21,"ok24":24,"ok28":28,"ok3":3,"ok20":20,"ok15":15,"ok7":7,"ok11":11,"ok1":1,"ok23":23,"ok12":12,"ok8":8,"ok29":29,"ok19":19,"ok2":2,"ok5":5,"ok26":26,"ok13":13,"ok18":18,"ok9":9,"ok6":6,"ok22":22}""") 122 | val ts2 = json2.as[DefaultTest] 123 | assert(ts2.a29 === 29) 124 | } 125 | 126 | ``` 127 | 128 | ## Joda Datetime for Scala and ScalaJS 129 | 130 | Finally a working implementation of Joda for Scala and ScalaJS. 131 | 132 | Play JSON serialization/deserialization in both Scala and ScalaJS 133 | 134 | In scalaJS is a facade around [moment.js](http://momentjs.com/) 135 | 136 | ```scala 137 | test("basic datetime test") { 138 | val dt = new DateTime(2015, 8, 11, 12, 1, 2, 3) 139 | assert(dt.year.get === 2015) 140 | assert(dt.monthOfYear.get === 8) 141 | assert(dt.dayOfMonth.get === 1) 142 | assert(dt.hourOfDay.get === 12) 143 | assert(dt.minuteOfHour.get === 1) 144 | assert(dt.secondOfMinute.get === 2) 145 | assert(dt.millisOfSecond.get === 3) 146 | } 147 | 148 | test("play json serialization/deserialization") { 149 | val dt = new DateTime(2015, 8, 11, 12, 1, 2, 3) 150 | val ts=TestDataTime(dt) 151 | val json = Json.stringify(Json.toJson(ts)) 152 | println(json) 153 | val ts2=Json.parse(json).as[TestDataTime] 154 | println(s"${ts.toString} ${ts2.toString}") 155 | println(s"${ts.dt.toString()} ${ts2.dt.toString()}") 156 | 157 | assert(ts2.dt.year.get === 2015) 158 | assert(ts2.dt.monthOfYear.get === 8) 159 | assert(ts2.dt.dayOfMonth.get === 1) 160 | assert(ts2.dt.hourOfDay.get === 12) 161 | assert(ts2.dt.minuteOfHour.get === 1) 162 | assert(ts2.dt.secondOfMinute.get === 2) 163 | } 164 | ``` 165 | 166 | ## Strong Typed Enum (String and Int) for Scala and ScalaJS 167 | 168 | Everyone loves strong type enums interoperability in play JSON. 169 | 170 | Just import: 171 | 172 | ```scala 173 | import macrame.enum 174 | import play.json.extra.Picklers._ 175 | ``` 176 | 177 | For a String Enum: 178 | 179 | ```scala 180 | val yellowStr = "YELLOW" 181 | @enum class Color { 182 | Red 183 | Blue("BLUE") 184 | Yellow(yellowStr) 185 | } 186 | object Color extends EnumStringJSON[Color] { 187 | def asString(c:Color) = asStringImpl(c) 188 | def fromString(s:String) = fromStringImpl(s) 189 | 190 | } 191 | 192 | @JsonFormat 193 | case class ColorData(color:Color) 194 | 195 | val ts = ColorData(Color.Red) 196 | val json = Json.toJson(ts) 197 | val expectedResult="""{"color":"Red"}""" 198 | assert(Json.stringify(json) === expectedResult) 199 | val ts2 = Json.parse(expectedResult).as[ColorData] 200 | assert(ts === ts2) 201 | ``` 202 | 203 | For an Int Enum: 204 | 205 | ```scala 206 | @enum class Color { 207 | Red 208 | Blue 209 | Yellow 210 | } 211 | object Color extends EnumIntJSON[Color] { 212 | def asInt(c:Color) = asIntImpl (c) 213 | def fromInt(i:Int) = fromIntImpl(i) 214 | } 215 | 216 | @JsonFormat 217 | case class ColorData(color:Color) 218 | val ts = ColorData(Color.Red) 219 | val json = Json.toJson(ts) 220 | val expectedResult="""{"color":0}""" 221 | assert(Json.stringify(json) === expectedResult) 222 | val ts2 = Json.parse(expectedResult).as[ColorData] 223 | assert(ts === ts2) 224 | ``` 225 | 226 | ## Thanks 227 | 228 | Thank you very much to: 229 | 230 | * Li Haoyi for [upickle](https://github.com/lihaoyi/upickle-pprint) and all his scalaJS stuff 231 | * Jan Christopher Vogt for [Play JSON Extensions](https://github.com/cvogt/play-json-extensions) 232 | * Chris Neveu for [Macramé](https://github.com/ChrisNeveu/macrame) 233 | * Julien Richard-Foy for [Play JSON Variants](https://github.com/julienrf/play-json-variants) 234 | * The ScalaJS team 235 | * The Play Framework team -------------------------------------------------------------------------------- /doc/SUMMARY.md: -------------------------------------------------------------------------------- 1 | # Summary 2 | 3 | * [Play Json Extension](playjsonext.md) 4 | * [Strong Typed Enum](macrame.md) 5 | * [Variant Types](variants.md) 6 | -------------------------------------------------------------------------------- /doc/book.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": ["ga"], 3 | "pluginsConfig": { 4 | "ga": { 5 | "token": "UA-67327828-1" 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /doc/macrame.md: -------------------------------------------------------------------------------- 1 | # Macramé 2 | Macrame provides macro-base replacements for parts of the Scala standard library. 3 | 4 | > This is a fork of the awesome [Macramé](https://github.com/ChrisNeveu/macrame) using the other feature of play.json.extra 5 | > and working for both Scala and ScalaJS 6 | 7 | ## Getting Macramé 8 | If you're using SBT, add the following to your build file. 9 | ```scala 10 | libraryDependencies ++= Seq( 11 | "com.chrisneveu" %% "macrame" % "1.0.1", 12 | compilerPlugin("org.scalamacros" % "paradise" % "2.1.0-M5" cross CrossVersion.full)) 13 | ``` 14 | 15 | ## API Documentation 16 | Full API documentation is available [here](http://chrisneveu.github.io/macrame/doc/1.0.0/#package). 17 | 18 | ## Enumerations 19 | Macramé provides an `@enum` macro annotation to replace Scala's `Enumeration` class. Unlike `Enumeration`, `@enum` classes are fully type-safe and provide exhaustiveness checks. `@enum` provides a much larger interface than `Enumeration` but it allows the user to select which functions to expose. 20 | 21 | ### Getting Started 22 | To understand how to use the `@enum` macro, we'll look at a simple `Color` enumeration and see what it expands to. 23 | ```scala 24 | @enum class Color { 25 | Red 26 | Blue 27 | Yellow 28 | } 29 | // Expands to: 30 | sealed abstract class Color extends Product with Serializable 31 | object Color extends EnumApi[Color] { 32 | case object Red 33 | case object Blue 34 | case object Yellow 35 | 36 | } 37 | ``` 38 | 39 | ### Using EnumApi 40 | In order to reduce boilerplate, the `@enum` macro defines a number of convenient functions on your enumeration type. Auto-generated functions are great but they often increase your API in undesired ways, exposing conversions to/from `String` that would be better hidden inside more principled conversions. 41 | 42 | To resolve this tension, `@enum` provides the *implementations* for automatically generated functions as `protected` members of the companion object, leaving you to expose these functions or use them to implement other functions as you wish. These functions can be found in [EnumApi](http://chrisneveu.github.io/macrame/doc/1.0.0/#macrame.EnumApi) (select "Visibility: All"). 43 | ```scala 44 | @enum class Color { 45 | Red 46 | Blue 47 | Yellow 48 | } 49 | object Color { 50 | def asString(color : Color) = asStringImpl(color) 51 | // Replicating Enumeration's partial String→Enumeration.Value conversion. 52 | def withName(s : String) = fromStringImpl(s) 53 | .getOrElse(throw new NoSuchElementException(s"No value found for '$s'")) 54 | } 55 | ``` 56 | 57 | ### Providing Custom String Representations 58 | As with `Enumeration`, you can provide custom `String` representations of your enum cases. These can be either a string literal or an identifier pointing to a string. You can mix-and-match the automatically generated representations with manual ones. 59 | ```scala 60 | @enum class Color { 61 | Red("RED") 62 | Blue("BLUE") 63 | Yellow("YELLOW") 64 | } 65 | object Color { 66 | def asString(color : Color) = asStringImpl(color) 67 | def fromString(s : String) = fromStringImpl(s) 68 | } 69 | ``` 70 | 71 | ## Regular Expressions 72 | Via the `r` string interpolator, Macramé provides compile-time checked regular expressions. Interpolated variables are correctly escaped. 73 | ```scala 74 | val separator = "-" 75 | val PhoneNumber = r"""\d{3}$separator\d{4}""" 76 | ``` 77 | ## Selecting Members 78 | Often the key to good boilerplate-elimination, Macramé provides two functions to select members of objects: `members` and `memberMap`. When used inside the selected object, be sure to use type ascriptions otherwise these functions will try to contain themselves. 79 | ```scala 80 | @enum class Color { 81 | Red 82 | Blue 83 | Yellow 84 | } 85 | object Color { 86 | val values : List[Color] = members[Color](this) 87 | } 88 | ``` 89 | ## Debugging 90 | The `trace` macro can be very useful when figuring out why a macro won't work. It outputs to the console during compiliation. The format looks like this: 91 | ```console 92 | [info] /home/chris/Programming/scala/macrame/README.scala:70: trace output 93 | [info] immutable.this.List.apply[Color](this.Red, this.Blue, this.Yellow) 94 | [info] for position: 95 | [info] val values : List[Color] = trace(members[Color](this)) 96 | [info] ^ 97 | ``` 98 | -------------------------------------------------------------------------------- /doc/playjsonext.md: -------------------------------------------------------------------------------- 1 | Play-Json extensions 2 | ========================== 3 | 4 | > This is a fork of the awesome [Play JSON Extensions](https://github.com/cvogt/play-json-extensions) using the other feature of play.json.extra 5 | > and working for both Scala and ScalaJS 6 | 7 | 8 | ### De-/Serialize case classes of arbitrary size (23+ fields allowed) 9 | 10 | case class Foo( 11 | _1:Int,_2:Int,_3:Int,_4:Int,_5:Int, 12 | _21:Int,_22:Int,_23:Int,_24:Int,_25:Int, 13 | _31:Int,_32:Int,_33:Int,_34:Int,_35:Int, 14 | _41:Int,_42:Int,_43:Int,_44:Int,_45:Int, 15 | _51:Int,_52:Int,_53:Int,_54:Int,_55:Int 16 | ) 17 | 18 | val foo = Foo(1,2,3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5) 19 | 20 | 21 | #### Create explicit formatter 22 | import play.json.extra.Jsonx 23 | implicit def jsonFormat = Jsonx.formatCaseClass[Foo] 24 | 25 | // if your case class uses Option make sure you import 26 | // one of the below implicit Option Reads to avoid 27 | // "could not find implicit value for parameter helper: play.json.extra.OptionValidationDispatcher" 28 | 29 | // note: formatCaseClass catches IllegalArgumentException and turns them into JsError enclosing the stack trace as the message 30 | // this allows using require(...) in class constructors and still get JsErrors out of serialization 31 | 32 | #### Then use ordinary play-json 33 | val json = Json.toJson( foo ) 34 | assert(foo == json.as[Foo]) 35 | 36 | #### De-/Serialize tuples 37 | import play.json.extra.tuples._ 38 | val json = Json.parse("""[1,1.0,"Test"]""") 39 | val res = Json.fromJson[(Int,Double,String)](json) 40 | assert(JsSuccess((1,1.0,"Test")) === res) 41 | 42 | #### De-/Serialize single value classes 43 | case class Foo(i: Int) 44 | val json = Json.parse("1") 45 | val res = Json.fromJson[Foo](json) 46 | assert(JsSuccess(Foo(1)) === res) 47 | 48 | ### Option for play-json 2.4 49 | 50 | #### implicit Option Reads 51 | import play.json.extra.implicits.optionWithNull // play 2.4 suggested behavior 52 | // or 53 | import play.json.extra.implicits.optionNoError // play 2.3 behavior 54 | 55 | #### automatic option validation: `validateAuto` 56 | val json = (Json.parse("""{}""") \ "s") 57 | json.validateAuto[Option[String]] == JsResult(None) // works as expected correctly 58 | 59 | // play-json built-ins 60 | json.validate[Option[String]] // JsError: "'s' is undefined on object: {}" 61 | json.validateOpt[String] == JsResult(None) // manual alternative (provided here, built-into play-json >= 2.4.2) 62 | 63 | #### automatic formatting of sealed traits, delegating to formatters of the subclasses 64 | #### formatSealed uses orElse of subclass Reads in random order, careful in case of ambiguities of field-class correspondances 65 | sealed trait SomeAdt 66 | case object A extends SomeAdt 67 | final case class X(i: Int, s: String) extends SomeAdt 68 | object X{ 69 | implicit def jsonFormat: Format[X] = Jsonx.formatCaseClass[X] 70 | } 71 | object SomeAdt{ 72 | import SingletonEncoder.simpleName // required for formatSingleton 73 | import play.json.extra.formatSingleton // required if trait has object children 74 | implicit def jsonFormat: Format[SomeAdt] = Jsonx.formatSealed[SomeAdt] 75 | } 76 | 77 | Json.parse("""A""").as[SomeAdt] == A 78 | Json.parse("""{"i": 5, "s":"foo", "type": "X"}""").as[SomeAdt] == X(5,"foo") 79 | 80 | ### experimental features (will change) 81 | #### Serialization nirvana - formatAuto FULLY automatic de-serializer (note: needs more optimized internal implementation) 82 | 83 | sealed trait SomeAdt 84 | case object A extends SomeAdt 85 | final case class X(i: Int, s: String) extends SomeAdt 86 | object Baz 87 | case class Bar(a: Int, b:Float, foo: Baz.type, o: Option[Int]) 88 | case class Foo(_1:Bar,_11:SomeAdt, _2:String,_3:Int,_4:Int,_5:Int,_21:Int,_22:Int,_23:Int,_24:Int,_25:Int,_31:Int,_32:Int,_33:Int,_34:Int,_35:Int,_41:Int,_42:Int,_43:Int,_44:Int,_45:Int,_51:Int,_52:Int,_53:Int,_54:Int,_55:Int) 89 | val foo = Foo(Bar(5,1.0f, Baz, Some(4): Option[Int]),A,"sdf",3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5) 90 | val foo2 = Foo(Bar(5,1.0f, Baz, None: Option[Int]),X(5,"x"),"sdf",3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5) 91 | 92 | import play.json.extra.implicits.optionWithNull 93 | val fmt2: Format[Foo] = Jsonx.formatAuto[Foo] // not implicit to avoid infinite recursion 94 | 95 | { 96 | implicit def fmt3: Format[Foo] = fmt2 97 | val json = Json.toJson( foo ) 98 | assert(foo === json.as[Foo]) 99 | val json2 = Json.toJson( foo2 ) 100 | assert(foo2 === json2.as[Foo]) 101 | } -------------------------------------------------------------------------------- /doc/variants.md: -------------------------------------------------------------------------------- 1 | # Play JSON Variants 2 | 3 | > This is a fork of the awesome [Play JSON Variants](https://github.com/julienrf/play-json-variants) using the other feature of play.json.extra 4 | > and working for both Scala and ScalaJS 5 | 6 | This artifact provides a function `Variants.format[A]` that takes as parameter a root type hierarchy `A` and generates a Play `Format[A]` JSON serializer/deserializer that supports all the subtypes of `A`. 7 | 8 | For instance, consider the following class hierarchy: 9 | 10 | ```scala 11 | sealed trait Foo 12 | case class Bar(x: Int) extends Foo 13 | case class Baz(s: String) extends Foo 14 | case class Bah(s: String) extends Foo 15 | ``` 16 | 17 | How to write a `Reads[Foo]` JSON deserializer able to build the right variant of `Foo` given a JSON value? The naive approach could be to write the following: 18 | 19 | ```scala 20 | import play.api.libs.json._ 21 | import play.api.libs.functional.syntax._ 22 | 23 | implicit val fooReads: Reads[Foo] = (__ \ "x").read[Int].map[Foo](Bar) | 24 | (__ \ "s").read[String].map[Foo](Baz) | 25 | (__ \ "s").read[String].map[Foo](Bah) 26 | ``` 27 | 28 | However this wouldn’t work because the deserializer is unable to distinguish between `Baz` and `Bah` values: 29 | 30 | ```scala 31 | val json = Json.obj("s" -> "hello") 32 | val foo = json.validate[Foo] // Is it a `Baz` or a `Bah`? 33 | println(foo) // "Success(Baz(hello))" 34 | ``` 35 | 36 | Any JSON value containing a `String` field `x` is always considered to be a `Baz` value by the deserializer (though it could be a `Bah`), just because the `Baz` and `Bah` deserializers are tried in order. 37 | 38 | In order to differentiate between all the `Foo` variants, we need to add a field in the JSON representation of `Foo` values: 39 | 40 | ```scala 41 | val bahJson = Json.obj("s" -> "hello", "$variant" -> "Bah") // This is a `Bah` 42 | val bazJson = Json.obj("s" -> "bye", "$variant" -> "Baz") // This is a `Baz` 43 | val barJson = Json.obj("x" -> "42", "$variant" -> "Bar") // And this is a `Bar` 44 | ``` 45 | 46 | The deserializer can then be written as follows: 47 | 48 | ```scala 49 | implicit val fooReads: Reads[Foo] = (__ \ "$variant").read[String].flatMap[Foo] { 50 | case "Bar" => (__ \ "x").read[Int].map(Bar) 51 | case "Baz" => (__ \ "s").read[String].map(Baz) 52 | case "Bah" => (__ \ "s").read[String].map(Bah) 53 | } 54 | ``` 55 | 56 | Usage: 57 | 58 | ```scala 59 | bahJson.validate[Foo] // Success(Bah("hello")) 60 | bazJson.validate[Foo] // Success(Baz("bye")) 61 | ``` 62 | 63 | The above text introduced a problem and its solution, but this one is very cumbersome: you don’t want to always write by hand the JSON serializer and deserializer of your data type hierarchy. 64 | 65 | The purpose of this project is to generate these serializer and deserializer for you. Just write the following and you are done: 66 | 67 | ```scala 68 | import julienrf.variants.Variants 69 | 70 | implicit val format: Format[Foo] = Variants.format[Foo] 71 | ``` 72 | 73 | You can also just generate a `Reads` or a `Writes`: 74 | 75 | ```scala 76 | import julienrf.variants.Variants 77 | 78 | implicit val reads: Reads[Foo] = Variants.reads[Foo] 79 | implicit val writes: Writes[Foo] = Variants.writes[Foo] 80 | ``` 81 | 82 | By default the field used to discriminate the target object’s type is named `$variant` but you can define your own logic: 83 | 84 | ```scala 85 | implicit val format: Format[Foo] = Variants.format[Foo]((__ \ "type").read[String]) 86 | implicit val reads: Reads[Foo] = Variants.reads[Foo]((__ \ "type").read[String]) 87 | implicit val writes: Writes[Foo] = Variants.writes[Foo]((__ \ "type").read[String]) 88 | ``` 89 | 90 | Or, you can transform the value of the JSON field into a valid class name: 91 | 92 | ```scala 93 | implicit val reads: Reads[Foo] = Variants.reads[Foo]((__ \ "type").read[String].map(_.capitalize)) 94 | ``` 95 | 96 | # How Does It Work? 97 | 98 | The `Variants.format[Foo]` is a Scala macro that takes as parameter the root type of a class hierarchy and expands to code equivalent to the hand-written version: it adds a `$variant` field to the default JSON serializer, containing the name of the variant, and uses it to deserialize values to the correct type. 99 | 100 | # Known Limitations 101 | 102 | * For now the macro expects its type parameter to be the root **sealed trait** of a class hierarchy made of **case classes** or **case objects** ; 103 | * Recursive types are not supported ; 104 | * Polymorphic types are not supported ; 105 | * Due to initialization order, your class hierarchy **must** be fully defined **before** `Variants.format` is used. 106 | -------------------------------------------------------------------------------- /install_dev.sh: -------------------------------------------------------------------------------- 1 | npm install gitbook-plugin-ga 2 | 3 | -------------------------------------------------------------------------------- /play-json-extra/js/src/main/scala/play/json/extra/DateConstants.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | /** 4 | * Created by alberto on 08/09/15. 5 | */ 6 | object DateConstants { 7 | //val millis="YYYY-MM-DDTHH:mm:ss.SSS" 8 | val millis = "YYYY-MM-DDTHH:mm:ss.SSSZZ" 9 | 10 | } 11 | -------------------------------------------------------------------------------- /play-json-extra/js/src/test/scala/play/json/extra/LocalDatetimeTests.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import java.time.LocalDateTime 4 | 5 | import org.scalatest.FunSuite 6 | import play.api.libs.json.Json 7 | 8 | 9 | class LocalDatetimeTests extends FunSuite { 10 | final case class TestDataTime(dt:LocalDateTime) 11 | implicit val jsonFmt=Json.format[TestDataTime] 12 | 13 | test("basic LocalDateTime test") { 14 | val dt = LocalDateTime.of(2015, 8, 11, 12, 1, 2, 3) 15 | assert(dt.getYear === 2015) 16 | assert(dt.getMonthValue === 8) 17 | assert(dt.getDayOfMonth === 11) 18 | assert(dt.getHour === 12) 19 | assert(dt.getMinute === 1) 20 | assert(dt.getSecond === 2) 21 | } 22 | 23 | test("play json serialization/deserialization") { 24 | val dt = LocalDateTime.of(2015, 8, 11, 12, 1, 2, 3) 25 | val ts=TestDataTime(dt) 26 | val json = Json.stringify(Json.toJson(ts)) 27 | // println(json) 28 | val ts2=Json.parse(json).as[TestDataTime] 29 | // println(s"${ts.toString} ${ts2.toString}") 30 | // println(s"${ts.dt.toString()} ${ts2.dt.toString()}") 31 | 32 | // assert(ts === ts2) 33 | assert(ts2.dt.getYear === 2015) 34 | assert(ts2.dt.getMonthValue === 8) 35 | assert(ts2.dt.getDayOfMonth === 11) 36 | assert(ts2.dt.getHour === 12) 37 | assert(ts2.dt.getMinute === 1) 38 | assert(ts2.dt.getSecond === 2) 39 | } 40 | 41 | // test("boopickle json serialization/deserialization") { 42 | // import boopickle._ 43 | // import boopickle.Default._ 44 | // import org.joda.time.LocalDateTime._ 45 | // val dt = new LocalDateTime(2015, 8, 11, 12, 1, 2, 3) 46 | // val ts=TestDataTime(dt) 47 | // val bb = Pickle.intoBytes(ts) 48 | // assert(bb.array().length == 1400) 49 | 50 | // val json = Json.stringify(Json.toJson(ts)) 51 | // println(json) 52 | // val ts2=Json.parse(json).as[TestDataTime] 53 | // println(s"${ts.toString} ${ts2.toString}") 54 | // 55 | // // assert(ts === ts2) 56 | // assert(ts2.dt.year.get === 2015) 57 | // assert(ts2.dt.monthOfYear.get === 8) 58 | // assert(ts2.dt.dayOfMonth.get === 1) 59 | // assert(ts2.dt.hourOfDay.get === 12) 60 | // assert(ts2.dt.minuteOfHour.get === 1) 61 | // assert(ts2.dt.secondOfMinute.get === 2) 62 | // } 63 | 64 | 65 | } -------------------------------------------------------------------------------- /play-json-extra/js/src/test/scala/play/json/extra/MapManagementTest.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import org.scalatest.FunSuite 4 | import play.api.libs.json._ 5 | 6 | final case class IndexSettings(version: Map[String, String]=Map.empty[String,String]) 7 | 8 | 9 | final case class ClusterIndex(state: Option[String]=None, aliases: List[String]=Nil) 10 | final case class Metadata(templates: Option[JsObject]=None, indices: Map[String, ClusterIndex]= Map.empty[String, ClusterIndex], 11 | repositories: Option[JsObject]=None) 12 | 13 | class MapManagementTest extends FunSuite { 14 | test("json default map serialize/deserialize") { 15 | import play.json.extra.implicits.optionWithNull 16 | 17 | implicit def fmt1 = Jsonx.formatCaseClassUseDefaults[IndexSettings] 18 | val t1=IndexSettings() 19 | 20 | assert(Json.stringify(Json.toJson(t1)) === """{"version":{}}""") 21 | 22 | assert(t1 === Json.parse("{}").as[IndexSettings]) 23 | 24 | implicit def ciJson = Jsonx.formatCaseClassUseDefaults[ClusterIndex] 25 | 26 | implicit def fmt2 = Jsonx.formatCaseClassUseDefaults[Metadata] 27 | val m1=Metadata() 28 | 29 | assert(Json.stringify(Json.toJson(m1)) === """{"indices":{}}""") 30 | 31 | assert(m1 === Json.parse("{}").as[Metadata]) 32 | 33 | } 34 | 35 | 36 | } 37 | -------------------------------------------------------------------------------- /play-json-extra/jvm/src/main/scala/play/json/extra/DateConstants.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | object DateConstants { 4 | val millis = "yyyy-MM-dd'T'HH:mm:ss.SSSZZ" 5 | } 6 | -------------------------------------------------------------------------------- /play-json-extra/jvm/src/test/scala/play/json/extra/DefaultValueTest.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import org.scalatest.FunSuite 4 | import play.api.libs.json.{JsSuccess, Json} 5 | 6 | final case class DefaultValueTestClass(@key("ok-field") field:String="p", lp:List[String]=Nil, 7 | l1:Option[Option[Int]]) 8 | 9 | class DefaultValueTest extends FunSuite { 10 | test("json default serialize/deserialize") { 11 | import play.json.extra.implicits.optionWithNull 12 | 13 | implicit def fmt1 = Jsonx.formatCaseClassUseDefaults[DefaultValueTestClass] 14 | val t1=DefaultValueTestClass(lp=Nil, l1=None) 15 | 16 | // println(Json.stringify(Json.toJson(t1))) 17 | assert(Json.stringify(Json.toJson(t1)) === """{"ok-field":"p","lp":[]}""") 18 | 19 | assert(t1 === Json.parse("{}").as[DefaultValueTestClass]) 20 | 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /play-json-extra/jvm/src/test/scala/play/json/extra/JsonTests.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import org.scalatest.FunSuite 4 | import play.api.libs.json._ 5 | 6 | class JsonTests extends FunSuite{ 7 | test("json optionWithNull"){ 8 | object JsonTestClasses extends JsonTestClasses{ 9 | implicit def option[A](implicit reads: Reads[A]): Reads[Option[A]] = implicits.optionWithNull[A] 10 | } 11 | import JsonTestClasses._ 12 | 13 | assert((Json.parse("""{}""") \ "s").validate[Option[String]].isInstanceOf[JsError]) 14 | assert(Some("foo") === (Json.parse("""{"s": "foo"}""") \ "s").validate[Option[String]].get) 15 | assert(None === (Json.parse("""{}""") \ "s").validateOpt[String].get) 16 | assert(Some("foo") === (Json.parse("""{"s": "foo"}""") \ "s").validateOpt[String].get) 17 | assert(None === (Json.parse("""{}""") \ "s").validateAuto[Option[String]].get) 18 | assert(Some("foo") === (Json.parse("""{"s": "foo"}""") \ "s").validateAuto[Option[String]].get) 19 | 20 | assert(Json.fromJson[Option[String]](Json.parse("""5""")).isInstanceOf[JsError]) 21 | assert(Json.fromJson[Option[String]](Json.parse("""{}""")).isInstanceOf[JsError]) 22 | 23 | assert(Json.fromJson[B](Json.parse("""{"s": {}}""")).isInstanceOf[JsError]) 24 | 25 | assert(A("foo") === Json.fromJson[A](Json.parse("""{"s": "foo"}""")).get) 26 | assert(B(Some("foo")) === Json.fromJson[B](Json.parse("""{"s": "foo"}""")).get) 27 | assert(B(None) === Json.fromJson[B](Json.parse("""{"s": null}""")).get) 28 | assert(B(None) === Json.fromJson[B](Json.parse("""{}""")).get) 29 | assert(B(None) === Json.fromJson[B](Json.parse("""5""")).get) 30 | assert(B(None) === Json.fromJson[B](Json.parse("""null""")).get) 31 | 32 | assert(Json.fromJson[B](Json.parse("""{"s": {}}""")).isInstanceOf[JsError]) 33 | assert(A2("foo") === Json.fromJson[A2](Json.parse("""{"s": "foo"}""")).get) 34 | assert(B2(Some("foo")) === Json.fromJson[B2](Json.parse("""{"s": "foo"}""")).get) 35 | assert(B2(None) === Json.fromJson[B2](Json.parse("""{"s": null}""")).get) 36 | assert(B2(None) === Json.fromJson[B2](Json.parse("""{}""")).get) 37 | // assert(B2(None) === Json.fromJson[B2](Json.parse("""null""")).get) 38 | // assert(B2(None) === Json.fromJson[B2](Json.parse("""5""")).get) 39 | 40 | assert(Optional(None) === Json.fromJson[Optional](Json.parse("""{}""")).get) 41 | assert(Optional(Some(Mandatory(List("test")))) === Json.fromJson[Optional](Json.parse("""{"o":{"s":["test"]}}""")).get) 42 | assert(Json.parse("""{"o":{}}""").validate[Optional].isInstanceOf[JsError]) 43 | 44 | assert(Optional2(None) === Json.fromJson[Optional2](Json.parse("""{}""")).get) 45 | assert(Optional2(Some(Mandatory2(List("test")))) === Json.fromJson[Optional2](Json.parse("""{"o":{"s":["test"]}}""")).get) 46 | assert(Json.parse("""{"o":{}}""").validate[Optional2].isInstanceOf[JsError]) 47 | 48 | assert(ClassOuter(Nil) === Json.fromJson[ClassOuter](Json.parse("""{"outer": []}""")).get) 49 | assert(ClassOuter2(Nil) === Json.fromJson[ClassOuter2](Json.parse("""{"outer": []}""")).get) 50 | } 51 | 52 | test("json optionNoError"){ 53 | object JsonTestClasses extends JsonTestClasses{ 54 | implicit def option[A](implicit reads: Reads[A]): Reads[Option[A]] = implicits.optionNoError[A] 55 | } 56 | import JsonTestClasses._ 57 | 58 | assert((Json.parse("""{}""") \ "s").validate[Option[String]].isInstanceOf[JsError]) 59 | assert(Some("foo") === (Json.parse("""{"s": "foo"}""") \ "s").validate[Option[String]].get) 60 | assert(None === (Json.parse("""{}""") \ "s").validateOpt[String].get) 61 | assert(Some("foo") === (Json.parse("""{"s": "foo"}""") \ "s").validateOpt[String].get) 62 | assert(None === (Json.parse("""{}""") \ "s").validateAuto[Option[String]].get) 63 | assert(Some("foo") === (Json.parse("""{"s": "foo"}""") \ "s").validateAuto[Option[String]].get) 64 | 65 | assert(None === Json.fromJson[Option[String]](Json.parse("""5""")).get) 66 | assert(None === Json.fromJson[Option[String]](Json.parse("""{}""")).get) 67 | 68 | assert(Json.fromJson[B](Json.parse("""{"s": {}}""")).isInstanceOf[JsError]) 69 | assert(A("foo") === Json.fromJson[A](Json.parse("""{"s": "foo"}""")).get) 70 | assert(B(Some("foo")) === Json.fromJson[B](Json.parse("""{"s": "foo"}""")).get) 71 | assert(B(None) === Json.fromJson[B](Json.parse("""{"s": null}""")).get) 72 | assert(B(None) === Json.fromJson[B](Json.parse("""{}""")).get) 73 | assert(B(None) === Json.fromJson[B](Json.parse("""5""")).get) 74 | assert(B(None) === Json.fromJson[B](Json.parse("""null""")).get) 75 | 76 | assert(Json.fromJson[B2](Json.parse("""{"s": {}}""")).isInstanceOf[JsError]) 77 | assert(A2("foo") === Json.fromJson[A2](Json.parse("""{"s": "foo"}""")).get) 78 | assert(B2(Some("foo")) === Json.fromJson[B2](Json.parse("""{"s": "foo"}""")).get) 79 | assert(B2(None) === Json.fromJson[B2](Json.parse("""{"s": null}""")).get) 80 | assert(B2(None) === Json.fromJson[B2](Json.parse("""{}""")).get) 81 | // assert(B2(None) === Json.fromJson[B2](Json.parse("""5""")).get) 82 | // assert(B2(None) === Json.fromJson[B2](Json.parse("""null""")).get) 83 | 84 | assert(Optional(None) === Json.fromJson[Optional](Json.parse("""{}""")).get) 85 | assert(Optional(Some(Mandatory(List("test")))) === Json.fromJson[Optional](Json.parse("""{"o":{"s":["test"]}}""")).get) 86 | assert(Json.fromJson[Optional](Json.parse("""{"o":{}}""")).isInstanceOf[JsError]) 87 | 88 | assert(Optional2(None) === Json.fromJson[Optional2](Json.parse("""{}""")).get) 89 | assert(Optional2(Some(Mandatory2(List("test")))) === Json.fromJson[Optional2](Json.parse("""{"o":{"s":["test"]}}""")).get) 90 | assert(Json.parse("""{"o":{}}""").validate[Optional2].isInstanceOf[JsError]) 91 | 92 | assert(ClassOuter(Nil) === Json.fromJson[ClassOuter](Json.parse("""{"outer": []}""")).get) 93 | assert(ClassOuter2(Nil) === Json.fromJson[ClassOuter2](Json.parse("""{"outer": []}""")).get) 94 | } 95 | 96 | test("test formatInline"){ 97 | case class Foo(i: Int) 98 | implicit def fmt = Jsonx.formatInline[Foo] 99 | val f = Foo(1) 100 | assert(f === Json.parse("1").validate[Foo].get) 101 | assert(f === Json.toJson(f).validate[Foo].get) 102 | 103 | implicit def fmt2 = Jsonx.formatInline[Bar] 104 | val b = new Bar(1) 105 | assert(b === Json.parse("1").validate[Bar].get) 106 | assert(b === Json.toJson(b).validate[Bar].get) 107 | } 108 | case class DontInline(a: Int) 109 | object DontInline{ 110 | implicit def format = Jsonx.formatCaseClass[DontInline] 111 | } 112 | case class Inline(a: Int) 113 | test("formatAuto"){ 114 | sealed trait SomeAdt 115 | case object A extends SomeAdt 116 | case class X(i: Int, s: String/*, recursion: SomeAdt*/) extends SomeAdt 117 | object Baz 118 | case class Bar(a: Int, b:Float, foo: Baz.type, o: Option[Int]) 119 | case class Foo(_1:Bar,_11:SomeAdt, _2:String,_3:Int,_4:Int,_5:Int,_21:Int,_22:Int,_23:Int,_24:Int,_25:Int,_31:Int,_32:Int,_33:Int,_34:Int,_35:Int,_41:Int,_42:Int,_43:Int,_44:Int,_45:Int,_51:Int,_52:Int,_53:Int,_54:Int,_55:Int) 120 | val foo = Foo(Bar(5,1.0f, Baz, Some(4): Option[Int]),A,"sdf",3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5) 121 | val foo2 = Foo(Bar(5,1.0f, Baz, None: Option[Int]),X(5,"x"/*,X(4,"z",A)*/),"sdf",3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5) 122 | 123 | import play.json.extra.implicits.optionWithNull 124 | ;{ 125 | val fmt: Format[SomeAdt] = Jsonx.formatAuto[SomeAdt] 126 | };{ 127 | val fmt: Format[Option[SomeAdt]] = Jsonx.formatAuto[Option[SomeAdt]] 128 | };{ 129 | val fmt: Format[A.type] = Jsonx.formatAuto[A.type] 130 | };{ 131 | val fmt: Format[Option[A.type]] = Jsonx.formatAuto[Option[A.type]] 132 | };{ 133 | val fmt: Format[X] = Jsonx.formatAuto[X] 134 | };{ 135 | val fmt: Format[Option[X]] = Jsonx.formatAuto[Option[X]] 136 | };{ 137 | val fmt: Format[Baz.type] = Jsonx.formatAuto[Baz.type] 138 | };{ 139 | val fmt: Format[Option[Baz.type]] = Jsonx.formatAuto[Option[Baz.type]] 140 | };{ 141 | val fmt: Format[Bar] = Jsonx.formatAuto[Bar] 142 | };{ 143 | val fmt: Format[Option[Bar]] = Jsonx.formatAuto[Option[Bar]] 144 | };{ 145 | val fmt: Format[Int] = Jsonx.formatAuto[Int] 146 | };{ 147 | val fmt: Format[Option[Int]] = Jsonx.formatAuto[Option[Int]] 148 | };{ 149 | val fmt: Format[Foo] = Jsonx.formatAuto[Foo] 150 | };{ 151 | val fmt: Format[Option[Foo]] = Jsonx.formatAuto[Option[Foo]] 152 | } 153 | 154 | val fmt2: Format[Foo] = Jsonx.formatAuto[Foo] // not implicit to avoid infinite recursion 155 | 156 | { 157 | implicit def fmt3: Format[Foo] = fmt2 158 | val json = Json.toJson( foo ) 159 | assert(foo === json.as[Foo]) 160 | assert(Some(foo) === json.validateAuto[Option[Foo]].get) 161 | val json2 = Json.toJson( foo2 ) 162 | assert(foo2 === json2.as[Foo]) 163 | } 164 | 165 | def fmt3: Format[DontInline] = Jsonx.formatAuto[DontInline] 166 | def fmt4: Format[Inline] = Jsonx.formatAuto[Inline] 167 | assert("5" === Json.toJson( Inline(5) )(fmt4).toString) 168 | assert("""{"a":5}""" === Json.toJson( DontInline(5) )(fmt3).toString) 169 | 170 | } 171 | case class CaseClassWithDefaults(foobar: Int = 5) 172 | test("defaults error test"){ 173 | 174 | implicit val childFormat = Jsonx.formatCaseClassUseDefaults[CaseClassWithDefaults] 175 | 176 | val string = """{ "foobar" : 10 } """ 177 | val string2 = """{ "foobar": "test"} """ 178 | val string3 = """{} """ 179 | val json = Json.parse( string ) 180 | val json2 = Json.parse( string2 ) 181 | val json3 = Json.parse( string3 ) 182 | assert( json.validateAuto[CaseClassWithDefaults] === JsSuccess(CaseClassWithDefaults(10)) ) 183 | assert( json2.validateAuto[CaseClassWithDefaults].isInstanceOf[JsError] ) 184 | assert( json3.validateAuto[CaseClassWithDefaults] === JsSuccess(CaseClassWithDefaults(5)) ) 185 | } 186 | 187 | FailureTest // needed to initialize object 188 | } -------------------------------------------------------------------------------- /play-json-extra/jvm/src/test/scala/play/json/extra/KeyChangeTest.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import org.scalatest.FunSuite 4 | import play.api.libs.json.{JsSuccess, JsError, Json, Reads} 5 | 6 | final case class KeyTestClass(@key("ok-field") field:String="p") 7 | 8 | class KeyChangeTest extends FunSuite { 9 | test("json key serialize/deserialize") { 10 | 11 | implicit def fmt1 = Jsonx.formatCaseClassUseDefaults[KeyTestClass] 12 | val t1=KeyTestClass("t1") 13 | 14 | assert(Json.stringify(Json.toJson(t1)) === """{"ok-field":"t1"}""") 15 | 16 | assert(JsSuccess(t1) === Json.fromJson[KeyTestClass](Json.toJson(t1))) 17 | 18 | } 19 | 20 | 21 | } 22 | -------------------------------------------------------------------------------- /play-json-extra/jvm/src/test/scala/play/json/extra/MapManagementTest.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import org.scalatest.FunSuite 4 | import play.api.libs.json.{JsSuccess, JsError, Json, Reads} 5 | 6 | final case class IndexSettings(version: Map[String, String]=Map.empty[String,String]) 7 | 8 | 9 | class MapManagementTest extends FunSuite { 10 | test("json default map serialize/deserialize") { 11 | import play.json.extra.implicits.optionWithNull 12 | 13 | implicit def fmt1 = Jsonx.formatCaseClassUseDefaults[IndexSettings] 14 | val t1=IndexSettings() 15 | 16 | assert(Json.stringify(Json.toJson(t1)) === """{"version":{}}""") 17 | 18 | assert(t1 === Json.parse("{}").as[IndexSettings]) 19 | 20 | } 21 | 22 | 23 | } 24 | -------------------------------------------------------------------------------- /play-json-extra/jvm/src/test/scala/play/json/extra/NoImport.scala: -------------------------------------------------------------------------------- 1 | // test file without imports to avoid regressions with missing imports in macros 2 | sealed trait Modifier 3 | case object early extends Modifier 4 | case object mid extends Modifier 5 | case object late extends Modifier 6 | final case class Foo(i: Int) 7 | object a{ 8 | import play.json.extra.SingletonEncoder.simpleName 9 | import play.json.extra.implicits.formatSingleton 10 | implicit def jsonFormat = play.json.extra.Jsonx.formatSealed[Modifier] 11 | implicit def jsonFormat2 = play.json.extra.Jsonx.formatCaseClass[Foo] 12 | implicit def jsonFormat3 = play.json.extra.Jsonx.formatInline[Foo] 13 | } 14 | object b{ 15 | implicit def jsonFormat4 = play.json.extra.Jsonx.formatAuto[Foo] 16 | } 17 | -------------------------------------------------------------------------------- /play-json-extra/jvm/src/test/scala/play/json/extra/PlayJsonExtensionsTest.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import org.scalatest.FunSuite 4 | 5 | import play.api.libs.json._ 6 | 7 | case class RecursiveClass(o: Option[RecursiveClass], s:String) 8 | object RecursiveClass{ 9 | import implicits.optionWithNull 10 | implicit def jsonFormat: Format[RecursiveClass] = Jsonx.formatCaseClass[RecursiveClass] 11 | } 12 | sealed trait RecursiveAdt 13 | case class RecursiveChild(o: Option[RecursiveAdt], s:String) extends RecursiveAdt 14 | object RecursiveFormat{ 15 | import implicits.optionWithNull 16 | implicit def jsonFormat: Format[RecursiveAdt] = Jsonx.formatSealed[RecursiveAdt] 17 | implicit def jsonFormat2: Format[RecursiveChild] = Jsonx.formatCaseClass[RecursiveChild] 18 | } 19 | object Adt{ 20 | sealed trait SomeAdt 21 | case object ChoiceA extends SomeAdt 22 | case object ChoiceB extends SomeAdt 23 | case object `Choice.C` extends SomeAdt 24 | case class X(i: Int, s: String) extends SomeAdt 25 | object X{ 26 | implicit def jsonFormat = Jsonx.formatCaseClass[X] 27 | } 28 | case class Y(i: Int, s: String) extends SomeAdt 29 | object Y{ 30 | implicit def jsonFormat = Jsonx.formatCaseClass[Y] 31 | def apply = "making sure overloaded apply doesn't break" 32 | } 33 | } 34 | object AdtWithEmptyLeafs{ 35 | sealed trait SomeAdt 36 | case class A() extends SomeAdt 37 | object A{ 38 | implicit def jsonFormat = Jsonx.formatCaseClass[A] 39 | } 40 | case class B() extends SomeAdt 41 | object B{ 42 | implicit def jsonFormat = Jsonx.formatCaseClass[B] 43 | } 44 | } 45 | 46 | sealed trait SealedTrait 47 | case class CaseClassChild(i: Int) extends SealedTrait 48 | 49 | object FailureTest{ 50 | import implicits.optionWithNull 51 | import org.scalatest.Assertions._ 52 | type AbstractType 53 | implicit val childFormat = Jsonx.formatCaseClass[CaseClassChild] 54 | Jsonx.formatSealed[SealedTrait] 55 | assertTypeError("Jsonx.formatSealed[Foo#X]") 56 | } 57 | 58 | sealed trait OP 59 | case class Ua(i: Int) extends OP 60 | case class Unknown(json: JsValue) extends OP 61 | case class Uzzzzzzz(s: String) extends OP 62 | 63 | class PlayJsonExtensionsTest extends FunSuite{ 64 | import implicits.optionWithNull 65 | test("de/serialize case class > 22"){ 66 | case class Bar(a: Int, b:Float) 67 | case class Foo(_1:Bar,_2:String,_3:Int,_4:Int,_5:Int,_21:Int,_22:Int,_23:Int,_24:Int,_25:Int,_31:Int,_32:Int,_33:Int,_34:Int,_35:Int,_41:Int,_42:Int,_43:Int,_44:Int,_45:Int,_51:Int,_52:Int,_53:Int,_54:Int,_55:Int) 68 | val foo = Foo(Bar(5,1.0f),"sdf",3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5,1,2,3,4,5) 69 | implicit def fmt1 = Jsonx.formatCaseClass[Bar] 70 | implicit def fmt2 = Jsonx.formatCaseClass[Foo] 71 | val json = Json.toJson( foo ) 72 | assert(foo === json.as[Foo]) 73 | } 74 | test("de/serialize empty case class"){ 75 | case class Bar() 76 | implicit def fmt1 = Jsonx.formatCaseClass[Bar] 77 | val bar = Bar() 78 | val json = Json.toJson( bar ) 79 | assert(bar === json.as[Bar]) 80 | } 81 | case class BarWithDefault(s: String, i: Int = 6) 82 | test("de/serialize case class default value"){ 83 | implicit def fmt1 = Jsonx.formatCaseClassUseDefaults[BarWithDefault] 84 | assert(BarWithDefault("asd",6) === Json.parse("""{"s":"asd"}""").validate[BarWithDefault].get) 85 | } 86 | test("don't de/serialize case class default value by default"){ 87 | implicit def fmt1 = Jsonx.formatCaseClass[BarWithDefault] 88 | assert(Json.parse("""{"s":"asd"}""").validate[BarWithDefault].isInstanceOf[JsError]) 89 | } 90 | test("formatCaseClass with explicit return type"){ 91 | case class Bar() 92 | implicit def fmt1: Format[Bar] = Jsonx.formatCaseClass[Bar] 93 | val bar = Bar() 94 | val json = Json.toJson( bar ) 95 | assert(bar === json.as[Bar]) 96 | } 97 | test("serializing None skips fields"){ 98 | // note, using null for a Scala String doesn't work with play Json 99 | case class Bar(a: Option[String], b: String, d: Option[String]) 100 | val bar = Bar(None,"foo",Some("foo")) 101 | implicit def fmt1 = Jsonx.formatCaseClass[Bar] 102 | val json = Json.parse(Json.stringify( // <- otherwise c = JsString(null), not JsNull 103 | Json.toJson(bar) 104 | )) 105 | assert(bar === json.validate[Bar].get) 106 | assert( 107 | Set("b"->JsString("foo"), "d"->JsString("foo")) 108 | === json.as[JsObject].fields.toSet 109 | ) 110 | } 111 | test("require to JsError"){ 112 | // note, using null for a Scala String doesn't work with play Json 113 | case class Bar(a: Int){ 114 | require(a > 5, "a needs to be larger than 5") 115 | } 116 | case class Baz(bar: Bar) 117 | implicit def fmt1 = Jsonx.formatCaseClass[Bar] 118 | implicit def fmt2 = Jsonx.formatCaseClass[Baz] 119 | assert(Baz(Bar(6)) === Json.parse("""{"bar":{"a":6}}""").validate[Baz].get) 120 | val capturedFailedRequire = Json.parse("""{"bar":{"a":5}}""").validate[Baz] 121 | assert( 122 | capturedFailedRequire.asInstanceOf[JsError].errors.head._2.head.message contains "requirement failed: a needs to be larger than 5" 123 | ) 124 | assert( 125 | capturedFailedRequire.asInstanceOf[JsError].errors.head._1.toString === "/bar" 126 | ) 127 | } 128 | test("serialize Adt"){ 129 | import Adt._ 130 | implicit def simpleName = SingletonEncoder.simpleName 131 | import implicits.formatSingleton 132 | implicit val jsonFormat = Jsonx.formatSealed[SomeAdt] 133 | val a: SomeAdt = ChoiceA 134 | val b: SomeAdt = ChoiceB 135 | val c: SomeAdt = `Choice.C` 136 | val x = X(99,"Chris") 137 | val y = Y(99,"Chris") 138 | assert("ChoiceA" === Json.toJson(ChoiceA).as[JsString].value) 139 | assert("ChoiceB" === Json.toJson(ChoiceB).as[JsString].value) 140 | assert("Choice.C" === Json.toJson(`Choice.C`).as[JsString].value) 141 | assert("ChoiceA" === Json.toJson(a).as[JsString].value) 142 | assert("ChoiceB" === Json.toJson(b).as[JsString].value) 143 | assert("Choice.C" === Json.toJson(c).as[JsString].value) 144 | 145 | assert(x !== y) 146 | assert(ChoiceA === Json.fromJson[SomeAdt](Json.toJson(ChoiceA)).get) 147 | assert(ChoiceB === Json.fromJson[SomeAdt](Json.toJson(ChoiceB)).get) 148 | assert(`Choice.C` === Json.fromJson[SomeAdt](Json.toJson(`Choice.C`)).get) 149 | 150 | /* disabling tests for ambiguity, not supported at the moment 151 | assert(x === Json.fromJson[SomeAdt](Json.toJson[SomeAdt](x)).get) 152 | assert(y === Json.fromJson[SomeAdt](Json.toJson[SomeAdt](y)).get) 153 | assert(x === Json.fromJson[SomeAdt](Json.toJson(x)).get) 154 | assert(y === Json.fromJson[SomeAdt](Json.toJson(y)).get) 155 | */ 156 | } 157 | test("serialize Adt with empty leafs"){ 158 | import AdtWithEmptyLeafs._ 159 | implicit val jsonFormat = Jsonx.formatSealed[SomeAdt] 160 | val x = A() 161 | val y = B() 162 | /* disabling tests for ambiguity, not supported at the moment 163 | assert(x === Json.fromJson[SomeAdt](Json.toJson[SomeAdt](x)).get) 164 | assert(y === Json.fromJson[SomeAdt](Json.toJson[SomeAdt](y)).get) 165 | assert(x === Json.fromJson[SomeAdt](Json.toJson(x)).get) 166 | assert(y === Json.fromJson[SomeAdt](Json.toJson(y)).get) 167 | */ 168 | } 169 | test("serialize Adt with fallback"){ 170 | implicit val OPFormat: Format[OP] = { 171 | implicit val UaFormat: Format[Ua] = Jsonx.formatCaseClass[Ua] 172 | implicit val UnknownFormat: Format[Unknown] = Jsonx.formatInline[Unknown] 173 | implicit val UzzzzzzzFormat: Format[Uzzzzzzz] = Jsonx.formatCaseClass[Uzzzzzzz] 174 | Jsonx.formatSealedWithFallback[OP,Unknown] 175 | } 176 | assert(JsSuccess(Ua(5)) === Json.fromJson[OP](Json.parse(""" {"i":5} """))) 177 | assert(JsSuccess(Uzzzzzzz("x")) === Json.fromJson[OP](Json.parse(""" {"s":"x"} """))) 178 | val json = """{"foo": "asdf"}""" 179 | assert(JsSuccess(Unknown(Json.parse(json))) === Json.fromJson[OP](Json.parse(json))) 180 | } 181 | test("serialize recursive class"){ 182 | val x = RecursiveClass(Some(RecursiveClass(Some(RecursiveClass(None,"c")),"b")),"a") 183 | val json = Json.toJson[RecursiveClass](x)(implicitly[Format[RecursiveClass]]) 184 | val res = Json.fromJson[RecursiveClass](json)(implicitly[Format[RecursiveClass]]) 185 | assert(x === res.get) 186 | } 187 | test("serialize recursive child"){ 188 | import RecursiveFormat._ 189 | val x = RecursiveChild(Some(RecursiveChild(Some(RecursiveChild(None,"c")),"b")),"a") 190 | val json = Json.toJson[RecursiveChild](x)(implicitly[Format[RecursiveChild]]) 191 | val res = Json.fromJson[RecursiveChild](json)(implicitly[Format[RecursiveChild]]) 192 | assert(x === res.get) 193 | } 194 | test("serialize recursive Adt"){ 195 | import RecursiveFormat._ 196 | val x = RecursiveChild(Some(RecursiveChild(Some(RecursiveChild(None,"c")),"b")),"a") 197 | val json = Json.toJson[RecursiveAdt](x)(implicitly[Format[RecursiveAdt]]) 198 | val res = Json.fromJson[RecursiveAdt](json)(implicitly[Format[RecursiveAdt]]) 199 | assert(x === res.get) 200 | } 201 | test("deserialize case class error messages"){ 202 | val json = Json.parse("""{"i":"test"}""") 203 | val res = Json.fromJson[Adt.X](json) 204 | res match { 205 | case JsError(_errors) => 206 | val errors = _errors.map{case (k,v) => (k.toString,v)}.toMap 207 | assert( 208 | 2 === _errors.size 209 | ) 210 | assert( 211 | "error.expected.jsnumber" === errors("/i").head.message 212 | ) 213 | assert( 214 | "error.path.missing" === errors("/s").head.message 215 | ) 216 | case _ => assert(false) 217 | } 218 | } 219 | test("deserialize tuple"){ 220 | val json = Json.parse("""[1,1.0,"Test"]""") 221 | val res = Json.fromJson[(Int,Double,String)](json) 222 | assert((1,1.0,"Test") === res.get) 223 | assert((1,1.0,"Test") === Json.toJson(res.get).validate[(Int,Double,String)].get) 224 | } 225 | test("deserialize tuple wrong size"){ 226 | case class Foo(bar: (Int,Double,String)) 227 | implicit def jsonFoo = Jsonx.formatCaseClass[Foo] 228 | val json = Json.parse("""{"bar": [1,1.1]}""") 229 | val res = Json.fromJson[Foo](json) 230 | res match { 231 | case JsError(_errors) => 232 | val errors = _errors.map{case (k,v) => (k.toString,v)}.toMap 233 | assert( 234 | "Expected array of 3 elements" === errors("/bar").head.message 235 | ) 236 | case _ => assert(false) 237 | } 238 | } 239 | } 240 | 241 | abstract class JsonTestClasses{ 242 | implicit def option[A](implicit reads: Reads[A]): Reads[Option[A]] 243 | case class A(s: String) 244 | object A{ implicit def jsonFormat = Jsonx.formatCaseClass[A] } 245 | case class B(s: Option[String]) 246 | object B{ implicit def jsonFormat = Jsonx.formatCaseClass[B] } 247 | case class C(i: Int, b: Option[B]) 248 | object C{ implicit def jsonFormat = Jsonx.formatCaseClass[C] } 249 | case class A2(s: String) 250 | object A2{ implicit def jsonFormat = Json.format[A2] } 251 | case class B2(s: Option[String]) 252 | object B2{ implicit def jsonFormat = Json.format[B2] } 253 | case class C2(i: Int, b: Option[B2]) 254 | object C2{ implicit def jsonFormat = Json.format[C2] } 255 | 256 | case class Mandatory(s: List[String]) 257 | object Mandatory{ implicit def jsonFormat = Jsonx.formatCaseClass[Mandatory] } 258 | case class Optional(o: Option[Mandatory]) 259 | object Optional{ implicit def jsonFormat = Jsonx.formatCaseClass[Optional] } 260 | 261 | case class Mandatory2(s: List[String]) 262 | object Mandatory2{ implicit def jsonFormat = Jsonx.formatCaseClass[Mandatory2] } 263 | case class Optional2(o: Option[Mandatory2]) 264 | object Optional2{ implicit def jsonFormat = Jsonx.formatCaseClass[Optional2] } 265 | 266 | case class ListInner(string: String) 267 | object ListInner{ implicit def jsonFormat = Jsonx.formatCaseClass[ListInner] } 268 | case class ListOuter(inner: List[ListInner]) 269 | object ListOuter{ implicit def jsonFormat = Jsonx.formatCaseClass[ListOuter] } 270 | case class ClassOuter(outer: List[ListOuter]) 271 | object ClassOuter{ implicit def jsonFormat = Jsonx.formatCaseClass[ClassOuter] } 272 | 273 | case class ListInner2(string: String) 274 | object ListInner2{ implicit def jsonFormat = Jsonx.formatCaseClass[ListInner2] } 275 | case class ListOuter2(inner: List[ListInner2]) 276 | object ListOuter2{ implicit def jsonFormat = Jsonx.formatCaseClass[ListOuter2] } 277 | case class ClassOuter2(outer: List[ListOuter2]) 278 | object ClassOuter2{ implicit def jsonFormat = Jsonx.formatCaseClass[ClassOuter2] } 279 | } 280 | 281 | class Bar(val i: Int) extends AnyVal 282 | -------------------------------------------------------------------------------- /play-json-extra/jvm/src/test/scala/play/json/extra/VariantsSpec.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import org.specs2.mutable.Specification 4 | import play.api.libs.json.{Reads, Json, Format, __} 5 | 6 | object VariantsSpec extends Specification { 7 | 8 | sealed trait Foo 9 | final case class Bar(x: Int) extends Foo 10 | final case class Baz(s: String) extends Foo 11 | case object Bah extends Foo 12 | 13 | sealed trait Attachment 14 | final case class PhotoAttachment(photo: String) extends Attachment 15 | 16 | sealed trait Status 17 | case object ToDo extends Status 18 | case object Done extends Status 19 | 20 | val bar = Bar(42) 21 | val baz = Baz("bah") 22 | 23 | implicit val fooFormat: Format[Foo] = Variants.format[Foo] 24 | implicit val statusFormat: Format[Status] = Variants.format[Status] 25 | 26 | sealed trait A 27 | final case class B(x: Int) extends A 28 | final case class C(x: Int) extends A 29 | 30 | "Variants" should { 31 | 32 | "Generate an additional JSON field containing the variant name" in { 33 | (Json.toJson(bar) \ "$variant").as[String] must equalTo ("Bar") 34 | (Json.toJson(baz) \ "$variant").as[String] must equalTo ("Baz") 35 | (Json.toJson(Bah) \ "$variant").as[String] must equalTo ("Bah") 36 | } 37 | 38 | "Build the right variant from JSON data" in { 39 | Json.obj("$variant" -> "Bar", "x" -> 0).as[Foo] must equalTo (Bar(0)) 40 | Json.obj("$variant" -> "Baz", "s" -> "hello").as[Foo] must equalTo (Baz("hello")) 41 | Json.obj("$variant" -> "Bah").as[Foo] must equalTo (Bah) 42 | } 43 | 44 | "Serialize and deserialize any variant of a sum type" in { 45 | Json.toJson(bar).as[Foo] must equalTo (bar) 46 | Json.toJson(baz).as[Foo] must equalTo (baz) 47 | Json.toJson(Bah).as[Foo] must equalTo (Bah) 48 | } 49 | 50 | "Support variants with the same types" in { 51 | implicit val format = Variants.format[A] 52 | Json.toJson(B(42)).as[A] must equalTo (B(42)) 53 | Json.toJson(C(0)).as[A] must equalTo (C(0)) 54 | } 55 | 56 | "Support case object style enumerations" in { 57 | Json.toJson(ToDo).as[Status] must equalTo (ToDo) 58 | Json.toJson(Done).as[Status] must equalTo (Done) 59 | } 60 | 61 | "Support customization of discriminator field name" in { 62 | implicit val format = Variants.format[A]((__ \ "type").format[String]) 63 | (Json.toJson(B(42)) \ "type").as[String] must equalTo ("B") 64 | (Json.toJson(C(0)) \ "type").as[String] must equalTo ("C") 65 | Json.obj("type" -> "B", "x" -> 0).as[A] must equalTo (B(0)) 66 | Json.obj("type" -> "C", "x" -> 0).as[A] must equalTo (C(0)) 67 | 68 | } 69 | 70 | "Generate just a Reads" in { 71 | implicit val reads = Variants.reads[A] 72 | Json.obj("x" -> 42, "$variant" -> "B").as[A] must equalTo (B(42)) 73 | Json.obj("x" -> 0, "$variant" -> "C").as[A] must equalTo (C(0)) 74 | } 75 | 76 | "Generate just a Writes" in { 77 | implicit val writes = Variants.writes[A] 78 | Json.toJson(B(42)) must equalTo (Json.obj("x" -> 42, "$variant" -> "B")) 79 | Json.toJson(C(0)) must equalTo (Json.obj("x" -> 0, "$variant" -> "C")) 80 | } 81 | 82 | "deserialize json with custom discriminator" in { 83 | implicit val attachmentReads: Reads[Attachment] = 84 | Variants.reads[Attachment]((__ \ "type").read[String].map(s => s"${s.capitalize}Attachment")) 85 | 86 | val photoJson = Json.obj("type" -> "photo", "photo" -> "bar") 87 | photoJson.as[Attachment] must beAnInstanceOf[PhotoAttachment] 88 | } 89 | } 90 | 91 | } 92 | -------------------------------------------------------------------------------- /play-json-extra/shared/src/main/scala/play/json/extra/JsonFormat.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import scala.annotation.StaticAnnotation 4 | import scala.collection.mutable.ListBuffer 5 | import scala.language.experimental.macros 6 | import scala.reflect.macros.blackbox.Context 7 | 8 | class JsonFormat() extends StaticAnnotation { 9 | def macroTransform(annottees: Any*): Any = macro JsonFormatMacro.jsonFormat 10 | } 11 | 12 | object JsonFormatMacro { 13 | 14 | import MacroCommons._ 15 | 16 | def jsonFormat(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { 17 | import c.universe._ 18 | import Flag._ 19 | 20 | class ModDesc(var mods: Modifiers, 21 | name: TermName, 22 | var parents: List[Tree], 23 | self: ValDef, 24 | body: List[Tree], 25 | var esArguments: Map[String, Any]) { 26 | val extraMethods: ListBuffer[Tree] = ListBuffer() 27 | 28 | def getTree: Tree = 29 | ModuleDef( 30 | mods, 31 | name, 32 | Template( 33 | parents, 34 | self, 35 | q"""import play.json.extra.implicits.optionWithNull""" 36 | .asInstanceOf[Tree] :: 37 | q"""import play.api.libs.json._ ;// JSON library""" 38 | .asInstanceOf[Tree] :: 39 | q"""import play.api.libs.json.Reads._ ; // Custom validation helpers""" 40 | .asInstanceOf[Tree] :: 41 | q"""import play.api.libs.functional.syntax._ ; // Combinator syntax""" 42 | .asInstanceOf[Tree] :: 43 | body ++ extraMethods)) 44 | 45 | def addMethod(tree: Tree) = { 46 | extraMethods += tree 47 | } 48 | 49 | def existsImplicit(name: String): Boolean = 50 | body.exists { 51 | case ValDef(_, nameDef, _, _) 52 | if nameDef.decodedName.toString == name => 53 | true 54 | case _ => false 55 | } 56 | 57 | } 58 | 59 | object ModDesc { 60 | def apply(tree: Tree, esArguments: Map[String, Any]): ModDesc = { 61 | tree match { 62 | case ModuleDef(mods, name, Template(parents, self, body)) => 63 | new ModDesc(mods, 64 | name, 65 | parents, 66 | self, 67 | body, 68 | esArguments = esArguments) 69 | } 70 | } 71 | } 72 | 73 | def jsonNamespace(name: String): Tree = { 74 | Select(Ident(TermName("play.api.libs.json")), TypeName(name)) 75 | } 76 | 77 | def getFields(body: List[Tree]): List[FldDesc] = 78 | body.flatMap { 79 | case DefDef(mods, name, tparams, vparamss, tpt, rhs) 80 | if name.decodedName.toString == "" => 81 | vparamss.head.map(f => FldDesc(f)).toList 82 | case _ => None 83 | } 84 | class FldDesc(val name: String, 85 | val fullTypeName: String, 86 | val typeName: String, 87 | val typeTree: Tree, 88 | val cls: Option[ClsDesc], 89 | val tree: Tree, 90 | val default: Tree, 91 | val annotations: List[Tree], 92 | val foreignType: Option[String] = None, 93 | val key: Option[String] = None) { 94 | var isOption = false 95 | var isMultiple = false 96 | var multipleType = "" 97 | 98 | internalType 99 | 100 | lazy val internalType: Tree = typeTree match { 101 | case AppliedTypeTree(Ident(option), tpe :: Nil) 102 | if option.decodedName.toString == "Option" => 103 | isOption = true 104 | tpe 105 | case AppliedTypeTree(Ident(list), tpe :: Nil) 106 | if multipleClasses.contains(list.decodedName.toString) => 107 | isMultiple = true 108 | multipleType = list.decodedName.toString 109 | tpe 110 | 111 | case _ => typeTree 112 | } 113 | 114 | val fieldName = Literal(Constant(key.getOrElse(name))) 115 | 116 | def getReads(recursiveName: String, recursiveBuilder: String): Tree = { 117 | // println(s"$isMultiple $multipleType $recursiveName $recursiveBuilder $typeName") 118 | 119 | if (isOption) { 120 | return q"""(__ \ $fieldName).readNullable[$internalType]""" 121 | } 122 | if (recursiveName == internalType.toString()) { 123 | val recursiveBuilderTerm = TermName(recursiveBuilder) 124 | 125 | if (isMultiple) { 126 | if (multipleType.toLowerCase == "list") 127 | return q"""(__ \ $fieldName).lazyRead(Reads.list[$internalType]($recursiveBuilderTerm))""" 128 | if (multipleType.toLowerCase == "seq") 129 | return q"""(__ \ $fieldName).lazyRead(Reads.seq[$internalType]($recursiveBuilderTerm))""" 130 | if (multipleType.toLowerCase == "set") 131 | return q"""(__ \ $fieldName).lazyRead(Reads.set[$internalType]($recursiveBuilderTerm))""" 132 | 133 | } else { 134 | return q"""(__ \ $fieldName).lazyRead[$internalType]($recursiveBuilderTerm)""" 135 | } 136 | } 137 | 138 | q"""(__ \ $fieldName).read[$typeTree]""" 139 | } 140 | 141 | def getWrites(recursiveName: String, recursiveBuilder: String): Tree = { 142 | if (isOption) { 143 | return q"""(__ \ $fieldName).writeNullable[$internalType]""" 144 | } 145 | if (recursiveName == internalType.toString()) { 146 | val recursiveBuilderTerm = TermName(recursiveBuilder) 147 | 148 | if (isMultiple) { 149 | val typeLow = multipleType.toLowerCase 150 | 151 | typeLow match { 152 | case "list" => 153 | return q"""(__ \ $fieldName).lazyWrite(Writes.list[$internalType]($recursiveBuilderTerm))""" 154 | case "seq" => 155 | return q"""(__ \ $fieldName).lazyWrite(Writes.seq[$internalType]($recursiveBuilderTerm))""" 156 | case "set" => 157 | return q"""(__ \ $fieldName).lazyWrite(Writes.set[$internalType]($recursiveBuilderTerm))""" 158 | 159 | } 160 | } else { 161 | return q"""(__ \ $fieldName).lazyWrite[$internalType]($recursiveBuilderTerm)""" 162 | } 163 | } 164 | 165 | q"""(__ \ $fieldName).write[$typeTree]""" 166 | } 167 | 168 | } 169 | 170 | object FldDesc { 171 | def apply(fieldTree: Tree) = { 172 | val ValDef(mod, name, tpt, rhs) = fieldTree 173 | val defaultValue: Tree = q"""None""" 174 | def buildTypeName(tree: Tree): String = { 175 | tree match { 176 | case Select(subtree, name) => 177 | buildTypeName(subtree) + "." + name.decodedName.toString 178 | case AppliedTypeTree(subtree, args) => 179 | buildTypeName(subtree) + "[" + args 180 | .map(it => buildTypeName(it)) 181 | .mkString(",") + "]" 182 | case Ident(x) => 183 | x.decodedName.toString 184 | case other => other.toString 185 | } 186 | } 187 | val fullTypeName: String = buildTypeName(tpt) 188 | val typeName = fullTypeName 189 | val clsDesc: Option[ClsDesc] = None 190 | 191 | var key: Option[String] = None 192 | mod.annotations.foreach { 193 | case Apply(Select(New(Ident(index)), _), 194 | List(Literal(Constant(unique)))) => 195 | //if (index.toString == "Parent") parent = Some(ParentMeta(unique.toString, name.decodedName.toString)) 196 | // println(s"$index $unique") 197 | 198 | if (index.toString == "key") key = Some(unique.toString) 199 | case extra => 200 | //println(s"annotation extra: ${showRaw(extra)}") 201 | } 202 | 203 | new FldDesc(name.decodedName.toString, 204 | fullTypeName, 205 | typeName, 206 | tpt, 207 | clsDesc, 208 | fieldTree, 209 | defaultValue, 210 | mod.annotations, 211 | key = key) 212 | } 213 | } 214 | 215 | class ClsDesc(val mods: Modifiers, 216 | val name: TypeName, 217 | types: List[TypeDef], 218 | var parents: List[Tree], 219 | self: ValDef, 220 | var body: List[Tree], 221 | esArguments: Map[String, Any]) { 222 | val objectName = TermName(name.decodedName.toString) 223 | val extraMethods: ListBuffer[Tree] = ListBuffer() 224 | val fields = getFields(body) 225 | 226 | def getTree: Tree = 227 | ClassDef(mods, 228 | name, 229 | types, 230 | Template(parents, self, body ++ extraMethods)) 231 | 232 | def addMethod(tree: Tree) = { 233 | extraMethods += tree 234 | } 235 | } 236 | 237 | object ClsDesc { 238 | def apply(tree: Tree, esArguments: Map[String, Any]) = { 239 | val ClassDef(mods, name, types, Template(parents, self, body)) = tree 240 | if (!mods.hasFlag(CASE)) 241 | c.abort( 242 | c.enclosingPosition, 243 | s"Only case classes allowed here ${name.decodedName.toString}") 244 | val annotations = mods.annotations.map(_.children.head.toString) 245 | val result = new ClsDesc(mods, 246 | name, 247 | types, 248 | parents, 249 | self, 250 | body, 251 | esArguments = esArguments) 252 | result 253 | } 254 | } 255 | 256 | def typeToType(myType: String): Tree = { 257 | if (myType.contains("[")) { 258 | val operator = myType.split("\\[")(0) 259 | val remainer = myType.split("\\[")(1).stripSuffix("]") 260 | return AppliedTypeTree( 261 | Ident(TypeName(operator)), 262 | remainer.split(",").map(a => typeToType(a.trim)).toList) 263 | } 264 | val tokens = myType.split("\\.") 265 | if (tokens.length == 1) 266 | return Ident(TypeName(tokens.head)) 267 | 268 | var tree: Tree = Ident(TermName(tokens.head)) 269 | tokens.tail.foreach { name => 270 | if (name == tokens.last) 271 | tree = Select(tree, TypeName(name)) 272 | else 273 | tree = Select(tree, TermName(name)) 274 | } 275 | tree 276 | } 277 | 278 | class ClsModClass(cls: ClsDesc, mod: ModDesc, arguments: Map[String, Any]) { 279 | def getTrees: List[Tree] = List(cls.getTree, mod.getTree) 280 | 281 | def inject = { 282 | 283 | val typeName = cls.name 284 | val varTypeName = cls.name.decodedName.toString 285 | .take(1) 286 | .toLowerCase + cls.name.decodedName.toString.drop(1) 287 | 288 | val formatName = varTypeName + "Format" 289 | 290 | if (!mod.existsImplicit("jsonFmt")) { 291 | 292 | mod.addMethod(q"""import play.json.extra._;""") 293 | // mod.addMethod(q"""import play.json.extra.tuples._;""") 294 | mod.addMethod( 295 | q"""implicit def jsonFmt: play.api.libs.json.Format[${typeToType( 296 | typeName.toString)}] = Jsonx.formatCaseClassUseDefaults[${typeToType( 297 | typeName.toString)}];""") 298 | } 299 | 300 | } 301 | } 302 | 303 | object ClsModClass { 304 | def apply(cls: Option[ClsDesc] = None, 305 | mod: Option[ModDesc] = None, 306 | arguments: Map[String, Any]): ClsModClass = { 307 | var module = mod 308 | if (module.isEmpty) { 309 | val className = TermName(cls.get.name.decodedName.toString) 310 | 311 | module = Some(q"""object $className {}""" match { 312 | case ModuleDef(mods, name, Template(parents, self, body)) => 313 | new ModDesc(mods, name, Nil, self, body, esArguments = arguments) 314 | }) 315 | } 316 | 317 | new ClsModClass(cls.get, module.get, arguments) 318 | } 319 | } 320 | 321 | var myClass: Option[ClsDesc] = None 322 | var myModule: Option[ModDesc] = None 323 | val arguments = extractArguments(c) 324 | annottees.map(_.tree).toList.foreach { 325 | case it @ ModuleDef(mods, name, Template(parents, self, body)) => 326 | myModule = Some(ModDesc(it, arguments)) 327 | case it @ ClassDef(mods, name, types, Template(parents, self, body)) => 328 | myClass = Some(ClsDesc(it, arguments)) 329 | } 330 | 331 | val esObject = ClsModClass(myClass, myModule, arguments = arguments) 332 | esObject.inject 333 | // esObject.getTrees.foreach(println) 334 | c.Expr[Any](Block(esObject.getTrees, Literal(Constant(())))) 335 | } 336 | 337 | private def extractArguments(c: Context): Map[String, Any] = { 338 | import c.universe._ 339 | var result: Map[String, Any] = Map.empty 340 | 341 | c.prefix.tree.foreach { 342 | case Apply(_, values) => 343 | values.foreach { 344 | case AssignOrNamedArg(Ident(ident), Literal(Constant(v))) => 345 | result += (ident.decodedName.toString -> v) 346 | } 347 | case _ => 348 | } 349 | result 350 | } 351 | 352 | } 353 | -------------------------------------------------------------------------------- /play-json-extra/shared/src/main/scala/play/json/extra/MacroCommons.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | /** 4 | * Common values to be used in macros 5 | */ 6 | object MacroCommons { 7 | val standardEmbeddableObjects = Set("String", 8 | "Int", 9 | "Long", 10 | "Double", 11 | "Boolean", 12 | "Short", 13 | "Float", 14 | "DateTime", 15 | "Integer", 16 | "BigInt", 17 | "Byte") 18 | val multipleClasses = Set("List", "Set", "Seq", "Vector") 19 | 20 | } 21 | -------------------------------------------------------------------------------- /play-json-extra/shared/src/main/scala/play/json/extra/Picklers.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import play.api.libs.json._ 4 | 5 | object Picklers { 6 | 7 | 8 | trait EnumStringJSON[T] { 9 | def asString(t: T): String 10 | 11 | def fromString(s: String): Option[T] 12 | 13 | implicit val jsonFmt = new Format[T] { 14 | override def writes(o: T): JsValue = JsString(asString(o)) 15 | 16 | override def reads(json: JsValue): JsResult[T] = json match { 17 | case JsString(v) => 18 | fromString(v) match { 19 | case Some(result) => JsSuccess(result.asInstanceOf[T]) 20 | case _ => JsError("Invalid value $v") 21 | } 22 | case _ => JsError("Invalid json element $json") 23 | 24 | } 25 | } 26 | 27 | } 28 | 29 | 30 | trait EnumIntJSON[T] { 31 | def asInt(t: T): Int 32 | 33 | def fromInt(s: Int): Option[T] 34 | 35 | implicit val jsonFmt = new Format[T] { 36 | override def writes(o: T): JsValue = JsNumber(asInt(o)) 37 | 38 | override def reads(json: JsValue): JsResult[T] = json match { 39 | case JsNumber(v) => 40 | fromInt(v.toInt) match { 41 | case Some(result) => JsSuccess(result.asInstanceOf[T]) 42 | case _ => JsError("Invalid value $v") 43 | } 44 | case _ => JsError("Invalid json element $json") 45 | 46 | } 47 | } 48 | 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /play-json-extra/shared/src/main/scala/play/json/extra/Variants.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import scala.language.experimental.macros 4 | 5 | import play.api.libs.json.{Writes, Reads, Format, __} 6 | import scala.reflect.macros.blackbox.Context 7 | 8 | object Variants { 9 | 10 | /** 11 | * @tparam A The base type of a final case class hierarchy. 12 | * @return A [[play.api.libs.json.Format]] for the type hierarchy of `A`. It uses an additional field named `$variant` 13 | * to discriminate between the possible subtypes of `A`. 14 | */ 15 | def format[A]: Format[A] = macro Impl.format[A] 16 | 17 | /** 18 | * @param discriminator Format of the type discriminator field. 19 | * @tparam A Base type of final case class hierarchy. 20 | * @return A [[play.api.libs.json.Format]] for the type hierarchy of `A`. 21 | */ 22 | def format[A](discriminator: Format[String]): Format[A] = macro Impl 23 | .formatDiscriminator[A] 24 | 25 | /** 26 | * @tparam A The base type of a final case class hierarchy. 27 | * @return A [[play.api.libs.json.Reads]] for the type hierarchy of `A`. It relies on an additional field named `$variant` 28 | * to discriminate between the possible subtypes of `A`. 29 | */ 30 | def reads[A]: Reads[A] = macro Impl.reads[A] 31 | 32 | /** 33 | * @param discriminator Decoder of the type discriminator field. 34 | * @tparam A Base type of final case class hierarchy. 35 | * @return A [[play.api.libs.json.Reads]] for the type hierarchy of `A`. 36 | */ 37 | def reads[A](discriminator: Reads[String]): Reads[A] = macro Impl 38 | .readsDiscriminator[A] 39 | 40 | /** 41 | * @tparam A The base type of a final case class hierarchy. 42 | * @return A [[play.api.libs.json.Writes]] for the type hierarchy of `A`. It uses an additional field named `$variant` 43 | * to discriminate between the possible subtypes of `A`. 44 | */ 45 | def writes[A]: Writes[A] = macro Impl.writes[A] 46 | 47 | /** 48 | * @param discriminator Name of the type discriminator field. 49 | * @tparam A Base type of final case class hierarchy. 50 | * @return A [[play.api.libs.json.Writes]] for the type hierarchy of `A`. 51 | */ 52 | def writes[A](discriminator: Writes[String]): Writes[A] = macro Impl 53 | .writesDiscriminator[A] 54 | 55 | private object Impl { 56 | 57 | val defaultDiscriminator = (__ \ "$variant").format[String] 58 | 59 | /** 60 | * Given the following definition of class hierarchy `Foo`: 61 | * 62 | * {{{ 63 | * sealed trait Foo 64 | * final case class Bar(x: Int) extends Foo 65 | * final case class Baz(s: String) extends Foo 66 | * case object Bah extends Foo 67 | * }}} 68 | * 69 | * `Variants.format[Foo]` expands to the following: 70 | * 71 | * {{{ 72 | * { 73 | * import play.api.libs.json.{Writes, Reads} 74 | * 75 | * val writes = Writes[Foo] { 76 | * case bar: Bar => Json.toJson(bar)(Json.writes[Bar]).as[JsObject] + ("$variant" -> JsString("Bar")) 77 | * case baz: Baz => Json.toJson(baz)(Json.writes[Baz]).as[JsObject] + ("$variant" -> JsString("Baz")) 78 | * case _: Bah => JsObject(Seq("$variant" -> JsString("Bah"))) 79 | * } 80 | * 81 | * val reads = Reads[Foo] { json => 82 | * (json \ "$variant").validate[String].flatMap { 83 | * case "Bar" => Json.fromJson(json)(Json.reads[Bar]) 84 | * case "Baz" => Json.fromJson(json)(Json.reads[Baz]) 85 | * case "Bah" => JsSuccess(Bah) 86 | * } 87 | * } 88 | * 89 | * Format(reads, writes) 90 | * } 91 | * 92 | * }}} 93 | * 94 | */ 95 | def format[A: c.WeakTypeTag](c: Context): c.Expr[Format[A]] = { 96 | import c.universe._ 97 | formatDiscriminator[A](c)(reify(defaultDiscriminator)) 98 | } 99 | 100 | def formatDiscriminator[A: c.WeakTypeTag](c: Context)( 101 | discriminator: c.Expr[Format[String]]): c.Expr[Format[A]] = { 102 | import c.universe._ 103 | val (baseClass, variants) = baseAndVariants[A](c) 104 | val writes = writesTree(c)(baseClass, variants, discriminator) 105 | val reads = readsTree(c)(baseClass, variants, discriminator) 106 | c.Expr[Format[A]]( 107 | q"play.api.libs.json.Format[$baseClass]($reads, $writes)") 108 | } 109 | 110 | def reads[A: c.WeakTypeTag](c: Context): c.Expr[Reads[A]] = { 111 | import c.universe._ 112 | readsDiscriminator[A](c)(reify(defaultDiscriminator)) 113 | } 114 | 115 | def readsDiscriminator[A: c.WeakTypeTag](c: Context)( 116 | discriminator: c.Expr[Reads[String]]): c.Expr[Reads[A]] = { 117 | import c.universe._ 118 | val (baseClass, variants) = baseAndVariants[A](c) 119 | c.Expr[Reads[A]](readsTree(c)(baseClass, variants, discriminator)) 120 | } 121 | 122 | def writes[A: c.WeakTypeTag](c: Context): c.Expr[Writes[A]] = { 123 | import c.universe._ 124 | writesDiscriminator[A](c)(reify(defaultDiscriminator)) 125 | } 126 | 127 | def writesDiscriminator[A: c.WeakTypeTag](c: Context)( 128 | discriminator: c.Expr[Writes[String]]): c.Expr[Writes[A]] = { 129 | val (baseClass, variants) = baseAndVariants[A](c) 130 | c.Expr[Writes[A]](writesTree(c)(baseClass, variants, discriminator)) 131 | } 132 | 133 | /* 134 | * Get the class hierarchy and checks that the hierarchy is closed 135 | */ 136 | def baseAndVariants[A: c.WeakTypeTag]( 137 | c: Context): (c.universe.ClassSymbol, Set[c.universe.ClassSymbol]) = { 138 | import c.universe._ 139 | val baseClass = weakTypeOf[A].typeSymbol.asClass 140 | baseClass.typeSignature // SI-7046 141 | if (!baseClass.isSealed) { 142 | c.abort(c.enclosingPosition, s"$baseClass is not sealed") 143 | } 144 | // Get all the possible variants of this type 145 | val variants = baseClass.knownDirectSubclasses.map(_.asClass) 146 | for (variant <- variants 147 | if !(variant.isCaseClass || variant.isModuleClass)) { 148 | c.abort(c.enclosingPosition, 149 | s"$variant is not a final case class nor a case object") 150 | } 151 | baseClass -> variants 152 | } 153 | 154 | def writesTree(c: Context)( 155 | baseClass: c.universe.ClassSymbol, 156 | variants: Set[c.universe.ClassSymbol], 157 | discriminator: c.Expr[Writes[String]]): c.Tree = { 158 | import c.universe._ 159 | val writesCases = for (variant <- variants) yield { 160 | if (!variant.isModuleClass) { 161 | val term = TermName(c.freshName()) 162 | cq"""$term: $variant => play.api.libs.json.Json.toJson($term)(play.api.libs.json.Json.writes[$variant]).as[play.api.libs.json.JsObject] ++ $discriminator.writes(${variant.name.decodedName.toString})""" 163 | } else { 164 | cq"""_: $variant => $discriminator.writes(${variant.name.decodedName.toString})""" 165 | } 166 | } 167 | q"play.api.libs.json.Writes[$baseClass] { case ..$writesCases }" 168 | } 169 | 170 | def readsTree(c: Context)(baseClass: c.universe.ClassSymbol, 171 | variants: Set[c.universe.ClassSymbol], 172 | discriminator: c.Expr[Reads[String]]): c.Tree = { 173 | import c.universe._ 174 | val readsCases = for (variant <- variants) yield { 175 | if (!variant.isModuleClass) { 176 | cq"""${variant.name.decodedName.toString} => play.api.libs.json.Json.fromJson(json)(play.api.libs.json.Json.reads[$variant])""" 177 | } else { 178 | cq"""${variant.name.decodedName.toString} => play.api.libs.json.JsSuccess(${TermName( 179 | variant.name.decodedName.toString)})""" 180 | } 181 | } 182 | q""" 183 | play.api.libs.json.Reads[$baseClass](json => 184 | $discriminator.reads(json).flatMap { case ..$readsCases } 185 | ) 186 | """ 187 | } 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /play-json-extra/shared/src/main/scala/play/json/extra/extra.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import scala.reflect.macros.blackbox 4 | import play.api.libs.json._ 5 | import collection.immutable.ListMap 6 | import scala.annotation.implicitNotFound 7 | import scala.language.experimental.macros 8 | 9 | package object internals { 10 | /* 11 | // this would allow implicitlyOption for primitives. move to scala-extensions 12 | final case class FetchedFormat[T](format: Option[Format[T]]) 13 | object FetchedFormat{ 14 | implicit def fetch[T](implicit format: Format[T] = null): FetchedFormat[T] = FetchedFormat(Option(format)) 15 | } 16 | def implicitlyOption[T](implicit ev: FetchedFormat[T]) = ev.format 17 | */ 18 | /** does not work for primitive types */ 19 | def implicitlyOption[T](implicit ev: T = null): Option[T] = Option(ev) 20 | 21 | /** 22 | Type class for case classes 23 | */ 24 | final class CaseClass[T] 25 | object CaseClass { 26 | def checkCaseClassMacro[T: c.WeakTypeTag](c: blackbox.Context) = { 27 | import c.universe._ 28 | val T = c.weakTypeOf[T] 29 | if (!T.typeSymbol.isClass || !T.typeSymbol.asClass.isCaseClass) 30 | c.error(c.enclosingPosition, "") 31 | q"new _root_.play.json.extra.internals.CaseClass[$T]" 32 | } 33 | 34 | /** 35 | fails compilation if T is not a final case class 36 | meaning this can be used as an implicit to check 37 | */ 38 | implicit def checkCaseClass[T]: CaseClass[T] = macro checkCaseClassMacro[T] 39 | } 40 | 41 | final class SingletonObject[T] 42 | object SingletonObject { 43 | def checkSingletonObjectMacro[T: c.WeakTypeTag](c: blackbox.Context) = { 44 | import c.universe._ 45 | val T = c.weakTypeOf[T] 46 | if (!T.typeSymbol.isClass || !T.typeSymbol.asClass.isModuleClass) 47 | c.error(c.enclosingPosition, "") 48 | q"new _root_.play.json.extra.internals.SingletonObject[$T]" 49 | } 50 | 51 | /** 52 | fails compilation if T is not a singleton object class 53 | meaning this can be used as an implicit to check 54 | */ 55 | implicit def checkSingletonObject[T]: SingletonObject[T] = macro checkSingletonObjectMacro[ 56 | T] 57 | } 58 | 59 | import scala.collection._ 60 | import scala.collection.generic.CanBuildFrom 61 | private[json] implicit class TraversableLikeExtensions[A, Repr]( 62 | val coll: TraversableLike[A, Repr]) 63 | extends AnyVal { 64 | 65 | /** Eliminates duplicates based on the given equivalence function. 66 | There is no guarantee which elements stay in case element two elements are considered equivalent. 67 | this has runtime O(n^2) 68 | @param symmetric comparison function which tests whether the two arguments are considered equivalent. */ 69 | def distinctWith[That](equivalent: (A, A) => Boolean)( 70 | implicit bf: CanBuildFrom[Repr, A, That]): That = { 71 | var l = List[A]() 72 | val b = bf(coll.repr) 73 | for (elem <- coll) { 74 | l.find { 75 | case first => equivalent(elem, first) 76 | }.getOrElse { 77 | l = elem +: l 78 | b += elem 79 | } 80 | } 81 | b.result 82 | } 83 | } 84 | } 85 | 86 | import internals._ 87 | 88 | @implicitNotFound( 89 | """could not find implicit value for parameter helper: play.api.libs.json.Reads[${T}] 90 | TRIGGERED BY: could not find implicit value for parameter helper: play.json.extra.OptionValidationDispatcher[${T}] 91 | TO SOLVE THIS 92 | 1. Make sure there is a Reads[${T}] or Format[${T}] in the implicit scope 93 | 2. In case of Reads[Option[...]] you need to either 94 | import play.json.extra.implicits.optionWithNull // suggested 95 | or 96 | import play.json.extra.implicits.optionNoError // buggy play-json 2.3 behavior 97 | 3. In case of Reads[... .type] 98 | import play.json.extra.SingletonEncoder.simpleName 99 | import play.json.extra.implicits.formatSingleton 100 | """) 101 | final class OptionValidationDispatcher[T] private[json] ( 102 | val validate: JsLookupResult => JsResult[T]) 103 | extends AnyVal 104 | 105 | object OptionValidationDispatcher { 106 | // these methods allow to dispatch via overloading 107 | // this is required to dispatch when not usign implicit search such as in the implementation of formatAuto 108 | def dispatch[T](reads: Reads[T])( 109 | disambiguate: AnyRef = null): OptionValidationDispatcher[T] = { 110 | new OptionValidationDispatcher[T](_.validate[T](reads)) 111 | } 112 | def dispatch[T](reads: Reads[T])(): OptionValidationDispatcher[Option[T]] = { 113 | new OptionValidationDispatcher[Option[T]](_.validateOpt[T](reads)) 114 | } 115 | 116 | // these methods allow dispatch via implicit search 117 | implicit def dispatchNonOption[T: Reads]: OptionValidationDispatcher[T] = { 118 | new OptionValidationDispatcher[T](_.validate[T]) 119 | } 120 | implicit def dispatchOption[T: Reads] 121 | : OptionValidationDispatcher[Option[T]] = { 122 | new OptionValidationDispatcher[Option[T]](_.validateOpt[T]) 123 | } 124 | } 125 | 126 | object debugMacro { 127 | def apply[T](tree: T): T = macro Macros.debugMacro 128 | } 129 | 130 | object `package` { 131 | implicit class JsLookupResultExtensions(res: JsLookupResult) { 132 | 133 | /** properly validate Option and non-Option fields alike */ 134 | def validateAuto[T]( 135 | implicit helper: OptionValidationDispatcher[T]): JsResult[T] = 136 | helper.validate(res) 137 | } 138 | implicit class JsValueExtensions(res: JsValue) { 139 | 140 | /** properly validate Option and non-Option fields alike */ 141 | def validateAuto[T]( 142 | implicit helper: OptionValidationDispatcher[T]): JsResult[T] = 143 | JsDefined(res).validateAuto[T] 144 | } 145 | } 146 | 147 | private[extra] class Macros(val c: blackbox.Context) { 148 | import c.universe._ 149 | val pkg = q"_root_.play.json.extra" 150 | val pjson = q"_root_.play.api.libs.json" 151 | 152 | /** like identity but prints desugared code and tree */ 153 | def debugMacro(tree: Tree): Tree = { 154 | println("code:\n " + tree) 155 | println("Tree:\n " + showRaw(tree)) 156 | tree 157 | } 158 | 159 | /** 160 | Generates a list of all known classes and traits in an inheritance tree. 161 | Includes the given class itself. 162 | Does not include subclasses of non-sealed classes and traits. 163 | TODO: move this to scala-extensions 164 | */ 165 | private def knownTransitiveSubclasses(sym: ClassSymbol): Seq[ClassSymbol] = { 166 | sym +: (if (sym.isModuleClass) { 167 | Seq() 168 | } else { 169 | sym.knownDirectSubclasses.flatMap(s => 170 | knownTransitiveSubclasses(s.asClass)) 171 | }).toSeq 172 | } 173 | 174 | private def primaryConstructor(tpe: Type): MethodSymbol = { 175 | tpe.decls.collectFirst { 176 | case m: MethodSymbol if m.isPrimaryConstructor => 177 | if (!m.isPublic) 178 | c.error( 179 | c.enclosingPosition, 180 | s"Only classes with public primary constructor are supported. Found: $tpe") 181 | m 182 | }.get 183 | } 184 | private def caseClassFieldsTypes( 185 | tpe: Type): ListMap[String, (String, Type)] = { 186 | val paramLists = primaryConstructor(tpe).paramLists 187 | val params = paramLists.head 188 | 189 | if (paramLists.size > 1) 190 | c.error(c.enclosingPosition, 191 | s"Only one parameter list classes are supported. Found: $tpe") 192 | 193 | params.foreach { p => 194 | if (!p.isPublic) 195 | c.error( 196 | c.enclosingPosition, 197 | s"Only classes with all public constructor arguments are supported. Found: $tpe") 198 | } 199 | 200 | ListMap(params.map { field => 201 | var attrName = field.name.toTermName.decodedName.toString 202 | if (field.annotations.nonEmpty) { 203 | field.annotations.map { annotation => 204 | annotation.tree match { 205 | case Apply(Select(New(tpe), _), List(Literal(Constant(unique)))) => 206 | if (tpe.toString().toLowerCase.endsWith(".key")) 207 | attrName = unique.toString 208 | case extra => 209 | } 210 | } 211 | } 212 | (field.name.toTermName.decodedName.toString, 213 | (attrName -> field.infoIn(tpe))) 214 | }: _*) 215 | } 216 | private def caseClassFieldsDefaults( 217 | tpe: Type): ListMap[String, Option[Tree]] = { 218 | if (tpe.companion == NoType) { 219 | ListMap() 220 | } else { 221 | ListMap( 222 | tpe.companion 223 | .member(TermName("apply")) 224 | .asTerm 225 | .alternatives 226 | .find(_.isSynthetic) 227 | .get 228 | .asMethod 229 | .paramLists 230 | .flatten 231 | .zipWithIndex 232 | .map { 233 | case (field, i) => 234 | ( 235 | field.name.toTermName.decodedName.toString, { 236 | val method = TermName(s"apply$$default$$${i + 1}") 237 | tpe.companion.member(method) match { 238 | case NoSymbol => None 239 | case _ => Some(q"${tpe.typeSymbol.companion}.$method") 240 | } 241 | } 242 | ) 243 | }: _*) 244 | } 245 | } 246 | 247 | def formatAuto[T: c.WeakTypeTag]: Tree = formatAutoInternal(c.weakTypeOf[T]) 248 | def formatAutoInternal(T: Type): Tree = { 249 | import internals.TraversableLikeExtensions 250 | def defaultFormatter = 251 | if (T <:< typeOf[Option[_]]) { 252 | val s = T.typeArgs.head 253 | q""" 254 | Format.optionWithNull(${formatAutoInternal(s)}) 255 | """ 256 | } else if (isModuleClass(T)) { 257 | q""" 258 | implicit def simpleName = SingletonEncoder.simpleName 259 | implicits.formatSingleton 260 | """ 261 | } else if (isCaseClass(T) && caseClassFieldsTypes(T).size == 1) { 262 | val ArgType = caseClassFieldsTypes(T).head._2._2 263 | val name = TermName(c.freshName) 264 | q""" 265 | implicit def $name = Jsonx.formatAuto[$ArgType] 266 | Jsonx.formatInline[$T] 267 | """ 268 | } else if (isCaseClass(T)) { 269 | val fieldFormatters = caseClassFieldsTypes(T).map { 270 | case (_, (_, t)) => t 271 | }.toVector.distinctWith(_ =:= _).map { t => 272 | val name = TermName(c.freshName) 273 | q"implicit def $name = Jsonx.formatAuto[$t]" 274 | } 275 | val t = q""" 276 | ..$fieldFormatters 277 | Jsonx.formatCaseClass[$T] 278 | """ 279 | t 280 | } else if (T.typeSymbol.isClass && T.typeSymbol.asClass.isSealed && T.typeSymbol.asClass.isAbstract) { 281 | val fieldFormatters = T.typeSymbol.asClass.knownDirectSubclasses.map { 282 | t => 283 | val name = TermName(c.freshName) 284 | q"implicit def $name = Jsonx.formatAuto[$t]" 285 | } 286 | q""" 287 | ..$fieldFormatters 288 | Jsonx.formatSealed[$T] 289 | """ 290 | } else { 291 | q"implicitly[Format[$T]]" // produces error message if no formatter defined 292 | } 293 | 294 | val t = q""" 295 | { 296 | import $pjson._ 297 | import $pkg._ 298 | internals.implicitlyOption[Format[$T]].getOrElse{ 299 | $defaultFormatter 300 | } 301 | } 302 | """ 303 | //println(t) 304 | t 305 | } 306 | 307 | def formatInline[T: c.WeakTypeTag]: Tree = { 308 | val T = c.weakTypeOf[T] 309 | val fields = caseClassFieldsTypes(T) 310 | if (fields.size != 1) 311 | c.error(c.enclosingPosition, 312 | s"class with exactly one argument required, but found: $T") 313 | val (field, (attrName, tpe)) = fields.head 314 | //TODO fix attrname in inline 315 | q""" 316 | { 317 | import $pjson._ 318 | import $pkg._ 319 | new Format[$T]{ 320 | def reads(json: JsValue) = json.validate[$tpe].map(new $T(_)) 321 | def writes(obj: $T) = Json.toJson(obj.${TermName(field)}) 322 | } 323 | } 324 | """ 325 | } 326 | 327 | def formatCaseClassUseDefaults[T: c.WeakTypeTag](ev: Tree): Tree = 328 | formatCaseClassInternal[T](ev, true) 329 | 330 | def formatCaseClass[T: c.WeakTypeTag](ev: Tree): Tree = 331 | formatCaseClassInternal[T](ev, false) 332 | 333 | private def formatCaseClassInternal[T: c.WeakTypeTag]( 334 | ev: Tree, 335 | useDefaults: Boolean): Tree = { 336 | val T = c.weakTypeOf[T] 337 | if (!isCaseClass(T)) 338 | c.error(c.enclosingPosition, s"not a final case class: $T") 339 | val defaults = caseClassFieldsDefaults(T) 340 | def orDefault(t: Tree, name: String) = { 341 | val default = defaults.get(name).flatten 342 | default 343 | .filter(_ => useDefaults) 344 | .map(d => q"$t orElse JsSuccess($d)") 345 | .getOrElse(t) 346 | } 347 | val (results, mkResults) = caseClassFieldsTypes(T).map { 348 | case (k, (a, t)) => 349 | val name = TermName(c.freshName) 350 | val path = q"(json \ $a)" 351 | val result = q"bpath.validateAuto[$t].repath(path)" 352 | // FIXME: the below needs cleanup 353 | (name, 354 | q"""val $name: JsResult[$t] = { 355 | val bpath = $path 356 | val path = (JsPath() \ $a) 357 | val resolved = path.asSingleJsResult(json) 358 | val result = if(bpath.isInstanceOf[JsDefined]) ${result} else ${orDefault( 359 | result, 360 | k)} 361 | (resolved,result) match { 362 | case (_,result:JsSuccess[_]) => result 363 | case _ => resolved.flatMap(_ => result) 364 | } 365 | } 366 | """) 367 | }.unzip 368 | val jsonFields = caseClassFieldsTypes(T).map { 369 | case (k, (a, t)) => 370 | q"""${Constant(a)} -> Json.toJson[$t](obj.${TermName(k)})(implicitly[Writes[$t]])""" 371 | } 372 | 373 | q""" 374 | { 375 | import $pjson._ 376 | import $pkg._ 377 | new Format[$T]{ 378 | def reads(json: JsValue) = { 379 | ..$mkResults 380 | val errors = Seq[JsResult[_]](..$results).collect{ 381 | case JsError(values) => values 382 | }.flatten 383 | if(errors.isEmpty){ 384 | try{ 385 | JsSuccess(new $T(..${results.map(r => q"$r.get")})) 386 | } catch { 387 | case e: _root_.java.lang.IllegalArgumentException => 388 | val sw = new _root_.java.io.StringWriter() 389 | val pw = new _root_.java.io.PrintWriter(sw) 390 | e.printStackTrace(pw) 391 | JsError(Seq(JsPath() -> Seq(JsonValidationError(sw.toString,e)))) 392 | } 393 | } else JsError(errors) 394 | } 395 | def writes(obj: $T) = JsObject(Seq[(String,JsValue)](..$jsonFields).filterNot(_._2 == JsNull)) 396 | } 397 | } 398 | """ 399 | } 400 | 401 | private def verifyKnownDirectSubclassesPostTyper(_T: Type, 402 | macroCall: String): Tree = { 403 | val T = _T.typeSymbol.asClass 404 | 405 | val subs = T.knownDirectSubclasses 406 | 407 | // hack to detect breakage of knownDirectSubclasses as suggested in 408 | // https://gitter.im/scala/scala/archives/2015/05/05 and 409 | // https://gist.github.com/retronym/639080041e3fecf58ba9 410 | val global = c.universe.asInstanceOf[scala.tools.nsc.Global] 411 | def checkSubsPostTyper = 412 | if (subs != T.knownDirectSubclasses) 413 | c.error( 414 | c.macroApplication.pos, 415 | s"""macro call $macroCall happend in a place, where typechecking of $T hasn't been completed yet. 416 | Completion is required in order to find all direct subclasses. 417 | Try moving the call lower in the file, into a separate file, a sibbling package, a separate sbt sub project or else. 418 | This is caused by https://issues.scala-lang.org/browse/SI-7046 and can only be avoided by manually moving the call. 419 | """) 420 | 421 | val checkSubsPostTyperTypTree = 422 | new global.TypeTreeWithDeferredRefCheck()(() => { 423 | checkSubsPostTyper; global.TypeTree(global.NoType) 424 | }).asInstanceOf[TypTree] 425 | q"type VerifyKnownDirectSubclassesPostTyper = $checkSubsPostTyperTypTree" 426 | } 427 | 428 | private def assertClass[T: c.WeakTypeTag]( 429 | msg: String = s"required class or trait") { 430 | val T = c.weakTypeOf[T].typeSymbol 431 | if (!T.isClass) { 432 | c.error(c.enclosingPosition, msg + ", found " + T) 433 | } 434 | } 435 | 436 | private def assertSealedAbstract[T: c.WeakTypeTag] { 437 | assertClass[T]() 438 | val T = c.weakTypeOf[T].typeSymbol.asClass 439 | if (!T.isSealed || !T.isAbstract) { 440 | lazy val modifiers = T.toString.split(" ").dropRight(1).mkString 441 | c.error( 442 | c.enclosingPosition, 443 | s"required sealed trait or sealed abstract class, found $modifiers ${T.fullName}") 444 | } 445 | } 446 | 447 | def formatSingletonImplicit[T: c.WeakTypeTag](encodeSingleton: Tree, 448 | ev: Tree): Tree = 449 | formatSingleton[T](encodeSingleton) 450 | 451 | def formatSingleton[T: c.WeakTypeTag](encodeSingleton: Tree): Tree = { 452 | SingletonObject.checkSingletonObjectMacro[T](c) 453 | val T = c.weakTypeOf[T].typeSymbol.asClass 454 | val t = q""" 455 | { 456 | import $pjson._ 457 | import $pkg._ 458 | val encoded = $encodeSingleton.apply(classOf[$T]) 459 | new Format[$T]{ 460 | def reads(json: JsValue) = { 461 | if(json == encoded) 462 | JsSuccess(${T.module}) 463 | else JsError(s"not " + ${T.fullName}) 464 | } 465 | def writes(obj: $T) = encoded 466 | } 467 | } 468 | """ 469 | //println(t) 470 | t 471 | } 472 | 473 | def formatSealed[T: c.WeakTypeTag]: Tree = formatSealedInternal[T](None) 474 | def formatSealedWithFallback[T: c.WeakTypeTag, Fallback <: T: c.WeakTypeTag] 475 | : Tree = 476 | formatSealedInternal[T](Some(c.weakTypeOf[Fallback].typeSymbol.asType)) 477 | def formatSealedInternal[T: c.WeakTypeTag]( 478 | fallback: Option[TypeSymbol]): Tree = { 479 | assertSealedAbstract[T] 480 | 481 | val T = c.weakTypeOf[T] 482 | val subs = T.typeSymbol.asClass.knownDirectSubclasses.toVector 483 | 484 | if (subs.isEmpty) 485 | c.error(c.enclosingPosition, 486 | s""" 487 | No child classes found for $T. If there clearly are child classes, 488 | try moving the call into a separate file, a sibbling package, a separate sbt sub project or else. 489 | This can be caused by https://issues.scala-lang.org/browse/SI-7046 which can only be avoided by manually moving the call. 490 | """) 491 | 492 | val writes = subs.map { sym => 493 | cq"""obj: $sym => Json.toJson[$sym](obj)(implicitly[Format[$sym]])""" 494 | } 495 | 496 | val reads = subs 497 | // don't include fallback 498 | .filterNot(t => 499 | fallback.map(_.toType =:= t.asType.toType).getOrElse(false)) 500 | .map { sym => 501 | q"""json.validateAuto[$sym]""" 502 | } 503 | .reduce((l, r) => q"$l orElse $r") 504 | 505 | // add fallback last 506 | val readsWithFallback = fallback.map(f => 507 | q"$reads orElse json.validateAuto[$f]") getOrElse reads 508 | 509 | val rootName = Literal(Constant(T.toString)) 510 | val subNames = Literal(Constant(subs.map(_.fullName).mkString(", "))) 511 | 512 | val t = q""" 513 | { 514 | import $pjson._ 515 | import $pkg._ 516 | new Format[$T]{ 517 | ${verifyKnownDirectSubclassesPostTyper(T: Type, s"formatSealed[$T]")} 518 | def reads(json: JsValue) = $readsWithFallback orElse JsError("Could not deserialize to any of the subtypes of "+ $rootName +". Tried: "+ $subNames) 519 | def writes(obj: $T) = { 520 | obj match { 521 | case ..$writes 522 | case _ => throw new Exception("formatSealed found unexpected object of type "+${Literal( 523 | Constant(T.toString))}+s": $${obj.getClass}$$obj") 524 | } 525 | } 526 | } 527 | } 528 | """ 529 | //println(t) 530 | t 531 | } 532 | 533 | protected def isCaseClass(tpe: Type) = 534 | tpe.typeSymbol.isClass && tpe.typeSymbol.asClass.isCaseClass 535 | 536 | protected def isModuleClass(tpe: Type) = 537 | tpe.typeSymbol.isClass && tpe.typeSymbol.asClass.isModuleClass 538 | } 539 | 540 | object implicits { 541 | 542 | /** very simple optional field Reads that maps "null" to None */ 543 | implicit def optionWithNull[T](implicit rds: Reads[T]): Reads[Option[T]] = 544 | Reads.optionWithNull[T] 545 | 546 | /** Stupidly reads a field as an Option mapping any error (format or missing field) to None */ 547 | implicit def optionNoError[A](implicit reads: Reads[A]): Reads[Option[A]] = 548 | Reads.optionNoError[A] 549 | 550 | /** Stupidly reads a field as an Option mapping any error (format or missing field) to None */ 551 | implicit def formatSingleton[T]( 552 | implicit encodeSingleton: SingletonEncoder, 553 | ev: SingletonObject[T] 554 | ): Format[T] = macro Macros.formatSingletonImplicit[T] 555 | } 556 | 557 | import scala.reflect.ClassTag 558 | final case class SingletonEncoder(apply: java.lang.Class[_] => JsValue) 559 | object SingletonEncoder { 560 | import java.lang.Class 561 | import scala.reflect.NameTransformer 562 | def camel2underscore(str: String) = ( 563 | str.take(1) 564 | ++ 565 | "[0-9A-Z]".r.replaceAllIn( 566 | str.drop(1), 567 | "_" + _.group(0).toLowerCase 568 | ) 569 | ) 570 | def decodeName(name: String) = NameTransformer.decode(name.dropRight(1)) 571 | implicit def simpleName = 572 | SingletonEncoder(cls => JsString(decodeName(cls.getSimpleName))) 573 | implicit def simpleNameLowerCase = 574 | SingletonEncoder( 575 | cls => JsString(camel2underscore(decodeName(cls.getSimpleName)))) 576 | implicit def simpleNameUpperCase = 577 | SingletonEncoder( 578 | cls => 579 | JsString( 580 | camel2underscore(decodeName(cls.getSimpleName)).toUpperCase)) 581 | } 582 | 583 | object Jsonx { 584 | 585 | /** 586 | Generates a PlayJson Format[T] for a final case class T with any number of fields (>22 included) 587 | */ 588 | def formatCaseClass[T](implicit ev: CaseClass[T]): Format[T] = macro Macros 589 | .formatCaseClass[T] 590 | 591 | /** 592 | Generates a PlayJson Format[T] for a final case class T with any number of fields (>22 included) 593 | Uses default values when fields are not found 594 | */ 595 | def formatCaseClassUseDefaults[T](implicit ev: CaseClass[T]): Format[T] = macro Macros 596 | .formatCaseClassUseDefaults[T] 597 | 598 | /** 599 | Serialize one member classes such as value classes as their single contained value instead of a wrapping js object. 600 | */ 601 | def formatInline[T]: Format[T] = macro Macros.formatInline[T] 602 | 603 | /** 604 | Generates a PlayJson Format[T] for a sealed trait that dispatches to Writes of it's concrete subclasses. 605 | CAREFUL: It uses orElse for Reads in an unspecified order, which can produce wrong results 606 | in case of ambiguities. 607 | */ 608 | def formatSealed[T]: Format[T] = macro Macros.formatSealed[T] 609 | 610 | /** 611 | Generates a PlayJson Format[T] for a sealed trait that dispatches to Writes of it's concrete subclasses. 612 | Uses provided type Fallback as the last resort. Fallback needs to be a subtype of T 613 | and ideally: final case class Fallback(json: JsValue) extend T 614 | and using formatInline[Fallback] as the serializer 615 | CAREFUL: It uses orElse for Reads in an unspecified order, which can produce wrong results 616 | in case of ambiguities. 617 | */ 618 | def formatSealedWithFallback[T, Fallback <: T]: Format[T] = macro Macros 619 | .formatSealedWithFallback[T, Fallback] 620 | 621 | /** serializes a singleton object of given type with the given encoder */ 622 | def formatSingleton[T]( 623 | implicit encodeSingleton: SingletonEncoder 624 | ): Format[T] = macro Macros.formatSingleton[T] 625 | 626 | /** 627 | Fully automatic, recursive formatter generator. 628 | Recognizes overridden formatters from companion objects or implicit scope 629 | Does currently only for for case classes, sealed traits, objects and manually defined formatters. 630 | Automatically, recursively delegates to formatCaseClass, formatSealed, formatInline, formatSingleton, implicitly[Format[...]] 631 | Note: defaults to inline single-value case classes. Override if required. 632 | Currently not supported: classes with type arguments including tuples 633 | */ 634 | def formatAuto[T]: Format[T] = macro Macros.formatAuto[T] 635 | } 636 | -------------------------------------------------------------------------------- /play-json-extra/shared/src/main/scala/play/json/extra/key.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import scala.annotation.StaticAnnotation 4 | 5 | //Name to be used to serialize this field 6 | final case class key(name: String) extends StaticAnnotation 7 | -------------------------------------------------------------------------------- /play-json-extra/shared/src/test/scala/play/json/extra/JsonFormatterTests.scala: -------------------------------------------------------------------------------- 1 | package play.json.extra 2 | 3 | import java.time.LocalDateTime 4 | 5 | import org.scalatest.FunSuite 6 | import play.api.libs.json.{JsObject, Json} 7 | 8 | class JsonFormatterTests extends FunSuite { 9 | 10 | @JsonFormat 11 | final case class TestDataTime(dt: LocalDateTime, children: List[TestDataTime] = Nil) 12 | 13 | @JsonFormat 14 | final case class DefaultTest(@key("ok1") a1: Int = 1, @key("ok2") a2: Int = 2, @key("ok3") a3: Int = 3, @key("ok4") a4: Int = 4, 15 | @key("ok5") a5: Int = 5, @key("ok6") a6: Int = 6, @key("ok7") a7: Int = 7, @key("ok8") a8: Int = 8, 16 | @key("ok9") a9: Int = 9, @key("ok10") a10: Int = 10, 17 | @key("ok11") a11: Int = 11, @key("ok12") a12: Int = 12, @key("ok13") a13: Int = 13, @key("ok14") a14: Int = 14, 18 | @key("ok15") a15: Int = 15, @key("ok16") a16: Int = 16, @key("ok17") a17: Int = 17, @key("ok18") a18: Int = 18, 19 | @key("ok19") a19: Int = 19, @key("ok20") a20: Int = 20, 20 | @key("ok21") a21: Int = 21, @key("ok22") a22: Int = 22, @key("ok23") a23: Int = 23, @key("ok24") a24: Int = 24, 21 | @key("ok25") a25: Int = 25, @key("ok26") a26: Int = 26, @key("ok27") a27: Int = 27, @key("ok28") a28: Int = 28, 22 | @key("ok29") a29: Int = 29 23 | ) 24 | 25 | test("basic datetime test") { 26 | val dt = LocalDateTime.of(2015, 8, 11, 12, 1, 2, 3) 27 | val ts = TestDataTime(dt) 28 | val json = Json.toJson(ts) 29 | // println(Json.stringify(json)) 30 | assert(Json.stringify(json) === """{"dt":"2015-08-11T12:01:02.000000003","children":[]}""") 31 | assert(json.as[TestDataTime].dt === dt) 32 | } 33 | 34 | test("default test key + >21 fields + des/ser") { 35 | val ts = DefaultTest() 36 | val json = Json.toJson(ts).as[JsObject] 37 | // println(Json.stringify(json)) 38 | assert(json.fields.length === 29) 39 | val fieldNames=List( 40 | "ok1", "ok2", "ok3", "ok4", "ok5", "ok6", "ok7", "ok8", "ok9", "ok10", 41 | "ok10", "ok11", "ok12", "ok13", "ok14", "ok15", "ok16", "ok17", "ok18", "ok19", 42 | "ok20", "ok21", "ok22", "ok23", "ok24", "ok25", "ok26", "ok27", "ok28", "ok29" 43 | ) 44 | 45 | fieldNames.foreach(value => assert(json.fields.map(_._1).contains(value) === true)) 46 | val json2=Json.parse("""{"ok27":27,"ok16":16,"ok4":4,"ok25":25,"ok14":14,"ok10":10,"ok17":17,"ok21":21,"ok24":24,"ok28":28,"ok3":3,"ok20":20,"ok15":15,"ok7":7,"ok11":11,"ok1":1,"ok23":23,"ok12":12,"ok8":8,"ok29":29,"ok19":19,"ok2":2,"ok5":5,"ok26":26,"ok13":13,"ok18":18,"ok9":9,"ok6":6,"ok22":22}""") 47 | val ts2 = json2.as[DefaultTest] 48 | assert(ts2.a29 === 29) 49 | } 50 | 51 | } 52 | 53 | 54 | -------------------------------------------------------------------------------- /project/Common.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | import Keys._ 3 | import com.scalapenos.sbt.prompt._ 4 | import SbtPrompt.autoImport._ 5 | 6 | object Common { 7 | 8 | val settings: Seq[Setting[_]] = Seq( 9 | organizationName := "Paro Alberto Maria Angelo", 10 | organization := "io.megl", 11 | version := Versions.app, 12 | scalaVersion := Versions.scala, 13 | autoScalaLibrary := false, 14 | parallelExecution := false, 15 | scalacOptions ++= Seq( 16 | "-encoding", "UTF-8", "-target:jvm-1.8", 17 | "-unchecked", // Enable additional warnings where generated code depends on assumptions. 18 | "-deprecation", // Emit warning and location for usages of deprecated APIs. 19 | "-feature", // Emit warning and location for usages of features that should be imported explicitly. 20 | //"-Xfatal-warnings", // Fail the compilation if there are any warnings. 21 | //"-Xlint", // Enable recommended additional warnings. 22 | //"-Ywarn-adapted-args", // Warn if an argument list is modified to match the receiver. 23 | //"-Ywarn-dead-code", // Warn when dead code is identified. 24 | //"-Ywarn-inaccessible", // Warn about inaccessible types in method signatures. 25 | //"-Ywarn-nullary-override", // Warn when non-nullary overrides nullary, e.g. def foo() over def foo. 26 | //"-Ywarn-numeric-widen", // Warn when numerics are widened. 27 | "-language:postfixOps", 28 | "-language:implicitConversions", 29 | "-language:higherKinds", 30 | "-language:existentials", 31 | "-language:reflectiveCalls" 32 | /*, "-Ylog-classpath"*/), 33 | scalacOptions in Test ++= Seq("-Yrangepos"), 34 | javacOptions ++= Seq("-encoding", "UTF-8", "-source", "1.8", "-target", "1.8"), 35 | //updateOptions := updateOptions.value.withCachedResolution(true), 36 | //shellPrompt in ThisBuild := { state => Project.extract(state).currentRef.project + "> " }, 37 | promptTheme := ScalapenosTheme, 38 | // additionalLibs in Dist := file("sigar").listFiles.filter(f => !f.isDirectory), 39 | logBuffered := false, 40 | publishTo := { 41 | val nexus = "https://oss.sonatype.org/" 42 | if (isSnapshot.value) 43 | Some("snapshots" at nexus + "content/repositories/snapshots") 44 | else 45 | Some("releases" at nexus + "service/local/staging/deploy/maven2") 46 | }, 47 | publishMavenStyle := true, 48 | publishArtifact in Test := false, 49 | pomIncludeRepository := { 50 | _ => false 51 | }, 52 | homepage := Some(url("http://www.megl.io")), 53 | startYear := Some(2015), 54 | licenses +=("Apache License 2.0", url("http://www.apache.org/licenses/")), 55 | resolvers ++= Resolvers.repositories, 56 | pomExtra := 57 | 58 | 59 | aparo 60 | Alberto Paro 61 | +1 62 | http://www.megl.io 63 | 64 | 65 | 66 | https://github.com/aparo/play-json-extra 67 | scm:git:https://github.com/aparo/play-json-extra.git 68 | , 69 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full)//, 70 | //sources in doc in Compile := List() //disable doc generation 71 | ) 72 | 73 | } -------------------------------------------------------------------------------- /project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | /* Copyright 2009-2017 - Paro Alberto Maria Angelo All Rights Reserved. */ 2 | 3 | import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._ 4 | import sbt.Keys.libraryDependencies 5 | import sbt._ 6 | 7 | object Versions { 8 | 9 | lazy val app = "2.6.8" 10 | 11 | //scalajs 12 | lazy val upickle = "0.5.1" 13 | lazy val boopickle = "1.2.5" 14 | lazy val log4js = "1.4.10" 15 | lazy val uTest = "0.3.1" 16 | 17 | lazy val scala = "2.12.4" 18 | 19 | lazy val scalaTest = "3.0.5" 20 | lazy val scalaTestJS = "3.0.5" 21 | 22 | lazy val playJson = "2.7.0-SNAPSHOT" 23 | lazy val specs2 = "4.0.2" 24 | 25 | } 26 | 27 | 28 | object Library { 29 | 30 | lazy val scalaCompiler = "org.scala-lang" % "scala-compiler" % Versions.scala 31 | 32 | lazy val scalaReflect = "org.scala-lang" % "scala-reflect" % Versions.scala 33 | 34 | lazy val scalastm = "org.scala-stm" %% "scala-stm" % "0.8" 35 | 36 | lazy val specs2 = "org.specs2" %% "specs2-core" % Versions.specs2 37 | // "test" 38 | lazy val specs2Junit = "org.specs2" %% "specs2-junit" % Versions.specs2 39 | // "test" 40 | lazy val specs2ScalaCheck = "org.specs2" %% "specs2-scalacheck" % Versions.specs2 // "test" 41 | 42 | lazy val scalatest = "org.scalatest" %% "scalatest" % Versions.scalaTest 43 | 44 | } 45 | 46 | 47 | object DependencyHelpers { 48 | def compile(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "compile") 49 | 50 | def provided(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided") 51 | 52 | def test(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "test") 53 | 54 | def runtime(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "runtime") 55 | 56 | def container(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "container") 57 | } 58 | -------------------------------------------------------------------------------- /project/Resolvers.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | import Keys._ 3 | 4 | object Resolvers { 5 | lazy val repositories = Seq( 6 | // Sonatype repo 7 | Resolver.sonatypeRepo("snapshots"), 8 | // "staging" at "http://oss.sonatype.org/content/repositories/staging", 9 | Resolver.sonatypeRepo("releases"), 10 | "Typesafe ivy-snapshots" at "http://repo.typesafe.com/typesafe/ivy-snapshots/", 11 | "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/" 12 | ) 13 | } -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.1.1 -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | // Comment to get more information during initialization 2 | logLevel := Level.Warn 3 | 4 | //logLevel := Level.Debug 5 | 6 | // Resolvers 7 | resolvers += "sonatype-releases" at "http://oss.sonatype.org/content/repositories/releases" 8 | resolvers += "Typesafe Repository" at "https://repo.typesafe.com/typesafe/releases/" 9 | resolvers += "Sonatype snapshots" at "https://oss.sonatype.org/content/repositories/snapshots/" 10 | resolvers += Resolver.sonatypeRepo("public") 11 | 12 | addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.1") 13 | addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.6") 14 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.22") 15 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1") 16 | //addSbtPlugin("com.codacy" % "sbt-codacy-coverage" % "1.0.3") 17 | //addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.0.0") 18 | //https://scalacenter.github.io/scalafix/ 19 | addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.5.7") 20 | addSbtPlugin("org.scalastyle" % "scalastyle-sbt-plugin" % "1.0.0") 21 | // https://github.com/rtimush/sbt-updates 22 | addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.1") 23 | addSbtPlugin("com.scalapenos" % "sbt-prompt" % "1.0.2") 24 | 25 | addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0") 26 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.0") 27 | 28 | 29 | 30 | 31 | --------------------------------------------------------------------------------