├── .gitignore ├── .travis.yml ├── LICENSE ├── README.markdown ├── build.sbt ├── project ├── Dependencies.scala ├── bintray.sbt ├── build.properties ├── houserules.sbt └── nightlies.sbt └── serialization └── src ├── main └── scala │ └── sbt │ └── serialization │ ├── CanToString.scala │ ├── CustomPicklerUnpickler.scala │ ├── DebugPickleFormat.scala │ ├── ManifestUtil.scala │ ├── RichTypes.scala │ ├── ScalaShim.scala │ ├── SerializationFunctions.scala │ ├── SerializedValue.scala │ ├── TypeExpression.scala │ ├── UTF8FileOutput.scala │ ├── Using.scala │ ├── json │ ├── JSONPickleFormat.scala │ └── JsonMethods.scala │ ├── package.scala │ └── pickler │ ├── JavaExtraPicklers.scala │ ├── Option.scala │ ├── SerializedValue.scala │ ├── StringMap.scala │ ├── Throwable.scala │ ├── Traversable.scala │ ├── Tuple2.scala │ ├── TypeExpression.scala │ └── package.scala └── test └── scala └── sbt └── serialization ├── ArrayPicklerSpec.scala ├── BasicPicklerSpec.scala ├── JUnitUtil.scala ├── PicklerGrowableSpec.scala ├── PicklerTypeSpec.scala ├── Pointed.scala └── SerializedValueSpec.scala /.gitignore: -------------------------------------------------------------------------------- 1 | *.iml 2 | *.lock 3 | *.komodoproject 4 | .DS_Store 5 | .history 6 | .idea 7 | .classpath 8 | .cache 9 | .project/ 10 | .project 11 | .idea/ 12 | .idea_modules/ 13 | .settings/ 14 | .target/ 15 | project/boot/ 16 | workspace/ 17 | repository/ 18 | target/ 19 | logs/ 20 | .settings 21 | .classpath 22 | .project 23 | .cache 24 | bin/ 25 | .sbtserver 26 | project/.sbtserver 27 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # Use Docker-based container (instead of OpenVZ) 2 | sudo: false 3 | 4 | cache: 5 | directories: 6 | - $HOME/.ivy2/cache 7 | 8 | # At the moment, sbt 0.13.5 is preinstalled in Travis VM image, 9 | # which fortunately corresponds to current scalaz settings. 10 | # The line below can be used to cache a given sbt version. 11 | # - $HOME/.sbt/launchers/0.13.x 12 | 13 | # The line below is used to cache the scala version used by the build 14 | # job, as these versions might be replaced after a Travis CI build 15 | # environment upgrade (e.g. scala 2.11.2 could be replaced by scala 2.11.4). 16 | - $HOME/.sbt/boot/scala-$TRAVIS_SCALA_VERSION 17 | 18 | language: scala 19 | 20 | scala: 21 | - 2.10.3 22 | jdk: 23 | - openjdk6 24 | - openjdk7 25 | - oraclejdk7 26 | notifications: 27 | email: 28 | - qbranch@typesafe.com 29 | 30 | script: 31 | - sbt clean test 32 | 33 | # Tricks to avoid unnecessary cache updates 34 | - find $HOME/.sbt -name "*.lock" | xargs rm 35 | - find $HOME/.ivy2 -name "ivydata-*.properties" | xargs rm 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This software is licensed under the Apache 2 license, quoted below. 2 | 3 | Copyright 2012-2013 Typesafe Inc. [http://www.typesafe.com] 4 | 5 | Licensed under the Apache License, Version 2.0 (the "License"); you may not 6 | use this file except in compliance with the License. You may obtain a copy of 7 | the License at 8 | 9 | [http://www.apache.org/licenses/LICENSE-2.0] 10 | 11 | Unless required by applicable law or agreed to in writing, software 12 | distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 13 | WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 14 | License for the specific language governing permissions and limitations under 15 | the License. 16 | -------------------------------------------------------------------------------- /README.markdown: -------------------------------------------------------------------------------- 1 | sbt/serialization is no longer maintained as sbt 1 uses [sjson-new](https://github.com/eed3si9n/sjson-new). 2 | 3 | sbt/serialization 4 | ================= 5 | 6 | sbt serialization is an opinionated wrapper around [Scala pickling][pickling] focused on sbt's usage. 7 | In particular it provides: 8 | 9 | - JSON format that's nice 10 | - static-only core picklers 11 | 12 | [pickling]: https://github.com/scala/pickling 13 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | import Dependencies._ 2 | import com.typesafe.tools.mima.core._, ProblemFilters._ 3 | 4 | lazy val commonSettings = Seq( 5 | git.baseVersion := "0.1.3", 6 | scalaVersion := scala210Version, 7 | crossScalaVersions := Seq(scala210Version, scala211Version), 8 | libraryDependencies ++= Seq(junitInterface % Test, scalaCheck % Test), 9 | bintrayOrganization := Some("sbt"), 10 | bintrayPackage := "serialization", 11 | bintrayRepository := "maven-releases", 12 | scalacOptions <<= (scalaVersion) map { sv => 13 | Seq("-unchecked", "-deprecation", "-Xmax-classfile-name", "72") ++ 14 | { if (sv.startsWith("2.9")) Seq.empty else Seq("-feature") } 15 | }, 16 | javacOptions in Compile := Seq("-target", "1.6", "-source", "1.6"), 17 | javacOptions in (Compile, doc) := Seq("-source", "1.6"), 18 | previousArtifact := None // Some(organization.value %% moduleName.value % "1.0.0") 19 | ) 20 | 21 | lazy val root = (project in file(".")). 22 | aggregate(serialization). 23 | settings( 24 | inThisBuild(Seq( 25 | organization := "org.scala-sbt", 26 | homepage := Some(url("https://github.com/sbt/serialization")), 27 | description := "serialization facility for sbt", 28 | licenses := List("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0")), 29 | scmInfo := Some(ScmInfo(url("https://github.com/sbt/serialization"), "git@github.com:sbt/serialization.git")), 30 | developers := List( 31 | Developer("havocp", "Havoc Pennington", "@havocp", url("https://github.com/havocp")), 32 | Developer("eed3si9n", "Eugene Yokota", "@eed3si9n", url("https://github.com/eed3si9n")), 33 | Developer("jsuereth", "Josh Suereth", "@jsuereth", url("https://github.com/jsuereth")) 34 | ), 35 | bintrayReleaseOnPublish := false 36 | )), 37 | commonSettings, 38 | publishArtifact := false, 39 | publish := {}, 40 | publishLocal := {} 41 | ) 42 | 43 | lazy val serialization = (project in file("serialization")). 44 | settings(commonSettings: _*). 45 | settings( 46 | parallelExecution in Test := false, 47 | libraryDependencies ++= Seq( 48 | pickling, 49 | junitInterface % Test 50 | ) ++ jsonDependencies, 51 | binaryIssueFilters ++= Seq( 52 | ) 53 | ) 54 | -------------------------------------------------------------------------------- /project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | import Keys._ 3 | import com.typesafe.sbt.SbtScalariform.ScalariformKeys 4 | 5 | object Dependencies { 6 | // Here are the versions used for the core project 7 | val scala210Version = "2.10.4" 8 | val scala211Version = "2.11.5" 9 | 10 | val picklingVersion = "0.10.1" 11 | val pickling210 = "org.scala-lang.modules" % "scala-pickling_2.10" % picklingVersion 12 | val pickling211 = "org.scala-lang.modules" % "scala-pickling_2.11" % picklingVersion 13 | val pickling = "org.scala-lang.modules" %% "scala-pickling" % picklingVersion 14 | 15 | private val jsonTuples = Seq( 16 | ("org.spire-math", "jawn-parser", "0.6.0"), 17 | ("org.spire-math", "json4s-support", "0.6.0") 18 | ) 19 | 20 | val jsonDependencies = jsonTuples map { 21 | case (group, mod, version) => (group %% mod % version).exclude("org.scala-lang", "scalap") 22 | } 23 | val jsonDependencies210 = jsonTuples map { 24 | case (group, mod, version) => group % s"${mod}_2.10" % version 25 | } 26 | val jsonDependencies211 = jsonTuples map { 27 | case (group, mod, version) => group % s"${mod}_2.11" % version 28 | } 29 | 30 | val mimeUtil = "eu.medsea.mimeutil" % "mime-util" % "2.1.1" 31 | // need to manually set this to override an incompatible old version 32 | val slf4jLog4j = "org.slf4j" % "slf4j-log4j12" % "1.6.6" 33 | 34 | val scalaCheckVersion = "1.11.5" 35 | val junitInterface = "com.novocode" % "junit-interface" % "0.11" 36 | val scalaCheck = "org.scalacheck" %% "scalacheck" % scalaCheckVersion 37 | val specs2 = "org.specs2" %% "specs2" % "2.3.11" 38 | } 39 | -------------------------------------------------------------------------------- /project/bintray.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("me.lessis" % "bintray-sbt" % "0.3.0") 2 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.9 2 | -------------------------------------------------------------------------------- /project/houserules.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.scala-sbt" % "sbt-houserules" % "0.3.1") 2 | -------------------------------------------------------------------------------- /project/nightlies.sbt: -------------------------------------------------------------------------------- 1 | resolvers += Resolver.url("bintray-sbt-ivy-releases", url("https://dl.bintray.com/sbt/ivy-snapshots"))(Resolver.ivyStylePatterns) 2 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/CanToString.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | import java.io.File 4 | import java.net.URI 5 | // import xsbti.Severity 6 | // import xsbti.Severity.{ Info, Warn, Error } 7 | 8 | /** 9 | * This trait represents a type that can be converted to/from a human readable string. The conversion MUST be 10 | * bi-directional. 11 | * 12 | * Note: There are Pickler helper methods to construct picklers which use this human-readable string when pickling 13 | * rather than a more-binary-encoding of these types. Ideal for messages which are sent/used in JSON. 14 | * 15 | * @tparam A The type that can be human readable. 16 | */ 17 | trait CanToString[A] { 18 | def toString(a: A): String 19 | def fromString(s: String): A 20 | } 21 | 22 | /** 23 | * This contains default mechanisms to create CanToString and implciits for supported types: File, URI, TypeExpression. 24 | */ 25 | object CanToString { 26 | /** 27 | * Construct a new CanToString instance using the given conversion operations. 28 | * 29 | * NOTE: The following must hold: fs(ts(x)) == x 30 | * 31 | * @param ts A function which can turn the type into a human readable string. 32 | * @param fs A function which can take the human readable string and turn it back into an instance of the same type. 33 | * @tparam A The type we can conver. 34 | * @return A new CanToString bidriectional conversion. 35 | */ 36 | def apply[A](ts: A => String, fs: String => A): CanToString[A] = new CanToString[A] { 37 | def toString(a: A): String = ts(a) 38 | def fromString(s: String): A = fs(s) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/CustomPicklerUnpickler.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | // TODO - Why are type aliases not workign? 4 | import scala.pickling.pickler.{ 5 | PrimitivePicklers, 6 | PrimitiveArrayPicklers, 7 | RefPicklers, 8 | DatePicklers 9 | } 10 | 11 | import sbt.serialization.pickler.{ 12 | OptionPicklers, 13 | ThrowablePicklers, 14 | VectorPicklers, 15 | ListPicklers, 16 | ArrayPicklers, 17 | SeqPicklers, 18 | MapPicklers, 19 | StringMapPicklers, 20 | Tuple2Picklers, 21 | JavaExtraPicklers, 22 | TypeExpressionPicklers, 23 | SerializationPicklers 24 | } 25 | 26 | trait CustomPicklers extends PrimitivePicklers 27 | with DatePicklers 28 | with PrimitiveArrayPicklers 29 | with OptionPicklers 30 | with ThrowablePicklers 31 | with JavaExtraPicklers 32 | with TypeExpressionPicklers 33 | with Tuple2Picklers 34 | with RefPicklers 35 | with LowPriorityCustomPicklers 36 | with SerializationPicklers {} 37 | 38 | trait LowPriorityCustomPicklers extends VectorPicklers 39 | with ListPicklers 40 | with ArrayPicklers 41 | with SeqPicklers 42 | with MapPicklers 43 | with StringMapPicklers {} 44 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/DebugPickleFormat.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | import scala.pickling.{ 4 | Pickle, 5 | PickleFormat, 6 | FastTypeTag, 7 | PBuilder, 8 | PReader, 9 | PicklingException, 10 | Output 11 | } 12 | import scala.reflect.runtime.universe.Mirror 13 | import scala.util.{ Failure, Success } 14 | 15 | // Note: This debug format should move into scala pickling. 16 | 17 | private[serialization] object EmptyPickle extends Pickle { 18 | type ValueType = Unit 19 | val value: ValueType = () 20 | } 21 | 22 | private[serialization] class DebugPickleFormat extends PickleFormat { 23 | type PickleType = EmptyPickle.type 24 | type OutputType = Output[String] 25 | def createBuilder() = new DebugPickleBuilder() 26 | def createBuilder(out: Output[String]): PBuilder = new DebugPickleBuilder() 27 | override def createReader(pickle: PickleType) = ??? 28 | } 29 | 30 | private[serialization] class DebugPickleBuilder(indent: Int = 0) extends PBuilder { 31 | private val indentString = (0 to indent) map (_ => " ") mkString "" 32 | private def println(s: String): Unit = System.err.println(s"$indentString$s") 33 | private def nextLevelBuilder = new DebugPickleBuilder(indent + 1) 34 | 35 | override def beginEntry(picklee: Any): PBuilder = { 36 | println(s"beginEntry($picklee)") 37 | this 38 | } 39 | 40 | override def result(): Pickle = { 41 | println("result()") 42 | EmptyPickle 43 | } 44 | 45 | override def putElement(pickler: (PBuilder) => Unit): PBuilder = { 46 | println(s"putElement($pickler)") 47 | pickler(nextLevelBuilder) 48 | this 49 | } 50 | 51 | override def beginCollection(length: Int): PBuilder = { 52 | println(s"beginCollection($length)") 53 | this 54 | } 55 | 56 | override def endEntry(): Unit = { 57 | println("endEntry()") 58 | } 59 | 60 | override def endCollection(): Unit = { 61 | println(s"endCollection()") 62 | } 63 | 64 | override def putField(name: String, pickler: (PBuilder) => Unit): PBuilder = { 65 | println(s"putField($name, $pickler)") 66 | pickler(nextLevelBuilder) 67 | this 68 | } 69 | 70 | override def hintKnownSize(knownSize: Int): this.type = { 71 | println(s"hintKnownSize($knownSize") 72 | this 73 | } 74 | 75 | override def popHints(): this.type = { 76 | println(s"popHints()") 77 | this 78 | } 79 | 80 | override def pushHints(): this.type = { 81 | println(s"pushHints()") 82 | this 83 | } 84 | override def hintStaticallyElidedType(): this.type = { 85 | println(s"hintStaticallyElidedType()") 86 | this 87 | } 88 | 89 | override def hintOid(id: Int): this.type = { 90 | println(s"hintOid($id)") 91 | this 92 | } 93 | 94 | override def pinHints(): this.type = { 95 | println(s"pinHints()") 96 | this 97 | } 98 | 99 | override def hintTag(tag: FastTypeTag[_]): this.type = { 100 | println(s"hintTag($tag)") 101 | this 102 | } 103 | 104 | override def hintDynamicallyElidedType(): this.type = { 105 | System.err.println(s"hintDynamicallyElidedType()") 106 | this 107 | } 108 | 109 | override def unpinHints(): this.type = { 110 | System.err.println(s"unpinHints()") 111 | this 112 | } 113 | } -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/ManifestUtil.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | import sbt.serialization.ScalaShims.ManifestFactory 4 | import scala.pickling.FastTypeTag 5 | 6 | /** 7 | * Utilities used in PrimitivePicklers/SeqPicklers. 8 | * 9 | * TODO - Remove this once we clean up scala/pickling default picklers (we hope). 10 | * Additionally, check to see if BuildValue is still using this. 11 | */ 12 | private[serialization] object ManifestUtil { 13 | def isApproxIterable(tag: FastTypeTag[_], cl: ClassLoader = ManifestUtil.getClass.getClassLoader): Boolean = 14 | tag match { 15 | case x if x.key startsWith "scala.Array[" => true 16 | case x if x.key startsWith "scala.Option[" => true 17 | case x if x.key startsWith "scala.collection.immutable.Nil.type" => true 18 | case x if x.key startsWith "scala.collection.immutable.Vector[" => true 19 | case x if x.key startsWith "scala.collection.immutable.$colon$colon[" => true 20 | case x if x.key startsWith "scala.collection.immutable.List[" => true 21 | case x if x.key startsWith "scala.collection.Seq[" => true 22 | case x if x.key startsWith "scala.collection.immutable.Seq[" => true 23 | case x if x.key startsWith "scala.collection.mutable.ArrayBuffer[" => true 24 | case x => 25 | val mitr = implicitly[Manifest[Iterable[Any]]] 26 | toManifest(tag, cl) map { 27 | _ <:< mitr 28 | } getOrElse false 29 | } 30 | def isApproxSubType(lhs: FastTypeTag[_], rhs: FastTypeTag[_], cl: ClassLoader = ManifestUtil.getClass.getClassLoader): Boolean = 31 | (lhs, rhs) match { 32 | case (_, x) if x.key == "scala.Any" => true 33 | case _ => 34 | (toManifest(lhs, cl), toManifest(rhs, cl)) match { 35 | case (Some(lhsm), Some(rhsm)) => lhsm <:< rhsm 36 | case _ => false 37 | } 38 | } 39 | def toManifest(tag: FastTypeTag[_], cl: ClassLoader = ManifestUtil.getClass.getClassLoader): Option[Manifest[_]] = 40 | toManifest(tag.key, cl) 41 | 42 | def toManifest(key: String, cl: ClassLoader): Option[Manifest[_]] = { 43 | val typeExpression = TypeExpression.parse(key)._1 44 | toManifest(typeExpression, cl) 45 | } 46 | 47 | def toManifest(typeExpression: TypeExpression, cl: ClassLoader): Option[Manifest[_]] = { 48 | val args = typeExpression.typeArgs map { toManifest(_, cl) } 49 | if (args forall { _.isDefined }) { 50 | val realArgs = args.flatten 51 | typeExpression.typeName match { 52 | case "scala.Unit" => Some(ManifestFactory.Unit) 53 | case default => 54 | try { 55 | val ourClass = cl.loadClass(default) 56 | val mf = 57 | if (realArgs.isEmpty) ManifestFactory.classType(ourClass) 58 | else ManifestFactory.classType(ourClass, realArgs.head, realArgs.tail: _*) 59 | Some(mf.asInstanceOf[Manifest[_]]) 60 | } catch { 61 | case _: ClassNotFoundException => None 62 | } 63 | } 64 | } else None 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/RichTypes.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | /** Utilities used in PrimitivePicklers. Note: We can remove this once we clean up scala/pickling. */ 4 | private[serialization] trait RichTypes { 5 | import scala.reflect.runtime.universe._ 6 | 7 | implicit class RichType(tpe: scala.reflect.api.Universe#Type) { 8 | import definitions._ 9 | def isEffectivelyPrimitive: Boolean = tpe match { 10 | case TypeRef(_, sym: ClassSymbol, _) if sym.isPrimitive => true 11 | case TypeRef(_, sym, eltpe :: Nil) if sym == ArrayClass && eltpe.typeSymbol.isClass && eltpe.typeSymbol.asClass.isPrimitive => true 12 | case _ => false 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/ScalaShim.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | // Note this is only used by ManifestUtil.scala, and can be removed if that is removed. 4 | private[serialization] object ScalaShims { 5 | val ManifestFactory = scala.reflect.ManifestFactory 6 | } 7 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/SerializationFunctions.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | import scala.pickling.Generated 4 | import scala.util.Try 5 | import java.io.File 6 | 7 | /** A layer of serialization cake which provides the gen* macros for auto-constructing new picklers. */ 8 | trait SerializationFunctions { 9 | import scala.language.experimental.macros 10 | 11 | // non-implicit aliases of pickling's gen macros 12 | def genPickler[T]: Pickler[T] = macro scala.pickling.Compat.PicklerMacros_impl[T] 13 | def genUnpickler[T]: Unpickler[T] with scala.pickling.Generated = macro scala.pickling.Compat.UnpicklerMacros_impl[T] 14 | 15 | def toJsonString[A: Pickler](a: A): String = SerializedValue(a).toJsonString 16 | def toJsonFile[A: Pickler](a: A, file: File): Unit = SerializedValue(a).toJsonFile(file) 17 | def fromJsonString[A: Unpickler](json: String): Try[A] = SerializedValue.fromJsonString(json).parse[A] 18 | def fromJsonFile[A: Unpickler](file: File): Try[A] = SerializedValue.fromJsonFile(file).parse[A] 19 | } 20 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/SerializedValue.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | import java.io.File 4 | import org.json4s.JsonAST._ 5 | import scala.pickling.PicklingException 6 | import scala.util.control.NonFatal 7 | import scala.util.{ Try, Success } 8 | import scala.pickling.functions._ 9 | import sbt.serialization.json.{ 10 | JSONPickle 11 | } 12 | import sbt.serialization.json.JsonMethods._ 13 | import sbt.serialization.json.JSONPickleFormat 14 | 15 | /** 16 | * We serialize to and from this opaque type. The idea is to 17 | * hide exactly what we can serialize from/to and hide 18 | * which library we use to do it. 19 | * 20 | * What this will expose is the mechanism of using Pickler/Unpickler to 21 | * handle unknown serialized values. 22 | */ 23 | sealed trait SerializedValue { 24 | def parse[T](implicit unpickler: Unpickler[T]): Try[T] 25 | 26 | /** 27 | * Returns true if the supplied unpickler matches the 28 | * type tag in the SerializedValue. This can return 29 | * false in some cases where parse[T] could succeed (parse 30 | * is allowed to use duck typing). So this should only 31 | * be used when the type to be unpickled is not known 32 | * for sure and a discriminator is required. The advantages 33 | * of hasTag over simply attempting to parse are that it 34 | * doesn't throw an expensive exception on match failure, 35 | * and it can't accidentally match a structurally-compatible 36 | * but distinct type. 37 | */ 38 | def hasTag[T](implicit unpickler: Unpickler[T]): Boolean 39 | 40 | def toJsonString: String 41 | def toJsonFile(file: File): Unit 42 | 43 | // TODO toBinary if/when we add binary pickling 44 | 45 | // private because we don't have our chosen 46 | // json AST library in the public API 47 | protected def toJValue: JValue 48 | 49 | final override def equals(other: Any): Boolean = 50 | other match { 51 | case null => false 52 | case sv: SerializedValue => json.JsonMethods.jvalueEquals(toJValue, sv.toJValue) 53 | case _ => false 54 | } 55 | 56 | final override def hashCode(): Int = 57 | json.JsonMethods.jvalueHashCode(toJValue) 58 | } 59 | 60 | object SerializedValue { 61 | def apply[V](value: V)(implicit pickler: Pickler[V]): SerializedValue = 62 | LazyValue[V](value, pickler) 63 | 64 | /** Reconstitutes a SerializedValue from a json string. */ 65 | def fromJsonString(value: String): SerializedValue = 66 | JsonValue(JSONPickle(value)) 67 | /** Reconstitutes a SerializedValue from a json string. */ 68 | def fromJsonFile(file: File): SerializedValue = 69 | JsonValue(JSONPickle.fromFile(file)) 70 | 71 | // TODO fromBinary if/when we add binary pickling 72 | 73 | // this is in this file so it can use private JsonValue, 74 | // but the public implicit version is in a trait elsewhere. 75 | // NOTE: this pickler ONLY works with our JSONPickleFormat because 76 | // it assumes JValue is a "primitive" known to the format. 77 | // we can adjust this if we add a binary format. 78 | private[sbt] object pickler extends Pickler[SerializedValue] with Unpickler[SerializedValue] { 79 | val cheaterTag = implicitly[FastTypeTag[JValue]] 80 | // TODO - This is super hacky mechanism to avoid issues w/ pinned types. 81 | override val tag = cheaterTag.asInstanceOf[FastTypeTag[SerializedValue]] 82 | def pickle(a: SerializedValue, builder: PBuilder): Unit = { 83 | val json = a.toJValue 84 | builder.hintTag(cheaterTag) 85 | builder.hintStaticallyElidedType() 86 | builder.beginEntry(json) 87 | builder.endEntry() 88 | //jsonPickler.pickle(spsv.toJson, builder) 89 | } 90 | def unpickle(tag: String, preader: PReader): Any = { 91 | preader.hintTag(cheaterTag) 92 | preader.hintStaticallyElidedType() 93 | preader.beginEntry() 94 | // TODO - Check beginEntry returns cheaterTag 95 | val value = preader.readPrimitive().asInstanceOf[JValue] 96 | preader.endEntry() 97 | JsonValue(JSONPickle.fromJValue(value)) 98 | } 99 | } 100 | } 101 | /** A value we have serialized as JSON */ 102 | private final case class JsonValue(pickledValue: JSONPickle) extends SerializedValue { 103 | require(pickledValue ne null) 104 | 105 | import sbt.serialization.json.pickleFormat 106 | override def parse[T](implicit unpicklerForT: Unpickler[T]): Try[T] = 107 | Try { unpickle[T](pickledValue) } 108 | 109 | def hasTag[T](implicit unpickler: Unpickler[T]): Boolean = 110 | pickledValue.readTypeTag.map(tag => tag == unpickler.tag.key).getOrElse(false) 111 | 112 | override def toJsonString: String = pickledValue.value 113 | override def toJsonFile(file: File): Unit = 114 | Using.fileWriter(file) { _.write(toJsonString) } 115 | 116 | // this deliberately doesn't simply toJsonString because it would 117 | // be broken to use toString to get parseable json (since the SerializedValue 118 | // may not be a JsonValue) 119 | override def toString = s"JsonValue($toJsonString)" 120 | 121 | override def toJValue: JValue = pickledValue.parsedValue 122 | } 123 | 124 | /** 125 | * A value we have the info available to serialize, but we haven't 126 | * picked a format yet. Allows us to defer format selection, or 127 | * for in-process uses we can even try to skip serialization. 128 | */ 129 | private final case class LazyValue[V](value: V, pickler: Pickler[V]) extends SerializedValue { 130 | // we use this to optimize a common case 131 | private def unpicklerMatchesExactly[T](unpickler: Unpickler[T]): Boolean = { 132 | // We compare tag.key because FastTypeTag.equals uses Mirror 133 | // and Type and we don't want to use the reflection API. 134 | pickler.tag.key == unpickler.tag.key 135 | } 136 | 137 | // this could theoretically avoid the round-trip through JSON 138 | // in some cases, but pretty annoying to figure out what those 139 | // cases are so forget it. 140 | // Not expecting to actually call this really anyway because 141 | // we use LazyValue on the "send" side. 142 | override def parse[T](implicit unpickler: Unpickler[T]): Try[T] = 143 | if (unpicklerMatchesExactly(unpickler)) Success(value.asInstanceOf[T]) 144 | // this allows duck typing to succeed and also handles 145 | // V=Fruit, T=Apple case. 146 | else toJson.parse[T] 147 | 148 | def hasTag[T](implicit unpickler: Unpickler[T]): Boolean = 149 | // the toJson is needed if you have a Fruit pickler 150 | // and an Apple unpickler, so $type is Apple but pickler.tag 151 | // is Fruit 152 | unpicklerMatchesExactly(unpickler) || toJson.hasTag[T] 153 | 154 | override def toJsonString = toJson.toJsonString 155 | override def toJsonFile(file: File): Unit = 156 | { 157 | val output = new UFT8FileOutput(file) 158 | var success = false 159 | try { 160 | val builder = json.pickleFormat.createBuilder(output) 161 | pickleInto(value, builder)(pickler) 162 | success = true 163 | } finally { 164 | // don't forget to close 165 | output.close() 166 | // UTF8FileOutput truncates the file if it exists, so there's no reason why we wouldn't delete it in case an error occured 167 | // Otherwise we end up with partially serialized data that will break when trying to deserialize it 168 | if (!success) { 169 | file.delete() 170 | } 171 | } 172 | } 173 | 174 | override def toJValue = toJson.toJValue 175 | 176 | private lazy val jsonValue = 177 | JsonValue(pickle(value)(sbt.serialization.json.pickleFormat, pickler)) 178 | 179 | def toJson: JsonValue = jsonValue 180 | } 181 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/TypeExpression.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | // Copied from scala/pickling 4 | // https://github.com/scala/pickling/blob/c0fc5df7784188cf470debb3f9d41adaf37df5a6/core/src/main/scala/pickling/internal/AppliedType.scala 5 | 6 | object TypeExpression { 7 | // the delimiters in an applied type 8 | private val delims = List(',', '[', ']') 9 | 10 | /* Parse an applied type. 11 | * 12 | * @param s the string that is parsed 13 | * @return a pair with the parsed applied type and the remaining string. 14 | */ 15 | def parse(s: String): (TypeExpression, String) = { 16 | // shape of `s`: fqn[at_1, ..., at_n] 17 | val (typeName, rem) = s.span(!delims.contains(_)) 18 | 19 | if (rem.isEmpty || rem.startsWith(",") || rem.startsWith("]")) { 20 | (TypeExpression(typeName, List()), rem) 21 | } else { // parse type arguments 22 | var typeArgs = List[TypeExpression]() 23 | var remaining = rem 24 | 25 | while (remaining.startsWith("[") || remaining.startsWith(",")) { 26 | remaining = remaining.substring(1) 27 | val (next, rem) = parse(remaining) 28 | typeArgs = typeArgs :+ next 29 | remaining = rem 30 | } 31 | 32 | (TypeExpression(typeName, typeArgs), if (remaining.startsWith("]")) remaining.substring(1) else remaining) 33 | } 34 | } 35 | 36 | } 37 | 38 | /** 39 | * Simple representation of an applied type. Used for reading pickled types. 40 | * 41 | * Example, ``List[String]`` would be represented as: 42 | * 43 | * {{{ 44 | * TypeExpression("scala.collection.immutable.List", 45 | * Seq(TypeExpression("java.lang.String", Nil) 46 | * ) 47 | * }}} 48 | * 49 | * As you can see, simple types like "String" are represented as applied types with no arguments. 50 | */ 51 | case class TypeExpression(typeName: String, typeArgs: List[TypeExpression]) { 52 | override def toString = 53 | typeName + (if (typeArgs.isEmpty) "" else typeArgs.mkString("[", ",", "]")) 54 | } 55 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/UTF8FileOutput.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | import java.io.{ File, FileOutputStream, BufferedWriter, OutputStreamWriter } 4 | import scala.pickling.Output 5 | import java.nio.charset.Charset 6 | 7 | private[serialization] class UFT8FileOutput(file: File) extends Output[String] { 8 | private[this] val writer = { 9 | if (!file.getParentFile.exists) { 10 | file.getParentFile.mkdirs() 11 | } 12 | new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file, false), UFT8FileOutput.utf8)) 13 | } 14 | def result(): String = throw new UnsupportedOperationException() 15 | 16 | def put(obj: String): this.type = { 17 | writer.write(obj) 18 | this 19 | } 20 | 21 | def close(): Unit = writer.close() 22 | } 23 | 24 | private[serialization] object UFT8FileOutput { 25 | val utf8 = Charset.forName("UTF-8") 26 | } 27 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/Using.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | import java.io.{ File, FileOutputStream, BufferedWriter, OutputStreamWriter } 4 | import java.nio.charset.Charset 5 | 6 | private[serialization] object Using { 7 | def fileWriter(file: File, charset: Charset = Charset.forName("UTF-8"), append: Boolean = false)(f: BufferedWriter => Unit): Unit = 8 | { 9 | if (!file.getParentFile.exists) { 10 | file.getParentFile.mkdirs() 11 | } 12 | val resource = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file, append), charset)) 13 | try { 14 | f(resource) 15 | } finally { 16 | resource.close() 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/json/JSONPickleFormat.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | import java.io.File 4 | import scala.pickling.{ 5 | FastTypeTag, 6 | Output, 7 | Pickle, 8 | PickleFormat, 9 | PickleTools, 10 | PicklingException, 11 | StringOutput, 12 | UnpickleOps 13 | } 14 | import scala.pickling.internal.lookupUnpicklee 15 | // FIXME this isn't threadsafe right? we need to get rid of its use. 16 | import org.json4s.JsonAST._ 17 | import scala.util.parsing.json.JSONFormat.quoteString 18 | import scala.collection.mutable.{ StringBuilder, Stack } 19 | import scala.util.{ Success, Failure } 20 | import jawn.support.json4s.{ Parser => JawnParser } 21 | 22 | package json { 23 | 24 | import scala.pickling.Hints 25 | 26 | private[serialization] object `package` { 27 | implicit val pickleFormat: JSONPickleFormat = new JSONPickleFormat 28 | def findByName(obj: JObject, name: String): JValue = 29 | (obj.obj find { case (n, v) => n == name }) match { 30 | case Some((n, v)) => v 31 | case _ => JNothing 32 | } 33 | } 34 | 35 | private[serialization] sealed abstract class JSONPickle extends Pickle { 36 | type ValueType = String 37 | type PickleFormatType = JSONPickleFormat 38 | //abstract val value: String 39 | /** The value in the pickled parsed into a JValue AST. note this may throw. */ 40 | def parsedValue: JValue 41 | 42 | private[serialization] def readTypeTag: Option[String] = parsedValue match { 43 | case obj: JObject => 44 | findByName(obj, JSONPickleFormat.TYPE_TAG_FIELD) match { 45 | case JString(s) => Some(s) 46 | case _ => None 47 | } 48 | case _ => None 49 | } 50 | 51 | override final def equals(other: Any): Boolean = other match { 52 | case null => false 53 | case o: JSONPickle => JsonMethods.jvalueEquals(parsedValue, o.parsedValue) 54 | case _ => false 55 | } 56 | override def hashCode = parsedValue.hashCode 57 | } 58 | private[json] class RawStringPickle(override val value: String) extends JSONPickle { 59 | require(value ne null) 60 | 61 | def parsedValue: JValue = 62 | JawnParser.parseFromString(value) match { 63 | case Success(json: JValue) => json 64 | case Failure(e) => throw new PicklingException(s"""failed to parse "${value}" as JSON: ${e.getMessage}""") 65 | } 66 | } 67 | private[json] class JValuePickle(override val parsedValue: JValue) extends JSONPickle { 68 | require(parsedValue ne null) 69 | 70 | // This HAS to be val based on the pickling API. However, we may never call it for a given pickle, 71 | // so we'd like to not pay the string rendering tax unless we must. 72 | override lazy val value: String = JsonMethods.compact(parsedValue) 73 | } 74 | private[serialization] object JSONPickle { 75 | def apply(in: String): JSONPickle = new RawStringPickle(in) 76 | 77 | def fromFile(file: File): JSONPickle = 78 | fromJValue(JawnParser.parseFromFile(file) match { 79 | case Success(json: JValue) => json 80 | case Failure(e) => throw new PicklingException(s"""failed to parse "${file}" as JSON: ${e.getMessage}""") 81 | }) 82 | 83 | def fromJValue(in: JValue): JSONPickle = 84 | in match { 85 | // this null check is because when we read primitive (I think with no type tag), 86 | // we get null instead of JNull. 87 | case null => new JValuePickle(JNull) 88 | case other => new JValuePickle(other) 89 | } 90 | } 91 | 92 | private[serialization] class JSONPickleFormat extends PickleFormat { 93 | type PickleType = JSONPickle 94 | type OutputType = Output[String] 95 | def createBuilder() = new VerifyingJSONPickleBuilder(this, new StringOutput) 96 | def createBuilder(out: Output[String]): PBuilder = new VerifyingJSONPickleBuilder(this, out) 97 | def createReader(pickle: JSONPickle) = 98 | new VerifyingJSONPickleReader(this, IniitalReaderState(pickle.parsedValue)) 99 | } 100 | private[serialization] object JSONPickleFormat { 101 | private[json] val TYPE_TAG_FIELD = "$type" 102 | private[json] val DYNAMIC_KEY_FIELD = "$keys" 103 | private[json] val REF_ID_FIELD = "$ref" 104 | 105 | private[json] def isSpecialField(name: String): Boolean = 106 | (TYPE_TAG_FIELD == name) || (DYNAMIC_KEY_FIELD == name) || (REF_ID_FIELD == name) 107 | private[json] def isElidedField(name: String): Boolean = 108 | (DYNAMIC_KEY_FIELD == name) 109 | } 110 | 111 | private[json] sealed trait BuilderState { 112 | def previous: BuilderState 113 | } 114 | private[json] case class CollectionState(val previous: BuilderState, numElements: Int, hasInput: Boolean) extends BuilderState 115 | private[json] case class RawEntryState(previous: BuilderState, picklee: Any, hints: Hints, var wasCollectionOrMap: Boolean = false) extends BuilderState 116 | private[json] case class MapEntryState(val previous: BuilderState, picklee: Any, hints: Hints) extends BuilderState 117 | private[json] case class RefEntryState(val previous: BuilderState) extends BuilderState 118 | private[json] case class WriteOptionState(val previous: BuilderState) extends BuilderState 119 | private[json] object EmptyState extends BuilderState { 120 | def previous = this 121 | } 122 | 123 | // A slow implementation of of a pickle builder 124 | // This uses a TON of branch statements to ensure the builder is in the correct state for any call 125 | // and to programatically enforce constraints of Pickler implementations. 126 | // We use this just to verify our own picklers. 127 | private[json] class VerifyingJSONPickleBuilder(format: JSONPickleFormat, buf: Output[String]) extends PBuilder with PickleTools { 128 | import JSONPickleFormat._ 129 | var state: BuilderState = EmptyState 130 | //(tag.key startsWith "scala.Option[") 131 | private def isJValue(tag: FastTypeTag[_]): Boolean = 132 | (tag.key startsWith "org.json4s.JsonAST.") 133 | // Hackery so we elide option collection types. 134 | private def isOption(tag: FastTypeTag[_]): Boolean = 135 | (tag.key startsWith "scala.Option") 136 | 137 | // Here we get notified of object/value-like things. 138 | override def beginEntry(picklee: Any): PBuilder = withHints { hints => 139 | // Here we check to see if we need to serialize a reference. These are used to avoid circular object 140 | // dependencies for picklers which have circluarly-references objects. 141 | if (hints.oid != -1) { 142 | buf.put("{\"" + REF_ID_FIELD + "\":" + hints.oid + "}") 143 | state = RefEntryState(state) 144 | } else if (isOption(hints.tag)) { 145 | // We expect to be writing a collection, we just ignore the collection aspect. 146 | state = WriteOptionState(RawEntryState(state, picklee, hints, true)) 147 | } else { 148 | state = new RawEntryState(state, picklee, hints) 149 | } 150 | this 151 | } 152 | override def putField(name: String, pickler: (PBuilder) => Unit): PBuilder = 153 | if (!isElidedField(name)) { 154 | state match { 155 | case x: RawEntryState => 156 | x.wasCollectionOrMap = true 157 | // Now we know we're in a map state, so we swap into map state. 158 | state = MapEntryState(x.previous, x.picklee, x.hints) 159 | buf.put("{") 160 | case _: MapEntryState => 161 | // here we just need another , 162 | buf.put(",") 163 | case _ => sys.error("Cannot put a field when not in entry state!") 164 | } 165 | // Here we must append all the stringy things around the field. 166 | buf.put('"' + name + "\":") 167 | pickler(this) 168 | this 169 | } else this 170 | override def endEntry(): Unit = { 171 | state match { 172 | case RawEntryState(prev, _, _, true) => 173 | // Here we do nothing because it was a collection. 174 | state = prev 175 | case RawEntryState(prev, picklee, hints, false) => 176 | // Here we have to actually serialize the thing, as we're not a collection or a map. 177 | if (primitives.contains(hints.tag.key)) 178 | primitives(hints.tag.key)(picklee) 179 | else if (primitiveArrays.contains(hints.tag.key)) { 180 | primitiveArrays(hints.tag.key)(picklee) 181 | } else if (isJValue(hints.tag)) { 182 | import JsonMethods._ 183 | buf.put(compact(render(picklee.asInstanceOf[JValue]))) 184 | } else { 185 | // Note: It's possible the object is empty, so we just put an empty object here, 186 | // as the type we're serializing may not have any contents. 187 | // we also serialize the "$type" here if needed. 188 | buf.put("{") 189 | if (!hints.isStaticallyElidedType) appendTagString(picklee, hints) 190 | buf.put("}") 191 | } 192 | state = prev 193 | case MapEntryState(prev, picklee, hints) => 194 | // Add the type tag if we don't know it statically. 195 | if (!hints.isStaticallyElidedType) { 196 | buf.put(",") 197 | appendTagString(picklee, hints) 198 | } 199 | buf.put("}") 200 | state = prev 201 | case RefEntryState(prev) => 202 | state = prev 203 | case _ => sys.error("Unable to endEntry() when not in entry state!") 204 | } 205 | } 206 | private def appendTagString(picklee: Any, hints: Hints): Unit = 207 | buf.put("\"" + TYPE_TAG_FIELD + "\":\"" + makeTagString(picklee, hints) + "\"") 208 | private def makeTagString(picklee: Any, hints: Hints): String = 209 | if (hints.tag.key.contains("anonfun$")) picklee.getClass.getName 210 | else hints.tag.key 211 | 212 | // We cover ararys of primitives separately here. 213 | // NOTE: these are special cased in the core pickler design (probably for binary encoding efficiency) 214 | private val primitiveArrays = Map[String, Any => Unit]( 215 | FastTypeTag.ArrayByte.key -> ((picklee: Any) => pickleArray(picklee.asInstanceOf[Array[Byte]], FastTypeTag.Byte)), 216 | FastTypeTag.ArrayShort.key -> ((picklee: Any) => pickleArray(picklee.asInstanceOf[Array[Short]], FastTypeTag.Short)), 217 | FastTypeTag.ArrayChar.key -> ((picklee: Any) => pickleArray(picklee.asInstanceOf[Array[Char]], FastTypeTag.Char)), 218 | FastTypeTag.ArrayInt.key -> ((picklee: Any) => pickleArray(picklee.asInstanceOf[Array[Int]], FastTypeTag.Int)), 219 | FastTypeTag.ArrayLong.key -> ((picklee: Any) => pickleArray(picklee.asInstanceOf[Array[Long]], FastTypeTag.Long)), 220 | FastTypeTag.ArrayBoolean.key -> ((picklee: Any) => pickleArray(picklee.asInstanceOf[Array[Boolean]], FastTypeTag.Boolean)), 221 | FastTypeTag.ArrayFloat.key -> ((picklee: Any) => pickleArray(picklee.asInstanceOf[Array[Float]], FastTypeTag.Float)), 222 | FastTypeTag.ArrayDouble.key -> ((picklee: Any) => pickleArray(picklee.asInstanceOf[Array[Double]], FastTypeTag.Double))) 223 | private def pickleArray(arr: Array[_], tag: FastTypeTag[_]) = { 224 | beginCollection(arr.length) 225 | pushHints() 226 | hintStaticallyElidedType() 227 | hintTag(tag) 228 | pinHints() 229 | var i = 0 230 | while (i < arr.length) { 231 | putElement(b => b.beginEntry(arr(i)).endEntry()) 232 | i += 1 233 | } 234 | popHints() 235 | endCollection() 236 | } 237 | 238 | private val primitives = Map[String, Any => Unit]( 239 | FastTypeTag.Unit.key -> ((picklee: Any) => buf.put("\"()\"")), 240 | FastTypeTag.Null.key -> ((picklee: Any) => buf.put("null")), 241 | FastTypeTag.Ref.key -> ((picklee: Any) => throw new Error("fatal error: shouldn't be invoked explicitly")), 242 | FastTypeTag.Int.key -> ((picklee: Any) => buf.put(picklee.toString)), 243 | FastTypeTag.Long.key -> ((picklee: Any) => buf.put(picklee.toString)), 244 | FastTypeTag.Short.key -> ((picklee: Any) => buf.put(picklee.toString)), 245 | FastTypeTag.Double.key -> ((picklee: Any) => buf.put(picklee.toString)), 246 | FastTypeTag.Float.key -> ((picklee: Any) => buf.put(picklee.toString)), 247 | FastTypeTag.Boolean.key -> ((picklee: Any) => buf.put(picklee.toString)), 248 | FastTypeTag.Byte.key -> ((picklee: Any) => buf.put(picklee.toString)), 249 | FastTypeTag.Char.key -> ((picklee: Any) => buf.put("\"" + quoteString(picklee.toString) + "\"")), 250 | FastTypeTag.String.key -> ((picklee: Any) => buf.put("\"" + quoteString(picklee.toString) + "\"")) // Note we've removed all Array knowledge in favor of traeting this NOT as primitive types, but instead 251 | // provide a collection pickler for them. 252 | ) 253 | 254 | override def beginCollection(length: Int): PBuilder = { 255 | state match { 256 | case x: RawEntryState => 257 | x.wasCollectionOrMap = true 258 | state = CollectionState(x, length, false) 259 | buf.put("[") 260 | this 261 | case x: WriteOptionState => 262 | // We need to serialize None 263 | if (length == 0) buf.put("null") 264 | this 265 | case _ => sys.error(s"Unable to begin collection when in unknown state: $state") 266 | } 267 | } 268 | override def putElement(pickler: (PBuilder) => Unit): PBuilder = 269 | state match { 270 | case s: CollectionState => 271 | // TODO - Verify 272 | if (s.hasInput) { buf.put(",") } else { 273 | state = s.copy(hasInput = true) 274 | } 275 | pickler(this) 276 | this 277 | case s: WriteOptionState => 278 | // Cheater methods to serialize options as raw values. 279 | pickler(this) 280 | this 281 | case _ => sys.error("Cannot put an element without first specifying a collection.") 282 | } 283 | override def endCollection(): Unit = 284 | state match { 285 | case s: CollectionState => 286 | buf.put("]") 287 | state = s.previous 288 | case s: WriteOptionState => 289 | state = s.previous 290 | case _ => sys.error("cannot end a collection when not in collection state!") 291 | } 292 | 293 | override def result(): JSONPickle = { 294 | // TODO - verify everything is done, and we have no state stack... 295 | if (state != EmptyState) sys.error("Failed to close/end all entries and collections!") 296 | JSONPickle(buf.toString) 297 | } 298 | } 299 | 300 | private[json] sealed trait ReaderState { 301 | def previous: ReaderState 302 | def current: JValue 303 | } 304 | // The state where we're looking at a value, but the reader hasn't told us to do anything yet. 305 | private[json] case class RawJsValue(current: JValue, previous: ReaderState) extends ReaderState 306 | // The state in which we've attempted to read a type tag. 307 | // i.e. this means beginEntry has been called. 308 | private[json] case class JsValueWithTag(current: JValue, tagKey: String, previous: ReaderState) extends ReaderState 309 | // The initial state where we pass parsed JSON and begin parsing. 310 | private[json] case class IniitalReaderState(current: JValue) extends ReaderState { 311 | def previous: ReaderState = this 312 | } 313 | // The state where we are reading elements from a collection. 314 | private[json] case class CollectionReadingState(current: JValue, idx: Int, previous: ReaderState) extends ReaderState 315 | 316 | private[json] class VerifyingJSONPickleReader(format: JSONPickleFormat, var state: ReaderState) extends PReader with PickleTools { 317 | import JSONPickleFormat._ 318 | 319 | // Debugging hints 320 | override def hintTag(tag: FastTypeTag[_]): this.type = { 321 | //System.err.println(s"hintTag($tag)") 322 | super.hintTag(tag) 323 | } 324 | override def hintStaticallyElidedType(): this.type = { 325 | //System.err.println(s"hintStaticallyElidedType()") 326 | super.hintStaticallyElidedType() 327 | } 328 | override def pinHints(): this.type = { 329 | //System.err.println(s"pinHints()") 330 | super.pinHints() 331 | } 332 | override def unpinHints(): this.type = { 333 | //System.err.println(s"unpinHints()") 334 | super.pinHints() 335 | } 336 | 337 | override def beginEntry(): String = withHints { hints => 338 | // This should be the default for static picklers. We don't need runtime reflection, 339 | // so we just grab tag strings and use that to match known/sealed class hierarchies. 340 | val tag = currentTag(state.current, hints) 341 | state = JsValueWithTag(state.current, tag, state.previous) 342 | tag 343 | } 344 | override def endEntry(): Unit = { 345 | //System.err.println(s"endEntry()") 346 | // TODO - validate state is correct before we pop the stack. 347 | state = state.previous 348 | } 349 | 350 | // Check for primitive at current state. 351 | override def atPrimitive: Boolean = state match { 352 | case JsValueWithTag(_, tag, _) => 353 | primitives.contains(tag) 354 | case _ => false 355 | } 356 | 357 | // Check if the user is aksing for a raw "JValue" so we don't deserialize it. 358 | private def atJValue: Boolean = 359 | state match { 360 | case JsValueWithTag(_, tag, _) => (tag startsWith "org.json4s.JsonAST.") 361 | case _ => false 362 | } 363 | 364 | override def readPrimitive(): Any = { 365 | //System.err.println(s"readPrimitive()") 366 | def unpickleHelper(value: JValue, tag: String): Any = { 367 | if (tag startsWith "org.json4s.JsonAST.") value 368 | else if (primitives.contains(tag)) primitives(tag)(value) 369 | // NOTE - This is a dirty, rotten hack when the tag.key does not lineup with the data. 370 | // We need to figure out hwat's wrong with our SPickles that would case this. 371 | else value match { 372 | case x: JString => x.values 373 | //case x: JDouble => x.values 374 | case x: JBool => x.value 375 | // TODO - We need to understand why the tag doesn't say JsonAST here... 376 | case x: JObject => x 377 | case JNull => null 378 | case _ => 379 | // TODO - check to see if we need the old primitiveSeqKeys handling 380 | // to read a primtiive out of a JArray 381 | val e = new PicklingException(s"Not a primitive: $tag, found $value") 382 | e.printStackTrace() 383 | throw e 384 | } 385 | } 386 | state match { 387 | case JsValueWithTag(value, tag, _) => unpickleHelper(value, tag) 388 | // Here we've gotten to a readPrimtive without a beginEntry which reads the tag. We'll 389 | // assume the statically hinted type is the right one 390 | case _: IniitalReaderState | _: RawJsValue => 391 | withHints { hints => 392 | unpickleHelper(state.current, hints.tag.key) 393 | } 394 | // TODO - Do we need a state where we can read a value if we're in a collection reading state? 395 | case state => 396 | throw new PicklingException(s"Cannot deserialize primitive in state: $state") 397 | } 398 | } 399 | 400 | // Check for object at current state, and read fields. 401 | override def atObject: Boolean = 402 | // TODO - Check for legit state 403 | state.current.isInstanceOf[JObject] 404 | override def readField(name: String): PReader = { 405 | //System.err.println(s"readField($name)") 406 | // TODO - assert(atObject) && we're in legit state to read fields... 407 | val nextState = if (name == DYNAMIC_KEY_FIELD) { 408 | // TODO - Should we sort here? 409 | val keys = 410 | state.current.asInstanceOf[JObject].values.keys.toList.sorted.map(k => JString(k)) 411 | RawJsValue(JArray(keys), state) 412 | // TODO - what do we do if we're at a JNothing here... 413 | } else RawJsValue(findByName(state.current.asInstanceOf[JObject], name), state) 414 | val nested = new VerifyingJSONPickleReader(format, nextState) 415 | if (this.areHintsPinned) { 416 | nested.pinHints() 417 | nested.hints = hints 418 | // TODO - maybe we modify the state to remember the type tag... 419 | } else { 420 | nested.hints = hints 421 | } 422 | nested 423 | } 424 | 425 | // Methods around reading collections. 426 | override def beginCollection(): PReader = { 427 | //System.err.println(s"beginCollection()") 428 | // For now we just migrate into collection reading state. 429 | state = CollectionReadingState(state.current, 0, state) 430 | this 431 | } 432 | override def readLength(): Int = state match { 433 | case CollectionReadingState(value, 0, _) => 434 | //System.err.println(s"readLength()") 435 | value match { 436 | case JNothing => 0 437 | case JNull => 0 // Hackery for Option handling 438 | case x: JArray => x.arr.size 439 | case x => 1 // Hackery for Option handling 440 | } 441 | case x => throw new PicklingException(s"Cannot read length when not in collection reading state.") 442 | } 443 | override def readElement(): PReader = state match { 444 | case cs @ CollectionReadingState(value, idx, _) => 445 | //System.err.println(s"readElement()") 446 | // First advance internal state. 447 | state = cs.copy(idx = idx + 1) 448 | val subState = value match { 449 | case x: JArray => 450 | RawJsValue(x.apply(idx), state) 451 | case _ if idx == 0 => 452 | RawJsValue(value, state) 453 | } 454 | val tmp = new VerifyingJSONPickleReader(format, subState) 455 | tmp.hints = this.hints // TODO - is this correct? 456 | tmp 457 | case x => throw new PicklingException(s"Cannot read an element when not in collection reading state.") 458 | } 459 | override def endCollection(): Unit = state match { 460 | case CollectionReadingState(value, idx, prev) => 461 | //System.err.println(s"endCollection()") 462 | // TODO - Warn if we haven't read all value, maybe 463 | state = prev 464 | case _ => throw new PicklingException(s"Cannot end reading a collection when we never started, state: $state") 465 | } 466 | 467 | // IMPLEMENTATION DETAILS 468 | // NOTE - most of this can be moved into static helper method 469 | // especially the state so we don't create it over and over on every pickle call. 470 | 471 | private val primitives = Map[String, JValue => Any]( 472 | FastTypeTag.Unit.key -> (datum => ()), 473 | FastTypeTag.Null.key -> (datum => null), 474 | FastTypeTag.Ref.key -> (datum => lookupUnpicklee(datum match { 475 | case obj: JObject => 476 | findByName(obj, REF_ID_FIELD) match { 477 | case JDouble(num) => num.toInt 478 | case x => unexpectedValue(x, FastTypeTag.Ref) 479 | } 480 | case x => unexpectedValue(x, FastTypeTag.Ref) 481 | })), 482 | FastTypeTag.Int.key -> (datum => datum match { 483 | case JDouble(num) => num.toInt 484 | case x => unexpectedValue(x, FastTypeTag.Int) 485 | }), 486 | FastTypeTag.Short.key -> (datum => datum match { 487 | case JDouble(num) => num.toShort 488 | case x => unexpectedValue(x, FastTypeTag.Short) 489 | }), 490 | FastTypeTag.Double.key -> (datum => datum match { 491 | case JDouble(num) => num 492 | case x => unexpectedValue(x, FastTypeTag.Double) 493 | }), 494 | FastTypeTag.Float.key -> (datum => datum match { 495 | case JDouble(num) => num.toFloat 496 | case x => unexpectedValue(x, FastTypeTag.Float) 497 | }), 498 | FastTypeTag.Long.key -> (datum => datum match { 499 | case JDouble(num) => num.toLong 500 | case JString(s) => s.toLong 501 | case x => unexpectedValue(x, FastTypeTag.Long) 502 | }), 503 | FastTypeTag.Byte.key -> (datum => datum match { 504 | case JDouble(num) => num.toByte 505 | case x => unexpectedValue(x, FastTypeTag.Byte) 506 | }), 507 | FastTypeTag.Boolean.key -> (datum => datum match { 508 | case JBool(b) => b 509 | case x => unexpectedValue(x, FastTypeTag.Boolean) 510 | }), 511 | FastTypeTag.Char.key -> (datum => datum match { 512 | case JString(s) => s.head 513 | case x => unexpectedValue(x, FastTypeTag.Char) 514 | }), 515 | FastTypeTag.String.key -> (datum => datum match { 516 | // TODO - where is this coming from... appears to be `Option[String]`, when option is `None` 517 | // More importantly, why is Jawn returning null instead of JNull? 518 | case null => null 519 | case JString(s) => s 520 | case x => unexpectedValue(x, FastTypeTag.String) 521 | }), 522 | FastTypeTag.ArrayByte.key -> (datum => (datum match { 523 | case JArray(arr) => 524 | arr map { 525 | case JDouble(num) => num.toByte 526 | case x => unexpectedValue(x, FastTypeTag.Byte) 527 | } 528 | case x => unexpectedValue(x, FastTypeTag.ArrayByte) 529 | }).toArray), 530 | FastTypeTag.ArrayShort.key -> (datum => (datum match { 531 | case JArray(arr) => 532 | arr map { 533 | case JDouble(num) => num.toShort 534 | case x => unexpectedValue(x, FastTypeTag.Short) 535 | } 536 | case x => unexpectedValue(x, FastTypeTag.ArrayShort) 537 | }).toArray), 538 | FastTypeTag.ArrayChar.key -> (datum => (datum match { 539 | case JArray(arr) => 540 | arr map { 541 | case JString(s) => s.head 542 | case x: JValue => unexpectedValue(x, FastTypeTag.Char) 543 | } 544 | case x => unexpectedValue(x, FastTypeTag.ArrayChar) 545 | }).toArray), 546 | FastTypeTag.ArrayInt.key -> { datum => 547 | (datum match { 548 | case JArray(arr) => 549 | arr map { 550 | case JDouble(num) => num.toInt 551 | case x => unexpectedValue(x, FastTypeTag.Int) 552 | } 553 | case x => unexpectedValue(x, FastTypeTag.ArrayInt) 554 | }).toArray 555 | }, 556 | FastTypeTag.ArrayLong.key -> (datum => (datum match { 557 | case JArray(arr) => 558 | arr map { 559 | case JDouble(num) => num.toLong 560 | case JString(s) => s.toLong 561 | case x => unexpectedValue(x, FastTypeTag.Long) 562 | } 563 | case x => unexpectedValue(x, FastTypeTag.ArrayLong) 564 | }).toArray), 565 | FastTypeTag.ArrayBoolean.key -> (datum => (datum match { 566 | case JArray(arr) => 567 | arr map { 568 | case JBool(b) => b 569 | case x => unexpectedValue(x, FastTypeTag.Boolean) 570 | } 571 | case x => unexpectedValue(x, FastTypeTag.ArrayBoolean) 572 | }).toArray), 573 | FastTypeTag.ArrayFloat.key -> (datum => (datum match { 574 | case JArray(arr) => 575 | arr map { 576 | case JDouble(num) => num.toFloat 577 | case x => unexpectedValue(x, FastTypeTag.Float) 578 | } 579 | case x => unexpectedValue(x, FastTypeTag.ArrayFloat) 580 | }).toArray), 581 | FastTypeTag.ArrayDouble.key -> (datum => (datum match { 582 | case JArray(arr) => 583 | arr map { 584 | case JDouble(num) => num 585 | case x => unexpectedValue(x, FastTypeTag.Double) 586 | } 587 | case x => unexpectedValue(x, FastTypeTag.ArrayDouble) 588 | }).toArray)) 589 | 590 | private def unexpectedValue(value: JValue, tag: FastTypeTag[_]): Nothing = 591 | throw new PicklingException("unexpected value: " + value.toString + ", looking for: " + tag) 592 | 593 | /** 594 | * Reads the pickled "$type" fields from a JObject. 595 | * Throws an exception if the "$type" fired doesn't exist. 596 | * 597 | * Note: This will use some runtime reflection to check if the pickled type still exists. If it does not, 598 | * this will use the type hint provided if we're deserializing a known subclass (not an abstract/trait) 599 | */ 600 | private def readTypeTagKey(obj: JObject, hints: Hints): String = { 601 | findByName(obj, TYPE_TAG_FIELD) match { 602 | case JString(s) => s 603 | case found => hints.tag.key 604 | } 605 | } 606 | /** Helper to read (or return elided) type tag for the given entry. */ 607 | private def currentTag(current: JValue, hints: Hints): String = { 608 | current match { 609 | case JNull => FastTypeTag.Null.key 610 | case JNothing => FastTypeTag.Nothing.key 611 | case obj: JObject => 612 | findByName(obj, REF_ID_FIELD) match { 613 | case JDouble(num) => FastTypeTag.Ref.key 614 | // Not a reference type. 615 | case _ => 616 | if (hints.isElidedType || hints.isStaticallyElidedType || hints.isDynamicallyElidedType) hints.tag.key 617 | else readTypeTagKey(obj, hints) 618 | } 619 | case _ if (hints.tag != null) => hints.tag.key 620 | case _ => 621 | // TODO - This should be an error. We need a tag and we have NO IDEA what we are. 622 | throw new PicklingException(s"Attempting to find tag in $current, but hints has ${hints.tag}") 623 | } 624 | } 625 | } 626 | } 627 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/json/JsonMethods.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization.json 2 | 3 | import org.json4s.JsonAST._ 4 | import java.io.File 5 | import scala.pickling.PicklingException 6 | import scala.util.Try 7 | import java.nio.charset.Charset 8 | import java.io.{ Reader => JReader, File, InputStream } 9 | 10 | private[serialization] sealed abstract class JsonInput extends Product with Serializable 11 | private[serialization] case class StringInput(string: String) extends JsonInput 12 | private[serialization] case class ReaderInput(reader: JReader) extends JsonInput 13 | private[serialization] case class StreamInput(stream: InputStream) extends JsonInput 14 | private[serialization] case class FileInput(file: File) extends JsonInput 15 | 16 | private[serialization] trait BaseJsonMethods[T] { 17 | def parse(in: JsonInput, useBigDecimalForDouble: Boolean = false): JValue 18 | def parseOpt(in: JsonInput, useBigDecimalForDouble: Boolean = false): Option[JValue] 19 | 20 | def render(value: JValue) /*(implicit formats: Formats = DefaultFormats)*/ : T 21 | def compact(d: T): String 22 | def pretty(d: T): String 23 | } 24 | 25 | /** An implementation of JsonMethods for json4s that uses Jawn and our own toStrings. */ 26 | private[serialization] object JsonMethods extends BaseJsonMethods[JValue] { 27 | // Redner doesn't do anything, as we aren't translating to an intermediate format before rendering. 28 | override def render(value: JValue): JValue = value 29 | // TODO - Write this. 30 | override def pretty(d: JValue): String = compact(d) 31 | // Compact rendering. 32 | override def compact(d: JValue): String = { 33 | val buf = new StringBuilder("") 34 | import org.json4s._ 35 | def trimArr(xs: List[JValue]) = xs.filter(_ != JNothing) 36 | def trimObj(xs: List[JField]) = xs.filter(_._2 != JNothing) 37 | def append(d: JValue): Unit = { 38 | d match { 39 | case null => buf.append("null") 40 | case JBool(true) => buf.append("true") 41 | case JBool(false) => buf.append("false") 42 | case JDouble(n) => buf.append(n.toString) 43 | case JDecimal(n) => buf.append(n.toString) 44 | case JInt(n) => buf.append(n.toString) 45 | case JNull => buf.append("null") 46 | // TODO - better error message 47 | case JNothing => sys.error("can't render 'nothing'") 48 | // TODO - does this even make sense? 49 | case JString(null) => buf.append("null") 50 | case JString(s) => 51 | buf.append("\"") 52 | buf.append(ParserUtil.quote(s)) 53 | buf.append("\"") 54 | case JArray(arr) => 55 | buf.append("[") 56 | val trimmed = trimArr(arr) 57 | var l = trimmed 58 | while (!l.isEmpty) { 59 | val el = l.head 60 | if (l ne trimmed) buf.append(",") 61 | append(el) 62 | l = l.tail 63 | } 64 | buf.append("]") 65 | case JObject(obj) => 66 | buf.append("{") 67 | val trimmed = trimObj(obj) 68 | var l = trimmed 69 | while (!l.isEmpty) { 70 | val (k, v) = l.head 71 | if (l ne trimmed) buf.append(",") 72 | buf.append("\"").append(ParserUtil.quote(k)).append("\":") 73 | append(v) 74 | l = l.tail 75 | } 76 | buf.append("}") 77 | } 78 | } 79 | append(d) 80 | buf.toString 81 | } 82 | 83 | override def parse(in: JsonInput, useBigDecimalForDouble: Boolean): JValue = 84 | parseTry(in, useBigDecimalForDouble).get 85 | override def parseOpt(in: JsonInput, useBigDecimalForDouble: Boolean): Option[JValue] = 86 | parseTry(in, useBigDecimalForDouble).toOption 87 | def parseTry(in: JsonInput, useBigDecimalForDouble: Boolean): Try[JValue] = { 88 | val result: Try[JValue] = in match { 89 | case StringInput(string) => jawn.support.json4s.Parser.parseFromString(string) 90 | // TODO - We should support the reader case too. 91 | case ReaderInput(reader) => util.Try(???) 92 | case StreamInput(stream) => 93 | val in = java.nio.channels.Channels.newChannel(stream) 94 | try jawn.support.json4s.Parser.parseFromChannel(in) 95 | finally in.close() 96 | case FileInput(file: File) => 97 | val in = (new java.io.FileInputStream(file)).getChannel 98 | try jawn.support.json4s.Parser.parseFromChannel(in) 99 | finally in.close() 100 | } 101 | result recover { 102 | case e @ jawn.ParseException(msg, _, line, col) => 103 | throw PicklingException(s"Parse error line $line column $col '$msg' in $in", Some(e)) 104 | case e @ jawn.IncompleteParseException(msg) => 105 | throw PicklingException(s"Incomplete json '$msg' in $in", Some(e)) 106 | } 107 | } 108 | 109 | private final def jvalueSorted(jvalue: JValue): JValue = jvalue match { 110 | case null => null 111 | case JObject(el) => JObject(el.sortBy(_._1).map(kv => kv._1 -> jvalueSorted(kv._2))) 112 | case JArray(el) => JArray(el.map(jvalueSorted(_))) 113 | case other => other 114 | } 115 | 116 | def jvalueEquals(jvalue: JValue, jvalue2: JValue): Boolean = 117 | (jvalue, jvalue2) match { 118 | // deal with null 119 | case (null, null) => true 120 | case (JNull, JNull) => true 121 | case (JNull, null) | (null, JNull) => false 122 | // optimize by avoiding the jvalueSorted if sizes don't match anyhow 123 | case (JArray(el), JArray(el2)) if (el.size != el2.size) => false 124 | case (JObject(el), JObject(el2)) if (el.size != el2.size) => false 125 | case (left, right) => 126 | // use the order-sensitive json4s implementation after sorting object fields 127 | jvalueSorted(left).equals(jvalueSorted(right)) 128 | } 129 | 130 | def jvalueHashCode(jvalue: JValue): Int = 131 | jvalueSorted(jvalue).hashCode 132 | } 133 | 134 | private[serialization] object ParserUtil { 135 | private val AsciiEncoder = Charset.forName("US-ASCII").newEncoder(); 136 | 137 | private[this] sealed abstract class StringAppender[T] { 138 | def append(s: String): T 139 | def subj: T 140 | } 141 | private[this] class StringWriterAppender(val subj: java.io.Writer) extends StringAppender[java.io.Writer] { 142 | def append(s: String): java.io.Writer = subj.append(s) 143 | } 144 | private[this] class StringBuilderAppender(val subj: StringBuilder) extends StringAppender[StringBuilder] { 145 | def append(s: String): StringBuilder = subj.append(s) 146 | } 147 | 148 | def quote(s: String): String = quote(s, new StringBuilderAppender(new StringBuilder)).toString 149 | private[serialization] def quote(s: String, writer: java.io.Writer): java.io.Writer = quote(s, new StringWriterAppender(writer)) 150 | private[this] def quote[T](s: String, appender: StringAppender[T]): T = { // hot path 151 | var i = 0 152 | val l = s.length 153 | while (i < l) { 154 | (s(i): @annotation.switch) match { 155 | case '"' => appender.append("\\\"") 156 | case '\\' => appender.append("\\\\") 157 | case '\b' => appender.append("\\b") 158 | case '\f' => appender.append("\\f") 159 | case '\n' => appender.append("\\n") 160 | case '\r' => appender.append("\\r") 161 | case '\t' => appender.append("\\t") 162 | case c => 163 | if (!AsciiEncoder.canEncode(c)) 164 | appender.append("\\u%04x".format(c: Int)) 165 | else appender.append(c.toString) 166 | } 167 | i += 1 168 | } 169 | appender.subj 170 | } 171 | } 172 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/package.scala: -------------------------------------------------------------------------------- 1 | package sbt 2 | 3 | /** 4 | * A package object which can be used to create new serializers. 5 | * 6 | * This package supports creating Pickler/Unpickler functions which can serialize arbitrary types. See the 7 | * SerializedValue type for what formats this library supports serializing into. 8 | */ 9 | package object serialization extends SerializationFunctions with CustomPicklers { 10 | type Pickler[A] = scala.pickling.Pickler[A] 11 | val Pickler = scala.pickling.Pickler 12 | type Unpickler[A] = scala.pickling.Unpickler[A] 13 | val Unpickler = scala.pickling.Unpickler 14 | val PicklerUnpickler = scala.pickling.PicklerUnpickler 15 | // These are exposed for custom implementations of picklers. 16 | type FastTypeTag[A] = scala.pickling.FastTypeTag[A] 17 | type PReader = scala.pickling.PReader 18 | type PBuilder = scala.pickling.PBuilder 19 | 20 | // pickling macros need FastTypeTag$ to have been initialized; 21 | // if things ever compile with this removed, it can be removed. 22 | private val __forceInitializeFastTypeTagCompanion = scala.pickling.FastTypeTag 23 | 24 | // All generated picklers are required to be static-only in this library. 25 | implicit val StaticOnly = scala.pickling.static.StaticOnly 26 | 27 | implicit val ShareNothing = scala.pickling.shareNothing.ShareNothing 28 | type directSubclasses = _root_.scala.pickling.directSubclasses 29 | } 30 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/JavaExtraPicklers.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | package pickler 3 | 4 | import java.io.File 5 | import java.net.URI 6 | import scala.pickling.{ FastTypeTag, PBuilder, PReader, PicklingException } 7 | // TODO - Why is alias not working. 8 | import scala.pickling.pickler.{ PrimitivePicklers, RefPicklers } 9 | 10 | /** Contains implementation-details of "can to strings" for Java/sbt 'raw' types. */ 11 | object JavaExtraPicklers { 12 | private val fileCanToString: CanToString[File] = CanToString( 13 | _.toURI.toASCIIString, { 14 | s: String => new File(new URI(s)) 15 | }) 16 | private val uriCanToString: CanToString[URI] = CanToString( 17 | _.toASCIIString, { 18 | s: String => new URI(s) 19 | }) 20 | 21 | } 22 | 23 | /** 24 | * Picklers relating to additional Java types we'd like to support. 25 | * 26 | * THis includes java.io.File, java.net.URI and the sbt "TypeExpression". 27 | */ 28 | trait JavaExtraPicklers extends PrimitivePicklers { 29 | // TODO - Maybe this shouldn't be implicitly available. 30 | implicit def canToStringPickler[A: FastTypeTag](implicit canToString: CanToString[A]): Pickler[A] with Unpickler[A] = new Pickler[A] with Unpickler[A] { 31 | val tag = implicitly[FastTypeTag[A]] 32 | def pickle(a: A, builder: PBuilder): Unit = { 33 | builder.pushHints() 34 | builder.hintTag(FastTypeTag.String) 35 | builder.hintStaticallyElidedType() 36 | stringPickler.pickle(canToString.toString(a), builder) 37 | builder.popHints() 38 | } 39 | def unpickle(tag: String, preader: PReader): Any = { 40 | preader.pushHints() 41 | preader.hintTag(FastTypeTag.String) 42 | preader.hintStaticallyElidedType() 43 | preader.pinHints() 44 | val s = stringPickler.unpickle(FastTypeTag.String.key, preader).asInstanceOf[String] 45 | preader.unpinHints() 46 | preader.popHints() 47 | try { 48 | val result = canToString.fromString(s) 49 | result 50 | } catch { 51 | case e: PicklingException => throw e 52 | case e: Throwable => throw PicklingException(s""""$s" is not valid ${tag}""", Some(e)) 53 | } 54 | } 55 | } 56 | 57 | implicit val filePickler: Pickler[File] with Unpickler[File] = 58 | canToStringPickler[File](FastTypeTag[File], JavaExtraPicklers.fileCanToString) 59 | implicit val uriPickler: Pickler[URI] with Unpickler[URI] = 60 | canToStringPickler[URI](FastTypeTag[URI], JavaExtraPicklers.uriCanToString) 61 | } -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/Option.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | package pickler 3 | 4 | import scala.pickling.{ FastTypeTag, PBuilder, PReader, PicklingException } 5 | // TODO - Why is alias not working. 6 | import scala.pickling.pickler.PrimitivePicklers 7 | 8 | trait OptionPicklers extends PrimitivePicklers with RichTypes { 9 | implicit def optionPickler[A: FastTypeTag](implicit elemPickler: Pickler[A], elemUnpickler: Unpickler[A], collTag: FastTypeTag[Option[A]]): Pickler[Option[A]] with Unpickler[Option[A]] = 10 | new Pickler[Option[A]] with Unpickler[Option[A]] { 11 | private implicit val elemTag = implicitly[FastTypeTag[A]] 12 | val tag = implicitly[FastTypeTag[Option[A]]] 13 | private val isPrimitive = elemTag.tpe.isEffectivelyPrimitive 14 | private val nullTag = implicitly[FastTypeTag[Null]] 15 | def pickle(coll: Option[A], builder: PBuilder): Unit = { 16 | // Here we cheat the "entry" so that the notion of option 17 | // is erased for "null" 18 | coll match { 19 | case Some(elem) => 20 | builder.hintTag(tag) 21 | builder.beginEntry(coll) 22 | builder.beginCollection(1) 23 | builder.putElement { b => 24 | b.hintTag(elemTag) 25 | b.hintStaticallyElidedType() 26 | elemPickler.pickle(elem, b) 27 | } 28 | builder.endCollection() 29 | builder.endEntry() 30 | case None => 31 | // TODO - Json Format shoudl special case this. 32 | builder.hintTag(tag) 33 | builder.beginEntry(None) 34 | builder.beginCollection(0) 35 | builder.endCollection() 36 | builder.endEntry() 37 | } 38 | } 39 | def unpickle(tag: String, preader: PReader): Any = { 40 | // Note - if we call beginEntry we should see JNothing or JNull show up if the option is empty. 41 | val reader = preader.beginCollection() 42 | preader.pushHints() 43 | // TODO - we may be ALWAYS eliding the type, so we shouldn't use an isPrimitive hack here. 44 | if (isPrimitive) { 45 | reader.hintStaticallyElidedType() 46 | reader.hintTag(elemTag) 47 | reader.pinHints() 48 | } else reader.hintTag(elemTag) 49 | val length = reader.readLength 50 | val result: Option[A] = 51 | if (length == 0) None 52 | else { 53 | val elem = elemUnpickler.unpickleEntry(reader.readElement()) 54 | Some(elem.asInstanceOf[A]) 55 | } 56 | if (isPrimitive) preader.unpinHints() 57 | preader.popHints() 58 | reader.endCollection() 59 | result 60 | } 61 | } 62 | } -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/SerializedValue.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | package pickler 3 | 4 | trait SerializationPicklers { 5 | implicit val serializedValuePickler: Pickler[SerializedValue] with Unpickler[SerializedValue] = SerializedValue.pickler 6 | } 7 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/StringMap.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | package pickler 3 | 4 | import scala.collection.generic.CanBuildFrom 5 | import scala.pickling.{ FastTypeTag, PBuilder, PReader, PicklingException } 6 | 7 | trait StringMapPicklers { 8 | // FIXME this could theoretically work for M<:Map[String,A] and use a CanBuildFrom for M? 9 | implicit def stringMapPickler[A](implicit valuePickler: Pickler[A], valueUnpickler: Unpickler[A], valueTag: FastTypeTag[A], 10 | mapTag: FastTypeTag[Map[String, A]], 11 | keysPickler: Pickler[List[String]], keysUnpickler: Unpickler[List[String]]): Pickler[Map[String, A]] with Unpickler[Map[String, A]] = new Pickler[Map[String, A]] with Unpickler[Map[String, A]] { 12 | override val tag = mapTag 13 | 14 | def pickle(m: Map[String, A], builder: PBuilder): Unit = { 15 | builder.pushHints() 16 | builder.hintTag(mapTag) 17 | builder.hintStaticallyElidedType() 18 | builder.beginEntry(m) 19 | // This is a pseudo-field that the JSON format will ignore reading, but 20 | // the binary format WILL write. 21 | // TODO - We should have this be a "hintDynamicKeys" instead. 22 | builder.putField("$keys", { b => 23 | keysPickler.pickle(m.keys.toList.sorted, b) 24 | }) 25 | m foreach { kv => 26 | builder.putField(kv._1, { b => 27 | b.hintTag(valueTag) 28 | valuePickler.pickle(kv._2, b) 29 | }) 30 | } 31 | builder.endEntry() 32 | builder.popHints() 33 | } 34 | 35 | def unpickle(tpe: String, reader: PReader): Any = { 36 | reader.pushHints() 37 | reader.hintStaticallyElidedType() 38 | reader.hintTag(mapTag) 39 | reader.hintStaticallyElidedType() 40 | reader.beginEntry() 41 | val keys = keysUnpickler.unpickleEntry(reader.readField("$keys")).asInstanceOf[List[String]] 42 | val results = for (key <- keys) yield { 43 | val value = valueUnpickler.unpickleEntry(reader.readField(key)) 44 | key -> value.asInstanceOf[A] 45 | } 46 | reader.endEntry() 47 | reader.popHints() 48 | results.toMap 49 | } 50 | override def toString = "StringMapPicklerUnpickler" 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/Throwable.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | package pickler 3 | 4 | // TODO - Why is alias not working. 5 | import scala.pickling.pickler.{ PrimitivePicklers, RefPicklers } 6 | import scala.pickling.PicklingException 7 | 8 | trait ThrowablePicklers extends PrimitivePicklers with OptionPicklers with VectorPicklers with RefPicklers { 9 | 10 | private implicit object stackTracePickler extends Pickler[StackTraceElement] with Unpickler[StackTraceElement] { 11 | override val tag: FastTypeTag[StackTraceElement] = implicitly[FastTypeTag[StackTraceElement]] 12 | private val intTag = implicitly[FastTypeTag[Int]] 13 | private val stringOptTag = implicitly[FastTypeTag[Option[String]]] 14 | private val stringOptPickler = implicitly[Pickler[Option[String]]] 15 | private val stringOptUnpickler = implicitly[Unpickler[Option[String]]] 16 | 17 | override def pickle(a: StackTraceElement, builder: PBuilder): Unit = { 18 | builder.beginEntry(a) 19 | def pickleString(field: String, value: String): Unit = { 20 | builder.putField(field, { b => 21 | b.hintTag(stringOptTag) 22 | stringOptPickler.pickle(Option(value), b) 23 | }) 24 | } 25 | pickleString("className", a.getClassName) 26 | pickleString("methodName", a.getMethodName) 27 | pickleString("fileName", a.getFileName) 28 | builder.putField("lineNumber", { b => 29 | b.hintTag(intTag) 30 | intPickler.pickle(a.getLineNumber, b) 31 | }) 32 | builder.endEntry() 33 | } 34 | override def unpickle(tag: String, preader: PReader): StackTraceElement = { 35 | def unpickleString(field: String): Option[String] = { 36 | stringOptUnpickler.unpickleEntry(preader.readField(field)).asInstanceOf[Option[String]] 37 | } 38 | val className = unpickleString("className") 39 | val methodName = unpickleString("methodName") 40 | val fileName = unpickleString("fileName") 41 | val lineNumber = intPickler.unpickleEntry(preader.readField("lineNumber")).asInstanceOf[Int] 42 | new StackTraceElement(className.orNull, methodName.orNull, fileName.orNull, lineNumber) 43 | } 44 | } 45 | 46 | // TODO why isn't this in LowPriority / what goes in Low and what goes here? 47 | implicit object throwablePicklerUnpickler extends Pickler[Throwable] with Unpickler[Throwable] { 48 | val tag: FastTypeTag[Throwable] = implicitly[FastTypeTag[Throwable]] 49 | private val stringTag = implicitly[FastTypeTag[String]] 50 | private val stringOptTag = implicitly[FastTypeTag[Option[String]]] 51 | private val throwableOptTag = implicitly[FastTypeTag[Option[Throwable]]] 52 | private val stringOptPickler = implicitly[Pickler[Option[String]]] 53 | private val stringOptUnpickler = implicitly[Unpickler[Option[String]]] 54 | private val throwableOptPicklerUnpickler = optionPickler[Throwable](tag, this, this, throwableOptTag) 55 | private val vsteTag = implicitly[FastTypeTag[Vector[StackTraceElement]]] 56 | private val vstePickler = vectorPickler[StackTraceElement] 57 | private val vsteUnpickler = vstePickler 58 | 59 | def pickle(a: Throwable, builder: PBuilder): Unit = { 60 | builder.beginEntry(a) 61 | builder.putField("message", { b => 62 | b.hintTag(stringOptTag) 63 | stringOptPickler.pickle(Option(a.getMessage), b) 64 | }) 65 | builder.putField("cause", { b => 66 | b.hintTag(throwableOptTag) 67 | throwableOptPicklerUnpickler.pickle(Option(a.getCause), b) 68 | }) 69 | builder.putField("stackTrace", { b => 70 | b.hintTag(vsteTag) 71 | vstePickler.pickle(a.getStackTrace.toVector, b) 72 | }) 73 | builder.endEntry() 74 | } 75 | def unpickle(tag: String, preader: PReader): Any = { 76 | val message = stringOptUnpickler.unpickleEntry(preader.readField("message")).asInstanceOf[Option[String]] 77 | val cause = throwableOptPicklerUnpickler.unpickleEntry(preader.readField("cause")).asInstanceOf[Option[Throwable]] 78 | preader.hintStaticallyElidedType() 79 | val stackTrace = vsteUnpickler.unpickleEntry(preader.readField("stackTrace")).asInstanceOf[Vector[StackTraceElement]] 80 | val result = new Exception(message.orNull, cause.orNull) 81 | result.setStackTrace(stackTrace.toArray) 82 | result 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/Traversable.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | package pickler 3 | 4 | import scala.collection.generic.CanBuildFrom 5 | import scala.pickling.{ FastTypeTag, PBuilder, PReader, PicklingException } 6 | 7 | trait VectorPicklers { 8 | implicit def vectorPickler[T: FastTypeTag](implicit elemPickler: Pickler[T], elemUnpickler: Unpickler[T], collTag: FastTypeTag[Vector[T]], cbf: CanBuildFrom[Vector[T], T, Vector[T]]): Pickler[Vector[T]] with Unpickler[Vector[T]] = 9 | TravPickler[T, Vector[T]] 10 | } 11 | 12 | trait ArrayPicklers { 13 | implicit def arrayPickler[A >: Null: FastTypeTag](implicit elemPickler: Pickler[A], elemUnpickler: Unpickler[A], collTag: FastTypeTag[Array[A]], cbf: CanBuildFrom[Array[A], A, Array[A]]): Pickler[Array[A]] with Unpickler[Array[A]] = 14 | TravPickler[A, Array[A]] 15 | } 16 | 17 | trait ListPicklers { 18 | implicit def listPickler[A: FastTypeTag](implicit elemPickler: Pickler[A], elemUnpickler: Unpickler[A], 19 | collTag: FastTypeTag[List[A]]): Pickler[List[A]] with Unpickler[List[A]] = 20 | TravPickler[A, List[A]] 21 | } 22 | 23 | trait SeqPicklers { 24 | // Ideally we wouldn't have this one, but it some sbt tasks return Seq 25 | implicit def seqPickler[A: FastTypeTag](implicit elemPickler: Pickler[A], elemUnpickler: Unpickler[A], collTag: FastTypeTag[Seq[A]], cbf: CanBuildFrom[Seq[A], A, Seq[A]]): Pickler[Seq[A]] with Unpickler[Seq[A]] = 26 | TravPickler[A, Seq[A]] 27 | } 28 | 29 | trait MapPicklers { 30 | implicit def mapPickler[A: FastTypeTag, B: FastTypeTag, C >: (A, B)](implicit keyPickler: Pickler[A], 31 | keyUnpickler: Unpickler[A], 32 | valuePickler: Pickler[B], 33 | valueUnpickler: Unpickler[B], 34 | collTag: FastTypeTag[Map[A, B]], 35 | cbf: CanBuildFrom[Map[A, B], C, Map[A, B]]): Pickler[Map[A, B]] with Unpickler[Map[A, B]] = 36 | TravPickler[(A, B), Map[A, B]] 37 | } 38 | 39 | // Custom pickler for Traversable is needed to emit $type hints for each element. 40 | object TravPickler { 41 | def apply[A: FastTypeTag, C <% Traversable[_]](implicit elemPickler: Pickler[A], elemUnpickler: Unpickler[A], 42 | cbf: CanBuildFrom[C, A, C], collTag: FastTypeTag[C]): Pickler[C] with Unpickler[C] = 43 | new Pickler[C] with Unpickler[C] with RichTypes { 44 | private implicit val elemTag = implicitly[FastTypeTag[A]] 45 | private val isPrimitive = elemTag.isEffectivelyPrimitive 46 | val tag = collTag 47 | 48 | def pickle(coll: C, builder: PBuilder): Unit = { 49 | if (elemTag == FastTypeTag.Int) builder.hintKnownSize(coll.size * 4 + 100) 50 | builder.beginEntry(coll) 51 | builder.beginCollection(coll.size) 52 | 53 | builder.pushHints() 54 | if (isPrimitive) { 55 | builder.hintStaticallyElidedType() 56 | builder.hintTag(elemTag) 57 | builder.pinHints() 58 | } 59 | 60 | (coll: Traversable[_]).asInstanceOf[Traversable[A]].foreach { (elem: A) => 61 | builder putElement { b => 62 | if (!isPrimitive) b.hintTag(elemTag) 63 | elemPickler.pickle(elem, b) 64 | } 65 | } 66 | if (isPrimitive) builder.unpinHints() 67 | builder.popHints() 68 | builder.endCollection() 69 | builder.endEntry() 70 | } 71 | 72 | def unpickle(tpe: String, preader: PReader): Any = { 73 | val reader = preader.beginCollection() 74 | 75 | preader.pushHints() 76 | if (isPrimitive) { 77 | reader.hintStaticallyElidedType() 78 | reader.hintTag(elemTag) 79 | reader.pinHints() 80 | } else { 81 | reader.hintTag(elemTag) // custom code here 82 | reader.pinHints() // custom code here 83 | } 84 | 85 | val length = reader.readLength() 86 | val builder = cbf.apply() 87 | var i = 0 88 | while (i < length) { 89 | val elem = elemUnpickler.unpickleEntry(reader.readElement()) 90 | builder += elem.asInstanceOf[A] 91 | i = i + 1 92 | } 93 | reader.unpinHints() 94 | preader.popHints() 95 | preader.endCollection() 96 | builder.result 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/Tuple2.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | package pickler 3 | 4 | import scala.pickling.{ FastTypeTag, PBuilder, PReader } 5 | import scala.pickling.pickler.PrimitivePicklers 6 | 7 | trait Tuple2Picklers extends PrimitivePicklers with RichTypes { 8 | implicit def tuple2Pickler[T1: FastTypeTag, T2: FastTypeTag](implicit elem1Pickler: Pickler[T1], 9 | elem1Unpickler: Unpickler[T1], 10 | elem2Pickler: Pickler[T2], 11 | elem2Unpickler: Unpickler[T2], 12 | collTag: FastTypeTag[(T1, T2)]): Pickler[(T1, T2)] with Unpickler[(T1, T2)] = 13 | new Pickler[(T1, T2)] with Unpickler[(T1, T2)] { 14 | 15 | val tag = collTag 16 | private implicit val elem1Tag = implicitly[FastTypeTag[T1]] 17 | private implicit val elem2Tag = implicitly[FastTypeTag[T2]] 18 | 19 | def pickle(coll: (T1, T2), builder: PBuilder): Unit = { 20 | // Our type should already be hinted before this method, however we additionally mark our type as 21 | // statically elided. 22 | builder.hintStaticallyElidedType() 23 | builder.beginEntry(coll) 24 | builder.beginCollection(2) 25 | builder.putElement { b => 26 | b.hintTag(elem1Tag) 27 | elem1Pickler.pickle(coll._1, b) 28 | } 29 | builder.putElement { b => 30 | 31 | b.hintTag(elem2Tag) 32 | elem2Pickler.pickle(coll._2, b) 33 | } 34 | builder.endCollection() 35 | builder.endEntry() 36 | } 37 | 38 | def unpickle(tag: String, preader: PReader): Any = { 39 | preader.beginCollection() 40 | // TODO - better warning here. 41 | assert(preader.readLength() == 2) 42 | val fst = elem1Unpickler.unpickleEntry(preader.readElement()) 43 | val snd = elem2Unpickler.unpickleEntry(preader.readElement()) 44 | preader.endCollection() 45 | (fst, snd) 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/TypeExpression.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | package pickler 3 | 4 | import scala.pickling.FastTypeTag 5 | 6 | object TypeExpressionPicklers { 7 | private val typeExpressionCanToString: CanToString[TypeExpression] = CanToString( 8 | _.toString, { 9 | s: String => TypeExpression.parse(s)._1 10 | }) 11 | } 12 | /** Provides a layer of pickler cake for type expressoins. */ 13 | trait TypeExpressionPicklers extends JavaExtraPicklers { 14 | implicit val typeExpressionPickler: Pickler[TypeExpression] with Unpickler[TypeExpression] = 15 | canToStringPickler[TypeExpression](FastTypeTag[TypeExpression], TypeExpressionPicklers.typeExpressionCanToString) 16 | } -------------------------------------------------------------------------------- /serialization/src/main/scala/sbt/serialization/pickler/package.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization 2 | 3 | package object pickler { 4 | type PrimitivePicklers = scala.pickling.pickler.PrimitivePicklers 5 | type PrimitiveArrayPicklers = scala.pickling.pickler.PrimitiveArrayPicklers 6 | type RefPicklers = scala.pickling.pickler.RefPicklers 7 | } 8 | -------------------------------------------------------------------------------- /serialization/src/test/scala/sbt/serialization/ArrayPicklerSpec.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization.spec 2 | 3 | import org.junit.Assert._ 4 | import org.junit._ 5 | import java.io.File 6 | import java.net.URI 7 | import JUnitUtil._ 8 | import sbt.serialization._ 9 | import scala.language.higherKinds 10 | 11 | class ArrayPicklerTest { 12 | 13 | @Test 14 | def testArrays: Unit = { 15 | // Array(1: Byte) should pickle as [1] 16 | pointed1[Array, Byte] 17 | // pointed2[Array, Byte] 18 | 19 | // Array(1: Short) should pickle as [1] 20 | pointed1[Array, Short] 21 | // pointed2[Array, Short] 22 | 23 | // Array('a') should pickle as ["a"]. 24 | pointed1[Array, Char] 25 | // pointed2[Array, Char] 26 | 27 | // Array("a") should pickle as ["a"] 28 | pointed1[Array, String] 29 | // pointed2[Array, String] 30 | 31 | // Array(1) should pickle as [1] 32 | pointed1[Array, Int] 33 | // pointed2[Array, Int] 34 | 35 | // Array(1L) should pickle as [1] 36 | pointed1[Array, Long] 37 | // pointed2[Array, Long] 38 | 39 | // Array(false) should pickle as [false] 40 | pointed1[Array, Boolean] 41 | // pointed2[Array, Boolean] 42 | 43 | // Array(1.0F) should pickle as [1.0] 44 | pointed1[Array, Float] 45 | // pointed2[Array, Float] 46 | 47 | // Array(1.0) should pickle as [1.0] 48 | pointed1[Array, Double] 49 | // pointed2[Array, Double] 50 | } 51 | 52 | @Test 53 | def testLists: Unit = { 54 | // List(1: Byte) should pickle as [1] 55 | pointed1[List, Byte] 56 | pointed2[List, Byte] 57 | // List(1: Short) should pickle as [1] 58 | pointed1[List, Short] 59 | pointed2[List, Short] 60 | // List('a') should pickle as ["a"] 61 | pointed1[List, Char] 62 | pointed2[List, Char] 63 | // List("a") should pickle as ["a"] 64 | pointed1[List, String] 65 | pointed2[List, String] 66 | // List(1) should pickle as [1] 67 | pointed1[List, Int] 68 | pointed2[List, Int] 69 | // List(1L) should pickle as [1] 70 | pointed1[List, Long] 71 | pointed2[List, Long] 72 | // List(false) should pickle as [false] 73 | pointed1[List, Boolean] 74 | pointed2[List, Boolean] 75 | // List(1.0F) should pickle as [1.0] 76 | pointed1[List, Float] 77 | pointed2[List, Float] 78 | // List(1.0) should pickle as [1.0] 79 | pointed1[List, Double] 80 | pointed2[List, Double] 81 | } 82 | 83 | @Test 84 | def testVectors: Unit = { 85 | // Vector(1: Byte) should pickle as [1] 86 | pointed1[Vector, Byte] 87 | pointed2[Vector, Byte] 88 | // Vector(1: Short) should pickle as [1] 89 | pointed1[Vector, Short] 90 | pointed2[Vector, Short] 91 | // Vector('a') should pickle as ["a"] 92 | pointed1[Vector, Char] 93 | pointed2[Vector, Char] 94 | // Vector("a") should pickle as ["a"] 95 | pointed1[Vector, String] 96 | pointed2[Vector, String] 97 | // Vector(1) should pickle as [1] 98 | pointed1[Vector, Int] 99 | pointed2[Vector, Int] 100 | // Vector(1L) should pickle as [1] 101 | pointed1[Vector, Long] 102 | pointed2[Vector, Long] 103 | // Vector(false) should pickle as [false] 104 | pointed1[Vector, Boolean] 105 | pointed2[Vector, Boolean] 106 | // Vector(1.0F) should pickle as [1.0] 107 | pointed1[Vector, Float] 108 | pointed2[Vector, Float] 109 | // Vector(1.0) should pickle as [1.0] 110 | pointed1[Vector, Double] 111 | pointed2[Vector, Double] 112 | } 113 | 114 | @Test 115 | def testOptions: Unit = { 116 | SerializedValue(Some(1): Option[Int]).toJsonString must_== "1" 117 | SerializedValue.fromJsonString("1").parse[Option[Int]].get must_== Some(1) 118 | 119 | SerializedValue(Some("a"): Option[String]).toJsonString must_== "\"a\"" 120 | SerializedValue.fromJsonString("\"a\"").parse[Option[String]].get must_== Some("a") 121 | 122 | SerializedValue(None: Option[Int]).toJsonString must_== "null" 123 | SerializedValue.fromJsonString("null").parse[Option[Int]].get must_== None 124 | 125 | SerializedValue(None: Option[String]).toJsonString must_== "null" 126 | SerializedValue.fromJsonString("null").parse[Option[String]].get must_== None 127 | } 128 | 129 | @Test 130 | def testRoundtrip: Unit = { 131 | // TODO it would be nice to pickle Nil.type so this works 132 | //roundTrip(Nil) // custom format to support both Nil and List[A] 133 | roundTrip(Nil: List[String]) 134 | roundTrip(Vector(): Vector[String]) 135 | roundTripArray(Array(1, 2, 3)) 136 | roundTripArray(Array("Bar", "Baz")) 137 | roundTrip(Vector("Bar", "Baz")) 138 | roundTrip(List("Bar", "Baz")) 139 | roundTrip(Vector(1, 2, 3)) 140 | } 141 | 142 | def trimLine(s: String): String = 143 | (s.lines map { _.trim }).mkString("\n") 144 | def pointed1[F[_], A: ClassManifest](implicit m: Pointed[F], ae: ArrayExample[A], ev0: Pickler[F[A]], ev1: FastTypeTag[F[A]]) = 145 | assertEquals(s"With type $ev1", ae.arrayJson, (trimLine(SerializedValue(m.pointed(ae.one)).toJsonString))) 146 | def pointed2[F[_], A: ClassManifest](implicit m: Pointed[F], ae: ArrayExample[A], ev0: Unpickler[F[A]], ev1: FastTypeTag[F[A]]) = 147 | SerializedValue.fromJsonString(ae.arrayJson).parse[F[A]].get must_== m.pointed(ae.one) 148 | } 149 | 150 | trait ArrayExample[A] { 151 | def one: A 152 | def arrayJson: String 153 | } 154 | object ArrayExample { 155 | def apply[A](one0: A, arrayJson0: String) = new ArrayExample[A] { 156 | def one = one0 157 | def arrayJson: String = arrayJson0 158 | } 159 | val arrayIntExample = """[1]""" 160 | val arrayDoubleExample = """[1.0]""" 161 | val arrayStringExample = """["a"]""" 162 | implicit val byteArrayExample: ArrayExample[Byte] = ArrayExample(1: Byte, arrayIntExample) 163 | implicit val shortArrayExample: ArrayExample[Short] = ArrayExample(1: Short, arrayIntExample) 164 | implicit val intArrayExample: ArrayExample[Int] = ArrayExample(1, arrayIntExample) 165 | implicit val charArrayExample: ArrayExample[Char] = ArrayExample('a', arrayStringExample) 166 | implicit val stringArrayExample: ArrayExample[String] = ArrayExample("a", arrayStringExample) 167 | implicit val longArrayExample: ArrayExample[Long] = ArrayExample(1L, arrayIntExample) 168 | implicit val booleanArrayExample: ArrayExample[Boolean] = ArrayExample(false, """[false]""".stripMargin) 169 | implicit val floatArrayExample: ArrayExample[Float] = ArrayExample(1.0F, arrayDoubleExample) 170 | implicit val doubleArrayExample: ArrayExample[Double] = ArrayExample(1.0, arrayDoubleExample) 171 | } 172 | -------------------------------------------------------------------------------- /serialization/src/test/scala/sbt/serialization/BasicPicklerSpec.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization.spec 2 | 3 | import org.junit.Assert._ 4 | import org.junit._ 5 | import java.io.File 6 | import java.net.URI 7 | import scala.pickling.{ PickleOps, UnpickleOps } 8 | import sbt.serialization._, sbt.serialization.json._ 9 | import JUnitUtil._ 10 | import sbt.serialization.pickler.{ 11 | PrimitivePicklers, 12 | PrimitiveArrayPicklers, 13 | JavaExtraPicklers, 14 | OptionPicklers, 15 | ThrowablePicklers 16 | } 17 | 18 | import scala.pickling.Defaults.pickleOps 19 | import scala.pickling.static._ 20 | 21 | class BasicPicklerTest { 22 | val basicProtocol = new PrimitivePicklers with PrimitiveArrayPicklers with JavaExtraPicklers with OptionPicklers with ThrowablePicklers { 23 | implicit val staticOnly = scala.pickling.static.StaticOnly 24 | } 25 | import basicProtocol._ 26 | 27 | @Test 28 | def testInt: Unit = { 29 | 1.pickle.value must_== "1" 30 | "1".unpickle[Int] must_== 1 31 | } 32 | 33 | @Test 34 | def testLong: Unit = { 35 | 1L.pickle.value must_== "1" 36 | "1".unpickle[Long] must_== 1L 37 | } 38 | 39 | @Test 40 | def testString: Unit = { 41 | "a".pickle.value must_== "\"a\"" 42 | "\"a\"".unpickle[String] must_== "a" 43 | } 44 | 45 | @Test 46 | def testBoolean: Unit = { 47 | false.pickle.value must_== "false" 48 | "false".unpickle[Boolean] must_== false 49 | } 50 | 51 | @Test 52 | def testDouble: Unit = { 53 | 1.0.pickle.value must_== "1.0" 54 | "1.0".unpickle[Double] must_== 1.0 55 | } 56 | 57 | @Test 58 | def testRoundtrip: Unit = { 59 | roundTrip("Foo") 60 | roundTrip(new File("/tmp")) 61 | roundTrip(new URI("/tmp")) 62 | roundTrip(true) 63 | roundTrip(false) 64 | roundTrip(10: Short) 65 | roundTrip(11) 66 | roundTrip(12L) 67 | roundTrip(13.0f) 68 | roundTrip(14.0) 69 | roundTrip(None: Option[String]) // roundTrip(None) must fail to compile 70 | roundTrip(Some("Foo"): Option[String]) // roundTrip(Some("Foo")) must fail to compile 71 | roundTrip(Some(true): Option[Boolean]) // roundTrip(Some(true)) must fail to compile 72 | roundTrip(Some(10): Option[Int]) // roundTrip(Some(10)) must fail to compile 73 | roundTrip((10, "Foo")) 74 | roundTrip(((10, false), None: Option[String])) 75 | roundTrip(Map("a" -> 10, "b" -> 20)) 76 | roundTrip(Map(10 -> "a", 20 -> "b")) 77 | roundTrip(Map.empty[Int, Long]) 78 | } 79 | 80 | @Test 81 | def testThrowable: Unit = { 82 | roundTrip(new Exception(): Throwable) 83 | roundTrip(new Exception("foo"): Throwable) 84 | val nested: Throwable = new Exception("foo", new Exception("bar")) 85 | val recovered = nested.pickle.value.unpickle[Throwable] 86 | recovered.getCause.getMessage must_== "bar" 87 | roundTrip(nested) 88 | recovered.getStackTrace()(0).getFileName must_== "BasicPicklerSpec.scala" 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /serialization/src/test/scala/sbt/serialization/JUnitUtil.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization.spec 2 | 3 | import org.junit.Assert._ 4 | import org.junit._ 5 | import sbt.serialization._ 6 | 7 | object JUnitUtil { 8 | private def addWhatWeWerePickling[T, U](t: T)(body: => U): U = try body 9 | catch { 10 | case e: Throwable => 11 | e.printStackTrace() 12 | throw new AssertionError(s"Crash round-tripping ${t.getClass.getName}: value was: ${t}", e) 13 | } 14 | 15 | def roundTripArray[A](x: Array[A])(implicit ev0: Pickler[Array[A]], ev1: Unpickler[Array[A]]): Unit = 16 | roundTripBase[Array[A]](x)((a, b) => 17 | assertEquals(a.toList, b.toList)) { (a, b) => 18 | assertEquals(s"Failed to round trip $x via ${implicitly[Pickler[Array[A]]]} and ${implicitly[Unpickler[Array[A]]]}", a.getMessage, b.getMessage) 19 | } 20 | def roundTrip[A: Pickler: Unpickler](x: A): Unit = 21 | roundTripBase[A](x)((a, b) => 22 | assertEquals(a, b)) { (a, b) => 23 | assertEquals(s"Failed to round trip $x via ${implicitly[Pickler[A]]} and ${implicitly[Unpickler[A]]}", a.getMessage, b.getMessage) 24 | } 25 | def roundTripBase[A: Pickler: Unpickler](a: A)(f: (A, A) => Unit)(e: (Throwable, Throwable) => Unit): Unit = addWhatWeWerePickling(a) { 26 | val json = toJsonString(a) 27 | //System.err.println(s"json: $json") 28 | val parsed = fromJsonString[A](json).get 29 | (a, parsed) match { 30 | case (a: Throwable, parsed: Throwable) => e(a, parsed) 31 | case _ => f(a, parsed) 32 | } 33 | } 34 | implicit class AnyOp[A](a: A) { 35 | def must_==(b: A): Unit = assertEquals(b, a) 36 | } 37 | 38 | import scala.language.implicitConversions 39 | import sbt.serialization.json.JSONPickle 40 | import scala.pickling.UnpickleOps 41 | implicit def toJSONPickle(value: String): JSONPickle = JSONPickle(value) 42 | implicit def toUnpickleOps(value: String): UnpickleOps = new UnpickleOps(JSONPickle(value)) 43 | } 44 | -------------------------------------------------------------------------------- /serialization/src/test/scala/sbt/serialization/PicklerGrowableSpec.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization.spec 2 | 3 | import org.junit.Assert._ 4 | import org.junit._ 5 | import sbt.serialization._ 6 | import JUnitUtil._ 7 | 8 | case class Foo(x: Int, y: Option[Int]) 9 | object Foo { 10 | implicit val pickler = genPickler[Foo] 11 | implicit val unpickler = genUnpickler[Foo] 12 | } 13 | 14 | class PicklerGrowableTest { 15 | @Test 16 | def testUnpickleWithExtra: Unit = { 17 | SerializedValue.fromJsonString(extraFieldExample).parse[Foo].get must_== Foo(1, Some(1)) 18 | } 19 | 20 | @Test 21 | def testUnpickleWithMissing: Unit = { 22 | SerializedValue.fromJsonString(missingFieldExample).parse[Foo].get must_== Foo(1, None) 23 | } 24 | 25 | lazy val extraFieldExample = """{ 26 | | "$type": "sbt.serialization.spec.Foo", 27 | | "x": 1, 28 | | "y": 1, 29 | | "z": 1 30 | |}""".stripMargin 31 | lazy val missingFieldExample = """{ 32 | | "$type": "sbt.serialization.spec.Foo", 33 | | "x": 1 34 | |}""".stripMargin 35 | } 36 | -------------------------------------------------------------------------------- /serialization/src/test/scala/sbt/serialization/PicklerTypeSpec.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization.spec 2 | 3 | import org.junit.Assert._ 4 | import org.junit._ 5 | import scala.pickling.{ PicklingException } 6 | import sbt.serialization._ 7 | import JUnitUtil._ 8 | 9 | object Fruits { 10 | sealed trait Fruit 11 | case class Apple(x: Int) extends Fruit 12 | object Apple { 13 | implicit val pickler = genPickler[Apple] 14 | implicit val unpickler = genUnpickler[Apple] 15 | } 16 | case class Orange(x: Int) extends Fruit 17 | object Orange { 18 | implicit val pickler = genPickler[Orange] 19 | implicit val unpickler = genUnpickler[Orange] 20 | } 21 | 22 | object Fruit { 23 | implicit val pickler = genPickler[Fruit] 24 | implicit val unpickler = genUnpickler[Fruit] 25 | } 26 | } 27 | 28 | class PicklerTypeTest { 29 | import Fruits._ 30 | 31 | @Test 32 | def testPickleApple: Unit = { 33 | assertEquals("Apple(1)", appleExample, SerializedValue(Apple(1)).toJsonString) 34 | } 35 | 36 | @Test 37 | def testUnpickleApple: Unit = { 38 | SerializedValue.fromJsonString(appleExample).parse[Apple].get must_== Apple(1) 39 | } 40 | 41 | @Test 42 | def testUnpickleFruit: Unit = { 43 | SerializedValue.fromJsonString(appleExample).parse[Fruit].get must_== Apple(1) 44 | } 45 | 46 | @Test 47 | def testUnpickleOrange: Unit = { 48 | SerializedValue.fromJsonString(appleExample).parse[Orange].get must_== Orange(1) 49 | } 50 | 51 | @Test 52 | def testUnpickleOrangeFromUnknown: Unit = { 53 | SerializedValue.fromJsonString(unknownTypeExample).parse[Orange].get must_== Orange(1) 54 | } 55 | 56 | @Test 57 | def testUnpickleFruitFromUnknown: Unit = { 58 | try { 59 | SerializedValue(unknownTypeExample).parse[Fruit].get 60 | sys.error("didn't fail") 61 | } catch { 62 | case _: PicklingException => () 63 | } 64 | } 65 | 66 | lazy val appleExample = """{"x":1,"$type":"sbt.serialization.spec.Fruits.Apple"}""".stripMargin 67 | lazy val unknownTypeExample = """{ 68 | | "$type": "something_unknown", 69 | | "x": 1 70 | |}""".stripMargin 71 | } 72 | -------------------------------------------------------------------------------- /serialization/src/test/scala/sbt/serialization/Pointed.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization.spec 2 | 3 | trait Pointed[F[_]] { 4 | def pointed[A: ClassManifest](a: A): F[A] 5 | } 6 | 7 | object Pointed { 8 | implicit def arrayPointed: Pointed[Array] = new Pointed[Array] { 9 | def pointed[A: ClassManifest](a: A): Array[A] = Array(a) 10 | } 11 | implicit def listPointed: Pointed[List] = new Pointed[List] { 12 | def pointed[A: ClassManifest](a: A): List[A] = List(a) 13 | } 14 | implicit def vectorPointed: Pointed[Vector] = new Pointed[Vector] { 15 | def pointed[A: ClassManifest](a: A): Vector[A] = Vector(a) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /serialization/src/test/scala/sbt/serialization/SerializedValueSpec.scala: -------------------------------------------------------------------------------- 1 | package sbt.serialization.spec 2 | 3 | import org.junit.Assert._ 4 | import org.junit._ 5 | import sbt.serialization._ 6 | import JUnitUtil._ 7 | 8 | private final case class Apple(foo: Int) extends Fruit 9 | private object Apple { 10 | implicit val pickler = Pickler.generate[Apple] 11 | implicit val unpickler = Unpickler.generate[Apple] 12 | } 13 | private final case class Orange(bar: String) extends Fruit 14 | private object Orange { 15 | implicit val pickler = Pickler.generate[Orange] 16 | implicit val unpickler = Unpickler.generate[Orange] 17 | } 18 | private sealed trait Fruit 19 | private object Fruit { 20 | implicit val pickler = Pickler.generate[Fruit] 21 | implicit val unpickler = Unpickler.generate[Fruit] 22 | } 23 | 24 | class SerializedValueTest { 25 | @Test 26 | def serializedValueParses(): Unit = { 27 | assertEquals(Apple(42), SerializedValue(Apple(42)).parse[Apple].get) 28 | } 29 | 30 | @Test 31 | def serializedValueHasTag(): Unit = { 32 | val serialized = SerializedValue(Apple(42)) 33 | assertTrue("apple has the apple tag", serialized.hasTag[Apple]) 34 | assertFalse("apple does not have the orange tag", serialized.hasTag[Orange]) 35 | val serializedFruit = SerializedValue[Fruit](Apple(42)) 36 | assertTrue("as-fruit apple has the apple tag", serializedFruit.hasTag[Apple]) 37 | val serializedOrange = SerializedValue(Orange("hello")) 38 | assertFalse("orange is not tagged as apple", serializedOrange.hasTag[Apple]) 39 | } 40 | } 41 | --------------------------------------------------------------------------------