├── version.sbt ├── project ├── build.properties ├── project │ └── plugins.sbt ├── plugins.sbt └── dependencies.scala ├── .gitignore ├── core └── src │ └── main │ └── scala │ └── quasar │ └── quark │ ├── sort.scala │ ├── data.scala │ ├── join.scala │ ├── package.scala │ ├── features.scala │ ├── setops.scala │ ├── reducefunc.scala │ ├── types.scala │ ├── dataset.scala │ └── mapping.scala ├── README.md ├── LICENSE └── sbt /version.sbt: -------------------------------------------------------------------------------- 1 | version in ThisBuild := "0.1.0" 2 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.13-RC2 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .DS_Store 3 | project/target 4 | src/core/target 5 | -------------------------------------------------------------------------------- /project/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.5.0") 2 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/sort.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | sealed trait Sort[A <: Type] { 4 | def value: MappingFunc[A, A] => MappingFunc[A, _] 5 | } 6 | final case class ascending[A <: Type](value: MappingFunc[A, A] => MappingFunc[A, _]) extends Sort[A] 7 | final case class descending[A <: Type](value: MappingFunc[A, A] => MappingFunc[A, _]) extends Sort[A] 8 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/data.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | trait DataConstruct[F[_]] { 4 | def str(v: scala.Predef.String): F[Type.Str] 5 | 6 | def int(v: scala.math.BigInt): F[Type.Int] 7 | 8 | def dec(v: scala.math.BigDecimal): F[Type.Dec] 9 | 10 | def bool(v: scala.Boolean): F[Type.Bool] 11 | 12 | def null0: F[Type.Null] 13 | } 14 | 15 | trait DataDeconstruct[A] { 16 | def deconstruct[F[_]: DataConstruct]: F[A] 17 | } 18 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/join.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | sealed trait Join[A <: Type, B <: Type] { 4 | def value: Join.ValueType[A, B] 5 | } 6 | object Join { 7 | type ValueType[A <: Type, B <: Type] = MappingFunc[Type.Tuple2[A, B], Type.Tuple2[A, B]] => MappingFunc[Type.Tuple2[A, B], Type.Bool] 8 | } 9 | final case class inner[A <: Type, B <: Type](value: Join.ValueType[A, B]) extends Join[A, B] 10 | final case class leftOuter[A <: Type, B <: Type](value: Join.ValueType[A, B]) extends Join[A, B] 11 | final case class rightOuter[A <: Type, B <: Type](value: Join.ValueType[A, B]) extends Join[A, B] 12 | final case class fullOuter[A <: Type, B <: Type](value: Join.ValueType[A, B]) extends Join[A, B] 13 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/package.scala: -------------------------------------------------------------------------------- 1 | package quasar 2 | 3 | package object quark { 4 | implicit def MakeMappingFuncTuple[A <: Type, B <: Type, C <: Type](v: MappingFunc[A, Type.Tuple2[B, C]]) = 5 | MappingFuncTuple(v) 6 | 7 | implicit def MakeMappingFuncMap[A <: Type, K <: Type, V <: Type](v: MappingFunc[A, Type.Map[K, V]]) = 8 | MappingFuncMap(v) 9 | 10 | implicit def MakeMappingFuncArray[A <: Type, E <: Type](v: MappingFunc[A, Type.Arr[E]]) = 11 | MappingFuncArray(v) 12 | 13 | implicit def MakeDatasetTuple2[K <: Type, V <: Type](v: Dataset[Type.Tuple2[K, V]]) = 14 | DatasetTuple2(v) 15 | 16 | implicit def TupleToMappingFuncTuple[A <: Type, B <: Type, C <: Type](t: (MappingFunc[A, B], MappingFunc[A, C])): MappingFunc[A, Type.Tuple2[B, C]] = 17 | t._1 ~ t._2 18 | 19 | } 20 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.wartremover" % "sbt-wartremover" % "1.1.1") 2 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.3") 3 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.3.5") 4 | addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.6.1") 5 | 6 | val commonScalacOptions = Seq( 7 | "-deprecation", 8 | "-encoding", "UTF-8", 9 | "-feature", 10 | "-language:existentials", 11 | "-language:higherKinds", 12 | "-language:implicitConversions", 13 | "-unchecked", 14 | "-Xfatal-warnings", 15 | "-Xfuture", 16 | "-Xlint", 17 | "-Yno-adapted-args", 18 | "-Ywarn-dead-code", 19 | "-Ywarn-numeric-widen", 20 | "-Ywarn-value-discard") 21 | 22 | scalacOptions ++= commonScalacOptions 23 | 24 | buildInfoKeys := Seq[BuildInfoKey]( 25 | "commonScalacOptions" -> (commonScalacOptions :+ "-Yno-imports")) 26 | 27 | buildInfoPackage := "quark.project.build" 28 | 29 | lazy val meta = project.in(file(".")).enablePlugins(BuildInfoPlugin) 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Quark Analytics DSL for Quasar 2 | 3 | Quark is an embedded DSL for the Quasar Analytics compiler. 4 | 5 | With a syntax reminiscent of Spark, Quark allows developers to easily create complex workflows on semi-structured data that can be compiled and deployed to any target infrastructure supported by the Quasar Analytics compiler. 6 | 7 | In contrast to Spark, Quark is specialized for semi-structured data, rather just structured data. In addition, Quark does not serialize any code and has a purely-functional, declarative interface. 8 | 9 | On the flip side, Quark does not permit arbitrary user-defined code, but rather carefully limits the space of operations to those supported by the Quasar Analytics compiler. 10 | 11 | # Build 12 | 13 | # Usage 14 | 15 | ```scala 16 | import quasar.quark._ 17 | 18 | object Example { 19 | def main(args: Array[String]): Unit = { 20 | val dataset = Dataset.load("/prod/profiles") 21 | 22 | val averageAge = dataset.groupBy(_.country[Str]).map(_.age[Int]).reduceBy(_.average) 23 | 24 | val stream = averageAge.evalTo("/prod/profiles_2") 25 | } 26 | } 27 | ``` 28 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/features.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | sealed trait BoolLike[A] 4 | object BoolLike { 5 | implicit val BoolBoolLike = new BoolLike[Type.Bool] { } 6 | } 7 | 8 | sealed trait DateLike[A] 9 | object DateLike { 10 | implicit val DateDateLike = new DateLike[Type.Date] { } 11 | implicit val TimestampDateLike = new DateLike[Type.Timestamp] { } 12 | } 13 | 14 | sealed trait TimeLike[A] 15 | object TimeLike { 16 | implicit val TimeTimeLike = new TimeLike[Type.Time] { } 17 | implicit val TimestampTimeLike = new TimeLike[Type.Timestamp] { } 18 | } 19 | 20 | sealed trait TimestampLike[A] 21 | object TimestmapLike { 22 | implicit val TimestampTimestampLike = new TimestampLike[Type.Timestamp] { } 23 | } 24 | 25 | sealed trait NumberLike[A] 26 | object NumberLike { 27 | implicit val IntNumberLike = new NumberLike[Type.Int] { } 28 | implicit val DecNumberLike = new NumberLike[Type.Dec] { } 29 | } 30 | 31 | sealed trait StringLike[A] 32 | object StringLike { 33 | implicit val StringStringLike = new StringLike[Type.Str] { } 34 | } 35 | 36 | sealed trait IntLike[A] 37 | object IntLike { 38 | implicit val IntIntLike = new IntLike[Type.Int] { } 39 | } 40 | 41 | sealed trait DecLike[A] 42 | object DecLike { 43 | implicit val DecDecLike = new DecLike[Type.Dec] { } 44 | } 45 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/setops.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | import quasar.ejson._ 4 | import quasar.common.SortDir 5 | 6 | import scala.Predef.String 7 | import matryoshka.RecursiveT 8 | import scalaz.NonEmptyList 9 | 10 | trait SetOps[F[_]] extends DataConstruct[F] { 11 | def constant[T[_[_]]: RecursiveT](e: T[EJson]): F[Type.Unknown] 12 | 13 | def empty[A <: Type]: F[A] 14 | 15 | def root: F[Type.Unknown] 16 | 17 | def read(path: String): F[Type.Unknown] 18 | 19 | def autojoin[A <: Type, B <: Type](l: F[A], r: F[B]): F[Type.Tuple2[A, B]] 20 | 21 | def let[A, B](v: F[A], binding: F[A] => F[B]): F[B] 22 | 23 | def sort[A <: Type](v: F[A], on: NonEmptyList[(MappingFunc[A, _], SortDir)]): F[A] 24 | 25 | def union[A <: Type](l: F[A], r: F[A]): F[A] 26 | 27 | def intersect[A <: Type](l: F[A], r: F[A]): F[A] 28 | 29 | def except[A <: Type](l: F[A], r: F[A]): F[A] 30 | 31 | def groupBy[A <: Type, B <: Type](v: F[A], f: MappingFunc[A, B]): F[A] 32 | 33 | def distinct[A <: Type](v: F[A]): F[A] 34 | 35 | def distinctBy[A <: Type, B <: Type](v: F[A], f: MappingFunc[A, B]): F[A] 36 | 37 | def filter[A <: Type, B <: Type: BoolLike](v: F[A], f: MappingFunc[A, B]): F[A] 38 | 39 | def take[A <: Type, B <: Type: IntLike](v: F[A], count: F[B]): F[A] 40 | 41 | def drop[A <: Type, B <: Type: IntLike](v: F[A], count: F[B]): F[A] 42 | 43 | def sample[A <: Type, B <: Type: IntLike](v: F[A], count: F[B]): F[A] 44 | 45 | def map[A <: Type, B <: Type](v: F[A], f: MappingFunc[A, B]): F[B] 46 | 47 | def reduce[A <: Type, B <: Type](v: F[A], f: ReduceFunc[A, B]): F[B] 48 | 49 | def flattenArrayElements[A <: Type](v: F[Type.Arr[A]]): F[A] 50 | 51 | def flattenArrayIndices[A <: Type](v: F[Type.Arr[A]]): F[Type.Int] 52 | 53 | def flattenMapKeys[K <: Type, V <: Type](v: F[Type.Map[K, V]]): F[K] 54 | 55 | def flattenMapValues[K <: Type, V <: Type](v: F[Type.Map[K, V]]): F[V] 56 | 57 | def zoomArrayElements[A <: Type](v: F[Type.Arr[A]]): F[A] 58 | 59 | def zoomArrayIndices[A <: Type](v: F[Type.Arr[A]]): F[Type.Int] 60 | 61 | def zoomMapKeys[K <: Type, V <: Type](v: F[Type.Map[K, V]]): F[K] 62 | 63 | def zoomMapValues[K <: Type, V <: Type](v: F[Type.Map[K, V]]): F[V] 64 | 65 | // def join[A <: Type, B <: Type](l: F[A], r: F[B]): Joined[F, A, B] 66 | } 67 | 68 | object SetOps { 69 | def apply[F[_]](implicit F: SetOps[F]): SetOps[F] = F 70 | } 71 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/reducefunc.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | trait ReduceFunc[A <: Type, B <: Type] { 4 | def apply[F[_]: ReduceOps](v: F[A]): F[B] 5 | } 6 | object ReduceFunc { 7 | def count[A <: Type]: ReduceFunc[A, Type.Int] = new ReduceFunc[A, Type.Int] { 8 | def apply[G[_]: ReduceOps](v: G[A]): G[Type.Int] = ReduceOps[G].count(v) 9 | } 10 | 11 | def sum[A <: Type: NumberLike]: ReduceFunc[A, A] = new ReduceFunc[A, A] { 12 | def apply[G[_]: ReduceOps](v: G[A]): G[A] = ReduceOps[G].sum(v) 13 | } 14 | 15 | def average[A <: Type: NumberLike]: ReduceFunc[A, Type.Dec] = new ReduceFunc[A, Type.Dec] { 16 | def apply[G[_]: ReduceOps](v: G[A]): G[Type.Dec] = ReduceOps[G].average(v) 17 | } 18 | 19 | def min[A <: Type]: ReduceFunc[A, A] = new ReduceFunc[A, A] { 20 | def apply[G[_]: ReduceOps](v: G[A]): G[A] = ReduceOps[G].min(v) 21 | } 22 | 23 | def max[A <: Type]: ReduceFunc[A, A] = new ReduceFunc[A, A] { 24 | def apply[G[_]: ReduceOps](v: G[A]): G[A] = ReduceOps[G].min(v) 25 | } 26 | 27 | def arbitrary[A <: Type]: ReduceFunc[A, A] = new ReduceFunc[A, A] { 28 | def apply[G[_]: ReduceOps](v: G[A]): G[A] = ReduceOps[G].arbitrary(v) 29 | } 30 | 31 | def first[A <: Type]: ReduceFunc[A, A] = new ReduceFunc[A, A] { 32 | def apply[G[_]: ReduceOps](v: G[A]): G[A] = ReduceOps[G].first(v) 33 | } 34 | 35 | def last[A <: Type]: ReduceFunc[A, A] = new ReduceFunc[A, A] { 36 | def apply[G[_]: ReduceOps](v: G[A]): G[A] = ReduceOps[G].last(v) 37 | } 38 | 39 | def makeArray[A <: Type]: ReduceFunc[A, Type.Arr[A]] = new ReduceFunc[A, Type.Arr[A]] { 40 | def apply[G[_]: ReduceOps](v: G[A]): G[Type.Arr[A]] = ReduceOps[G].makeArray(v) 41 | } 42 | 43 | def makeMap[K <: Type, V <: Type]: ReduceFunc[Type.Tuple2[K, V], Type.Map[K, V]] = new ReduceFunc[Type.Tuple2[K, V], Type.Map[K, V]] { 44 | def apply[G[_]: ReduceOps](v: G[Type.Tuple2[K, V]]): G[Type.Map[K, V]] = ReduceOps[G].makeMap(v) 45 | } 46 | } 47 | 48 | trait ReduceOps[F[_]] { 49 | def count[A <: Type](v: F[A]): F[Type.Int] 50 | 51 | def sum[A <: Type: NumberLike](v: F[A]): F[A] 52 | 53 | def average[A <: Type: NumberLike](v: F[A]): F[Type.Dec] 54 | 55 | def min[A <: Type](v: F[A]): F[A] 56 | 57 | def max[A <: Type](v: F[A]): F[A] 58 | 59 | def arbitrary[A <: Type](v: F[A]): F[A] 60 | 61 | def first[A <: Type](v: F[A]): F[A] 62 | 63 | def last[A <: Type](v: F[A]): F[A] 64 | 65 | def makeArray[A <: Type](v: F[A]): F[Type.Arr[A]] 66 | 67 | def makeMap[A <: Type, B <: Type](kv: F[Type.Tuple2[A, B]]): F[Type.Map[A, B]] 68 | } 69 | object ReduceOps { 70 | def apply[F[_]](implicit W: ReduceOps[F]): ReduceOps[F] = W 71 | } 72 | -------------------------------------------------------------------------------- /project/dependencies.scala: -------------------------------------------------------------------------------- 1 | package quark.project 2 | 3 | import scala.collection.Seq 4 | 5 | import sbt._, Keys._ 6 | 7 | object dependencies { 8 | private val argonautVersion = "6.2-M3" 9 | private val monocleVersion = "1.3.2" 10 | private val quasarVersion = "14.5.7" 11 | private val pathyVersion = "0.2.2" 12 | private val refinedVersion = "0.5.0" 13 | private val scalacheckVersion = "1.12.5" 14 | private val scalazVersion = "7.2.8" 15 | private val specs2Version = "3.8.4-scalacheck-1.12.5" 16 | 17 | val core = Seq( 18 | "commons-codec" % "commons-codec" % "1.10", 19 | "org.scalaz" %% "scalaz-core" % scalazVersion, 20 | "org.quasar-analytics" %% "quasar-foundation-internal" % quasarVersion, 21 | "org.quasar-analytics" %% "quasar-foundation-internal" % quasarVersion % "test" classifier "tests", 22 | "org.quasar-analytics" %% "quasar-connector-internal" % quasarVersion, 23 | "org.quasar-analytics" %% "quasar-connector-internal" % quasarVersion % "test" classifier "tests", 24 | "org.quasar-analytics" %% "quasar-core-internal" % quasarVersion, 25 | "org.quasar-analytics" %% "quasar-core-internal" % quasarVersion % "test" classifier "tests", 26 | "org.quasar-analytics" %% "quasar-frontend-internal" % quasarVersion, 27 | "org.quasar-analytics" %% "quasar-frontend-internal" % quasarVersion % "test" classifier "tests", 28 | "com.github.julien-truffaut" %% "monocle-core" % monocleVersion, 29 | "com.nimbusds" % "oauth2-oidc-sdk" % "5.13", 30 | "com.slamdata" %% "pathy-core" % pathyVersion, 31 | "com.slamdata" %% "pathy-argonaut" % pathyVersion, 32 | "com.github.scopt" %% "scopt" % "3.5.0", 33 | "eu.timepit" %% "refined" % refinedVersion, 34 | "eu.timepit" %% "refined-scalacheck" % refinedVersion % "test", 35 | "io.argonaut" %% "argonaut" % argonautVersion, 36 | "io.argonaut" %% "argonaut-monocle" % argonautVersion, 37 | "io.argonaut" %% "argonaut-scalaz" % argonautVersion, 38 | "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test", 39 | "org.specs2" %% "specs2-core" % specs2Version % "test", 40 | "org.specs2" %% "specs2-scalacheck" % specs2Version % "test", 41 | "org.scalaz" %% "scalaz-scalacheck-binding" % scalazVersion % "test", 42 | "org.typelevel" %% "scalaz-specs2" % "0.4.0" % "test" 43 | ) 44 | } 45 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/types.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | import scala.math.{BigInt, BigDecimal} 4 | import java.time.{Instant, Period, Duration, LocalDate, LocalTime} 5 | 6 | import scalaz.\/ 7 | 8 | sealed trait Type { 9 | type ScalaType 10 | } 11 | object Type { 12 | final case class Unknown() extends Type { 13 | type ScalaType = scala.Any 14 | } 15 | final case class Timestamp() extends Type { 16 | type ScalaType = Instant 17 | } 18 | final case class Date() extends Type { 19 | type ScalaType = LocalDate 20 | } 21 | final case class Time() extends Type { 22 | type ScalaType = LocalTime 23 | } 24 | final case class Interval() extends Type { 25 | type ScalaType = Duration \/ Period 26 | } 27 | final case class Int() extends Type { 28 | type ScalaType = BigInt 29 | } 30 | final case class Dec() extends Type { 31 | type ScalaType = BigDecimal 32 | } 33 | final case class Str() extends Type { 34 | type ScalaType = scala.Predef.String 35 | } 36 | final case class Map[A <: Type, B <: Type](key: A, value: B) extends Type { 37 | type ScalaType = scala.Predef.Map[A#ScalaType, B#ScalaType] 38 | } 39 | final case class Arr[A <: Type](element: A) extends Type { 40 | type ScalaType = scala.Array[A] 41 | } 42 | final case class Tuple2[A <: Type, B <: Type](_1: A, _2: B) extends Type { 43 | type ScalaType = (A#ScalaType, B#ScalaType) 44 | } 45 | final case class Bool() extends Type { 46 | type ScalaType = scala.Boolean 47 | } 48 | final case class Null() extends Type { 49 | type ScalaType = scala.Null 50 | } 51 | type UnknownMap = Map[Unknown, Unknown] 52 | val UnknownMap : UnknownMap = Map(Unknown(), Unknown()) 53 | 54 | type UnknownArr = Arr[Unknown] 55 | val UnknownArr : UnknownArr = Arr(Unknown()) 56 | 57 | type Record[A <: Type] = Map[Str, A] 58 | type UnknownRecord = Record[Unknown] 59 | } 60 | 61 | sealed trait HasType[A <: Type] { 62 | def typeOf: A 63 | } 64 | object HasType { 65 | import Type._ 66 | 67 | def apply[A <: Type](implicit W: HasType[A]): HasType[A] = W 68 | 69 | implicit val UnknownHasType: HasType[Unknown] = new HasType[Unknown] { 70 | def typeOf = Unknown() 71 | } 72 | implicit val IntHasType: HasType[Int] = new HasType[Int] { 73 | def typeOf = Int() 74 | } 75 | implicit val DecHasType: HasType[Dec] = new HasType[Dec] { 76 | def typeOf = Dec() 77 | } 78 | implicit def MapHasType[A <: Type: HasType, B <: Type: HasType]: HasType[Map[A, B]] = new HasType[Map[A, B]] { 79 | def typeOf = Map(HasType[A].typeOf, HasType[B].typeOf) 80 | } 81 | implicit def ArrHasType[A <: Type: HasType]: HasType[Arr[A]] = new HasType[Arr[A]] { 82 | def typeOf = Arr(HasType[A].typeOf) 83 | } 84 | } 85 | 86 | 87 | // def convert[A <: Type](v: DataDeconstruct[A]): A#ScalaType = v.deconstruct(new DataConstruct[String \/ ?] { 88 | // def str(v: scala.Predef.String): String \/ 89 | // 90 | // def int(v: scala.math.BigInt): FInt 91 | // 92 | // def dec(v: scala.math.BigDecimal): FDec 93 | // 94 | // def bool(v: scala.Boolean): FBool 95 | // 96 | // def null0: FNull 97 | // }) 98 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/dataset.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | import scala.Predef.String 4 | import scala.Int 5 | import scala.Predef._ 6 | 7 | /** 8 | * A dataset is a set of values loaded from the file system. Datasets may be 9 | * mapped, filtered, sorted, joined, and otherwise transformed. 10 | */ 11 | sealed trait Dataset[A <: Type] { self => 12 | def apply[F[_]: SetOps]: F[A] 13 | 14 | /** 15 | * Performs a runtime type filter to select for the subset of values in the 16 | * dataset that conform to the specified type. 17 | */ 18 | def typed[B <: Type: HasType]: Dataset[B] = map(_.typed[B]) 19 | 20 | /** 21 | * Distincts the dataset by the values inside the datset. 22 | */ 23 | def distinct: Dataset[A] = new Dataset[A] { 24 | def apply[F[_]: SetOps]: F[A] = SetOps[F].distinct(self.apply) 25 | } 26 | 27 | /** 28 | * Distincts the dataset by the specified projection on values of the dataset. 29 | */ 30 | def distinctBy[B <: Type](f: MappingFunc[A, A] => MappingFunc[A, B]): Dataset[A] = new Dataset[A] { 31 | def apply[F[_]: SetOps]: F[A] = SetOps[F].distinctBy(self.apply, f(MappingFunc.id[A])) 32 | } 33 | 34 | /** 35 | * Groups this dataset by the specified projection on the values of the 36 | * dataset. Set-level operations applied to a grouped dataset are applied 37 | * within each group. 38 | */ 39 | def groupBy[B <: Type](f: MappingFunc[A, A] => MappingFunc[A, B]): Dataset[A] = new Dataset[A] { 40 | def apply[F[_]: SetOps]: F[A] = SetOps[F].groupBy(self.apply, f(MappingFunc.id[A])) 41 | } 42 | 43 | /** 44 | * Joins this dataset with the specified dataset, according to the specified 45 | * join type and predicate. 46 | */ 47 | def join[B <: Type](that: Dataset[B])(j: Join[A, B]): Dataset[Type.Tuple2[A, B]] = new Dataset[Type.Tuple2[A, B]] { 48 | def apply[F[_]: SetOps]: F[Type.Tuple2[A, B]] = ??? 49 | } 50 | 51 | /** 52 | * Zips this dataset and the specified dataset together according to the 53 | * natural join of the two datasets. 54 | */ 55 | def zip[B <: Type](that: Dataset[B]): Dataset[Type.Tuple2[A, B]] = new Dataset[Type.Tuple2[A, B]] { 56 | def apply[F[_]: SetOps]: F[Type.Tuple2[A, B]] = SetOps[F].autojoin(self.apply, that.apply[F]) 57 | } 58 | 59 | /** 60 | * Maps over every value in this dataset, applying the specified 61 | * transformation to each element. 62 | */ 63 | def map[B <: Type](f: MappingFunc[A, A] => MappingFunc[A, B]): Dataset[B] = new Dataset[B] { 64 | def apply[F[_]: SetOps]: F[B] = SetOps[F].map(self.apply[F], f(MappingFunc.id[A])) 65 | } 66 | 67 | /** 68 | * Maps over every value in the dataset, applying the specified array- 69 | * generating transformation to each element, and flattening the array. 70 | */ 71 | def flatMap[B <: Type](f: MappingFunc[A, A] => MappingFunc[A, Type.Arr[B]]): Dataset[B] = new Dataset[B] { 72 | def apply[F[_]: SetOps]: F[B] = SetOps[F].flattenArrayElements(SetOps[F].map(self.apply[F], f(MappingFunc.id[A]))) 73 | } 74 | 75 | /** 76 | * Zips this dataset together with the specified dataset, by using the self- 77 | * join of the two datasets. 78 | */ 79 | def zipWith[B <: Type, C <: Type](that: Dataset[B])(f: MappingFunc[Type.Tuple2[A, B], Type.Tuple2[A, B]] => MappingFunc[Type.Tuple2[A, B], C]): Dataset[C] = zip(that).map(f) 80 | 81 | /** 82 | * Unions this dataset together with the specified dataset, retaining all 83 | * duplicates. 84 | */ 85 | def union(that: Dataset[A]): Dataset[A] = new Dataset[A] { 86 | def apply[F[_]: SetOps]: F[A] = SetOps[F].union(self.apply, that.apply) 87 | } 88 | 89 | /** 90 | * Intersects this dataset together with the specified dataset, retaining only 91 | * values that are a member of both datasets. 92 | */ 93 | def intersect(that: Dataset[A]): Dataset[A] = new Dataset[A] { 94 | def apply[F[_]: SetOps]: F[A] = SetOps[F].intersect(self.apply, that.apply) 95 | } 96 | 97 | /** 98 | * Removes all values from this dataset that are values of the specified 99 | * dataset. 100 | */ 101 | def except(that: Dataset[A]): Dataset[A] = new Dataset[A] { 102 | def apply[F[_]: SetOps]: F[A] = SetOps[F].except(self.apply, that.apply) 103 | } 104 | 105 | /** 106 | * Filters the dataset by the specified predicate, removing values that do 107 | * not satisfy the predicate. 108 | */ 109 | def filter(f: MappingFunc[A, A] => MappingFunc[A, Type.Bool]): Dataset[A] = new Dataset[A] { 110 | def apply[F[_]: SetOps]: F[A] = SetOps[F].filter[A, Type.Bool](self.apply, f(MappingFunc.id[A])) 111 | } 112 | 113 | /** 114 | * Takes the first `n` values from the dataset. 115 | */ 116 | def take(n: Int): Dataset[A] = new Dataset[A] { 117 | def apply[F[_]: SetOps]: F[A] = SetOps[F].take(self.apply, SetOps[F].int(n)) 118 | } 119 | 120 | /** 121 | * Drops the first `n` values from the dataset. 122 | */ 123 | def drop(n: Int): Dataset[A] = new Dataset[A] { 124 | def apply[F[_]: SetOps]: F[A] = SetOps[F].drop(self.apply, SetOps[F].int(n)) 125 | } 126 | 127 | /** 128 | * Randomly samples `n` values from the dataset. 129 | */ 130 | def sample(n: Int): Dataset[A] = new Dataset[A] { 131 | def apply[F[_]: SetOps]: F[A] = SetOps[F].sample(self.apply, SetOps[F].int(n)) 132 | } 133 | 134 | /** 135 | * sortBy(ascending(_.length), descending(_.age)) 136 | */ 137 | def sortBy(v1: Sort[A], vs: Sort[A]*): Dataset[A] = new Dataset[A] { 138 | def apply[F[_]: SetOps]: F[A] = ??? 139 | } 140 | 141 | /** 142 | * Counts the values in the dataset. 143 | */ 144 | def count: Dataset[Type.Int] = new Dataset[Type.Int] { 145 | def apply[F[_]: SetOps]: F[Type.Int] = SetOps[F].reduce(self.apply, ReduceFunc.count) 146 | } 147 | 148 | /** 149 | * Sums all the values in the dataset. 150 | */ 151 | def sum(implicit W: NumberLike[A]): Dataset[A] = new Dataset[A] { 152 | def apply[F[_]: SetOps]: F[A] = SetOps[F].reduce(self.apply, ReduceFunc.sum) 153 | } 154 | 155 | /** 156 | * Computes the average of all the values in the dataset. 157 | */ 158 | def average(implicit W: NumberLike[A]): Dataset[Type.Dec] = new Dataset[Type.Dec] { 159 | def apply[F[_]: SetOps]: F[Type.Dec] = SetOps[F].reduce(self.apply, ReduceFunc.average) 160 | } 161 | 162 | /** 163 | * Selects the minimum value in the dataset. 164 | */ 165 | def min: Dataset[A] = new Dataset[A] { 166 | def apply[F[_]: SetOps]: F[A] = SetOps[F].reduce(self.apply, ReduceFunc.min) 167 | } 168 | 169 | /** 170 | * Selects the maximum value in the dataset. 171 | */ 172 | def max: Dataset[A] = new Dataset[A] { 173 | def apply[F[_]: SetOps]: F[A] = SetOps[F].reduce(self.apply, ReduceFunc.max) 174 | } 175 | 176 | /** 177 | * Selects the first value in the dataset. 178 | */ 179 | def first: Dataset[A] = new Dataset[A] { 180 | def apply[F[_]: SetOps]: F[A] = SetOps[F].reduce(self.apply, ReduceFunc.first) 181 | } 182 | 183 | /** 184 | * Selects the last value in the dataset. 185 | */ 186 | def last: Dataset[A] = new Dataset[A] { 187 | def apply[F[_]: SetOps]: F[A] = SetOps[F].reduce(self.apply, ReduceFunc.last) 188 | } 189 | 190 | /** 191 | * Selects an arbitrary value in the dataset. 192 | */ 193 | def arbitrary: Dataset[A] = new Dataset[A] { 194 | def apply[F[_]: SetOps]: F[A] = SetOps[F].reduce(self.apply, ReduceFunc.arbitrary) 195 | } 196 | 197 | /** 198 | * Packs the dataset into an array. 199 | */ 200 | def makeArray: Dataset[Type.Arr[A]] = new Dataset[Type.Arr[A]] { 201 | def apply[F[_]: SetOps]: F[Type.Arr[A]] = SetOps[F].reduce(self.apply, ReduceFunc.makeArray) 202 | } 203 | 204 | /** 205 | * Combines this dataset (the keys) with the specified dataset (the values) 206 | * to form a map, using the natural join of the two datasets. 207 | */ 208 | def makeMap[B <: Type](that: Dataset[B]): Dataset[Type.Map[A, B]] = DatasetTuple2(zip(that)).makeMap 209 | } 210 | 211 | object Dataset { 212 | /** 213 | * Creates an empty dataset of any type that contains no values. 214 | */ 215 | def empty[A <: Type]: Dataset[A] = new Dataset[A] { 216 | def apply[F[_]](implicit F: SetOps[F]): F[A] = F.empty[A] 217 | } 218 | 219 | /** 220 | * Creates a dataset containing the root of the Quasar file system. 221 | */ 222 | def root: Dataset[Type.Unknown] = new Dataset[Type.Unknown] { 223 | def apply[F[_]](implicit F: SetOps[F]) = F.root 224 | } 225 | 226 | /** 227 | * Loads the data at a specified path in the Quasar file system. 228 | */ 229 | def load(path: String): Dataset[Type.Unknown] = new Dataset[Type.Unknown] { 230 | def apply[F[_]](implicit F: SetOps[F]) = F.read(path) 231 | } 232 | } 233 | 234 | final case class DatasetTuple2[K <: Type, V <: Type](self: Dataset[Type.Tuple2[K, V]]) { 235 | /** 236 | * Packs the datset into a map. 237 | */ 238 | def makeMap: Dataset[Type.Map[K, V]] = new Dataset[Type.Map[K, V]] { 239 | def apply[F[_]: SetOps]: F[Type.Map[K, V]] = SetOps[F].reduce(self.apply, ReduceFunc.makeMap) 240 | } 241 | } 242 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | -------------------------------------------------------------------------------- /core/src/main/scala/quasar/quark/mapping.scala: -------------------------------------------------------------------------------- 1 | package quasar.quark 2 | 3 | import quasar.ejson.EJson 4 | import quasar.std.DateLib.TemporalPart 5 | import scala.Predef._ 6 | import scala.language.dynamics 7 | import scala.Dynamic 8 | 9 | import matryoshka.RecursiveT 10 | 11 | trait MappingFunc[A <: Type, B <: Type] extends Dynamic { self => 12 | import MappingFunc.Case 13 | 14 | def apply[F[_]: MappingOps](v: F[A]): F[B] 15 | 16 | def >>> [C <: Type](that: MappingFunc[B, C]): MappingFunc[A, C] = new MappingFunc[A, C] { 17 | def apply[F[_]: MappingOps](v: F[A]): F[C] = that.apply[F](self.apply[F](v)) 18 | } 19 | 20 | def typed[C <: Type: HasType]: MappingFunc[A, C] = new MappingFunc[A, C] { 21 | def apply[F[_]: MappingOps](v: F[A]): F[C] = MappingOps[F].typed(self(v), HasType[C].typeOf) 22 | } 23 | 24 | def := [T[_[_]]: RecursiveT](e: T[EJson]): MappingFunc[A, Type.Unknown] = new MappingFunc[A, Type.Unknown] { 25 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Unknown] = MappingOps[F].set[T, B](self(v), e) 26 | } 27 | 28 | def extractCentury(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 29 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractCentury(self.apply[F](v)) 30 | } 31 | 32 | def extractDayOfMonth(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 33 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractDayOfMonth(self.apply[F](v)) 34 | } 35 | 36 | def extractDecade(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 37 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractDecade(self.apply[F](v)) 38 | } 39 | 40 | def extractDayOfWeek(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 41 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractDayOfWeek(self.apply[F](v)) 42 | } 43 | 44 | def extractDayOfYear(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 45 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractDayOfYear(self.apply[F](v)) 46 | } 47 | 48 | def extractEpoch(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 49 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractEpoch(self.apply[F](v)) 50 | } 51 | 52 | def extractMillennium(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 53 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractMillennium(self.apply[F](v)) 54 | } 55 | 56 | def extractMonth(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 57 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractMonth(self.apply[F](v)) 58 | } 59 | 60 | def extractQuarter(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 61 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractQuarter(self.apply[F](v)) 62 | } 63 | 64 | def extractTimezone(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 65 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractTimezone(self.apply[F](v)) 66 | } 67 | 68 | def extractTimezoneHour(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 69 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractTimezoneHour(self.apply[F](v)) 70 | } 71 | 72 | def extractTimezoneMinute(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 73 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractTimezoneMinute(self.apply[F](v)) 74 | } 75 | 76 | def extractWeek(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 77 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractWeek(self.apply[F](v)) 78 | } 79 | 80 | def extractYear(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 81 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractYear(self.apply[F](v)) 82 | } 83 | 84 | def extractHour(implicit W: TimeLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 85 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractHour(self.apply[F](v)) 86 | } 87 | 88 | def extractIsoDayOfWeek(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 89 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractIsoDayOfWeek(self.apply[F](v)) 90 | } 91 | 92 | def extractIsoYear(implicit W: DateLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 93 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractIsoYear(self.apply[F](v)) 94 | } 95 | 96 | def extractMicroseconds(implicit W: TimeLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 97 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractMicroseconds(self.apply[F](v)) 98 | } 99 | 100 | def extractMilliseconds(implicit W: TimeLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 101 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractMilliseconds(self.apply[F](v)) 102 | } 103 | 104 | def extractMinute(implicit W: TimeLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 105 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractMinute(self.apply[F](v)) 106 | } 107 | 108 | def extractSecond(implicit W: TimeLike[B]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 109 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].extractSecond(self.apply[F](v)) 110 | } 111 | 112 | def date(implicit W: StringLike[B]): MappingFunc[A, Type.Date] = new MappingFunc[A, Type.Date] { 113 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Date] = MappingOps[F].date(self.apply[F](v)) 114 | } 115 | 116 | def time(implicit W: StringLike[B]): MappingFunc[A, Type.Time] = new MappingFunc[A, Type.Time] { 117 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Time] = MappingOps[F].time(self.apply[F](v)) 118 | } 119 | 120 | def timestamp(implicit W: IntLike[B]): MappingFunc[A, Type.Timestamp] = new MappingFunc[A, Type.Timestamp] { 121 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Timestamp] = MappingOps[F].timestamp(self.apply[F](v)) 122 | } 123 | 124 | def interval(implicit W: StringLike[B]): MappingFunc[A, Type.Interval] = new MappingFunc[A, Type.Interval] { 125 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Interval] = MappingOps[F].interval(self.apply[F](v)) 126 | } 127 | 128 | def startOfDay(implicit W: DateLike[B]): MappingFunc[A, B] = new MappingFunc[A, B] { 129 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].startOfDay(self.apply[F](v)) 130 | } 131 | 132 | def temporalTrunc(part: TemporalPart)(implicit W: TimestampLike[B]): MappingFunc[A, Type.Timestamp] = new MappingFunc[A, Type.Timestamp] { 133 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Timestamp] = MappingOps[F].temporalTrunc(part, self.apply[F](v)) 134 | } 135 | 136 | def timeOfDay(implicit W: TimestampLike[B]): MappingFunc[A, Type.Time] = new MappingFunc[A, Type.Time] { 137 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Time] = MappingOps[F].timeOfDay(self.apply[F](v)) 138 | } 139 | 140 | def toTimestamp(implicit W: IntLike[B]): MappingFunc[A, Type.Timestamp] = new MappingFunc[A, Type.Timestamp] { 141 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Timestamp] = MappingOps[F].toTimestamp(self.apply[F](v)) 142 | } 143 | 144 | def typeOf: MappingFunc[A, Type.Str] = new MappingFunc[A, Type.Str] { 145 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Str] = MappingOps[F].typeOf(self.apply[F](v)) 146 | } 147 | 148 | def unary_- (implicit W: NumberLike[B]): MappingFunc[A, B] = new MappingFunc[A, B] { 149 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].negate(self(v)) 150 | } 151 | 152 | // Binary operators: 153 | def + (that: MappingFunc[A, B])(implicit W: NumberLike[B]): MappingFunc[A, B] = new MappingFunc[A, B] { 154 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].add(self(v), that(v)) 155 | } 156 | 157 | def - (that: MappingFunc[A, B])(implicit W: NumberLike[B]): MappingFunc[A, B] = new MappingFunc[A, B] { 158 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].subtract(self(v), that(v)) 159 | } 160 | 161 | def * (that: MappingFunc[A, B])(implicit W: NumberLike[B]): MappingFunc[A, B] = new MappingFunc[A, B] { 162 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].multiply(self(v), that(v)) 163 | } 164 | 165 | def / (that: MappingFunc[A, B])(implicit W: NumberLike[B]): MappingFunc[A, B] = new MappingFunc[A, B] { 166 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].divide(self(v), that(v)) 167 | } 168 | 169 | def % (that: MappingFunc[A, B])(implicit W: NumberLike[B]): MappingFunc[A, B] = new MappingFunc[A, B] { 170 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].modulo(self(v), that(v)) 171 | } 172 | 173 | def ^ (that: MappingFunc[A, B])(implicit W: NumberLike[B]): MappingFunc[A, B] = new MappingFunc[A, B] { 174 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].power(self(v), that(v)) 175 | } 176 | 177 | def === [C <: Type](that: MappingFunc[A, C]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 178 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].eq(self(v), that(v)) 179 | } 180 | 181 | def !== [C <: Type](that: MappingFunc[A, C]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 182 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].neq(self(v), that(v)) 183 | } 184 | 185 | def < [C <: Type](that: MappingFunc[A, C]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 186 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].lt(self(v), that(v)) 187 | } 188 | 189 | def <= [C <: Type](that: MappingFunc[A, C]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 190 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].lte(self(v), that(v)) 191 | } 192 | 193 | def > [C <: Type](that: MappingFunc[A, C]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 194 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].gt(self(v), that(v)) 195 | } 196 | 197 | def >= [C <: Type](that: MappingFunc[A, C]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 198 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].gte(self(v), that(v)) 199 | } 200 | 201 | def ?? (that: MappingFunc[A, B]): MappingFunc[A, B] = new MappingFunc[A, B] { 202 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].ifUndefined(self(v), that(v)) 203 | } 204 | 205 | def && (that: MappingFunc[A, B])(implicit W: BoolLike[B]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 206 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].and(self(v), that(v)) 207 | } 208 | 209 | def || (that: MappingFunc[A, B])(implicit W: BoolLike[B]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 210 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].or(self(v), that(v)) 211 | } 212 | 213 | def within(that: MappingFunc[A, Type.Arr[B]]): MappingFunc[A, Type.Bool] = new MappingFunc[A, Type.Bool] { 214 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Bool] = MappingOps[F].within(self(v), that(v)) 215 | } 216 | 217 | def makeMap[C <: Type](that: MappingFunc[A, C]): MappingFunc[A, Type.Map[B, C]] = new MappingFunc[A, Type.Map[B, C]] { 218 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Map[B, C]] = MappingOps[F].makeMap(self(v), that(v)) 219 | } 220 | 221 | def to[C <: Type](that: MappingFunc[A, C])(implicit W1: IntLike[B], W2: IntLike[C]): MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 222 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].range(self(v), that(v)) 223 | } 224 | 225 | /** 226 | * Switches based on a series of cases, which are pairs of predicates and 227 | * values to return if the predicates evaluate to true. 228 | * {{{ 229 | * v.switch( 230 | * (_ > bar) -> (_ # baz) 231 | * ) 232 | * }}} 233 | */ 234 | def switch[C <: Type: BoolLike](f1: Case[A, B, C], fs: Case[A, B, C]*): MappingFunc[A, C] = new MappingFunc[A, C] { 235 | def apply[F[_]: MappingOps](v: F[A]): F[C] = 236 | (f1 :: fs.toList).foldRight[F[C]](MappingOps[F].undefined[C]) { (f, acc) => 237 | MappingOps[F].cond(f._1(self)(v), f._2(self)(v), acc) 238 | } 239 | } 240 | 241 | def ~ [C <: Type](that: MappingFunc[A, C]): MappingFunc[A, Type.Tuple2[B, C]] = new MappingFunc[A, Type.Tuple2[B, C]] { 242 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Tuple2[B, C]] = MappingOps[F].entuple(self.apply(v), that.apply(v)) 243 | } 244 | 245 | def selectDynamic[C <: Type: HasType](name: String): MappingFunc[A, C] = 246 | ??? 247 | } 248 | 249 | object MappingFunc { 250 | type Case[A <: Type, B <: Type, C <: Type] = (MappingFunc[A, B] => MappingFunc[A, Type.Bool], MappingFunc[A, B] => MappingFunc[A, C]) 251 | 252 | def now[A <: Type]: MappingFunc[A, Type.Timestamp] = new MappingFunc[A, Type.Timestamp] { 253 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Timestamp] = MappingOps[F].now 254 | } 255 | 256 | def left[A <: Type, B <: Type]: MappingFunc[Type.Tuple2[A, B], A] = new MappingFunc[Type.Tuple2[A, B], A] { 257 | def apply[F[_]: MappingOps](v: F[Type.Tuple2[A, B]]): F[A] = MappingOps[F].left(v) 258 | } 259 | 260 | def right[A <: Type, B <: Type]: MappingFunc[Type.Tuple2[A, B], B] = new MappingFunc[Type.Tuple2[A, B], B] { 261 | def apply[F[_]: MappingOps](v: F[Type.Tuple2[A, B]]): F[B] = MappingOps[F].right(v) 262 | } 263 | 264 | def constant[T[_[_]]: RecursiveT, A <: Type](e: T[EJson]): MappingFunc[A, Type.Unknown] = new MappingFunc[A, Type.Unknown] { 265 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Unknown] = MappingOps[F].constant(e) 266 | } 267 | 268 | def id[A <: Type]: MappingFunc[A, A] = new MappingFunc[A, A] { 269 | def apply[F[_]: MappingOps](v: F[A]): F[A] = v 270 | } 271 | } 272 | 273 | final case class MappingFuncTuple[A <: Type, B <: Type, C <: Type](self: MappingFunc[A, Type.Tuple2[B, C]]) { 274 | def _1 : MappingFunc[A, B] = left 275 | def _2 : MappingFunc[A, C] = right 276 | 277 | def left: MappingFunc[A, B] = new MappingFunc[A, B] { 278 | def apply[F[_]: MappingOps](v: F[A]): F[B] = MappingOps[F].left(self(v)) 279 | } 280 | 281 | def right: MappingFunc[A, C] = new MappingFunc[A, C] { 282 | def apply[F[_]: MappingOps](v: F[A]): F[C] = MappingOps[F].right(self(v)) 283 | } 284 | } 285 | 286 | final case class MappingFuncMap[A <: Type, K <: Type, V <: Type](self: MappingFunc[A, Type.Map[K, V]]) { 287 | def merge(that: MappingFunc[A, Type.Map[K, V]]): MappingFunc[A, Type.Map[K, V]] = new MappingFunc[A, Type.Map[K, V]] { 288 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Map[K, V]] = MappingOps[F].concatMaps(self(v), that(v)) 289 | } 290 | 291 | def projectKey(that: MappingFunc[A, K]): MappingFunc[A, V] = new MappingFunc[A, V] { 292 | def apply[F[_]: MappingOps](v: F[A]): F[V] = MappingOps[F].projectKey(self(v), that(v)) 293 | } 294 | 295 | def deleteKey(that: MappingFunc[A, K]): MappingFunc[A, Type.Map[K, V]] = new MappingFunc[A, Type.Map[K, V]] { 296 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Map[K, V]] = MappingOps[F].deleteKey(self(v), that(v)) 297 | } 298 | } 299 | 300 | final case class MappingFuncArray[A <: Type, E <: Type](self: MappingFunc[A, Type.Arr[E]]) { 301 | def length: MappingFunc[A, Type.Int] = new MappingFunc[A, Type.Int] { 302 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Int] = MappingOps[F].length(self(v)) 303 | } 304 | 305 | def ++ (that: MappingFunc[A, Type.Arr[E]]): MappingFunc[A, Type.Arr[E]] = new MappingFunc[A, Type.Arr[E]] { 306 | def apply[F[_]: MappingOps](v: F[A]): F[Type.Arr[E]] = MappingOps[F].concatArrays(self(v), that(v)) 307 | } 308 | 309 | def projectIndex[C <: Type](that: MappingFunc[A, C])(implicit W2: IntLike[C]): MappingFunc[A, E] = new MappingFunc[A, E] { 310 | def apply[F[_]: MappingOps](v: F[A]): F[E] = MappingOps[F].projectIndex(self(v), that(v)) 311 | } 312 | } 313 | 314 | trait MappingOps[F[_]] extends DataConstruct[F] { 315 | def now: F[Type.Timestamp] 316 | 317 | def undefined[A <: Type]: F[A] 318 | 319 | def constant[T[_[_]]: RecursiveT](e: T[EJson]): F[Type.Unknown] 320 | 321 | def set[T[_[_]]: RecursiveT, A](v: F[A], e: T[EJson]): F[Type.Unknown] 322 | 323 | def entuple[A <: Type, B <: Type](l: F[A], r: F[B]): F[Type.Tuple2[A, B]] 324 | 325 | def left[A <: Type, B <: Type](v: F[Type.Tuple2[A, B]]): F[A] 326 | 327 | def right[A <: Type, B <: Type](v: F[Type.Tuple2[A, B]]): F[B] 328 | 329 | def typed[A <: Type, B <: Type: HasType](v: F[A], t: B): F[B] 330 | 331 | // Unary mapping ops: 332 | def length[A <: Type](v: F[Type.Arr[A]]): F[Type.Int] 333 | 334 | def extractCentury[A <: Type: DateLike](v: F[A]): F[Type.Int] 335 | 336 | def extractDayOfMonth[A <: Type: DateLike](v: F[A]): F[Type.Int] 337 | 338 | def extractDecade[A <: Type: DateLike](v: F[A]): F[Type.Int] 339 | 340 | def extractDayOfWeek[A <: Type: DateLike](v: F[A]): F[Type.Int] 341 | 342 | def extractDayOfYear[A <: Type: DateLike](v: F[A]): F[Type.Int] 343 | 344 | def extractEpoch[A <: Type: DateLike](v: F[A]): F[Type.Int] 345 | 346 | def extractMillennium[A <: Type: DateLike](v: F[A]): F[Type.Int] 347 | 348 | def extractMonth[A <: Type: DateLike](v: F[A]): F[Type.Int] 349 | 350 | def extractQuarter[A <: Type: DateLike](v: F[A]): F[Type.Int] 351 | 352 | def extractTimezone[A <: Type: DateLike](v: F[A]): F[Type.Int] 353 | 354 | def extractTimezoneHour[A <: Type: DateLike](v: F[A]): F[Type.Int] 355 | 356 | def extractTimezoneMinute[A <: Type: DateLike](v: F[A]): F[Type.Int] 357 | 358 | def extractWeek[A <: Type: DateLike](v: F[A]): F[Type.Int] 359 | 360 | def extractYear[A <: Type: DateLike](v: F[A]): F[Type.Int] 361 | 362 | def extractHour[A <: Type: TimeLike](v: F[A]): F[Type.Int] 363 | 364 | def extractIsoDayOfWeek[A <: Type: DateLike](v: F[A]): F[Type.Int] 365 | 366 | def extractIsoYear[A <: Type: DateLike](v: F[A]): F[Type.Int] 367 | 368 | def extractMicroseconds[A <: Type: TimeLike](v: F[A]): F[Type.Int] 369 | 370 | def extractMilliseconds[A <: Type: TimeLike](v: F[A]): F[Type.Int] 371 | 372 | def extractMinute[A <: Type: TimeLike](v: F[A]): F[Type.Int] 373 | 374 | def extractSecond[A <: Type: TimeLike](v: F[A]): F[Type.Int] 375 | 376 | def date[A <: Type: StringLike](v: F[A]): F[Type.Date] 377 | 378 | def time[A <: Type: StringLike](v: F[A]): F[Type.Time] 379 | 380 | def timestamp[A <: Type: IntLike](v: F[A]): F[Type.Timestamp] 381 | 382 | def interval[A <: Type: StringLike](v: F[A]): F[Type.Interval] 383 | 384 | def startOfDay[A <: Type: DateLike](v: F[A]): F[A] 385 | 386 | def temporalTrunc[A <: Type: TimestampLike](part: TemporalPart, v: F[A]): F[Type.Timestamp] 387 | 388 | def timeOfDay[A <: Type: TimestampLike](v: F[A]): F[Type.Time] 389 | 390 | def toTimestamp[A <: Type: IntLike](v: F[A]): F[Type.Timestamp] 391 | 392 | def typeOf[A <: Type](v: F[A]): F[Type.Str] 393 | 394 | def negate[A <: Type: NumberLike](v: F[A]): F[A] 395 | 396 | // Binary mapping ops: 397 | def add[A <: Type: NumberLike](l: F[A], r: F[A]): F[A] 398 | 399 | def multiply[A <: Type: NumberLike](l: F[A], r: F[A]): F[A] 400 | 401 | def subtract[A <: Type: NumberLike](l: F[A], r: F[A]): F[A] 402 | 403 | def divide[A <: Type: NumberLike](l: F[A], r: F[A]): F[A] 404 | 405 | def modulo[A <: Type: NumberLike](l: F[A], r: F[A]): F[A] 406 | 407 | def power[A <: Type: NumberLike](l: F[A], r: F[A]): F[A] 408 | 409 | def eq[A <: Type, B <: Type](l: F[A], r: F[B]): F[Type.Bool] 410 | 411 | def neq[A <: Type, B <: Type](l: F[A], r: F[B]): F[Type.Bool] 412 | 413 | def lt[A <: Type, B <: Type](l: F[A], r: F[B]): F[Type.Bool] 414 | 415 | def lte[A <: Type, B <: Type](l: F[A], r: F[B]): F[Type.Bool] 416 | 417 | def gt[A <: Type, B <: Type](l: F[A], r: F[B]): F[Type.Bool] 418 | 419 | def gte[A <: Type, B <: Type](l: F[A], r: F[B]): F[Type.Bool] 420 | 421 | def ifUndefined[A <: Type](l: F[A], r: F[A]): F[A] 422 | 423 | def and[A <: Type: BoolLike, B <: Type: BoolLike](l: F[A], r: F[B]): F[Type.Bool] 424 | 425 | def or[A <: Type: BoolLike, B <: Type: BoolLike](l: F[A], r: F[B]): F[Type.Bool] 426 | 427 | def within[A <: Type, B <: Type](l: F[A], r: F[Type.Arr[B]]): F[Type.Bool] 428 | 429 | def makeMap[A <: Type, B <: Type](key: F[A], value: F[B]): F[Type.Map[A, B]] 430 | 431 | def concatMaps[K <: Type, V <: Type](l: F[Type.Map[K, V]], r: F[Type.Map[K, V]]): F[Type.Map[K, V]] 432 | 433 | def projectIndex[A <: Type, B <: Type: IntLike](array: F[Type.Arr[A]], index: F[B]): F[A] 434 | 435 | def projectKey[K <: Type, V <: Type](map: F[Type.Map[K, V]], key: F[K]): F[V] 436 | 437 | def deleteKey[K <: Type, V <: Type](map: F[Type.Map[K, V]], key: F[K]): F[Type.Map[K, V]] 438 | 439 | def concatArrays[E <: Type](l: F[Type.Arr[E]], r: F[Type.Arr[E]]): F[Type.Arr[E]] 440 | 441 | def range[A <: Type: IntLike, B <: Type: IntLike](from: F[A], to: F[B]): F[Type.Int] 442 | 443 | // Ternary mapping ops: 444 | def between[A <: Type](v: F[A], start: F[A], end: F[A]): F[Type.Bool] 445 | 446 | def cond[A <: Type: BoolLike, B <: Type](p: F[A], ifTrue: F[B], ifFalse: F[B]): F[B] 447 | 448 | def search[A <: Type: StringLike, B <: Type: StringLike, C <: Type: BoolLike](str: F[A], pat: F[B], caseInsensitive: F[C]): F[A] 449 | 450 | def substring[A <: Type: StringLike, B <: Type: IntLike, C <: Type: IntLike](str: F[A], from: F[B], to: F[C]): F[A] 451 | 452 | def guard[A <: Type, B <: Type, C <: Type](v: F[A], tpe: B, yesMatch: F[C], noMatch: F[C]): F[C] 453 | } 454 | 455 | object MappingOps { 456 | def apply[F[_]](implicit F: MappingOps[F]): MappingOps[F] = F 457 | } 458 | -------------------------------------------------------------------------------- /sbt: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # A more capable sbt runner, coincidentally also called sbt. 4 | # Author: Paul Phillips 5 | 6 | set -o pipefail 7 | 8 | declare -r sbt_release_version="0.13.13" 9 | declare -r sbt_unreleased_version="0.13.13" 10 | 11 | declare -r latest_212="2.12.1" 12 | declare -r latest_211="2.11.8" 13 | declare -r latest_210="2.10.6" 14 | declare -r latest_29="2.9.3" 15 | declare -r latest_28="2.8.2" 16 | 17 | declare -r buildProps="project/build.properties" 18 | 19 | declare -r sbt_launch_ivy_release_repo="http://repo.typesafe.com/typesafe/ivy-releases" 20 | declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots" 21 | declare -r sbt_launch_mvn_release_repo="http://repo.scala-sbt.org/scalasbt/maven-releases" 22 | declare -r sbt_launch_mvn_snapshot_repo="http://repo.scala-sbt.org/scalasbt/maven-snapshots" 23 | 24 | declare -r default_jvm_opts_common="-Xms512m -Xmx1536m -Xss2m" 25 | declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" 26 | 27 | declare sbt_jar sbt_dir sbt_create sbt_version sbt_script sbt_new 28 | declare sbt_explicit_version 29 | declare verbose noshare batch trace_level 30 | declare sbt_saved_stty debugUs 31 | 32 | declare java_cmd="java" 33 | declare sbt_launch_dir="$HOME/.sbt/launchers" 34 | declare sbt_launch_repo 35 | 36 | # pull -J and -D options to give to java. 37 | declare -a java_args scalac_args sbt_commands residual_args 38 | 39 | # args to jvm/sbt via files or environment variables 40 | declare -a extra_jvm_opts extra_sbt_opts 41 | 42 | echoerr () { echo >&2 "$@"; } 43 | vlog () { [[ -n "$verbose" ]] && echoerr "$@"; } 44 | die () { echo "Aborting: $@" ; exit 1; } 45 | 46 | # restore stty settings (echo in particular) 47 | onSbtRunnerExit() { 48 | [[ -n "$sbt_saved_stty" ]] || return 49 | vlog "" 50 | vlog "restoring stty: $sbt_saved_stty" 51 | stty "$sbt_saved_stty" 52 | unset sbt_saved_stty 53 | } 54 | 55 | # save stty and trap exit, to ensure echo is re-enabled if we are interrupted. 56 | trap onSbtRunnerExit EXIT 57 | sbt_saved_stty="$(stty -g 2>/dev/null)" 58 | vlog "Saved stty: $sbt_saved_stty" 59 | 60 | # this seems to cover the bases on OSX, and someone will 61 | # have to tell me about the others. 62 | get_script_path () { 63 | local path="$1" 64 | [[ -L "$path" ]] || { echo "$path" ; return; } 65 | 66 | local target="$(readlink "$path")" 67 | if [[ "${target:0:1}" == "/" ]]; then 68 | echo "$target" 69 | else 70 | echo "${path%/*}/$target" 71 | fi 72 | } 73 | 74 | declare -r script_path="$(get_script_path "$BASH_SOURCE")" 75 | declare -r script_name="${script_path##*/}" 76 | 77 | init_default_option_file () { 78 | local overriding_var="${!1}" 79 | local default_file="$2" 80 | if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then 81 | local envvar_file="${BASH_REMATCH[1]}" 82 | if [[ -r "$envvar_file" ]]; then 83 | default_file="$envvar_file" 84 | fi 85 | fi 86 | echo "$default_file" 87 | } 88 | 89 | declare sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)" 90 | declare jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)" 91 | 92 | build_props_sbt () { 93 | [[ -r "$buildProps" ]] && \ 94 | grep '^sbt\.version' "$buildProps" | tr '=\r' ' ' | awk '{ print $2; }' 95 | } 96 | 97 | update_build_props_sbt () { 98 | local ver="$1" 99 | local old="$(build_props_sbt)" 100 | 101 | [[ -r "$buildProps" ]] && [[ "$ver" != "$old" ]] && { 102 | perl -pi -e "s/^sbt\.version\b.*\$/sbt.version=${ver}/" "$buildProps" 103 | grep -q '^sbt.version[ =]' "$buildProps" || printf "\nsbt.version=%s\n" "$ver" >> "$buildProps" 104 | 105 | vlog "!!!" 106 | vlog "!!! Updated file $buildProps setting sbt.version to: $ver" 107 | vlog "!!! Previous value was: $old" 108 | vlog "!!!" 109 | } 110 | } 111 | 112 | set_sbt_version () { 113 | sbt_version="${sbt_explicit_version:-$(build_props_sbt)}" 114 | [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version 115 | export sbt_version 116 | } 117 | 118 | url_base () { 119 | local version="$1" 120 | 121 | case "$version" in 122 | 0.7.*) echo "http://simple-build-tool.googlecode.com" ;; 123 | 0.10.* ) echo "$sbt_launch_ivy_release_repo" ;; 124 | 0.11.[12]) echo "$sbt_launch_ivy_release_repo" ;; 125 | 0.*-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" 126 | echo "$sbt_launch_ivy_snapshot_repo" ;; 127 | 0.*) echo "$sbt_launch_ivy_release_repo" ;; 128 | *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" 129 | echo "$sbt_launch_mvn_snapshot_repo" ;; 130 | *) echo "$sbt_launch_mvn_release_repo" ;; 131 | esac 132 | } 133 | 134 | make_url () { 135 | local version="$1" 136 | 137 | local base="${sbt_launch_repo:-$(url_base "$version")}" 138 | 139 | case "$version" in 140 | 0.7.*) echo "$base/files/sbt-launch-0.7.7.jar" ;; 141 | 0.10.* ) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 142 | 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 143 | 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; 144 | *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; 145 | esac 146 | } 147 | 148 | addJava () { vlog "[addJava] arg = '$1'" ; java_args+=("$1"); } 149 | addSbt () { vlog "[addSbt] arg = '$1'" ; sbt_commands+=("$1"); } 150 | addScalac () { vlog "[addScalac] arg = '$1'" ; scalac_args+=("$1"); } 151 | addResidual () { vlog "[residual] arg = '$1'" ; residual_args+=("$1"); } 152 | 153 | addResolver () { addSbt "set resolvers += $1"; } 154 | addDebugger () { addJava "-Xdebug" ; addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"; } 155 | setThisBuild () { 156 | vlog "[addBuild] args = '$@'" 157 | local key="$1" && shift 158 | addSbt "set $key in ThisBuild := $@" 159 | } 160 | setScalaVersion () { 161 | [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")' 162 | addSbt "++ $1" 163 | } 164 | setJavaHome () { 165 | java_cmd="$1/bin/java" 166 | setThisBuild javaHome "_root_.scala.Some(file(\"$1\"))" 167 | export JAVA_HOME="$1" 168 | export JDK_HOME="$1" 169 | export PATH="$JAVA_HOME/bin:$PATH" 170 | } 171 | 172 | getJavaVersion() { "$1" -version 2>&1 | grep -E -e '(java|openjdk) version' | awk '{ print $3 }' | tr -d \"; } 173 | 174 | checkJava() { 175 | # Warn if there is a Java version mismatch between PATH and JAVA_HOME/JDK_HOME 176 | 177 | [[ -n "$JAVA_HOME" && -e "$JAVA_HOME/bin/java" ]] && java="$JAVA_HOME/bin/java" 178 | [[ -n "$JDK_HOME" && -e "$JDK_HOME/lib/tools.jar" ]] && java="$JDK_HOME/bin/java" 179 | 180 | if [[ -n "$java" ]]; then 181 | pathJavaVersion=$(getJavaVersion java) 182 | homeJavaVersion=$(getJavaVersion "$java") 183 | if [[ "$pathJavaVersion" != "$homeJavaVersion" ]]; then 184 | echoerr "Warning: Java version mismatch between PATH and JAVA_HOME/JDK_HOME, sbt will use the one in PATH" 185 | echoerr " Either: fix your PATH, remove JAVA_HOME/JDK_HOME or use -java-home" 186 | echoerr " java version from PATH: $pathJavaVersion" 187 | echoerr " java version from JAVA_HOME/JDK_HOME: $homeJavaVersion" 188 | fi 189 | fi 190 | } 191 | 192 | java_version () { 193 | local version=$(getJavaVersion "$java_cmd") 194 | vlog "Detected Java version: $version" 195 | echo "${version:2:1}" 196 | } 197 | 198 | # MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+ 199 | default_jvm_opts () { 200 | local v="$(java_version)" 201 | if [[ $v -ge 8 ]]; then 202 | echo "$default_jvm_opts_common" 203 | else 204 | echo "-XX:MaxPermSize=384m $default_jvm_opts_common" 205 | fi 206 | } 207 | 208 | build_props_scala () { 209 | if [[ -r "$buildProps" ]]; then 210 | versionLine="$(grep '^build.scala.versions' "$buildProps")" 211 | versionString="${versionLine##build.scala.versions=}" 212 | echo "${versionString%% .*}" 213 | fi 214 | } 215 | 216 | execRunner () { 217 | # print the arguments one to a line, quoting any containing spaces 218 | vlog "# Executing command line:" && { 219 | for arg; do 220 | if [[ -n "$arg" ]]; then 221 | if printf "%s\n" "$arg" | grep -q ' '; then 222 | printf >&2 "\"%s\"\n" "$arg" 223 | else 224 | printf >&2 "%s\n" "$arg" 225 | fi 226 | fi 227 | done 228 | vlog "" 229 | } 230 | 231 | [[ -n "$batch" ]] && exec /dev/null; then 255 | curl --fail --silent --location "$url" --output "$jar" 256 | elif which wget >/dev/null; then 257 | wget -q -O "$jar" "$url" 258 | fi 259 | } && [[ -r "$jar" ]] 260 | } 261 | 262 | acquire_sbt_jar () { 263 | { 264 | sbt_jar="$(jar_file "$sbt_version")" 265 | [[ -r "$sbt_jar" ]] 266 | } || { 267 | sbt_jar="$HOME/.ivy2/local/org.scala-sbt/sbt-launch/$sbt_version/jars/sbt-launch.jar" 268 | [[ -r "$sbt_jar" ]] 269 | } || { 270 | sbt_jar="$(jar_file "$sbt_version")" 271 | download_url "$(make_url "$sbt_version")" "$sbt_jar" 272 | } 273 | } 274 | 275 | usage () { 276 | set_sbt_version 277 | cat < display stack traces with a max of frames (default: -1, traces suppressed) 296 | -debug-inc enable debugging log for the incremental compiler 297 | -no-colors disable ANSI color codes 298 | -sbt-create start sbt even if current directory contains no sbt project 299 | -sbt-dir path to global settings/plugins directory (default: ~/.sbt/) 300 | -sbt-boot path to shared boot directory (default: ~/.sbt/boot in 0.11+) 301 | -ivy path to local Ivy repository (default: ~/.ivy2) 302 | -no-share use all local caches; no sharing 303 | -offline put sbt in offline mode 304 | -jvm-debug Turn on JVM debugging, open at the given port. 305 | -batch Disable interactive mode 306 | -prompt Set the sbt prompt; in expr, 's' is the State and 'e' is Extracted 307 | -script Run the specified file as a scala script 308 | 309 | # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version) 310 | -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version 311 | -sbt-version use the specified version of sbt (default: $sbt_release_version) 312 | -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version 313 | -sbt-jar use the specified jar as the sbt launcher 314 | -sbt-launch-dir directory to hold sbt launchers (default: $sbt_launch_dir) 315 | -sbt-launch-repo repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version")) 316 | 317 | # scala version (default: as chosen by sbt) 318 | -28 use $latest_28 319 | -29 use $latest_29 320 | -210 use $latest_210 321 | -211 use $latest_211 322 | -212 use $latest_212 323 | -scala-home use the scala build at the specified directory 324 | -scala-version use the specified version of scala 325 | -binary-version use the specified scala version when searching for dependencies 326 | 327 | # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) 328 | -java-home alternate JAVA_HOME 329 | 330 | # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution 331 | # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found 332 | $(default_jvm_opts) 333 | JVM_OPTS environment variable holding either the jvm args directly, or 334 | the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts') 335 | Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument. 336 | -jvm-opts file containing jvm args (if not given, .jvmopts in project root is used if present) 337 | -Dkey=val pass -Dkey=val directly to the jvm 338 | -J-X pass option -X directly to the jvm (-J is stripped) 339 | 340 | # passing options to sbt, OR to this runner 341 | SBT_OPTS environment variable holding either the sbt args directly, or 342 | the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts') 343 | Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument. 344 | -sbt-opts file containing sbt args (if not given, .sbtopts in project root is used if present) 345 | -S-X add -X to sbt's scalacOptions (-S is stripped) 346 | EOM 347 | } 348 | 349 | process_args () { 350 | require_arg () { 351 | local type="$1" 352 | local opt="$2" 353 | local arg="$3" 354 | 355 | if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then 356 | die "$opt requires <$type> argument" 357 | fi 358 | } 359 | while [[ $# -gt 0 ]]; do 360 | case "$1" in 361 | -h|-help) usage; exit 1 ;; 362 | -v) verbose=true && shift ;; 363 | -d) addSbt "--debug" && shift ;; 364 | -w) addSbt "--warn" && shift ;; 365 | -q) addSbt "--error" && shift ;; 366 | -x) debugUs=true && shift ;; 367 | -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;; 368 | -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; 369 | -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;; 370 | -no-share) noshare=true && shift ;; 371 | -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; 372 | -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;; 373 | -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; 374 | -offline) addSbt "set offline in Global := true" && shift ;; 375 | -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;; 376 | -batch) batch=true && shift ;; 377 | -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;; 378 | -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;; 379 | 380 | -sbt-create) sbt_create=true && shift ;; 381 | -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; 382 | -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;; 383 | -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;; 384 | -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;; 385 | -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;; 386 | -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;; 387 | -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;; 388 | -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;; 389 | -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "_root_.scala.Some(file(\"$2\"))" && shift 2 ;; 390 | -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;; 391 | -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;; 392 | -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;; 393 | 394 | -D*) addJava "$1" && shift ;; 395 | -J*) addJava "${1:2}" && shift ;; 396 | -S*) addScalac "${1:2}" && shift ;; 397 | -28) setScalaVersion "$latest_28" && shift ;; 398 | -29) setScalaVersion "$latest_29" && shift ;; 399 | -210) setScalaVersion "$latest_210" && shift ;; 400 | -211) setScalaVersion "$latest_211" && shift ;; 401 | -212) setScalaVersion "$latest_212" && shift ;; 402 | new) sbt_new=true && sbt_explicit_version="$sbt_release_version" && addResidual "$1" && shift ;; 403 | *) addResidual "$1" && shift ;; 404 | esac 405 | done 406 | } 407 | 408 | # process the direct command line arguments 409 | process_args "$@" 410 | 411 | # skip #-styled comments and blank lines 412 | readConfigFile() { 413 | local end=false 414 | until $end; do 415 | read || end=true 416 | [[ $REPLY =~ ^# ]] || [[ -z $REPLY ]] || echo "$REPLY" 417 | done < "$1" 418 | } 419 | 420 | # if there are file/environment sbt_opts, process again so we 421 | # can supply args to this runner 422 | if [[ -r "$sbt_opts_file" ]]; then 423 | vlog "Using sbt options defined in file $sbt_opts_file" 424 | while read opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file") 425 | elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then 426 | vlog "Using sbt options defined in variable \$SBT_OPTS" 427 | extra_sbt_opts=( $SBT_OPTS ) 428 | else 429 | vlog "No extra sbt options have been defined" 430 | fi 431 | 432 | [[ -n "${extra_sbt_opts[*]}" ]] && process_args "${extra_sbt_opts[@]}" 433 | 434 | # reset "$@" to the residual args 435 | set -- "${residual_args[@]}" 436 | argumentCount=$# 437 | 438 | # set sbt version 439 | set_sbt_version 440 | 441 | checkJava 442 | 443 | # only exists in 0.12+ 444 | setTraceLevel() { 445 | case "$sbt_version" in 446 | "0.7."* | "0.10."* | "0.11."* ) echoerr "Cannot set trace level in sbt version $sbt_version" ;; 447 | *) setThisBuild traceLevel $trace_level ;; 448 | esac 449 | } 450 | 451 | # set scalacOptions if we were given any -S opts 452 | [[ ${#scalac_args[@]} -eq 0 ]] || addSbt "set scalacOptions in ThisBuild += \"${scalac_args[@]}\"" 453 | 454 | # Update build.properties on disk to set explicit version - sbt gives us no choice 455 | [[ -n "$sbt_explicit_version" && -z "$sbt_new" ]] && update_build_props_sbt "$sbt_explicit_version" 456 | vlog "Detected sbt version $sbt_version" 457 | 458 | if [[ -n "$sbt_script" ]]; then 459 | residual_args=( $sbt_script ${residual_args[@]} ) 460 | else 461 | # no args - alert them there's stuff in here 462 | (( argumentCount > 0 )) || { 463 | vlog "Starting $script_name: invoke with -help for other options" 464 | residual_args=( shell ) 465 | } 466 | fi 467 | 468 | # verify this is an sbt dir, -create was given or user attempts to run a scala script 469 | [[ -r ./build.sbt || -d ./project || -n "$sbt_create" || -n "$sbt_script" || -n "$sbt_new" ]] || { 470 | cat <