├── .dockerignore
├── project
├── build.properties
├── Dependencies.scala
└── plugins.sbt
├── quill-spark
└── src
│ ├── test
│ ├── resources
│ │ └── .placeholder
│ └── scala
│ │ └── io
│ │ └── getquill
│ │ └── context
│ │ └── spark
│ │ └── package.scala
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── spark
│ ├── Binding.scala
│ ├── norm
│ ├── QuestionMarkEscaper.scala
│ └── EscapeQuestionMarks.scala
│ └── Decoders.scala
├── quill-sql-test
└── src
│ └── test
│ ├── resources
│ └── placeholder
│ ├── scala
│ └── io
│ │ └── getquill
│ │ └── context
│ │ └── sql
│ │ ├── idiom
│ │ ├── test
│ │ ├── MySQL5DialectSpec.scala
│ │ ├── MySQLDialectSpec.scala
│ │ ├── H2DialectSpec.scala
│ │ └── OffsetWithoutLimitWorkaroundSpec.scala
│ │ ├── util
│ │ └── StringOps.scala
│ │ ├── TestEncoders.scala
│ │ ├── TestDecoders.scala
│ │ ├── PrettyPrintingSpec.scala
│ │ ├── norm
│ │ └── ExpandMappedInfixSpec.scala
│ │ ├── BindVariablesSpec.scala
│ │ ├── ProductSpec.scala
│ │ └── base
│ │ └── ArrayOpsSpec.scala
│ ├── scala-2.12
│ └── io
│ │ └── getquill
│ │ └── Versions.scala
│ └── scala-2.13
│ └── io
│ └── getquill
│ └── Versions.scala
├── quill.png
├── example.gif
├── codecov.yml
├── quill-core
└── src
│ ├── main
│ └── scala
│ │ └── io
│ │ └── getquill
│ │ ├── Udt.scala
│ │ ├── ast
│ │ └── AstOps.scala
│ │ ├── util
│ │ ├── NullCheck.scala
│ │ ├── PrintMac.scala
│ │ ├── EnableReflectiveCalls.scala
│ │ ├── OptionalTypecheck.scala
│ │ ├── PrintMacMacro.scala
│ │ ├── LoadObject.scala
│ │ ├── ExceptionOps.scala
│ │ ├── OrderedGroupByExt.scala
│ │ └── QueryLogger.scala
│ │ ├── dsl
│ │ ├── CoreDsl.scala
│ │ └── QueryDslMacro.scala
│ │ ├── log
│ │ └── ContextLog.scala
│ │ ├── quotation
│ │ ├── IsDynamic.scala
│ │ └── Bindings.scala
│ │ ├── quat
│ │ ├── package.scala
│ │ └── QuatMacro.scala
│ │ ├── context
│ │ ├── ContextVerbPrepareLambda.scala
│ │ ├── VerifyFreeVariables.scala
│ │ ├── ContextVerbStream.scala
│ │ ├── ContextVerbPrepare.scala
│ │ └── mirror
│ │ │ └── Row.scala
│ │ ├── ModelMacro.scala
│ │ ├── idiom
│ │ └── LoadNaming.scala
│ │ ├── monad
│ │ └── IOMonadMacro.scala
│ │ └── Quoted.scala
│ └── test
│ ├── scala
│ └── io
│ │ └── getquill
│ │ ├── MirrorContextSpec.scala
│ │ ├── util
│ │ ├── ShowSpec.scala
│ │ ├── InterleaveSpec.scala
│ │ └── LogToFileSpec.scala
│ │ ├── MoreAstOps.scala
│ │ ├── context
│ │ └── mirror
│ │ │ └── RowSpec.scala
│ │ ├── VIdent.scala
│ │ ├── norm
│ │ ├── NormalizeAggregationIdentSpec.scala
│ │ ├── NormalizeCachingSpec.scala
│ │ ├── capture
│ │ │ └── AvoidCaptureSpec.scala
│ │ └── NormalizeSpec.scala
│ │ ├── UpperCaseNonDefault.scala
│ │ ├── MirrorContexts.scala
│ │ ├── QueryProbingSpec.scala
│ │ ├── quotation
│ │ └── IsDynamicSpec.scala
│ │ ├── MirrorIdiomExt.scala
│ │ └── AsyncMirrorContextSpec.scala
│ └── resources
│ └── logback.xml
├── quill-engine
└── src
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ ├── Embedded.scala
│ ├── QueryProbing.scala
│ ├── MappedEncoding.scala
│ ├── dsl
│ ├── OrdDsl.scala
│ ├── UnlimitedTuple.scala
│ └── InfixDsl.scala
│ ├── context
│ ├── mirror
│ │ └── MirrorSession.scala
│ ├── cassandra
│ │ └── ExpandMappedInfixCassandra.scala
│ └── RowContext.scala
│ ├── sql
│ ├── idiom
│ │ ├── QuestionMarkBindVariables.scala
│ │ ├── PositionalBindVariables.scala
│ │ └── ConcatSupport.scala
│ ├── norm
│ │ └── ExpandMappedInfix.scala
│ └── Common.scala
│ ├── ast
│ ├── JoinType.scala
│ ├── Transform.scala
│ └── CollectAst.scala
│ ├── norm
│ ├── capture
│ │ ├── TemporaryIdent.scala
│ │ └── AvoidCapture.scala
│ ├── NormalizeStringConcat.scala
│ ├── DialectBehaviors.scala
│ ├── NormalizeCaching.scala
│ ├── OrderTerms.scala
│ └── DisablePhase.scala
│ ├── util
│ ├── CollectTry.scala
│ ├── LoadConfig.scala
│ ├── IndentUtil.scala
│ ├── Interleave.scala
│ ├── Show.scala
│ └── Cache.scala
│ ├── quotation
│ ├── NonQuotedException.scala
│ └── QuatException.scala
│ ├── ReturnAction.scala
│ ├── quat
│ └── QuatNestingHelper.scala
│ └── idiom
│ ├── Statement.scala
│ └── Idiom.scala
├── quill-codegen-jdbc
└── src
│ ├── test
│ ├── resources
│ │ ├── application.conf
│ │ ├── h2_schema_precursor.sql
│ │ ├── logback.xml
│ │ ├── schema_simple.sql
│ │ ├── schema_snakecase.sql
│ │ ├── schema_casesensitive.sql
│ │ ├── application-codegen.conf
│ │ ├── schema_twotable.sql
│ │ ├── schema_snakecase_twotable.sql
│ │ └── schema_snakecase_twotable_differentcolumns.sql
│ └── scala
│ │ └── io
│ │ └── getquill
│ │ └── codegen
│ │ └── util
│ │ ├── TryOps.scala
│ │ ├── OptionOps.scala
│ │ └── SchemaMaker.scala
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ └── codegen
│ └── jdbc
│ ├── model
│ ├── JdbcTypeInfo.scala
│ └── JdbcTypes.scala
│ └── util
│ └── DiscoverDatabaseType.scala
├── docs
├── etc
│ ├── IntelliJ-SBT-Settings.png
│ ├── IntelliJ-Run-Debug-Config.png
│ ├── IntelliJ-Debug-App-Launcher.png
│ └── IntelliJ-SBT-Settings-Additional.png
├── package.json
└── sidebars.js
├── .github
├── dependabot.yml
├── workflows
│ ├── release-drafter.yml
│ └── scala-steward.yml
├── release-drafter.yml
├── ISSUE_TEMPLATE.md
└── PULL_REQUEST_TEMPLATE.md
├── quill-orientdb
└── src
│ ├── test
│ ├── resources
│ │ └── application.conf
│ └── scala
│ │ └── io
│ │ └── getquill
│ │ └── context
│ │ └── orientdb
│ │ ├── BindVariablesSpec.scala
│ │ ├── DecodeNullSpec.scala
│ │ └── PeopleOrientDBSpec.scala
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ ├── OrientDBContextConfig.scala
│ ├── context
│ └── orientdb
│ │ ├── dsl
│ │ └── OrientDBDsl.scala
│ │ └── encoding
│ │ ├── CollectionEncoders.scala
│ │ └── CollectionDecoders.scala
│ └── OrientDBMirrorContext.scala
├── quill-jdbc-zio
└── src
│ ├── main
│ └── scala
│ │ └── io
│ │ └── getquill
│ │ └── JsonValue.scala
│ └── test
│ ├── scala
│ └── io
│ │ └── getquill
│ │ ├── misc
│ │ ├── package.scala
│ │ ├── PeopleZioOuterJdbcSpec.scala
│ │ └── PrepareJdbcSpec.scala
│ │ ├── h2
│ │ ├── h2.scala
│ │ └── PrepareJdbcSpec.scala
│ │ ├── sqlite
│ │ ├── sqlite.scala
│ │ └── PrepareJdbcSpec.scala
│ │ ├── mysql
│ │ ├── mysql.scala
│ │ └── PrepareJdbcSpec.scala
│ │ ├── oracle
│ │ ├── oracle.scala
│ │ └── PrepareJdbcSpec.scala
│ │ ├── postgres
│ │ └── package.scala
│ │ ├── sqlserver
│ │ └── sqlserver.scala
│ │ ├── TypeParamExtensionTest.scala
│ │ ├── PeopleZioSpec.scala
│ │ ├── examples
│ │ └── other
│ │ │ ├── ZioApp.scala
│ │ │ ├── ZioAppDataSource.scala
│ │ │ ├── ZioAppManual.scala
│ │ │ ├── PlainApp.scala
│ │ │ ├── ZioAppImplicitEnv.scala
│ │ │ ├── PlainAppDataSource.scala
│ │ │ ├── PlainAppDataSource2.scala
│ │ │ └── ZioAppExample.scala
│ │ └── mock
│ │ └── Introspection.scala
│ └── resources
│ ├── logback.xml
│ └── logback-test.xml
├── debug_sbt.sh
├── quill-codegen
└── src
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ └── codegen
│ ├── util
│ ├── StringSeqUtil.scala
│ ├── ScalaLangUtil.scala
│ └── MapExtensions.scala
│ ├── gen
│ └── QuerySchemaNaming.scala
│ └── model
│ ├── Typing.scala
│ ├── Stereotyper.scala
│ └── StereotypedModel.scala
├── .git-blame-ignore-revs
├── quill-cassandra
└── src
│ ├── main
│ └── scala
│ │ └── io
│ │ └── getquill
│ │ ├── context
│ │ ├── cassandra
│ │ │ ├── util
│ │ │ │ ├── ClassTagConversions.scala
│ │ │ │ ├── UdtMetaUtils.scala
│ │ │ │ └── FutureConversions.scala
│ │ │ ├── encoding
│ │ │ │ ├── CassandraType.scala
│ │ │ │ ├── CassandraMapper.scala
│ │ │ │ ├── Encodings.scala
│ │ │ │ ├── UdtEncoding.scala
│ │ │ │ └── UdtOps.scala
│ │ │ ├── UdtMetaDsl.scala
│ │ │ ├── UdtMetaDslMacro.scala
│ │ │ └── PrepareStatementCache.scala
│ │ └── Caches.scala
│ │ ├── CassandraCqlSessionContext.scala
│ │ └── CassandraContextConfig.scala
│ └── test
│ ├── scala
│ └── io
│ │ └── getquill
│ │ └── context
│ │ └── cassandra
│ │ ├── CassandraContextConfigSpec.scala
│ │ ├── udt
│ │ ├── UdtSpec.scala
│ │ ├── UdtMetaDslSpec.scala
│ │ └── UdtEncodingMirrorContextSpec.scala
│ │ ├── CassandraTestEntities.scala
│ │ ├── BindVariablesSpec.scala
│ │ ├── CollectionsSpec.scala
│ │ ├── package.scala
│ │ ├── ExpandMappedInfixSpec.scala
│ │ └── PeopleCassandraSpec.scala
│ └── resources
│ ├── logback-test.xml
│ └── application.conf
├── quill-jdbc-test-h2
└── src
│ └── test
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── jdbc
│ └── h2
│ ├── package.scala
│ ├── JdbcEncodingSpec.scala
│ ├── DepartmentsJdbcSpec.scala
│ ├── BatchValuesJdbcSpec.scala
│ └── PrepareJdbcSpec.scala
├── quill-jdbc
└── src
│ ├── main
│ └── scala
│ │ └── io
│ │ └── getquill
│ │ ├── context
│ │ └── jdbc
│ │ │ ├── BooleanObjectEncoding.scala
│ │ │ ├── BooleanIntEncoding.scala
│ │ │ ├── UUIDStringEncoding.scala
│ │ │ ├── UUIDObjectEncoding.scala
│ │ │ ├── ResultSetExtractor.scala
│ │ │ ├── BaseContexts.scala
│ │ │ └── JdbcContextBase.scala
│ │ ├── JdbcContextConfig.scala
│ │ ├── H2JdbcContext.scala
│ │ ├── MysqlJdbcContext.scala
│ │ ├── OracleJdbcContext.scala
│ │ ├── SqliteJdbcContext.scala
│ │ ├── PostgresJdbcContext.scala
│ │ └── SqlServerJdbcContext.scala
│ └── test
│ ├── scala
│ └── io
│ │ └── getquill
│ │ ├── TypeParamExtensionTest.scala
│ │ └── context
│ │ └── jdbc
│ │ └── JdbcContextConfigSpec.scala
│ └── resources
│ └── logback.xml
├── quill-jdbc-test-mysql
└── src
│ └── test
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── jdbc
│ └── mysql
│ ├── package.scala
│ ├── OnConflictJdbcSpec.scala
│ ├── DepartmentsJdbcSpec.scala
│ ├── BatchValuesJdbcSpec.scala
│ ├── JdbcEncodingSpec.scala
│ └── PrepareJdbcSpec.scala
├── quill-jdbc-test-oracle
└── src
│ └── test
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── jdbc
│ └── oracle
│ ├── oracle.scala
│ ├── ScalarValueSpec.scala
│ └── PrepareJdbcSpec.scala
├── quill-test-kit
└── src
│ └── test
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── sql
│ ├── TestEncoders.scala
│ ├── TestDecoders.scala
│ ├── ProductSpec.scala
│ └── base
│ └── ArrayOpsSpec.scala
├── quill-jdbc-test-postgres
└── src
│ └── test
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── jdbc
│ └── postgres
│ ├── package.scala
│ ├── JdbcArrayOpsSpec.scala
│ ├── PrettyPrintingSpec.scala
│ ├── BatchValuesJdbcSpec.scala
│ ├── DepartmentsJdbcSpec.scala
│ └── PrepareJdbcSpec.scala
├── quill-jdbc-test-sqlserver
└── src
│ └── test
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── jdbc
│ └── sqlserver
│ ├── package.scala
│ └── DepartmentsJdbcSpec.scala
├── quill-cassandra-zio
└── src
│ ├── test
│ ├── resources
│ │ └── application.conf
│ └── scala
│ │ └── io
│ │ └── getquill
│ │ └── context
│ │ └── cassandra
│ │ └── zio
│ │ ├── package.scala
│ │ ├── examples
│ │ └── other
│ │ │ ├── ExampleApp.scala
│ │ │ ├── PlainApp.scala
│ │ │ └── ExampleAppImplicitEnv.scala
│ │ └── QueryResultTypeCassandraZioSpec.scala
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ └── cassandrazio
│ └── Probing.scala
├── quill-jdbc-test-sqlite
└── src
│ └── test
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── jdbc
│ └── sqlite
│ ├── package.scala
│ ├── JdbcEncodingSpec.scala
│ ├── OnConflictJdbcSpec.scala
│ ├── DepartmentsJdbcSpec.scala
│ └── PrepareJdbcSpec.scala
├── quill-sql
└── src
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ ├── SqlMirrorContext.scala
│ └── context
│ └── sql
│ └── dsl
│ └── SqlDsl.scala
├── scripts
└── start_containers.sh
├── quill-util
└── src
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ └── util
│ ├── ThrowableOps.scala
│ └── ScalafmtFormat.scala
├── env.sh
├── quill-cassandra-pekko
└── src
│ └── test
│ ├── resources
│ └── application.conf
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── cassandra
│ └── pekko
│ └── DecodeNullSpec.scala
├── quill-zio
└── src
│ └── main
│ └── scala
│ └── io
│ └── getquill
│ └── context
│ └── qzio
│ └── ZioTranslateContext.scala
├── .gitignore
├── .scalafmt.conf
└── quill-doobie
└── src
└── test
└── scala
└── io
└── getquill
└── doobie
└── issue
└── Issue1067.scala
/.dockerignore:
--------------------------------------------------------------------------------
1 | *
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.11.4
2 |
--------------------------------------------------------------------------------
/quill-spark/src/test/resources/.placeholder:
--------------------------------------------------------------------------------
1 | .
2 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/resources/placeholder:
--------------------------------------------------------------------------------
1 | placeholder
2 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/idiom/test:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/quill.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zio/zio-quill/HEAD/quill.png
--------------------------------------------------------------------------------
/example.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zio/zio-quill/HEAD/example.gif
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | status:
3 | patch:
4 | default: {}
5 | comment: off
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/Udt.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | trait Udt
4 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/Embedded.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | trait Embedded
4 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/ast/AstOps.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.ast
2 |
3 | object AstOps {}
4 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | ../../../../quill-jdbc/src/test/resources/application.conf
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/QueryProbing.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | trait QueryProbing
4 |
--------------------------------------------------------------------------------
/docs/etc/IntelliJ-SBT-Settings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zio/zio-quill/HEAD/docs/etc/IntelliJ-SBT-Settings.png
--------------------------------------------------------------------------------
/docs/etc/IntelliJ-Run-Debug-Config.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zio/zio-quill/HEAD/docs/etc/IntelliJ-Run-Debug-Config.png
--------------------------------------------------------------------------------
/docs/etc/IntelliJ-Debug-App-Launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zio/zio-quill/HEAD/docs/etc/IntelliJ-Debug-App-Launcher.png
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/MappedEncoding.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | case class MappedEncoding[I, O](f: I => O)
4 |
--------------------------------------------------------------------------------
/docs/etc/IntelliJ-SBT-Settings-Additional.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zio/zio-quill/HEAD/docs/etc/IntelliJ-SBT-Settings-Additional.png
--------------------------------------------------------------------------------
/docs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@zio.dev/zio-quill",
3 | "description": "ZIO Quill Documentation",
4 | "license": "Apache-2.0"
5 | }
6 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "github-actions"
4 | directory: "/"
5 | schedule:
6 | interval: "weekly"
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/h2_schema_precursor.sql:
--------------------------------------------------------------------------------
1 | DROP ALL OBJECTS;
2 |
3 | CREATE SCHEMA IF NOT EXISTS Alpha;
4 | CREATE SCHEMA IF NOT EXISTS Bravo;
--------------------------------------------------------------------------------
/quill-orientdb/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | ctx.dbUrl="remote:"${?ORIENTDB_HOST}":"${?ORIENTDB_PORT}"/GratefulDeadConcerts"
2 | ctx.username=root
3 | ctx.password=root
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/NullCheck.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | object NullCheck {
4 | def product(v: AnyRef) = v == null || v == None
5 | }
6 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/main/scala/io/getquill/JsonValue.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | final case class JsonValue[T](value: T) extends AnyVal
4 | final case class JsonbValue[T](value: T) extends AnyVal
5 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/PrintMac.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import scala.language.experimental.macros
4 |
5 | object PrintMac {
6 | def apply(value: Any): Unit = macro PrintMacMacro.apply
7 | }
8 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/dsl/OrdDsl.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.dsl
2 |
3 | import io.getquill.Ord
4 |
5 | private[getquill] trait OrdDsl {
6 |
7 | implicit def implicitOrd[T]: Ord[T] = Ord.ascNullsFirst
8 | }
9 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/context/mirror/MirrorSession.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.mirror
2 |
3 | case class MirrorSession(name: String)
4 | object MirrorSession {
5 | def default = MirrorSession("DefaultMirrorSession")
6 | }
7 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/sql/idiom/QuestionMarkBindVariables.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.idiom
2 |
3 | trait QuestionMarkBindVariables { self: SqlIdiom =>
4 |
5 | override def liftingPlaceholder(index: Int): String = s"?"
6 | }
7 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/sql/idiom/PositionalBindVariables.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.idiom
2 |
3 | trait PositionalBindVariables { self: SqlIdiom =>
4 |
5 | override def liftingPlaceholder(index: Int): String = s"$$${index + 1}"
6 | }
7 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala-2.12/io/getquill/Versions.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | /**
4 | * Comes from
5 | * https://discord.com/channels/632150470000902164/632150470000902166/1154004784806891571
6 | */
7 | object Versions {
8 | val scala = "2.12"
9 | }
10 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala-2.13/io/getquill/Versions.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | /**
4 | * Comes from
5 | * https://discord.com/channels/632150470000902164/632150470000902166/1154004784806891571
6 | */
7 | object Versions {
8 | val scala = "2.13"
9 | }
10 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/ast/JoinType.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.ast
2 |
3 | sealed trait JoinType
4 |
5 | case object InnerJoin extends JoinType
6 | case object LeftJoin extends JoinType
7 | case object RightJoin extends JoinType
8 | case object FullJoin extends JoinType
9 |
--------------------------------------------------------------------------------
/debug_sbt.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | JAVA_OPTS='-Xmx8G -DdebugMacro=true -DexcludeTests=false -Dquill.macro.log.pretty=true -Dquill.macro.log=true -Dquill.trace.enabled=true -Dquill.trace.color=true -Dquill.trace.opinion=false -Dquill.trace.ast.simple=false -Dquill.trace.types=' sbt -jvm-debug 5005
4 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/dsl/CoreDsl.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.dsl
2 |
3 | private[getquill] trait CoreDsl
4 | extends InfixDsl
5 | with OrdDsl
6 | with QueryDsl
7 | with QuotationDsl
8 | with EncodingDsl
9 | with MetaDsl
10 | with DynamicQueryDsl
11 |
--------------------------------------------------------------------------------
/quill-codegen/src/main/scala/io/getquill/codegen/util/StringSeqUtil.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.util
2 |
3 | object StringSeqUtil {
4 | implicit final class StringSeqExt(private val seq: Seq[String]) extends AnyVal {
5 | def pruneEmpty: Seq[String] = seq.filterNot(_.trim == "")
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # Scala Steward: Reformat with scalafmt 3.7.5
2 | d8a77dfb017feae820e38eea0d7b5128b13a737d
3 |
4 | # Scala Steward: Reformat with scalafmt 3.8.3
5 | 1b7c658737faa53ec506fb403892320773282488
6 |
7 | # Scala Steward: Reformat with scalafmt 3.8.6
8 | c9094fe152787d45d372562fe6743e4deaaea82d
9 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/log/ContextLog.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.log
2 |
3 | import io.getquill.util.ContextLogger
4 |
5 | object ContextLog {
6 | private val logger = ContextLogger(this.getClass)
7 |
8 | def apply(str: String): Unit =
9 | logger.underlying.error(str)
10 | }
11 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/ClassTagConversions.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.util
2 |
3 | import scala.reflect.ClassTag
4 |
5 | object ClassTagConversions {
6 | def asClassOf[T](implicit tag: ClassTag[T]): Class[T] = tag.runtimeClass.asInstanceOf[Class[T]]
7 | }
8 |
--------------------------------------------------------------------------------
/quill-spark/src/main/scala/io/getquill/context/spark/Binding.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.spark
2 |
3 | import org.apache.spark.sql.Dataset
4 |
5 | sealed trait Binding
6 |
7 | case class DatasetBinding[T](ds: Dataset[T]) extends Binding
8 |
9 | case class ValueBinding(str: String) extends Binding
10 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/quotation/IsDynamic.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.quotation
2 |
3 | import io.getquill.ast.Ast
4 | import io.getquill.ast.CollectAst
5 | import io.getquill.ast.Dynamic
6 |
7 | object IsDynamic {
8 | def apply(a: Ast) =
9 | CollectAst(a) { case d: Dynamic => d }.nonEmpty
10 | }
11 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/norm/capture/TemporaryIdent.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm.capture
2 |
3 | import io.getquill.ast.Ident
4 |
5 | object TemporaryIdent {
6 | def unapply(id: Ident) =
7 | if (id.name.matches("\\[tmp_[0-9A-Za-z]+\\]"))
8 | Some(id)
9 | else
10 | None
11 | }
12 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/misc/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.context.qzio.ImplicitSyntax.Implicit
4 |
5 | package object misc {
6 | implicit val pool = Implicit(io.getquill.postgres.pool)
7 | object testContext extends PostgresZioJdbcContext(Literal) with TestEntities
8 | }
9 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/quat/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import scala.language.experimental.macros
4 |
5 | /**
6 | * Convenience API that allows construction of a Quat using `Quat.from[T]`
7 | */
8 | package object quat {
9 |
10 | def quatOf[T]: Quat = macro QuatMacro.makeQuat[T]
11 | }
12 |
--------------------------------------------------------------------------------
/.github/workflows/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name: Release Drafter
2 |
3 | on:
4 | push:
5 | branches: ['master']
6 |
7 | jobs:
8 | update_release_draft:
9 | runs-on: ubuntu-20.04
10 | steps:
11 | - uses: release-drafter/release-drafter@v6
12 | env:
13 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
14 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/util/StringOps.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.util
2 |
3 | object StringOps {
4 |
5 | implicit final class StringOpsExt(private val str: String) extends AnyVal {
6 | def collapseSpace: String =
7 | str.stripMargin.replaceAll("\\s+", " ").trim
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/dsl/UnlimitedTuple.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.dsl
2 |
3 | import io.getquill.quotation.NonQuotedException
4 | import scala.annotation.compileTimeOnly
5 |
6 | object UnlimitedTuple {
7 | @compileTimeOnly(NonQuotedException.message)
8 | def apply(values: Any*): Nothing = NonQuotedException()
9 | }
10 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/h2/h2.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.ZioSpec.runLayerUnsafe
4 | import io.getquill.jdbczio.Quill
5 |
6 | package object h2 {
7 | val pool = runLayerUnsafe(Quill.DataSource.fromPrefix("testH2DB"))
8 | object testContext extends Quill.H2(Literal, pool) with TestEntities
9 | }
10 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraType.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.encoding
2 |
3 | /**
4 | * Marker which signals that type `T` is already supported by Cassandra
5 | */
6 | trait CassandraType[T]
7 | object CassandraType {
8 | def of[T]: CassandraType[T] = new CassandraType[T] {}
9 | }
10 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/util/CollectTry.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import scala.util.Try
4 |
5 | object CollectTry {
6 | def apply[T](list: List[Try[T]]): Try[List[T]] =
7 | list.foldLeft(Try(List.empty[T])) { case (list, t) =>
8 | list.flatMap { l =>
9 | t.map(l :+ _)
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/norm/capture/AvoidCapture.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm.capture
2 |
3 | import io.getquill.ast.Query
4 | import io.getquill.util.TraceConfig
5 |
6 | object AvoidCapture {
7 |
8 | def apply(q: Query, traceConfig: TraceConfig): Query =
9 | Dealias(AvoidAliasConflict(q, false, traceConfig))(traceConfig)
10 | }
11 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/quotation/NonQuotedException.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.quotation
2 |
3 | class NonQuotedException extends Exception(NonQuotedException.message)
4 |
5 | object NonQuotedException {
6 | final val message = "The query definition must happen within a `quote` block."
7 | def apply() = throw new NonQuotedException
8 | }
9 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/ReturnAction.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | sealed trait ReturnAction
4 | object ReturnAction {
5 | case object ReturnNothing extends ReturnAction
6 | case class ReturnColumns(columns: List[String]) extends ReturnAction
7 | case object ReturnRecord extends ReturnAction
8 | }
9 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/sqlite.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.ZioSpec.runLayerUnsafe
4 | import io.getquill.jdbczio.Quill
5 |
6 | package object sqlite {
7 | val pool = runLayerUnsafe(Quill.DataSource.fromPrefix("testSqliteDB"))
8 | object testContext extends Quill.Sqlite(Literal, pool) with TestEntities
9 | }
10 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/mysql/mysql.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.ZioSpec.runLayerUnsafe
4 | import io.getquill.jdbczio.Quill
5 |
6 | package object mysql {
7 | implicit val pool = runLayerUnsafe(Quill.DataSource.fromPrefix("testMysqlDB"))
8 | object testContext extends Quill.Mysql(Literal, pool) with TestEntities
9 | }
10 |
--------------------------------------------------------------------------------
/quill-orientdb/src/main/scala/io/getquill/OrientDBContextConfig.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import com.typesafe.config.Config
4 |
5 | case class OrientDBContextConfig(config: Config) {
6 | def dbUrl: String = config.getString("dbUrl")
7 | def username: String = config.getString("username")
8 | def password: String = config.getString("password")
9 | }
10 |
--------------------------------------------------------------------------------
/quill-jdbc-test-h2/src/test/scala/io/getquill/context/jdbc/h2/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import io.getquill._
4 | import io.getquill.context.sql.{TestDecoders, TestEncoders}
5 |
6 | package object h2 {
7 |
8 | object testContext extends H2JdbcContext(Literal, "testH2DB") with TestEntities with TestEncoders with TestDecoders
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/oracle/oracle.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.ZioSpec.runLayerUnsafe
4 | import io.getquill.jdbczio.Quill
5 |
6 | package object oracle {
7 | implicit val pool = runLayerUnsafe(Quill.DataSource.fromPrefix("testOracleDB"))
8 | object testContext extends Quill.Oracle(Literal, pool) with TestEntities
9 | }
10 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/postgres/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.ZioSpec.runLayerUnsafe
4 | import io.getquill.jdbczio.Quill
5 |
6 | package object postgres {
7 | val pool = runLayerUnsafe(Quill.DataSource.fromPrefix("testPostgresDB"))
8 | object testContext extends Quill.Postgres(Literal, pool) with TestEntities
9 | }
10 |
--------------------------------------------------------------------------------
/project/Dependencies.scala:
--------------------------------------------------------------------------------
1 | import sbt.*
2 | import sbt.Keys.*
3 |
4 | object Version {
5 | val zio = "2.1.20"
6 | }
7 |
8 | sealed trait ExcludeTests
9 | object ExcludeTests {
10 | case object Exclude extends ExcludeTests
11 | case object Include extends ExcludeTests
12 | case class KeepSome(regex: String) extends ExcludeTests
13 | }
14 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/context/ContextVerbPrepareLambda.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context
2 |
3 | trait ContextVerbPrepareLambda extends ContextVerbPrepare {
4 | type PrepareQueryResult = Session => Result[PrepareRow]
5 | type PrepareActionResult = Session => Result[PrepareRow]
6 | type PrepareBatchActionResult = Session => Result[List[PrepareRow]]
7 | }
8 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/sqlserver.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.ZioSpec.runLayerUnsafe
4 | import io.getquill.jdbczio.Quill
5 |
6 | package object sqlserver {
7 | val pool = runLayerUnsafe(Quill.DataSource.fromPrefix("testSqlServerDB"))
8 | object testContext extends Quill.SqlServer(Literal, pool) with TestEntities
9 | }
10 |
--------------------------------------------------------------------------------
/quill-orientdb/src/main/scala/io/getquill/context/orientdb/dsl/OrientDBDsl.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.orientdb.dsl
2 |
3 | import io.getquill.context.orientdb.OrientDBContext
4 |
5 | trait OrientDBDsl {
6 | this: OrientDBContext[_] =>
7 |
8 | implicit final class Like(s1: String) {
9 | def like(s2: String) = quote(sql"$s1 like $s2".as[Boolean])
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/model/JdbcTypeInfo.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.jdbc.model
2 |
3 | import io.getquill.codegen.model.JdbcColumnMeta
4 |
5 | case class JdbcTypeInfo(jdbcType: Int, size: Int, typeName: Option[String])
6 | object JdbcTypeInfo {
7 | def apply(cs: JdbcColumnMeta): JdbcTypeInfo = JdbcTypeInfo(cs.dataType, cs.size, Some(cs.typeName))
8 | }
9 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/EnableReflectiveCalls.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import scala.reflect.macros.blackbox.Context
4 |
5 | object EnableReflectiveCalls {
6 |
7 | def apply(c: Context) = {
8 | import c.universe._
9 | q"import _root_.scala.language.reflectiveCalls" ::
10 | q"Nil.asInstanceOf[{ def size }].size" ::
11 | Nil
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/sql/norm/ExpandMappedInfix.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.norm
2 |
3 | import io.getquill.ast._
4 |
5 | object ExpandMappedInfix {
6 | def apply(q: Ast): Ast =
7 | Transform(q) { case Map(Infix("" :: parts, (q: Query) :: params, pure, tr, quat), x, p) =>
8 | Infix("" :: parts, Map(q, x, p) :: params, pure, tr, quat)
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BooleanObjectEncoding.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import java.sql.Types
4 |
5 | trait BooleanObjectEncoding {
6 | this: JdbcContextTypes[_, _] =>
7 |
8 | implicit val booleanEncoder: Encoder[Boolean] = encoder(Types.BOOLEAN, _.setBoolean)
9 | implicit val booleanDecoder: Decoder[Boolean] = decoder(_.getBoolean)
10 | }
11 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/sql/Common.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.sql
2 |
3 | import io.getquill.ast.{Aggregation, Ast, CollectAst, Infix}
4 |
5 | object Common {
6 | object ContainsImpurities {
7 | def unapply(ast: Ast) =
8 | CollectAst(ast) {
9 | case agg: Aggregation => agg
10 | case inf: Infix if (!inf.pure) => inf
11 | }.nonEmpty
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/util/LoadConfig.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import com.typesafe.config.ConfigFactory
4 |
5 | object LoadConfig {
6 |
7 | def apply(configPrefix: String) = {
8 | val factory = ConfigFactory.load(getClass.getClassLoader)
9 | if (factory.hasPath(configPrefix))
10 | factory.getConfig(configPrefix)
11 | else
12 | ConfigFactory.empty
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/quill-jdbc-test-mysql/src/test/scala/io/getquill/context/jdbc/mysql/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import io.getquill._
4 | import io.getquill.context.sql.{TestDecoders, TestEncoders}
5 |
6 | package object mysql {
7 |
8 | object testContext
9 | extends MysqlJdbcContext(Literal, "testMysqlDB")
10 | with TestEntities
11 | with TestEncoders
12 | with TestDecoders
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/OptionalTypecheck.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import scala.reflect.macros.blackbox.{Context => MacroContext}
4 |
5 | object OptionalTypecheck {
6 |
7 | def apply(c: MacroContext)(tree: c.Tree): Option[c.Tree] =
8 | c.typecheck(tree, silent = true) match {
9 | case c.universe.EmptyTree => None
10 | case tree => Some(tree)
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/MirrorContextSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.base.Spec
4 |
5 | class MirrorContextSpec extends Spec {
6 | val ctx = new MirrorContext(MirrorIdiom, Literal) with TestEntities
7 |
8 | "probe" in {
9 | ctx.probe("Ok").toOption mustBe defined
10 | ctx.probe("Fail").toOption mustBe empty
11 | }
12 |
13 | "close" in {
14 | ctx.close()
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/quill-jdbc-test-oracle/src/test/scala/io/getquill/context/jdbc/oracle/oracle.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import io.getquill._
4 | import io.getquill.context.sql.{TestDecoders, TestEncoders}
5 |
6 | package object oracle {
7 |
8 | object testContext
9 | extends OracleJdbcContext(Literal, "testOracleDB")
10 | with TestEntities
11 | with TestEncoders
12 | with TestDecoders
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/TestEncoders.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql
2 |
3 | import io.getquill.MappedEncoding
4 |
5 | trait TestEncoders {
6 | implicit val encodingTestTypeEncoder: MappedEncoding[EncodingTestType, String] =
7 | MappedEncoding[EncodingTestType, String](_.value)
8 | implicit val nameEncoder: MappedEncoding[Number, String] = MappedEncoding[Number, String](_.value)
9 | }
10 |
--------------------------------------------------------------------------------
/quill-test-kit/src/test/scala/io/getquill/context/sql/TestEncoders.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql
2 |
3 | import io.getquill.MappedEncoding
4 |
5 | trait TestEncoders {
6 | implicit val encodingTestTypeEncoder: MappedEncoding[EncodingTestType, String] =
7 | MappedEncoding[EncodingTestType, String](_.value)
8 | implicit val nameEncoder: MappedEncoding[Number, String] = MappedEncoding[Number, String](_.value)
9 | }
10 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/PrintMacMacro.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import scala.reflect.macros.whitebox.{Context => MacroContext}
4 |
5 | class PrintMacMacro(val c: MacroContext) {
6 | import c.universe._
7 |
8 | def apply(value: Tree): Tree = {
9 | println(
10 | "================= Printing Tree =================\n" +
11 | show(value)
12 | )
13 | q"()"
14 | }
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/norm/NormalizeStringConcat.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm
2 |
3 | import io.getquill.ast._
4 |
5 | object NormalizeStringConcat extends StatelessTransformer {
6 | override def apply(ast: Ast): Ast = ast match {
7 | case BinaryOperation(Constant("", _), StringOperator.`+`, b) => apply(b)
8 | case _ => super.apply(ast)
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/LoadObject.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import scala.reflect.macros.whitebox.Context
4 | import scala.util.Try
5 |
6 | object LoadObject {
7 |
8 | def apply[T](c: Context)(tpe: c.Type): Try[T] =
9 | Try {
10 | val cls = Class.forName(tpe.typeSymbol.fullName + "$")
11 | val field = cls.getField("MODULE$")
12 | field.get(cls).asInstanceOf[T]
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/quill-jdbc-test-postgres/src/test/scala/io/getquill/context/jdbc/postgres/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import io.getquill._
4 | import io.getquill.context.sql.{TestDecoders, TestEncoders}
5 |
6 | package object postgres {
7 |
8 | object testContext
9 | extends PostgresJdbcContext(Literal, "testPostgresDB")
10 | with TestEntities
11 | with TestEncoders
12 | with TestDecoders
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/quill-jdbc-test-sqlserver/src/test/scala/io/getquill/context/jdbc/sqlserver/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import io.getquill._
4 | import io.getquill.context.sql.{TestDecoders, TestEncoders}
5 |
6 | package object sqlserver {
7 |
8 | object testContext
9 | extends SqlServerJdbcContext(Literal, "testSqlServerDB")
10 | with TestEntities
11 | with TestEncoders
12 | with TestDecoders
13 | }
14 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/sql/idiom/ConcatSupport.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.idiom
2 |
3 | import io.getquill.util.Messages
4 |
5 | trait ConcatSupport {
6 | this: SqlIdiom =>
7 |
8 | override def concatFunction = "UNNEST"
9 | }
10 |
11 | trait NoConcatSupport {
12 | this: SqlIdiom =>
13 |
14 | override def concatFunction = Messages.fail(s"`concatMap` not supported by ${this.getClass.getSimpleName}")
15 | }
16 |
--------------------------------------------------------------------------------
/quill-jdbc/src/test/scala/io/getquill/TypeParamExtensionTest.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.context.Context
4 |
5 | // Testing we are passing type params explicitly into JdbcContextBase, otherwise
6 | // this file will fail to compile
7 |
8 | trait BaseExtensions {
9 | val context: Context[PostgresDialect, _]
10 | }
11 |
12 | trait JDBCExtensions extends BaseExtensions {
13 | override val context: PostgresJdbcContext[_]
14 | }
15 |
--------------------------------------------------------------------------------
/quill-jdbc/src/test/scala/io/getquill/context/jdbc/JdbcContextConfigSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import com.typesafe.config.ConfigFactory
4 | import io.getquill.JdbcContextConfig
5 | import io.getquill.base.Spec
6 |
7 | class JdbcContextConfigSpec extends Spec {
8 | "fail if cannot load dataSource" in {
9 | intercept[IllegalStateException] {
10 | JdbcContextConfig(ConfigFactory.empty()).dataSource
11 | }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/TypeParamExtensionTest.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.context.Context
4 |
5 | // Testing we are passing type params explicitly into JdbcContextBase, otherwise
6 | // this file will fail to compile
7 |
8 | trait BaseExtensions {
9 | val context: Context[PostgresDialect, _]
10 | }
11 |
12 | trait JDBCExtensions extends BaseExtensions {
13 | override val context: PostgresZioJdbcContext[_]
14 | }
15 |
--------------------------------------------------------------------------------
/quill-spark/src/main/scala/io/getquill/context/spark/norm/QuestionMarkEscaper.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.spark.norm
2 |
3 | import java.util.regex.Matcher
4 |
5 | object QuestionMarkEscaper {
6 | def escape(str: String) = str.replace("?", "\\?")
7 | def unescape(str: String) = str.replace("\\?", "?")
8 |
9 | def pluginValueSafe(str: String, value: String) =
10 | str.replaceFirst("(? BaseIdiom}
4 | import io.getquill.context.sql.SqlContext
5 | import io.getquill.context.sql.encoding.mirror.ArrayMirrorEncoding
6 |
7 | class SqlMirrorContext[+Idiom <: BaseIdiom, +Naming <: NamingStrategy](idiom: Idiom, naming: Naming)
8 | extends MirrorContext(idiom, naming)
9 | with SqlContext[Idiom, Naming]
10 | with ArrayMirrorEncoding
11 |
--------------------------------------------------------------------------------
/scripts/start_containers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # From 'All In One' of Quill CONTRIBUTING.md
4 | docker compose down && docker compose build && docker compose run --rm --service-ports setup
5 |
6 | # echo "Adding 50ms latency to protoquill_postgres_1"
7 | # docker exec protoquill_postgres_1 tc qdisc add dev eth0 root netem delay 50ms
8 |
9 | # echo "Adding 50ms latency to protoquill_mysql_1"
10 | # docker exec protoquill_mysql_1 tc qdisc add dev eth0 root netem delay 50ms
11 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BooleanIntEncoding.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import java.sql.Types
4 |
5 | trait BooleanIntEncoding {
6 | this: JdbcContextTypes[_, _] =>
7 |
8 | implicit val booleanEncoder: Encoder[Boolean] =
9 | encoder(Types.TINYINT, (index, value, row) => row.setInt(index, if (value) 1 else 0))
10 | implicit val booleanDecoder: Decoder[Boolean] = decoder((index, row, session) => row.getInt(index) == 1)
11 | }
12 |
--------------------------------------------------------------------------------
/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 | import io.getquill.Literal
3 | import io.getquill.cassandrazio.Quill
4 | import io.getquill.context.cassandra.zio.ZioCassandraSpec.runLayerUnsafe
5 |
6 | package object zio {
7 | val pool = runLayerUnsafe(Quill.CassandraZioSession.fromPrefix("testStreamDB"))
8 | lazy val testZioDB = new Quill.Cassandra(Literal, pool) with CassandraTestEntities
9 | }
10 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/ast/Transform.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.ast
2 |
3 | class Transform[T](p: PartialFunction[Ast, Ast]) extends StatelessTransformer {
4 |
5 | override def apply(a: Ast) =
6 | a match {
7 | case a if (p.isDefinedAt(a)) => p(a)
8 | case other => super.apply(other)
9 | }
10 | }
11 |
12 | object Transform {
13 | def apply[T](a: Ast)(p: PartialFunction[Ast, Ast]): Ast =
14 | new Transform(p).apply(a)
15 | }
16 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextConfigSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import com.typesafe.config.ConfigFactory
4 | import io.getquill.CassandraContextConfig
5 | import io.getquill.base.Spec
6 |
7 | class CassandraContextConfigSpec extends Spec {
8 | "load default preparedStatementCacheSize if not found in configs" in {
9 | CassandraContextConfig(ConfigFactory.empty()).preparedStatementCacheSize mustBe 1000
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDStringEncoding.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import java.sql.Types
4 | import java.util.UUID
5 |
6 | trait UUIDStringEncoding {
7 | this: JdbcContextTypes[_, _] =>
8 | implicit val uuidEncoder: Encoder[UUID] =
9 | encoder(Types.VARCHAR, (index, value, row) => row.setString(index, value.toString))
10 | implicit val uuidDecoder: Decoder[UUID] = decoder((index, row, conn) => UUID.fromString(row.getString(index)))
11 | }
12 |
--------------------------------------------------------------------------------
/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Probing.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.cassandrazio
2 |
3 | import io.getquill.CassandraZioSession
4 |
5 | import scala.util.Try
6 |
7 | trait Probing {
8 | def probingSession: Option[CassandraZioSession] = None
9 |
10 | def probe(statement: String): scala.util.Try[_] =
11 | probingSession match {
12 | case Some(csession) =>
13 | Try(csession.prepare(statement))
14 | case None =>
15 | Try(())
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/quill-codegen/src/main/scala/io/getquill/codegen/gen/QuerySchemaNaming.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.gen
2 |
3 | trait QuerySchemaNaming {
4 | this: CodeGeneratorComponents =>
5 |
6 | object `"query"` extends QuerySchemaNaming {
7 | override def apply(tm: TableMeta): String = "query"
8 | }
9 |
10 | object `[namespace][Table]` extends QuerySchemaNaming {
11 | override def apply(tm: TableMeta): String =
12 | namespacer(tm) + tm.tableName.toLowerCase.capitalize
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/.github/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name-template: 'v$NEXT_PATCH_VERSION'
2 | tag-template: 'v$NEXT_PATCH_VERSION'
3 | categories:
4 | - title: '🚀 Features'
5 | labels:
6 | - 'feature'
7 | - title: '🐛 Bug Fixes'
8 | labels:
9 | - 'bug'
10 | - title: '🧰 Maintenance'
11 | labels:
12 | - 'build'
13 | - title: '🌱 Dependency Updates'
14 | labels:
15 | - 'dependency-update'
16 | change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
17 | template: |
18 | ## Changes
19 | $CHANGES
20 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapper.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.encoding
2 |
3 | import io.getquill.context.UdtValueLookup
4 |
5 | /**
6 | * Developers API.
7 | *
8 | * End-users should rely on MappedEncoding since it's more general.
9 | */
10 | case class CassandraMapper[I, O](f: (I, UdtValueLookup) => O)
11 | object CassandraMapper {
12 | def simple[I, O](f: I => O): CassandraMapper[I, O] = CassandraMapper[I, O]((iOrig, _) => f(iOrig))
13 | }
14 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDObjectEncoding.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import java.sql.Types
4 | import java.util.UUID
5 |
6 | trait UUIDObjectEncoding {
7 | this: JdbcContextTypes[_, _] =>
8 | implicit val uuidEncoder: Encoder[UUID] =
9 | encoder(Types.OTHER, (index, value, row) => row.setObject(index, value, Types.OTHER))
10 | implicit val uuidDecoder: Decoder[UUID] =
11 | decoder((index, row, conn) => UUID.fromString(row.getObject(index).toString))
12 | }
13 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/context/VerifyFreeVariables.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context
2 |
3 | import scala.reflect.macros.whitebox.{Context => MacroContext}
4 | import io.getquill.quotation.FreeVariables
5 | import io.getquill.ast.Ast
6 | import io.getquill.util.MacroContextExt._
7 |
8 | object VerifyFreeVariables {
9 |
10 | def apply(c: MacroContext)(ast: Ast): Ast =
11 | FreeVariables.verify(ast) match {
12 | case Right(ast) => ast
13 | case Left(msg) => c.fail(msg)
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/quill-core/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n%ex
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n%ex
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.udt
2 |
3 | import io.getquill.Udt
4 | import io.getquill.base.Spec
5 |
6 | trait UdtSpec extends Spec {
7 | case class Name(firstName: String, lastName: Option[String]) extends Udt
8 | case class Personal(
9 | number: Int,
10 | street: String,
11 | name: Name,
12 | optName: Option[Name],
13 | list: List[String],
14 | sets: Set[Int],
15 | map: Map[Int, String]
16 | ) extends Udt
17 | }
18 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/util/ShowSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import io.getquill.base.Spec
4 |
5 | class ShowSpec extends Spec {
6 |
7 | import Show._
8 |
9 | "given a Show implicit, provides an implicit class with the show method" in {
10 | implicit val show = Show[Int] {
11 | _.toString
12 | }
13 | 1.show mustEqual "1"
14 | }
15 |
16 | "provides a factory method that receives a function" in {
17 | implicit val show = Show[Int](_.toString)
18 | 1.show mustEqual "1"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/quill-sql/src/main/scala/io/getquill/context/sql/dsl/SqlDsl.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.dsl
2 |
3 | import io.getquill.Query
4 | import io.getquill.context.sql.SqlContext
5 |
6 | //noinspection TypeAnnotation
7 | trait SqlDsl {
8 | this: SqlContext[_, _] =>
9 |
10 | implicit final class Like(s1: String) {
11 | def like(s2: String) = quote(sql"$s1 like $s2".pure.asCondition)
12 | }
13 |
14 | implicit final class ForUpdate[T](q: Query[T]) {
15 | def forUpdate() = quote(sql"$q FOR UPDATE".as[Query[T]])
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraTestEntities.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import io.getquill.TestEntities
4 |
5 | trait CassandraTestEntities extends TestEntities {
6 | this: CassandraContext[_] =>
7 |
8 | case class MapFrozen(id: Map[Int, Boolean])
9 | val mapFroz = quote(query[MapFrozen])
10 |
11 | case class SetFrozen(id: Set[Int])
12 | val setFroz = quote(query[SetFrozen])
13 |
14 | case class ListFrozen(id: List[Int])
15 | val listFroz = quote(query[ListFrozen])
16 | }
17 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/TestDecoders.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql
2 |
3 | import io.getquill.MappedEncoding
4 |
5 | trait TestDecoders {
6 | implicit val encodingTestTypeDecoder: MappedEncoding[String, EncodingTestType] =
7 | MappedEncoding[String, EncodingTestType](EncodingTestType)
8 | implicit val nameDecoder: MappedEncoding[String, Number] = MappedEncoding[String, Number](s =>
9 | Number
10 | .withValidation(s)
11 | .getOrElse(throw new Exception(s"Illegal number $s"))
12 | )
13 | }
14 |
--------------------------------------------------------------------------------
/quill-test-kit/src/test/scala/io/getquill/context/sql/TestDecoders.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql
2 |
3 | import io.getquill.MappedEncoding
4 |
5 | trait TestDecoders {
6 | implicit val encodingTestTypeDecoder: MappedEncoding[String, EncodingTestType] =
7 | MappedEncoding[String, EncodingTestType](EncodingTestType)
8 | implicit val nameDecoder: MappedEncoding[String, Number] = MappedEncoding[String, Number](s =>
9 | Number
10 | .withValidation(s)
11 | .getOrElse(throw new Exception(s"Illegal number $s"))
12 | )
13 | }
14 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/MoreAstOps.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 | import io.getquill.ast._
3 |
4 | object MoreAstOps {
5 | implicit final class AstOpsExt2(private val body: Ast) extends AnyVal {
6 | def +++(other: Constant): BinaryOperation =
7 | if (other.v.isInstanceOf[String])
8 | BinaryOperation(body, StringOperator.`+`, other)
9 | else
10 | BinaryOperation(body, NumericOperator.`+`, other)
11 |
12 | def +>+(other: Ast): BinaryOperation = BinaryOperation(body, NumericOperator.`>`, other)
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/ExceptionOps.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import java.io.ByteArrayOutputStream
4 |
5 | object ExceptionOps {
6 | implicit final class ThrowableOpsMethods(private val t: Throwable) extends AnyVal {
7 | def stackTraceToString: String = {
8 | val stream = new ByteArrayOutputStream()
9 | val writer = new java.io.BufferedWriter(new java.io.OutputStreamWriter(stream))
10 | t.printStackTrace(new java.io.PrintWriter(writer))
11 | writer.flush()
12 | stream.toString
13 | }
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/quill-util/src/main/scala/io/getquill/util/ThrowableOps.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import java.io.ByteArrayOutputStream
4 |
5 | object ThrowableOps {
6 | implicit final class ThrowableOpsMethods(private val t: Throwable) extends AnyVal {
7 | def stackTraceToString: String = {
8 | val stream = new ByteArrayOutputStream()
9 | val writer = new java.io.BufferedWriter(new java.io.OutputStreamWriter(stream))
10 | t.printStackTrace(new java.io.PrintWriter(writer))
11 | writer.flush()
12 | stream.toString
13 | }
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/context/mirror/RowSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.mirror
2 |
3 | import io.getquill.base.Spec
4 |
5 | class RowSpec extends Spec {
6 |
7 | "adds value" in {
8 | val r = Row(1, 2)
9 | r.add(3) mustEqual Row(1, 2, 3)
10 | }
11 |
12 | "gets value by index" in {
13 | val r = Row(1, 2)
14 | r[Int](0) mustEqual 1
15 | }
16 |
17 | "fails if the value doesn't match the expected type" in {
18 | val r = Row(1, 2)
19 | intercept[IllegalStateException] {
20 | r[String](0)
21 | }
22 | ()
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/CassandraCqlSessionContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import com.datastax.oss.driver.api.core.CqlSession
4 | import io.getquill.context.{AsyncFutureCache, CassandraSession, SyncCache}
5 | import io.getquill.context.cassandra.CassandraSessionContext
6 |
7 | abstract class CassandraCqlSessionContext[+N <: NamingStrategy](
8 | val naming: N,
9 | val session: CqlSession,
10 | val preparedStatementCacheSize: Long
11 | ) extends CassandraSessionContext[N]
12 | with CassandraSession
13 | with SyncCache
14 | with AsyncFutureCache {}
15 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/schema_simple.sql:
--------------------------------------------------------------------------------
1 | create table Person (
2 | id int not null,
3 | firstName varchar(255),
4 | lastName varchar(255),
5 | age int not null
6 | );
7 |
8 | create table Address (
9 | personFk int not null,
10 | street varchar(255),
11 | zip int
12 | );
13 |
14 | insert into Person values (1, 'Joe', 'Bloggs', 22);
15 | insert into Person values (2, 'Jack', 'Ripper', 33);
16 | insert into Address values (1, '123 Someplace', 1001);
17 | insert into Address values (1, '678 Blah', 2002);
18 | insert into Address values (2, '111234 Some Other Place', 3333);
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/idiom/MySQL5DialectSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.idiom
2 |
3 | import io.getquill._
4 | import io.getquill.idiom.StringToken
5 |
6 | class MySQL5DialectSpec extends AbstractMySQLDialectSpec {
7 | lazy val ctx = new SqlMirrorContext(MySQL5Dialect, Literal) with TestEntities
8 |
9 | import ctx._
10 |
11 | "delete is without table alias" in {
12 | val q = quote {
13 | qr1.filter(t => t.i == 999).delete
14 | }
15 | ctx.run(q).string mustEqual
16 | "DELETE FROM TestEntity WHERE i = 999"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/idiom/MySQLDialectSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.idiom
2 |
3 | import io.getquill._
4 | import io.getquill.idiom.StringToken
5 |
6 | class MySQLDialectSpec extends AbstractMySQLDialectSpec {
7 | lazy val ctx = new SqlMirrorContext(MySQLDialect, Literal) with TestEntities
8 |
9 | import ctx._
10 |
11 | "delete is with table alias" in {
12 | val q = quote {
13 | qr1.filter(t => t.i == 999).delete
14 | }
15 | ctx.run(q).string mustEqual
16 | "DELETE FROM TestEntity t WHERE t.i = 999"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/model/JdbcTypes.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.jdbc.model
2 |
3 | import java.sql.Connection
4 |
5 | import io.getquill.codegen.model.{JdbcColumnMeta, JdbcTableMeta, RawSchema}
6 |
7 | import scala.reflect.ClassTag
8 |
9 | object JdbcTypes {
10 | type JdbcConnectionMaker = () => Connection
11 | type JdbcSchemaReader = (JdbcConnectionMaker) => Seq[RawSchema[JdbcTableMeta, JdbcColumnMeta]]
12 | type JdbcTyper = JdbcTypeInfo => Option[ClassTag[_]]
13 | type JdbcQuerySchemaNaming = JdbcTableMeta => String
14 | }
15 |
--------------------------------------------------------------------------------
/quill-jdbc-test-oracle/src/test/scala/io/getquill/context/jdbc/oracle/ScalarValueSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.oracle
2 |
3 | import io.getquill.base.Spec
4 |
5 | class ScalarValueSpec extends Spec {
6 |
7 | val context = testContext
8 | import testContext._
9 |
10 | "Simple Scalar Select" in {
11 | context.run(1) mustEqual 1
12 | }
13 |
14 | "Multi Scalar Select" in {
15 | context.run(quote(1 + quote(1))) mustEqual 2
16 | }
17 |
18 | "Multi Scalar Select with Infix" in {
19 | context.run("foo" + sql"""'bar'""".as[String]) mustEqual "foobar"
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/env.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export CASSANDRA_HOST=127.0.0.1
3 | export CASSANDRA_PORT=19042
4 | export MYSQL_HOST=127.0.0.1
5 | export MYSQL_PORT=13306
6 | export POSTGRES_HOST=127.0.0.1
7 | export POSTGRES_PORT=15432
8 | export SQL_SERVER_HOST=127.0.0.1
9 | export SQL_SERVER_PORT=11433
10 | export ORIENTDB_HOST=127.0.0.1
11 | export ORIENTDB_PORT=12424
12 | export ORACLE_HOST=127.0.0.1
13 | export ORACLE_PORT=11521
14 |
15 | export JAVA_OPTS="-Dquill.macro.log=false -Xms1024m -Xmx3g -Xss5m -XX:ReservedCodeCacheSize=256m -XX:+TieredCompilation -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC"
16 |
17 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/schema_snakecase.sql:
--------------------------------------------------------------------------------
1 | create table Person (
2 | id int primary key,
3 | first_name varchar(255),
4 | last_name varchar(255),
5 | age int not null
6 | );
7 |
8 | create table Address (
9 | person_fk int not null,
10 | street varchar(255),
11 | zip int
12 | );
13 |
14 | insert into Person values (1, 'Joe', 'Bloggs', 22);
15 | insert into Person values (2, 'Jack', 'Ripper', 33);
16 | insert into Address values (1, '123 Someplace', 1001);
17 | insert into Address values (1, '678 Blah', 2002);
18 | insert into Address values (2, '111234 Some Other Place', 3333);
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/VIdent.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.ast.{Ast, Ident}
4 | import io.getquill.quat.Quat
5 |
6 | /**
7 | * For unit tests that do not care about the `Quat` of an ident, use this class
8 | * to construct an Ident with Quat.Value and ignore the Quat when
9 | * deconstructing.
10 | */
11 | object VIdent {
12 | def unapply(id: Ast): Option[String] =
13 | id match {
14 | case Ident(name, _) => Some(name)
15 | case _ => None
16 | }
17 |
18 | def apply(value: String) =
19 | Ident(value, Quat.Value)
20 | }
21 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/util/TryOps.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.util
2 | import scala.util.{Failure, Try}
3 |
4 | object TryOps {
5 | implicit final class TryThrowExt[T](private val t: Try[T]) extends AnyVal {
6 | def orThrow: T =
7 | t match {
8 | case scala.util.Success(v) => v
9 | case Failure(e) => throw e
10 | }
11 |
12 | def orThrow(wrapper: Throwable => Throwable): T =
13 | t match {
14 | case scala.util.Success(v) => v
15 | case Failure(e) => throw wrapper(e)
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/quat/QuatMacro.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.quat
2 |
3 | import io.getquill.quotation.LiftUnlift
4 |
5 | import scala.reflect.macros.whitebox.{Context => MacroContext}
6 |
7 | class QuatMacro(val c: MacroContext) extends QuatMaking {
8 | import c.universe._
9 |
10 | def makeQuat[T: c.WeakTypeTag]: c.Tree = {
11 | val quat = inferQuat(implicitly[c.WeakTypeTag[T]].tpe)
12 | val liftUnlift = new { override val mctx: c.type = c } with LiftUnlift(quat.countFields)
13 | val quatExpr: c.Tree = liftUnlift.quatLiftable(quat)
14 | q"${quatExpr}"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/schema_casesensitive.sql:
--------------------------------------------------------------------------------
1 | create table "Person" (
2 | "id" int primary key,
3 | "firstName" varchar(255),
4 | "lastName" varchar(255),
5 | "age" int not null
6 | );
7 |
8 | create table "Address" (
9 | "personFk" int not null,
10 | "street" varchar(255),
11 | "zip" int
12 | );
13 |
14 | insert into "Person" values (1, 'Joe', 'Bloggs', 22);
15 | insert into "Person" values (2, 'Jack', 'Ripper', 33);
16 | insert into "Address" values (1, '123 Someplace', 1001);
17 | insert into "Address" values (1, '678 Blah', 2002);
18 | insert into "Address" values (2, '111234 Some Other Place', 3333);
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | resolvers += Classpaths.sbtPluginReleases
2 |
3 | resolvers += "Typesafe repository" at "https://repo.typesafe.com/typesafe/releases/"
4 |
5 | addDependencyTreePlugin
6 |
7 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.5")
8 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.1")
9 | addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.4")
10 | addSbtPlugin("com.etsy" % "sbt-compile-quick-plugin" % "1.4.0")
11 | addSbtPlugin("dev.zio" % "zio-sbt-website" % "0.3.10")
12 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.11.1")
13 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n%ex
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/norm/NormalizeAggregationIdentSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.MirrorContexts.testContext._
5 |
6 | class NormalizeAggregationIdentSpec extends Spec {
7 | "multiple select" in {
8 | val q = quote {
9 | qr1.groupBy(p => p.i).map { case (i, qrs) =>
10 | i -> qrs.map(_.l).sum
11 | }
12 | }
13 | val n = quote {
14 | qr1.groupBy(p => p.i).map { p =>
15 | p._1 -> p._2.map(x1 => x1.l).sum
16 | }
17 | }
18 | new Normalize(TranspileConfig.Empty)(q.ast) mustEqual (n.ast)
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/util/IndentUtil.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | object IndentUtil {
4 | implicit final class StringOpsExt(private val str: String) extends AnyVal {
5 | def fitsOnOneLine: Boolean = !str.contains("\n")
6 | def multiline(indent: Int, prefix: String): String =
7 | str.split("\n").map(elem => indent.prefix + prefix + elem).mkString("\n")
8 | }
9 |
10 | implicit final class IndentOps(private val i: Int) extends AnyVal {
11 | def prefix: String = indentOf(i)
12 | }
13 |
14 | private def indentOf(num: Int): String =
15 | (0 to num).map(_ => "").mkString(" ")
16 | }
17 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/BindVariablesSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.context.mirror.Row
5 | import io.getquill.context.cassandra.mirrorContext._
6 |
7 | class BindVariablesSpec extends Spec {
8 |
9 | "binds lifted values" in {
10 | def q(i: Int) =
11 | quote {
12 | query[TestEntity].filter(e => e.i == lift(i))
13 | }
14 | val mirror = mirrorContext.run(q(2))
15 | mirror.string mustEqual "SELECT s, i, l, o, b FROM TestEntity WHERE i = ?"
16 | mirror.prepareRow mustEqual Row(2)
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/quill-codegen/src/main/scala/io/getquill/codegen/model/Typing.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.model
2 |
3 | sealed trait UnrecognizedTypeStrategy
4 | case object AssumeString extends UnrecognizedTypeStrategy
5 | case object SkipColumn extends UnrecognizedTypeStrategy
6 | case object ThrowTypingError extends UnrecognizedTypeStrategy
7 |
8 | // TODO Need to document
9 | sealed trait NumericPreference
10 | case object PreferPrimitivesWhenPossible extends NumericPreference
11 | case object UseDefaults extends NumericPreference
12 |
13 | class TypingError(private val message: String) extends RuntimeException(message) {}
14 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/UdtMetaUtils.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.util
2 |
3 | import io.getquill.util.Messages
4 |
5 | object UdtMetaUtils {
6 |
7 | /**
8 | * Extracts udt name and keyspace from given path
9 | *
10 | * @param path
11 | * udt path
12 | * @return
13 | * (name, keyspace)
14 | */
15 | def parse(path: String): (String, Option[String]) = {
16 | val arr = path.split('.')
17 | if (arr.length == 1) arr(0) -> None
18 | else if (arr.length == 2) arr(1) -> Some(arr(0))
19 | else Messages.fail(s"Cannot parse udt path `$path`")
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/quill-spark/src/main/scala/io/getquill/context/spark/Decoders.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.spark
2 |
3 | import io.getquill.util.Messages
4 | import io.getquill.QuillSparkContext
5 |
6 | trait Decoders {
7 | this: QuillSparkContext =>
8 |
9 | type Decoder[T] = BaseDecoder[T]
10 | type ResultRow = Unit
11 |
12 | implicit def dummyDecoder[T]: (Index, ResultRow, ResultRow) => Nothing =
13 | (_: Int, _: ResultRow, _: Session) => Messages.fail("quill decoders are not used for spark")
14 |
15 | implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], decoder: Decoder[I]): Decoder[O] =
16 | dummyDecoder[O]
17 | }
18 |
--------------------------------------------------------------------------------
/quill-spark/src/main/scala/io/getquill/context/spark/norm/EscapeQuestionMarks.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.spark.norm
2 |
3 | import io.getquill.ast._
4 | import QuestionMarkEscaper._
5 |
6 | object EscapeQuestionMarks extends StatelessTransformer {
7 |
8 | override def apply(ast: Ast): Ast =
9 | ast match {
10 | case Constant(value, _) =>
11 | Constant.auto(if (value.isInstanceOf[String]) escape(value.asInstanceOf[String]) else value)
12 | case Infix(parts, params, pure, tr, quat) =>
13 | Infix(parts.map(escape(_)), params, pure, tr, quat)
14 | case other =>
15 | super.apply(other)
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/docs/sidebars.js:
--------------------------------------------------------------------------------
1 | const sidebars = {
2 | sidebar: [
3 | {
4 | type: "category",
5 | label: "ZIO Quill",
6 | collapsed: false,
7 | link: { type: "doc", id: "index" },
8 | items: [
9 | "getting-started",
10 | "writing-queries",
11 | "extending-quill",
12 | "contexts",
13 | "code-generation",
14 | "logging",
15 | "compiler-performance",
16 | "additional-resources",
17 | "quill-vs-cassandra",
18 | "quill-vs-slick",
19 | "changelog",
20 | "how-to-contribute",
21 | ]
22 | }
23 | ]
24 | };
25 |
26 | module.exports = sidebars;
27 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/util/Interleave.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import scala.annotation.tailrec
4 | import scala.collection.mutable.ListBuffer
5 |
6 | object Interleave {
7 |
8 | def apply[T](l1: List[T], l2: List[T]): List[T] =
9 | interleave(l1, l2, ListBuffer.empty)
10 |
11 | @tailrec
12 | private[this] def interleave[T](l1: List[T], l2: List[T], acc: ListBuffer[T]): List[T] =
13 | (l1, l2) match {
14 | case (Nil, l2) => (acc ++ l2).toList
15 | case (l1, Nil) => (acc ++ l1).toList
16 | case (h1 :: t1, h2 :: t2) => interleave(t1, t2, { acc += h1; acc += h2 })
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/quill-jdbc/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n%ex
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/quill-orientdb/src/test/scala/io/getquill/context/orientdb/BindVariablesSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.orientdb
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.context.mirror.Row
5 |
6 | class BindVariablesSpec extends Spec {
7 |
8 | "binds lifted values" in {
9 | val mirrorContext = orientdb.mirrorContext
10 | import mirrorContext._
11 | def q(i: Int) =
12 | quote {
13 | query[TestEntity].filter(e => e.i == lift(i))
14 | }
15 | val mirror = mirrorContext.run(q(2))
16 | mirror.string mustEqual "SELECT s, i, l, o, b FROM TestEntity WHERE i = ?"
17 | mirror.prepareRow mustEqual Row(2)
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/util/InterleaveSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import io.getquill.base.Spec
4 |
5 | class InterleaveSpec extends Spec {
6 |
7 | "same-size lists" in {
8 | val l1 = List(1, 3, 5)
9 | val l2 = List(2, 4, 6)
10 | Interleave(l1, l2) mustEqual List(1, 2, 3, 4, 5, 6)
11 | }
12 |
13 | "first list with more elements" in {
14 | val l1 = List(1, 3)
15 | val l2 = List(2)
16 | Interleave(l1, l2) mustEqual List(1, 2, 3)
17 | }
18 |
19 | "second list with more elements" in {
20 | val l1 = List(1)
21 | val l2 = List(2, 3)
22 | Interleave(l1, l2) mustEqual List(1, 2, 3)
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n%ex
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/norm/DialectBehaviors.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm
2 |
3 | trait EqualityBehavior
4 | object EqualityBehavior {
5 | case object AnsiEquality extends EqualityBehavior
6 | case object NonAnsiEquality extends EqualityBehavior
7 | }
8 |
9 | trait ConcatBehavior
10 | object ConcatBehavior {
11 | case object AnsiConcat extends ConcatBehavior
12 | case object NonAnsiConcat extends ConcatBehavior
13 | }
14 |
15 | sealed trait ProductAggregationToken
16 | object ProductAggregationToken {
17 | case object Star extends ProductAggregationToken
18 | case object VariableDotStar extends ProductAggregationToken
19 | }
20 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n%ex
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/UpperCaseNonDefault.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | trait UpperCaseNonDefault extends NamingStrategy {
4 | override def column(s: String): String = s.toUpperCase
5 | override def table(s: String): String = s.toUpperCase
6 | override def default(s: String) = s
7 | }
8 | object UpperCaseNonDefault extends UpperCaseNonDefault
9 |
10 | trait UpperCaseEscapeColumn extends NamingStrategy {
11 | override def column(s: String): String = s""""${s.toUpperCase}""""
12 | override def table(s: String): String = s
13 | override def default(s: String) = s
14 | }
15 | object UpperCaseEscapeColumn extends UpperCaseEscapeColumn
16 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CollectionsSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import io.getquill.MappedEncoding
4 | import io.getquill.base.Spec
5 | import org.scalatest.BeforeAndAfterEach
6 |
7 | trait CollectionsSpec extends Spec with BeforeAndAfterEach {
8 | case class StrWrap(x: String)
9 | implicit val encodeStrWrap = MappedEncoding[StrWrap, String](_.x)
10 | implicit val decodeStrWrap = MappedEncoding[String, StrWrap](StrWrap.apply)
11 |
12 | case class IntWrap(x: Int)
13 | implicit val encodeIntWrap = MappedEncoding[IntWrap, Int](_.x)
14 | implicit val decodeIntWrap = MappedEncoding[Int, IntWrap](IntWrap.apply)
15 | }
16 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/norm/NormalizeCaching.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm
2 |
3 | import com.github.benmanes.caffeine.cache.{Cache, Caffeine}
4 | import io.getquill.ast.Ast
5 | import io.getquill.util.Messages
6 |
7 | object NormalizeCaching {
8 |
9 | private val cache: Cache[Ast, Ast] = Caffeine
10 | .newBuilder()
11 | .maximumSize(Messages.cacheDynamicMaxSize)
12 | .recordStats()
13 | .build()
14 |
15 | def apply(f: Ast => Ast): Ast => Ast = { ori =>
16 | val (stabilized, state) = StabilizeLifts.stabilize(ori)
17 | val normalized = cache.get(stabilized, ast => f(ast))
18 | StabilizeLifts.revert(normalized, state)
19 | }
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/util/Show.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | object Show {
4 | trait Show[T] {
5 | def show(v: T): String
6 | }
7 |
8 | object Show {
9 | // noinspection ConvertExpressionToSAM
10 | def apply[T](f: T => String): Show[T] =
11 | new Show[T] {
12 | def show(v: T): String = f(v)
13 | }
14 | }
15 |
16 | implicit final class Shower[T](private val v: T) extends AnyVal {
17 | def show(implicit shower: Show[T]): String = shower.show(v)
18 | }
19 |
20 | implicit def listShow[T](implicit shower: Show[T]): Show[List[T]] =
21 | Show[List[T]] { list =>
22 | list.map(_.show).mkString(", ")
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/PeopleZioSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.context.qzio.ZioJdbcContext
4 | import io.getquill.context.sql.base.PeopleSpec
5 | import io.getquill.jdbczio.Quill
6 |
7 | trait PeopleZioSpec extends PeopleSpec with ZioSpec {
8 |
9 | val context: Quill[_, _]
10 | import context._
11 |
12 | val `Ex 11 query` = quote(query[Person])
13 | val `Ex 11 expected` = peopleEntries
14 | }
15 |
16 | trait PeopleZioProxySpec extends PeopleSpec with ZioProxySpec {
17 |
18 | val context: ZioJdbcContext[_, _]
19 | import context._
20 |
21 | val `Ex 11 query` = quote(query[Person])
22 | val `Ex 11 expected` = peopleEntries
23 | }
24 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encodings.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.encoding
2 |
3 | import java.time.{Instant, ZonedDateTime, ZoneId}
4 |
5 | import io.getquill.context.cassandra.CassandraContext
6 |
7 | trait Encodings extends CassandraMapperConversions with CassandraTypes {
8 | this: CassandraContext[_] =>
9 |
10 | protected val zoneId = ZoneId.systemDefault
11 |
12 | implicit val encodeJava8ZonedDateTime: MappedEncoding[ZonedDateTime, Instant] = MappedEncoding(zdt => zdt.toInstant)
13 | implicit val decodeJava8ZonedDateTime: MappedEncoding[Instant, ZonedDateTime] =
14 | MappedEncoding(d => ZonedDateTime.ofInstant(d, zoneId))
15 | }
16 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/quotation/Bindings.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.quotation
2 |
3 | import scala.reflect.macros.whitebox.Context
4 |
5 | object Bindings {
6 | def apply(c: Context)(quoted: c.Tree, tpe: c.Type): Map[c.Symbol, c.Tree] = {
7 | import c.universe._
8 | tpe
9 | .member(TermName("bindings"))
10 | .typeSignature
11 | .decls
12 | .collect {
13 | case m: MethodSymbol if (m.isGetter) =>
14 | m ->
15 | q"""
16 | {
17 | import _root_.scala.language.reflectiveCalls
18 | $quoted.bindings.$m
19 | }
20 | """
21 | }
22 | .toMap
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/util/OptionOps.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.util
2 | import scala.util.{Failure, Success, Try}
3 |
4 | object OptionOps {
5 | implicit final class OptExt[T](private val o: Option[T]) extends AnyVal {
6 | def mapIfThen(`if`: T, `then`: T): Option[T] = o.map(v => if (v == `if`) `then` else v)
7 | def toTry(e: Throwable): Try[T] = o match {
8 | case Some(value) => Success(value)
9 | case None => Failure(e)
10 | }
11 | }
12 | implicit final class StringOptionExt(private val opt: Option[String]) extends AnyVal {
13 | def andNotEmpty: Option[String] = opt.flatMap(s => if (s.trim.isEmpty) None else Some(s))
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/norm/NormalizeCachingSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm
2 |
3 | import io.getquill.base.Spec
4 |
5 | class NormalizeCachingSpec extends Spec {
6 |
7 | val normalize = new Normalize(TranspileConfig.Empty)
8 | val cached = NormalizeCaching(normalize.apply)
9 | val gen = new QueryGenerator(1)
10 |
11 | "Cached normalization" - {
12 | "consists with non-cached `Normalize`" in {
13 | for (i <- (3 to 15)) {
14 | for (j <- (0 until 30)) {
15 | val query = gen(i)
16 | val r = normalize(query)
17 | val cr = cached.apply(query)
18 | r mustEqual (cr)
19 | }
20 | }
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | This template isn't a strict requirement to open issues, but please try to provide as much information as possible.
2 |
3 | **Version**: (e.g. `0.4.1-SNAPSHOT`)
4 | **Module**: (e.g. `quill-jdbc`)
5 | **Database**: (e.g. `mysql`)
6 |
7 | ### Expected behavior
8 |
9 | ### Actual behavior
10 |
11 | ### Steps to reproduce the behavior
12 |
13 | If the issue can be reproduced using a [mirror context](https://zio.dev/zio-quill/contexts#mirror-context), please provide a scastie snippet that reproduces it. See https://scastie.scala-lang.org/fwbrasil/Z2CeR2qHQJK6EyQWUBhANA as an example. Remember to select the correct Quill version in the left menu.
14 |
15 | ### Workaround
16 |
17 | @getquill/maintainers
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/quotation/QuatException.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.quotation
2 |
3 | import io.getquill.quat.Quat
4 |
5 | class QuatException(message: String) extends IllegalArgumentException(message)
6 |
7 | object QuatException {
8 | def apply(message: String) = throw new QuatException(message)
9 | }
10 |
11 | object QuatExceptionOps {
12 | implicit final class QuatExceptionOpsExt(quat: => Quat) {
13 | def suppress(additionalMessage: String = ""): String =
14 | try { quat.shortString }
15 | catch {
16 | case e: QuatException =>
17 | s"QuatException(${e.getMessage + (if (additionalMessage != "") ", " + additionalMessage else "")})"
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/context/ContextVerbStream.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context
2 |
3 | import io.getquill.{NamingStrategy, Query, Quoted}
4 |
5 | import scala.language.higherKinds
6 | import scala.language.experimental.macros
7 |
8 | trait ContextVerbStream[+Idiom <: io.getquill.idiom.Idiom, +Naming <: NamingStrategy] {
9 | this: Context[Idiom, Naming] =>
10 |
11 | type StreamResult[T]
12 |
13 | def stream[T](quoted: Quoted[Query[T]]): StreamResult[T] = macro QueryMacro.streamQuery[T]
14 |
15 | // Macro methods do not support default arguments so need to have two methods
16 | def stream[T](quoted: Quoted[Query[T]], fetchSize: Int): StreamResult[T] = macro QueryMacro.streamQueryFetch[T]
17 | }
18 |
--------------------------------------------------------------------------------
/quill-cassandra-pekko/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 |
2 | testDB.keyspace=quill_test
3 | testDB.preparedStatementCacheSize=1000
4 | testDB.session.contactPoint=127.0.0.1
5 | testDB.session.contactPoint=${?CASSANDRA_HOST}
6 | testDB.session.port=9042
7 | testDB.session.port=${?CASSANDRA_PORT}
8 |
9 | pekko {
10 | loglevel = "INFO"
11 | coordinated-shutdown.exit-jvm = off
12 |
13 | actor {
14 | allow-java-serialization = off
15 | }
16 |
17 | discovery.method = "pekko-dns"
18 | }
19 |
20 | datastax-java-driver.basic {
21 | contact-points = [ ${testDB.session.contactPoint}":"${testDB.session.port} ]
22 | load-balancing-policy.local-datacenter = "datacenter1"
23 | session-keyspace = ${testDB.keyspace}
24 | }
25 |
--------------------------------------------------------------------------------
/.github/workflows/scala-steward.yml:
--------------------------------------------------------------------------------
1 | name: Scala Steward
2 |
3 | # This workflow will launch everyday at 00:00
4 | on:
5 | schedule:
6 | - cron: '0 0 * * *'
7 | workflow_dispatch: {}
8 |
9 | jobs:
10 | scala-steward:
11 | timeout-minutes: 45
12 | runs-on: ubuntu-latest
13 | name: Scala Steward
14 | steps:
15 | - name: Scala Steward
16 | uses: scala-steward-org/scala-steward-action@v2.76.0
17 | with:
18 | github-app-id: ${{ secrets.SCALA_STEWARD_GITHUB_APP_ID }}
19 | github-app-installation-id: ${{ secrets.SCALA_STEWARD_GITHUB_APP_INSTALLATION_ID }}
20 | github-app-key: ${{ secrets.SCALA_STEWARD_GITHUB_APP_PRIVATE_KEY }}
21 | github-app-auth-only: true
22 |
--------------------------------------------------------------------------------
/quill-orientdb/src/main/scala/io/getquill/context/orientdb/encoding/CollectionEncoders.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.orientdb.encoding
2 |
3 | import io.getquill.context.orientdb.OrientDBSessionContext
4 | import scala.jdk.CollectionConverters._
5 |
6 | trait CollectionEncoders {
7 | this: OrientDBSessionContext[_] =>
8 |
9 | implicit def listEncoder[T]: Encoder[List[T]] = encoder { (index, value, row) =>
10 | row.insert(index, value.asJava); row
11 | }
12 | implicit def setEncoder[T]: Encoder[Set[T]] = encoder { (index, value, row) =>
13 | row.insert(index, value.asJava); row
14 | }
15 | implicit def mapEncoder[K, V]: Encoder[Map[K, V]] = encoder { (index, value, row) =>
16 | row.insert(index, value.asJava); row
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | Fixes #issue_number
2 |
3 | ### Problem
4 |
5 | Explain here the context, and why you're making that change.
6 | What is the problem you're trying to solve?
7 |
8 | ### Solution
9 |
10 | Describe the modifications you've done.
11 |
12 | ### Notes
13 |
14 | Additional notes.
15 |
16 | ### Checklist
17 |
18 | - [ ] Unit test all changes
19 | - [ ] Update `README.md` if applicable
20 | - [ ] Add `[WIP]` to the pull request title if it's work in progress
21 | - [ ] [Squash commits](https://ariejan.net/2011/07/05/git-squash-your-latests-commits-into-one) that aren't meaningful changes
22 | - [ ] Run `sbt scalariformFormat test:scalariformFormat` to make sure that the source files are formatted
23 |
24 | @getquill/maintainers
25 |
--------------------------------------------------------------------------------
/quill-spark/src/test/scala/io/getquill/context/spark/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context
2 |
3 | import org.apache.spark.sql.SparkSession
4 | import io.getquill.QuillSparkContext
5 |
6 | package object spark {
7 |
8 | val sparkSession =
9 | SparkSession
10 | .builder()
11 | .config("spark.sql.shuffle.partitions", 2) // Default shuffle partitions is 200, too much for tests
12 | .config("spark.ui.enabled", "false")
13 | .config("spark.driver.bindAddress", "127.0.0.1")
14 | .master("local[*]")
15 | .appName("spark test")
16 | .getOrCreate()
17 |
18 | sparkSession.sparkContext.setLogLevel("WARN")
19 |
20 | implicit val sqlContext = sparkSession.sqlContext
21 |
22 | val testContext = QuillSparkContext
23 | }
24 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/PrettyPrintingSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql
2 |
3 | import io.getquill.base.Spec
4 |
5 | class PrettyPrintingSpec extends Spec {
6 |
7 | import testContext._
8 |
9 | case class Person(name: String, age: Int)
10 |
11 | "pretty print query when enabled" in {
12 | val prettyString = testContext.run(query[Person]).string(true)
13 | prettyString mustEqual
14 | """SELECT
15 | | x.name,
16 | | x.age
17 | |FROM
18 | | Person x""".stripMargin
19 | }
20 |
21 | "regular print query when not enabled" in {
22 | val prettyString = testContext.run(query[Person]).string(false)
23 | prettyString mustEqual "SELECT x.name, x.age FROM Person x"
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/quill-jdbc-test-postgres/src/test/scala/io/getquill/context/jdbc/postgres/JdbcArrayOpsSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.postgres
2 |
3 | import io.getquill.context.sql.base.ArrayOpsSpec
4 |
5 | class JdbcArrayOpsSpec extends ArrayOpsSpec {
6 | val ctx = testContext
7 | import ctx._
8 |
9 | "contains" in {
10 | ctx.run(`contains`.`Ex 1 return all`) mustBe `contains`.`Ex 1 expected`
11 | ctx.run(`contains`.`Ex 2 return 1`) mustBe `contains`.`Ex 2 expected`
12 | ctx.run(`contains`.`Ex 3 return 2,3`) mustBe `contains`.`Ex 3 expected`
13 | ctx.run(`contains`.`Ex 4 return empty`) mustBe `contains`.`Ex 4 expected`
14 | }
15 |
16 | override protected def beforeAll(): Unit = {
17 | ctx.run(entity.delete)
18 | ctx.run(insertEntries)
19 | ()
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ResultSetExtractor.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 | import java.sql.{Connection, ResultSet}
3 | import scala.annotation.tailrec
4 |
5 | object ResultSetExtractor {
6 |
7 | private[getquill] final def apply[T](
8 | rs: ResultSet,
9 | conn: Connection,
10 | extractor: (ResultSet, Connection) => T
11 | ): List[T] =
12 | extractResult(rs, conn, extractor, List.empty)
13 |
14 | @tailrec
15 | private[getquill] final def extractResult[T](
16 | rs: ResultSet,
17 | conn: Connection,
18 | extractor: (ResultSet, Connection) => T,
19 | acc: List[T]
20 | ): List[T] =
21 | if (rs.next)
22 | extractResult(rs, conn, extractor, extractor(rs, conn) :: acc)
23 | else
24 | acc.reverse
25 | }
26 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioApp.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.examples.other
2 |
3 | import io.getquill._
4 | import io.getquill.jdbczio.Quill
5 | import zio.Console.printLine
6 | import zio.ZIOAppDefault
7 |
8 | object ZioApp extends ZIOAppDefault {
9 |
10 | object MyPostgresContext extends PostgresZioJdbcContext(Literal)
11 | import MyPostgresContext._
12 |
13 | case class Person(name: String, age: Int)
14 |
15 | val zioDS = Quill.DataSource.fromPrefix("testPostgresDB")
16 |
17 | override def run = {
18 | val people = quote {
19 | query[Person].filter(p => p.name == "Alex")
20 | }
21 | MyPostgresContext
22 | .run(people)
23 | .tap(result => printLine(result.toString))
24 | .provide(zioDS)
25 | .exitCode
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/UdtMetaDsl.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import scala.language.experimental.macros
4 | import io.getquill.Udt
5 |
6 | trait UdtMetaDsl {
7 | this: CassandraContext[_] =>
8 |
9 | /**
10 | * Creates udt meta to override udt name / keyspace and rename columns
11 | *
12 | * @param path
13 | * \- either `udt_name` or `keyspace.udt_name`
14 | * @param columns
15 | * \- columns to rename
16 | * @return
17 | * udt meta
18 | */
19 | def udtMeta[T <: Udt](path: String, columns: (T => (Any, String))*): UdtMeta[T] = macro UdtMetaDslMacro.udtMeta[T]
20 |
21 | trait UdtMeta[T <: Udt] {
22 | def keyspace: Option[String]
23 | def name: String
24 | def alias(col: String): Option[String]
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/package.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context
2 |
3 | import scala.concurrent.Await
4 | import scala.concurrent.Future
5 | import scala.concurrent.duration.Duration
6 |
7 | import io.getquill._
8 | import io.getquill.Literal
9 |
10 | package object cassandra {
11 |
12 | lazy val mirrorContext = new CassandraMirrorContext(Literal) with CassandraTestEntities
13 | lazy val capsMirrorContext = new CassandraMirrorContext(UpperCaseNonDefault) with CassandraTestEntities
14 |
15 | lazy val testSyncDB = new CassandraSyncContext(Literal, "testSyncDB") with CassandraTestEntities
16 |
17 | lazy val testAsyncDB = new CassandraAsyncContext(Literal, "testAsyncDB") with CassandraTestEntities
18 |
19 | def await[T](f: Future[T]): T = Await.result(f, Duration.Inf)
20 | }
21 |
--------------------------------------------------------------------------------
/quill-codegen/src/main/scala/io/getquill/codegen/model/Stereotyper.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.model
2 |
3 | import io.getquill.codegen.gen.HasBasicMeta
4 | import io.getquill.codegen.model.Stereotyper.Namespacer
5 |
6 | object Stereotyper {
7 | type Namespacer[TableMeta] = TableMeta => String
8 | type Expresser[TableMeta, ColumnMeta] = (RawSchema[TableMeta, ColumnMeta]) => TableStereotype[TableMeta, ColumnMeta]
9 | type Fuser[TableMeta, ColumnMeta] =
10 | (Seq[TableStereotype[TableMeta, ColumnMeta]]) => TableStereotype[TableMeta, ColumnMeta]
11 | }
12 |
13 | trait Stereotyper extends HasBasicMeta {
14 | def namespacer: Namespacer[JdbcTableMeta]
15 | def nameParser: NameParser
16 | def stereotype(schemas: Seq[RawSchema[TableMeta, ColumnMeta]]): Seq[TableStereotype[TableMeta, ColumnMeta]]
17 | }
18 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/mock/Introspection.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.mock
2 |
3 | import scala.reflect.ClassTag
4 | import scala.reflect.runtime.{universe => ru}
5 |
6 | class Introspection[T](t: T)(implicit tt: ru.TypeTag[T], ct: ClassTag[T]) {
7 | import ru._
8 |
9 | val rm = runtimeMirror(getClass.getClassLoader)
10 | val instanceMirror = rm.reflect(t)
11 |
12 | val fieldsAndValues =
13 | typeOf[T].members.collect {
14 | case m: MethodSymbol if m.isCaseAccessor => m
15 | }.map(sym => (sym.name.toString, instanceMirror.reflectField(sym.asTerm).get)).toList.reverse
16 |
17 | val map = fieldsAndValues.toMap
18 |
19 | def getIndex(i: Int) = fieldsAndValues(i - 1)._2 // Subtract 1 because DB result sets are 1-indexed
20 | def getField(name: String) = map(name)
21 | }
22 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncoding.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.encoding
2 |
3 | import com.datastax.oss.driver.api.core.data.UdtValue
4 | import io.getquill.Udt
5 | import io.getquill.context.cassandra.CassandraRowContext
6 |
7 | import scala.language.experimental.macros
8 |
9 | trait UdtEncoding {
10 | this: CassandraRowContext[_] =>
11 |
12 | implicit def udtDecoder[T <: Udt]: Decoder[T] = macro UdtEncodingMacro.udtDecoder[T]
13 | implicit def udtEncoder[T <: Udt]: Encoder[T] = macro UdtEncodingMacro.udtEncoder[T]
14 |
15 | implicit def udtDecodeMapper[T <: Udt]: CassandraMapper[UdtValue, T] = macro UdtEncodingMacro.udtDecodeMapper[T]
16 | implicit def udtEncodeMapper[T <: Udt]: CassandraMapper[T, UdtValue] = macro UdtEncodingMacro.udtEncodeMapper[T]
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/context/cassandra/ExpandMappedInfixCassandra.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import io.getquill.ast.StatelessTransformer
4 | import io.getquill.ast._
5 |
6 | object ExpandMappedInfixCassandra extends StatelessTransformer {
7 |
8 | override def apply(q: Ast) =
9 | q match {
10 | case Map(q: Infix, x, p) if (x == p) =>
11 | q
12 | case q @ Map(Infix(parts, params, pure, tr, quat), x, p) =>
13 | params.zipWithIndex.collect { case (q: Query, i) =>
14 | (q, i)
15 | } match {
16 | case List((q, i)) =>
17 | Infix(parts, params.updated(i, Map(q, x, p)), pure, tr, quat)
18 | case other =>
19 | super.apply(q)
20 | }
21 | case other =>
22 | super.apply(q)
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/quill-orientdb/src/main/scala/io/getquill/OrientDBMirrorContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.context.orientdb.{OrientDBContext, OrientDBIdiom}
4 |
5 | class OrientDBMirrorContext[+Naming <: NamingStrategy](naming: Naming)
6 | extends MirrorContext[OrientDBIdiom, Naming](OrientDBIdiom, naming)
7 | with OrientDBContext[Naming] {
8 |
9 | implicit def listDecoder[T]: Decoder[List[T]] = decoderUnsafe[List[T]]
10 | implicit def setDecoder[T]: Decoder[Set[T]] = decoderUnsafe[Set[T]]
11 | implicit def mapDecoder[K, V]: Decoder[Map[K, V]] = decoderUnsafe[Map[K, V]]
12 |
13 | implicit def listEncoder[T]: Encoder[List[T]] = encoder[List[T]]
14 | implicit def setEncoder[T]: Encoder[Set[T]] = encoder[Set[T]]
15 | implicit def mapEncoder[K, V]: Encoder[Map[K, V]] = encoder[Map[K, V]]
16 | }
17 |
--------------------------------------------------------------------------------
/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleApp.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.zio.examples.other
2 |
3 | import io.getquill.{CassandraZioContext, _}
4 | import zio.ZIOAppDefault
5 | import zio.Console.printLine
6 |
7 | object ExampleApp extends ZIOAppDefault {
8 |
9 | object MyZioPostgresContext extends CassandraZioContext(Literal)
10 | import MyZioPostgresContext._
11 |
12 | case class Person(name: String, age: Int)
13 |
14 | val zioSessionLayer =
15 | CassandraZioSession.fromPrefix("testStreamDB")
16 |
17 | override def run = {
18 | val people = quote {
19 | query[Person]
20 | }
21 | MyZioPostgresContext
22 | .run(people)
23 | .tap(result => printLine(result.toString))
24 | .provide(zioSessionLayer)
25 | .exitCode
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/quill-jdbc-test-postgres/src/test/scala/io/getquill/context/jdbc/postgres/PrettyPrintingSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.postgres
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.context.TranslateOptions
5 |
6 | class PrettyPrintingSpec extends Spec {
7 |
8 | val context = testContext
9 | import testContext._
10 |
11 | case class Person(name: String, age: Int)
12 |
13 | "pretty prints query when enabled" in {
14 | val q = quote(query[Person])
15 | translate(q, TranslateOptions(prettyPrint = true)) mustEqual
16 | """SELECT
17 | | x.name,
18 | | x.age
19 | |FROM
20 | | Person x""".stripMargin
21 | }
22 |
23 | "regular print when not enabled" in {
24 | val q = quote(query[Person])
25 | translate(q) mustEqual "SELECT x.name, x.age FROM Person x"
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/JdbcContextConfig.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import com.typesafe.config.Config
4 | import com.zaxxer.hikari.HikariConfig
5 | import com.zaxxer.hikari.HikariDataSource
6 |
7 | import java.util.Properties
8 | import scala.util.control.NonFatal
9 |
10 | case class JdbcContextConfig(config: Config) {
11 |
12 | def configProperties = {
13 | import scala.jdk.CollectionConverters._
14 | val p = new Properties
15 | for (entry <- config.entrySet.asScala)
16 | p.setProperty(entry.getKey, entry.getValue.unwrapped.toString)
17 | p
18 | }
19 |
20 | def dataSource =
21 | try
22 | new HikariDataSource(new HikariConfig(configProperties))
23 | catch {
24 | case NonFatal(ex) =>
25 | throw new IllegalStateException("Failed to load data source", ex)
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/quill-zio/src/main/scala/io/getquill/context/qzio/ZioTranslateContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.qzio
2 |
3 | import io.getquill.NamingStrategy
4 | import io.getquill.context.{Context, ContextTranslateMacro}
5 | import io.getquill.idiom.Idiom
6 | import zio.ZIO
7 |
8 | trait ZioTranslateContext extends ContextTranslateMacro {
9 | this: Context[_ <: Idiom, _ <: NamingStrategy] =>
10 |
11 | type Error
12 | type Environment
13 |
14 | override type TranslateResult[T] = ZIO[Environment, Error, T]
15 | override def wrap[T](t: => T): TranslateResult[T] = ZIO.environment[Environment].as(t)
16 | override def push[A, B](result: TranslateResult[A])(f: A => B): TranslateResult[B] = result.map(f)
17 | override def seq[A](list: List[TranslateResult[A]]): TranslateResult[List[A]] = ZIO.collectAll(list)
18 | }
19 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | target
3 | null
4 | .classpath
5 | null/
6 | .project
7 | .settings
8 | .bsp
9 | .history
10 | .cache*
11 | .jvmopts
12 | queries.sql
13 | LogToFileSpecQuery.sql
14 | **/bin
15 | **/.tmpBin
16 | local.*
17 | quill_test.db
18 | codegen_test.db
19 | codegen_test.h2.mv.db
20 | codegen_test.h2.trace.db
21 | Bug.scala
22 | *.gz
23 | quill-jdbc/io/
24 | quill-sql/io/
25 | MyTest.scala
26 | MyTest*.scala
27 | MySparkTest.scala
28 | MyTestJdbc.scala
29 | MyJdbcTest.scala
30 | MySqlTest*.scala
31 | MyCassandraTest*.scala
32 | MyCqlTest.scala
33 | MySparkExample.scala
34 | quill-core/src/main/resources/logback.xml
35 | quill-jdbc/src/main/resources/logback.xml
36 | log.txt*
37 | tmp
38 | nohup.out
39 | .bloop/
40 | .metals/
41 | project/.bloop/
42 | /io/
43 | /.metadata/
44 | .bsp/
45 | .jvmopts
46 | .DS_Store
47 | .vscode
48 | .history
49 | metals.sbt
50 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/idiom/H2DialectSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.idiom
2 |
3 | import io.getquill.{H2Dialect, Literal, SqlMirrorContext, TestEntities}
4 |
5 | class H2DialectSpec extends OnConflictSpec {
6 | val ctx = new SqlMirrorContext(H2Dialect, Literal) with TestEntities
7 | import ctx._
8 | "OnConflict" - `onConflict with all` { i =>
9 | "no target - ignore" in {
10 | ctx.run(`no target - ignore`(i)).string mustEqual
11 | "INSERT INTO TestEntity (s,i,l,o,b) VALUES ($1, $2, $3, $4, $5) ON CONFLICT DO NOTHING"
12 | }
13 | "no target - ignore batch" in {
14 | ctx.run(`no target - ignore batch`).groups.foreach {
15 | _._1 mustEqual
16 | "INSERT INTO TestEntity (s,i,l,o,b) VALUES ($1, $2, $3, $4, $5) ON CONFLICT DO NOTHING"
17 | }
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/application-codegen.conf:
--------------------------------------------------------------------------------
1 | # Create a configuration for the code generators. Recycling as much as possible
2 | # from application.conf.
3 | include required(classpath("application.conf"))
4 |
5 | testMysqlDB.dataSource.url="jdbc:mysql://"${?MYSQL_HOST}":"${?MYSQL_PORT}"/codegen_test"
6 |
7 | testPostgresDB.dataSource.databaseName=codegen_test
8 | # Pools created frequently in tests. Need to limit the connection count sometimes.
9 | # Otherwise can get PSQLException: FATAL: sorry, too many clients already
10 | testPostgresDB.maximumPoolSize=1
11 |
12 | testH2DB.dataSource.url="jdbc:h2:file:./codegen_test.h2;DB_CLOSE_ON_EXIT=TRUE"
13 |
14 | testSqliteDB.jdbcUrl="jdbc:sqlite:codegen_test.db"
15 |
16 | testSqlServerDB.dataSource.databaseName=codegen_test
17 |
18 | testOracleDB.dataSource.user=codegen_test
19 | testOracleDB.maximumPoolSize=1
20 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/MirrorContexts.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import scala.concurrent.{ExecutionContext, Future}
4 | import scala.util.{Failure, Try}
5 |
6 | object MirrorContexts {
7 |
8 | object testContext extends TestMirrorContextTemplate(MirrorIdiom, Literal) with TestEntities
9 | object testAsyncContext extends AsyncMirrorContext(MirrorIdiom, Literal) with TestEntities {
10 |
11 | // hack to avoid Await.result since scala.js doesn't support it
12 | implicit val immediateEC: ExecutionContext = new ExecutionContext {
13 | def execute(runnable: Runnable) = runnable.run()
14 | def reportFailure(cause: Throwable) = ()
15 | }
16 |
17 | def eval[T](f: Future[T]): T = {
18 | var res: Try[T] = Failure(new IllegalStateException())
19 | f.onComplete(res = _)
20 | res.get
21 | }
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/QueryProbingSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.base.Spec
4 |
5 | class QueryProbingSpec extends Spec {
6 |
7 | "fails if the context can't be resolved at compile time" in {
8 | object c extends MirrorContext(MirrorIdiom, Literal) with TestEntities with QueryProbing
9 | import c._
10 | "c.run(qr1)" mustNot compile
11 | }
12 |
13 | "doesn't warn if query probing is disabled and the context can't be resolved at compile time" in {
14 | object c extends MirrorContext(MirrorIdiom, Literal) with TestEntities
15 | import c._
16 | c.run(qr1.delete)
17 | ()
18 | }
19 |
20 | "fails compilation if the query probing fails" in {
21 | case class Fail()
22 | import mirrorContextWithQueryProbing._
23 | "mirrorContextWithQueryProbing.run(query[Fail].delete)" mustNot compile
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppDataSource.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.examples.other
2 |
3 | import io.getquill._
4 | import io.getquill.util.LoadConfig
5 | import zio.Console.printLine
6 | import zio.{ZEnvironment, ZIOAppDefault}
7 |
8 | object ZioAppDataSource extends ZIOAppDefault {
9 |
10 | object MyPostgresContext extends PostgresZioJdbcContext(Literal)
11 | import MyPostgresContext._
12 |
13 | case class Person(name: String, age: Int)
14 |
15 | def dataSource = JdbcContextConfig(LoadConfig("testPostgresDB")).dataSource
16 |
17 | override def run = {
18 | val people = quote {
19 | query[Person].filter(p => p.name == "Alex")
20 | }
21 | MyPostgresContext
22 | .run(people)
23 | .provideEnvironment(ZEnvironment(dataSource))
24 | .tap(result => printLine(result.toString))
25 | .exitCode
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/CassandraContextConfig.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import com.datastax.oss.driver.api.core.{CqlSession, CqlSessionBuilder}
4 | import com.typesafe.config.Config
5 | import io.getquill.context.cassandra.cluster.SessionBuilder
6 |
7 | case class CassandraContextConfig(config: Config) {
8 | def preparedStatementCacheSize: Long =
9 | if (config.hasPath("preparedStatementCacheSize"))
10 | config.getLong("preparedStatementCacheSize")
11 | else
12 | 1000
13 | def builder: CqlSessionBuilder = SessionBuilder(config.getConfig("session"))
14 | lazy val session: CqlSession = builder.withKeyspace(keyspace).build()
15 |
16 | /**
17 | * the keyspace is from config file. to get actual active keyspace use
18 | * session.getKeyspace
19 | * @return
20 | */
21 | def keyspace: String = config.getString("keyspace")
22 | }
23 |
--------------------------------------------------------------------------------
/quill-orientdb/src/test/scala/io/getquill/context/orientdb/DecodeNullSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.orientdb
2 |
3 | import io.getquill.base.Spec
4 |
5 | class DecodeNullSpec extends Spec {
6 |
7 | "no default values when reading null" - {
8 |
9 | "sync" in {
10 | val ctx = orientdb.testSyncDB
11 | import ctx._
12 | val writeEntities = quote(querySchema[DecodeNullTestWriteEntity]("DecodeNullTestEntity"))
13 |
14 | ctx.run(writeEntities.delete)
15 | ctx.run(writeEntities.insertValue(lift(insertValue)))
16 |
17 | intercept[IllegalStateException] {
18 | ctx.run(query[DecodeNullTestEntity])
19 | }
20 | }
21 | }
22 |
23 | case class DecodeNullTestEntity(id: Int, value: Int)
24 |
25 | case class DecodeNullTestWriteEntity(id: Int, value: Option[Int])
26 |
27 | val insertValue = DecodeNullTestWriteEntity(0, None)
28 | }
29 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppManual.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.examples.other
2 |
3 | import io.getquill._
4 | import io.getquill.util.LoadConfig
5 | import zio.{ZIOAppDefault, ZLayer}
6 | import zio.Console.printLine
7 |
8 | import javax.sql.DataSource
9 |
10 | object ZioAppManual extends ZIOAppDefault {
11 |
12 | object MyPostgresContext extends PostgresZioJdbcContext(Literal)
13 | import MyPostgresContext._
14 |
15 | case class Person(name: String, age: Int)
16 | lazy val ds: DataSource = JdbcContextConfig(LoadConfig("testPostgresDB")).dataSource
17 |
18 | override def run = {
19 | val people = quote {
20 | query[Person].filter(p => p.name == "Alex")
21 | }
22 | MyPostgresContext
23 | .run(people)
24 | .tap(result => printLine(result.toString))
25 | .provide(ZLayer.succeed(ds))
26 | .exitCode
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/quill-orientdb/src/main/scala/io/getquill/context/orientdb/encoding/CollectionDecoders.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.orientdb.encoding
2 |
3 | import io.getquill.context.orientdb.OrientDBSessionContext
4 | import scala.jdk.CollectionConverters._
5 | import com.orientechnologies.orient.core.db.record.OTrackedSet
6 |
7 | trait CollectionDecoders {
8 | this: OrientDBSessionContext[_] =>
9 |
10 | implicit def listDecoder[T]: Decoder[List[T]] =
11 | decoder((index, row, session) => row.field[java.util.List[T]](row.fieldNames()(index)).asScala.toList)
12 | implicit def setDecoder[T]: Decoder[Set[T]] =
13 | decoder((index, row, session) => row.field[OTrackedSet[T]](row.fieldNames()(index)).asScala.toSet)
14 | implicit def mapDecoder[K, V]: Decoder[Map[K, V]] =
15 | decoder((index, row, session) => row.field[java.util.Map[K, V]](row.fieldNames()(index)).asScala.toMap[K, V])
16 | }
17 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/idiom/OffsetWithoutLimitWorkaroundSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.idiom
2 |
3 | import io.getquill.Literal
4 | import io.getquill.SqlMirrorContext
5 | import io.getquill.TestEntities
6 | import io.getquill.MySQLDialect
7 | import io.getquill.base.Spec
8 | import scala.util.Try
9 |
10 | class OffsetWithoutLimitWorkaroundSpec extends Spec {
11 |
12 | val ctx = new SqlMirrorContext(MySQLDialect, Literal) with TestEntities {
13 | override def probe(statement: String) =
14 | Try {
15 | statement mustEqual
16 | "PREPARE p603247403 FROM 'SELECT x.s, x.i, x.l, x.o FROM TestEntity x LIMIT 18446744073709551610 OFFSET 1'"
17 | }
18 | }
19 | import ctx._
20 |
21 | "creates a synthetic limit" in {
22 | val q = quote {
23 | qr1.drop(1)
24 | }
25 | ctx.run(q)
26 | ()
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/ModelMacro.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import scala.language.experimental.macros
4 | import io.getquill.dsl.QueryDslMacro
5 | import io.getquill.quotation.NonQuotedException
6 |
7 | sealed trait EntityQuery[T] extends EntityQueryModel[T] {
8 |
9 | override def withFilter(f: T => Boolean): EntityQuery[T] = NonQuotedException()
10 | override def filter(f: T => Boolean): EntityQuery[T] = NonQuotedException()
11 | override def map[R](f: T => R): EntityQuery[R] = NonQuotedException()
12 |
13 | def insertValue(value: T): Insert[T] = macro QueryDslMacro.expandInsert[T]
14 | def insert(f: (T => (Any, Any)), f2: (T => (Any, Any))*): Insert[T]
15 |
16 | def updateValue(value: T): Update[T] = macro QueryDslMacro.expandUpdate[T]
17 | def update(f: (T => (Any, Any)), f2: (T => (Any, Any))*): Update[T]
18 |
19 | def delete: Delete[T]
20 | }
21 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ExpandMappedInfixSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import io.getquill._
4 | import io.getquill.ast.Ast
5 | import io.getquill.base.Spec
6 |
7 | class ExpandMappedInfixSpec extends Spec {
8 |
9 | import mirrorContext._
10 |
11 | "removes identity map" in {
12 | val i = quote {
13 | sql"test".as[Query[Int]]
14 | }
15 | val q = quote {
16 | i.map(x => x)
17 | }
18 | ExpandMappedInfixCassandra(q.ast: Ast) mustEqual i.ast
19 | }
20 |
21 | "expands mapped infix wrapping single query" in {
22 | val q = quote {
23 | sql"$qr1 ALLOW FILTERING".as[Query[TestEntity]].map(t => t.i)
24 | }
25 | val n = quote {
26 | sql"${qr1.map(t => t.i)} ALLOW FILTERING".as[Query[TestEntity]]
27 | }
28 | ExpandMappedInfixCassandra(q.ast: Ast) mustEqual n.ast
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/schema_twotable.sql:
--------------------------------------------------------------------------------
1 | create table Alpha_Person (
2 | id int primary key,
3 | firstName varchar(255),
4 | lastName varchar(255),
5 | age int not null
6 | );
7 |
8 | create table Bravo_Person (
9 | id int primary key,
10 | firstName varchar(255),
11 | lastName varchar(255),
12 | age int not null
13 | );
14 |
15 | create table Address (
16 | personFk int not null,
17 | street varchar(255),
18 | zip int
19 | );
20 |
21 | insert into Alpha_Person values (1, 'Joe', 'Bloggs', 22);
22 | insert into Alpha_Person values (2, 'Jack', 'Ripper', 33);
23 |
24 | insert into Bravo_Person values (1, 'George', 'Oleaf', 22);
25 | insert into Bravo_Person values (2, 'Greg', 'Raynor', 33);
26 |
27 | insert into Address values (1, '123 Someplace', 1001);
28 | insert into Address values (1, '678 Blah', 2002);
29 | insert into Address values (2, '111234 Some Other Place', 3333);
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/idiom/LoadNaming.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.idiom
2 |
3 | import scala.reflect.macros.whitebox.Context
4 | import scala.util.Try
5 |
6 | import io.getquill.NamingStrategy
7 | import io.getquill.util.CollectTry
8 | import io.getquill.util.LoadObject
9 | import io.getquill.CompositeNamingStrategy
10 |
11 | object LoadNaming {
12 |
13 | def static(c: Context)(tpe: c.Type): Try[NamingStrategy] =
14 | CollectTry {
15 | strategies(c)(tpe).map(LoadObject[NamingStrategy](c)(_))
16 | }.map(NamingStrategy(_))
17 |
18 | private def strategies(c: Context)(tpe: c.Type) =
19 | tpe <:< c.typeOf[CompositeNamingStrategy] match {
20 | case true =>
21 | tpe.typeArgs
22 | .filterNot(_ =:= c.weakTypeOf[NamingStrategy])
23 | .filterNot(_ =:= c.weakTypeOf[scala.Nothing])
24 | case false =>
25 | List(tpe)
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/quill-jdbc-test-h2/src/test/scala/io/getquill/context/jdbc/h2/JdbcEncodingSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.h2
2 |
3 | import io.getquill.context.sql.EncodingSpec
4 |
5 | import java.time.ZoneId
6 |
7 | class JdbcEncodingSpec extends EncodingSpec {
8 |
9 | val context = testContext
10 | import testContext._
11 |
12 | "encodes and decodes types" in {
13 | testContext.run(delete)
14 | testContext.run(liftQuery(insertValues).foreach(p => insert(p)))
15 | verify(testContext.run(query[EncodingTestEntity]))
16 | }
17 |
18 | "Encode/Decode Other Time Types" in {
19 | context.run(query[TimeEntity].delete)
20 | val zid = ZoneId.systemDefault()
21 | val timeEntity = TimeEntity.make(zid)
22 | context.run(query[TimeEntity].insertValue(lift(timeEntity)))
23 | val actual = context.run(query[TimeEntity]).head
24 | timeEntity mustEqual actual
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | testSyncDB {
2 | preparedStatementCacheSize=1000
3 | keyspace=quill_test
4 | session.queryOptions.fetchSize=1
5 |
6 | session {
7 | basic.contact-points = [ ${?CASSANDRA_CONTACT_POINT_0}, ${?CASSANDRA_CONTACT_POINT_1} ]
8 | basic.load-balancing-policy.local-datacenter = ${?CASSANDRA_DC}
9 | basic.request.consistency = LOCAL_QUORUM
10 | }
11 |
12 | }
13 |
14 | testAsyncDB {
15 | preparedStatementCacheSize=1000
16 | keyspace=quill_test
17 | session.queryOptions.fetchSize=1
18 |
19 |
20 | session {
21 | basic.contact-points = [ ${?CASSANDRA_CONTACT_POINT_0}, ${?CASSANDRA_CONTACT_POINT_1} ]
22 | basic.load-balancing-policy.local-datacenter = ${?CASSANDRA_DC}
23 | basic.request.consistency = LOCAL_QUORUM
24 | }
25 |
26 | }
--------------------------------------------------------------------------------
/quill-codegen/src/main/scala/io/getquill/codegen/model/StereotypedModel.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.model
2 |
3 | import scala.reflect.ClassTag
4 |
5 | /**
6 | * Represents a top-level entity to be processed by the code generator. A table
7 | * is considered to be properly 'stereotyped' if it is either the only table
8 | * with a given name or, if it has been combined with all other identically
9 | * named tables (in the same schema) that we wish to combine it with.
10 | */
11 | case class TableStereotype[TableMeta, ColumnMeta](
12 | table: TableFusion[TableMeta],
13 | columns: Seq[ColumnFusion[ColumnMeta]]
14 | )
15 |
16 | case class TableFusion[TableMeta](
17 | namespace: String,
18 | name: String,
19 | meta: Seq[TableMeta]
20 | )
21 |
22 | case class ColumnFusion[ColumnMeta](
23 | name: String,
24 | dataType: ClassTag[_],
25 | nullable: Boolean,
26 | meta: Seq[ColumnMeta]
27 | )
28 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/monad/IOMonadMacro.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.monad
2 |
3 | import scala.reflect.macros.blackbox.{Context => MacroContext}
4 | import scala.concurrent.ExecutionContext
5 |
6 | class IOMonadMacro(val c: MacroContext) {
7 | import c.universe._
8 |
9 | def runIO(quoted: Tree): Tree =
10 | q"${c.prefix}.Run(() => ${c.prefix}.run($quoted))"
11 |
12 | def runIOEC(quoted: Tree): Tree = {
13 | // make sure we're shadowing the current ec implicit
14 | val ecName =
15 | c.inferImplicitValue(c.weakTypeOf[ExecutionContext]) match {
16 | case Select(_, name) =>
17 | name.decodedName.toString
18 | case tree =>
19 | tree.symbol.name.decodedName.toString
20 | }
21 |
22 | val v = q"implicit val ${TermName(ecName)}: scala.concurrent.ExecutionContext"
23 | q"${c.prefix}.Run($v => ${c.prefix}.run($quoted))"
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/quat/QuatNestingHelper.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.quat
2 |
3 | import io.getquill.ast.{Ast, Ident, Property}
4 |
5 | object QuatNestingHelper {
6 | def valueQuat(quat: Quat): Quat =
7 | quat match {
8 | case Quat.BooleanExpression => Quat.BooleanValue
9 | case p @ Quat.Product(fields) => Quat.Product(p.name, fields.toList.map { case (k, v) => (k, valueQuat(v)) })
10 | case other => other
11 | }
12 |
13 | def valuefyQuatInProperty(ast: Ast): Ast =
14 | ast match {
15 | case Property(id: Ident, name) =>
16 | val newQuat = valueQuat(id.quat) // Force quat value recomputation for better performance
17 | Property(id.copy(quat = newQuat), name)
18 | case Property(prop: Property, name) =>
19 | Property(valuefyQuatInProperty(prop), name)
20 | case other =>
21 | other
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/H2JdbcContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import java.io.Closeable
4 |
5 | import javax.sql.DataSource
6 | import com.typesafe.config.Config
7 | import io.getquill.context.jdbc.{JdbcContext, H2JdbcContextBase}
8 | import io.getquill.util.LoadConfig
9 |
10 | class H2JdbcContext[+N <: NamingStrategy](val naming: N, dataSourceInput: => DataSource with Closeable)
11 | extends JdbcContext[H2Dialect, N]
12 | with H2JdbcContextBase[H2Dialect, N] {
13 | override val idiom: H2Dialect = H2Dialect
14 | override lazy val dataSource: DataSource with Closeable = dataSourceInput
15 |
16 | def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource)
17 | def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config))
18 | def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))
19 | }
20 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/schema_snakecase_twotable.sql:
--------------------------------------------------------------------------------
1 | create table Alpha_Person (
2 | id int primary key,
3 | first_name varchar(255),
4 | last_name varchar(255),
5 | age int not null
6 | );
7 |
8 | create table Bravo_Person (
9 | id int primary key,
10 | first_name varchar(255),
11 | last_name varchar(255),
12 | age int not null
13 | );
14 |
15 | create table Address (
16 | person_fk int not null,
17 | street varchar(255),
18 | zip int
19 | );
20 |
21 | insert into Alpha_Person values (1, 'Joe', 'Bloggs', 22);
22 | insert into Alpha_Person values (2, 'Jack', 'Ripper', 33);
23 |
24 | insert into Bravo_Person values (1, 'George', 'Oleaf', 22);
25 | insert into Bravo_Person values (2, 'Greg', 'Raynor', 33);
26 |
27 | insert into Address values (1, '123 Someplace', 1001);
28 | insert into Address values (1, '678 Blah', 2002);
29 | insert into Address values (2, '111234 Some Other Place', 3333);
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtMetaDslSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.udt
2 |
3 | import io.getquill.context.cassandra.mirrorContext._
4 |
5 | class UdtMetaDslSpec extends UdtSpec {
6 | "name" in {
7 | udtMeta[Name]("my_name").name mustBe "my_name"
8 |
9 | // allows dynamic renaming
10 | val x: String = 123.toString
11 | udtMeta[Name](x).name mustBe x
12 | }
13 |
14 | "keyspace" in {
15 | udtMeta[Name]("ks.name").keyspace mustBe Some("ks")
16 | udtMeta[Name]("name").keyspace mustBe None
17 | intercept[IllegalStateException] {
18 | udtMeta[Name]("ks.name.name")
19 | }.getMessage mustBe "Cannot parse udt path `ks.name.name`"
20 | }
21 |
22 | "alias" in {
23 | val meta = udtMeta[Name]("name", _.lastName -> "last")
24 | meta.alias("firstName") mustBe None
25 | meta.alias("lastName") mustBe Some("last")
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/quill-util/src/main/scala/io/getquill/util/ScalafmtFormat.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import io.getquill.util.ThrowableOps._
4 | import org.scalafmt.config.ScalafmtConfig
5 | import org.scalafmt.{Formatted, Scalafmt}
6 |
7 | /**
8 | * Based on ScalaFmt210 from scalafmt cli
9 | */
10 | object ScalafmtFormat {
11 | def apply(code: String, showErrorTrace: Boolean = false): String = {
12 | val style = ScalafmtConfig.default
13 | Scalafmt.format(code, style, Set.empty, "") match {
14 | case Formatted.Success(formattedCode) =>
15 | formattedCode
16 | case Formatted.Failure(e) =>
17 | if (showErrorTrace)
18 | println(
19 | s"""===== Failed to format the code ====
20 | |$code
21 | |---
22 | |${e.stackTraceToString}.
23 | |""".stripMargin
24 | )
25 | code
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/quill-jdbc-test-sqlite/src/test/scala/io/getquill/context/jdbc/sqlite/JdbcEncodingSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.sqlite
2 |
3 | import io.getquill.context.sql.EncodingSpec
4 |
5 | import java.time.ZoneId
6 |
7 | class JdbcEncodingSpec extends EncodingSpec {
8 |
9 | val context = testContext
10 | import testContext._
11 |
12 | "encodes and decodes types" in {
13 | testContext.run(delete)
14 | testContext.run(liftQuery(insertValues).foreach(e => insert(e)))
15 | verify(testContext.run(query[EncodingTestEntity]))
16 | ()
17 | }
18 |
19 | "Encode/Decode Other Time Types" in {
20 | context.run(query[TimeEntity].delete)
21 | val zid = ZoneId.systemDefault()
22 | val timeEntity = TimeEntity.make(zid)
23 | context.run(query[TimeEntity].insertValue(lift(timeEntity)))
24 | val actual = context.run(query[TimeEntity]).head
25 | timeEntity mustEqual actual
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/MysqlJdbcContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import java.io.Closeable
4 |
5 | import javax.sql.DataSource
6 | import com.typesafe.config.Config
7 | import io.getquill.context.jdbc.{JdbcContext, MysqlJdbcContextBase}
8 | import io.getquill.util.LoadConfig
9 |
10 | class MysqlJdbcContext[+N <: NamingStrategy](val naming: N, dataSourceInput: => DataSource with Closeable)
11 | extends JdbcContext[MySQLDialect, N]
12 | with MysqlJdbcContextBase[MySQLDialect, N] {
13 | override val idiom: MySQLDialect = MySQLDialect
14 | override lazy val dataSource: DataSource with Closeable = dataSourceInput
15 |
16 | def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource)
17 | def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config))
18 | def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))
19 | }
20 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/context/RowContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context
2 |
3 | import io.getquill.ReturnAction
4 | import io.getquill.ast.ScalarLift
5 |
6 | trait RowContext {
7 | type PrepareRow
8 | type ResultRow
9 |
10 | protected val identityPrepare: Prepare = (p: PrepareRow, _: Session) => (Nil, p)
11 | private val _identityExtractor: Extractor[Any] = (rr: ResultRow, _: Session) => rr
12 | protected def identityExtractor[T]: Extractor[T] = _identityExtractor.asInstanceOf[Extractor[T]]
13 |
14 | case class BatchGroup(string: String, prepare: List[Prepare], liftings: List[List[ScalarLift]])
15 | case class BatchGroupReturning(string: String, returningBehavior: ReturnAction, prepare: List[Prepare], liftings: List[List[ScalarLift]])
16 |
17 | type Prepare = (PrepareRow, Session) => (List[Any], PrepareRow)
18 | type Extractor[T] = (ResultRow, Session) => T
19 | type Session
20 | }
21 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/OracleJdbcContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import java.io.Closeable
4 |
5 | import com.typesafe.config.Config
6 | import io.getquill.context.jdbc.{JdbcContext, OracleJdbcContextBase}
7 | import io.getquill.util.LoadConfig
8 | import javax.sql.DataSource
9 |
10 | class OracleJdbcContext[+N <: NamingStrategy](val naming: N, dataSourceInput: => DataSource with Closeable)
11 | extends JdbcContext[OracleDialect, N]
12 | with OracleJdbcContextBase[OracleDialect, N] {
13 | override val idiom: OracleDialect = OracleDialect
14 | override lazy val dataSource: DataSource with Closeable = dataSourceInput
15 |
16 | def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource)
17 | def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config))
18 | def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))
19 | }
20 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/SqliteJdbcContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import java.io.Closeable
4 |
5 | import javax.sql.DataSource
6 | import com.typesafe.config.Config
7 | import io.getquill.context.jdbc.{JdbcContext, SqliteJdbcContextBase}
8 | import io.getquill.util.LoadConfig
9 |
10 | class SqliteJdbcContext[+N <: NamingStrategy](val naming: N, dataSourceInput: => DataSource with Closeable)
11 | extends JdbcContext[SqliteDialect, N]
12 | with SqliteJdbcContextBase[SqliteDialect, N] {
13 | override val idiom: SqliteDialect = SqliteDialect
14 | override lazy val dataSource: DataSource with Closeable = dataSourceInput
15 |
16 | def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource)
17 | def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config))
18 | def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))
19 | }
20 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/context/ContextVerbPrepare.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context
2 |
3 | import io.getquill.{Action, BatchAction, Query, Quoted}
4 | import io.getquill.dsl.CoreDsl
5 | import scala.language.experimental.macros
6 | import scala.language.higherKinds
7 |
8 | trait ContextVerbPrepare extends CoreDsl {
9 | type Result[T]
10 | type Session
11 |
12 | type PrepareQueryResult // Usually: Session => Result[PrepareRow]
13 | type PrepareActionResult // Usually: Session => Result[PrepareRow]
14 | type PrepareBatchActionResult // Usually: Session => Result[List[PrepareRow]]
15 |
16 | def prepare[T](quoted: Quoted[Query[T]]): PrepareQueryResult = macro QueryMacro.prepareQuery[T]
17 | def prepare(quoted: Quoted[Action[_]]): PrepareActionResult = macro ActionMacro.prepareAction
18 | def prepare(quoted: Quoted[BatchAction[Action[_]]]): PrepareBatchActionResult = macro ActionMacro.prepareBatchAction
19 | }
20 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/norm/capture/AvoidCaptureSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm.capture
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.MirrorContexts.testContext.qr1
5 | import io.getquill.MirrorContexts.testContext.qr2
6 | import io.getquill.MirrorContexts.testContext.qr3
7 | import io.getquill.MirrorContexts.testContext.quote
8 | import io.getquill.MirrorContexts.testContext.unquote
9 | import io.getquill.util.TraceConfig
10 |
11 | class AvoidCaptureSpec extends Spec {
12 |
13 | "avoids capture of entities for normalization" in {
14 | val q = quote {
15 | qr1.filter(u => u.s == "s1").flatMap(b => qr2.filter(u => u.s == "s1")).flatMap(c => qr3.map(u => u.s))
16 | }
17 | val n = quote {
18 | qr1.filter(u => u.s == "s1").flatMap(u => qr2.filter(u1 => u1.s == "s1")).flatMap(u1 => qr3.map(u2 => u2.s))
19 | }
20 | AvoidCapture(q.ast, TraceConfig(List.empty)) mustEqual n.ast
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainApp.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.examples.other
2 |
3 | import io.getquill.jdbczio.Quill
4 | import io.getquill.{Literal, PostgresZioJdbcContext}
5 | import zio.{Runtime, Unsafe}
6 |
7 | object PlainApp {
8 |
9 | object MyPostgresContext extends PostgresZioJdbcContext(Literal)
10 | import MyPostgresContext._
11 |
12 | case class Person(name: String, age: Int)
13 |
14 | val zioDS = Quill.DataSource.fromPrefix("testPostgresDB")
15 |
16 | def main(args: Array[String]): Unit = {
17 | val people = quote {
18 | query[Person].filter(p => p.name == "Alex")
19 | }
20 | val qzio =
21 | MyPostgresContext
22 | .run(people)
23 | .tap(result => zio.ZIO.attempt(println(result.toString)))
24 | .provideLayer(zioDS)
25 |
26 | Unsafe.unsafe { implicit u =>
27 | Runtime.default.unsafe.run(qzio).getOrThrow()
28 | }
29 | ()
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/PostgresJdbcContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import java.io.Closeable
4 |
5 | import javax.sql.DataSource
6 | import com.typesafe.config.Config
7 | import io.getquill.context.jdbc.{JdbcContext, PostgresJdbcContextBase}
8 | import io.getquill.util.LoadConfig
9 |
10 | class PostgresJdbcContext[+N <: NamingStrategy](val naming: N, dataSourceInput: => DataSource with Closeable)
11 | extends JdbcContext[PostgresDialect, N]
12 | with PostgresJdbcContextBase[PostgresDialect, N] {
13 | override val idiom: PostgresDialect = PostgresDialect
14 | override lazy val dataSource: DataSource with Closeable = dataSourceInput
15 |
16 | def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource)
17 | def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config))
18 | def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))
19 | }
20 |
--------------------------------------------------------------------------------
/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/PlainApp.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.zio.examples.other
2 |
3 | import io.getquill.{CassandraZioContext, _}
4 | import zio.{Runtime, Unsafe}
5 | import zio.Console.printLine
6 |
7 | object PlainApp {
8 |
9 | object MyZioPostgresContext extends CassandraZioContext(Literal)
10 | import MyZioPostgresContext._
11 |
12 | case class Person(name: String, age: Int)
13 |
14 | val zioSession =
15 | CassandraZioSession.fromPrefix("testStreamDB")
16 |
17 | def main(args: Array[String]): Unit = {
18 | val people = quote {
19 | query[Person]
20 | }
21 | val czio =
22 | MyZioPostgresContext
23 | .run(people)
24 | .tap(result => printLine(result.toString))
25 | .provide(zioSession)
26 |
27 | Unsafe.unsafe { implicit u =>
28 | Runtime.default.unsafe.run(czio).getOrThrow()
29 | }
30 | ()
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/main/scala/io/getquill/codegen/jdbc/util/DiscoverDatabaseType.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.jdbc.util
2 |
3 | import io.getquill.codegen.jdbc.DatabaseTypes.DatabaseType
4 | import io.getquill.codegen.jdbc.model.JdbcTypes.JdbcConnectionMaker
5 | import io.getquill.util.Using.Manager
6 |
7 | import scala.util.{Failure, Success}
8 |
9 | object DiscoverDatabaseType {
10 | def apply(
11 | connectionMaker: JdbcConnectionMaker
12 | ): DatabaseType = {
13 | val tryProductName = Manager { use =>
14 | val conn = use(connectionMaker())
15 | val meta = conn.getMetaData
16 | meta.getDatabaseProductName
17 | }
18 |
19 | tryProductName.flatMap { productName =>
20 | DatabaseType.fromProductName(productName)
21 | } match {
22 | case Success(value) => value
23 | case Failure(e) => throw new IllegalArgumentException("Could not parse database product name.", e)
24 | }
25 | }
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/SqlServerJdbcContext.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import java.io.Closeable
4 |
5 | import javax.sql.DataSource
6 | import com.typesafe.config.Config
7 | import io.getquill.context.jdbc.{JdbcContext, SqlServerJdbcContextBase}
8 | import io.getquill.util.LoadConfig
9 |
10 | class SqlServerJdbcContext[+N <: NamingStrategy](val naming: N, dataSourceInput: => DataSource with Closeable)
11 | extends JdbcContext[SQLServerDialect, N]
12 | with SqlServerJdbcContextBase[SQLServerDialect, N] {
13 | override val idiom: SQLServerDialect = SQLServerDialect
14 | override lazy val dataSource: DataSource with Closeable = dataSourceInput
15 |
16 | def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource)
17 | def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config))
18 | def this(naming: N, configPrefix: String) = this(naming, LoadConfig(configPrefix))
19 | }
20 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/misc/PeopleZioOuterJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.misc
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.{Literal, PostgresZioJdbcContext}
5 | import zio.{Runtime, Unsafe, ZEnvironment}
6 |
7 | class PeopleZioOuterJdbcSpec extends Spec {
8 | val testContext = new PostgresZioJdbcContext(Literal)
9 | import testContext._
10 | case class Person(name: String, age: Int)
11 |
12 | def ds = io.getquill.postgres.pool
13 |
14 | "test query" in {
15 | val q = quote {
16 | query[Person].filter(p => p.name == "Bert")
17 | }
18 | val exec = testContext.run(q).provideEnvironment(ZEnvironment(ds))
19 | println(Unsafe.unsafe { implicit u =>
20 | Runtime.default.unsafe.run(exec).getOrThrow()
21 | })
22 | }
23 |
24 | "test translate" in {
25 | val q = quote {
26 | query[Person].filter(p => p.name == "Bert")
27 | }
28 | println(testContext.translate(q))
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/util/LogToFileSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.util.Messages.LogToFile
5 | import scala.io.Source
6 |
7 | class LogToFileSpec extends Spec {
8 |
9 | // TODO temporarily ignore this test, will release ZIO2 RC1 without query logging support
10 | "logs a query to file when enabled" ignore {
11 | val queryLogName = "./LogToFileSpecQuery.sql"
12 | val mockLogger = new QueryLogger(LogToFile(queryLogName))
13 |
14 | val mockQuery = "SELECT * from foo_bar where id = ?"
15 |
16 | mockLogger(mockQuery, "io.getquill.util.LogToFileSpec", 15, 5)
17 |
18 | Thread.sleep(1000) // Give the async log a chance to finish up
19 |
20 | val queryFile = Source.fromFile(queryLogName)
21 | val contents = queryFile.mkString.trim
22 | queryFile.close()
23 |
24 | contents must not be empty
25 | contents must endWith(s"""${mockQuery};""")
26 |
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/UdtMetaDslMacro.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import scala.reflect.macros.blackbox.{Context => MacroContext}
4 |
5 | class UdtMetaDslMacro(val c: MacroContext) {
6 |
7 | import c.universe._
8 |
9 | def udtMeta[T](path: Tree, columns: Tree*)(implicit t: WeakTypeTag[T]): Tree = {
10 | val pairs = columns.map { case q"(($x1) => $pack.Predef.ArrowAssoc[$t]($prop).$arrow[$v](${alias: String}))" =>
11 | q"(${prop.symbol.name.decodedName.toString}, $alias)"
12 | }
13 | c.untypecheck {
14 | q"""
15 | new ${c.prefix}.UdtMeta[$t] {
16 | private[this] val (nm, ks) = io.getquill.context.cassandra.util.UdtMetaUtils.parse($path)
17 | private[this] val map = Map[String, String](..$pairs)
18 | def name = nm
19 | def keyspace = ks
20 | def alias(col: String) = map.get(col)
21 | }
22 | """
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/quill-cassandra-pekko/src/test/scala/io/getquill/context/cassandra/pekko/DecodeNullSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.pekko
2 |
3 | class DecodeNullSpec extends CassandraPekkoSpec {
4 |
5 | "no default values when reading null" in {
6 |
7 | import testDB._
8 | val writeEntities = quote(querySchema[DecodeNullTestWriteEntity]("DecodeNullTestEntity"))
9 |
10 | val result =
11 | for {
12 | _ <- testDB.run(writeEntities.delete)
13 | _ <- testDB.run(writeEntities.insertValue(lift(insertValue)))
14 | result <- testDB.run(query[DecodeNullTestEntity])
15 | } yield {
16 | result
17 | }
18 | intercept[IllegalStateException] {
19 | await {
20 | result
21 | }
22 | }
23 | }
24 |
25 | case class DecodeNullTestEntity(id: Int, value: Int)
26 |
27 | case class DecodeNullTestWriteEntity(id: Int, value: Option[Int])
28 |
29 | val insertValue = DecodeNullTestWriteEntity(0, None)
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/PeopleCassandraSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import io.getquill._
4 | import io.getquill.base.Spec
5 |
6 | class PeopleCassandraSpec extends Spec {
7 |
8 | import testSyncDB._
9 |
10 | case class Person(id: Int, name: String, age: Int)
11 |
12 | override def beforeAll = {
13 | val entries = List(
14 | Person(1, "Bob", 30),
15 | Person(2, "Gus", 40),
16 | Person(3, "Pet", 20),
17 | Person(4, "Don", 50),
18 | Person(5, "Dre", 60)
19 | )
20 | testSyncDB.run(query[Person].delete)
21 | testSyncDB.run(liftQuery(entries).foreach(e => query[Person].insertValue(e)))
22 | ()
23 | }
24 |
25 | val q = quote { (ids: Query[Int]) =>
26 | query[Person].filter(p => ids.contains(p.id))
27 | }
28 |
29 | "Contains id" - {
30 | "empty" in {
31 | testSyncDB.run(q(liftQuery(Set.empty[Int]))) mustEqual List.empty[Person]
32 | }
33 |
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/norm/OrderTerms.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm
2 |
3 | import io.getquill.ast._
4 | import io.getquill.util.TraceConfig
5 |
6 | class OrderTerms(traceConfig: TraceConfig) {
7 |
8 | def unapply(q: Query) =
9 | q match {
10 |
11 | case Take(Map(a: GroupBy, b, c), d) => None
12 |
13 | // a.sortBy(b => c).filter(d => e) =>
14 | // a.filter(d => e).sortBy(b => c)
15 | case Filter(SortBy(a, b, c, d), e, f) =>
16 | Some(SortBy(Filter(a, e, f), b, c, d))
17 |
18 | // a.flatMap(b => c).take(n).map(d => e) =>
19 | // a.flatMap(b => c).map(d => e).take(n)
20 | case Map(Take(fm: FlatMap, n), ma, mb) =>
21 | Some(Take(Map(fm, ma, mb), n))
22 |
23 | // a.flatMap(b => c).drop(n).map(d => e) =>
24 | // a.flatMap(b => c).map(d => e).drop(n)
25 | case Map(Drop(fm: FlatMap, n), ma, mb) =>
26 | Some(Drop(Map(fm, ma, mb), n))
27 |
28 | case other => None
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/QueryResultTypeCassandraZioSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.zio
2 |
3 | import io.getquill.context.cassandra.QueryResultTypeCassandraSpec
4 |
5 | class QueryResultTypeCassandraZioSpec extends ZioCassandraSpec with QueryResultTypeCassandraSpec {
6 |
7 | val context = testZioDB
8 |
9 | import context._
10 |
11 | override def beforeAll = {
12 | super.beforeAll()
13 | result(context.run(deleteAll))
14 | result(context.run(liftQuery(entries).foreach(e => insert(e))))
15 | ()
16 | }
17 |
18 | "query" in {
19 | result(context.run(selectAll)) mustEqual entries
20 | }
21 |
22 | "stream" in {
23 | result(context.stream(selectAll)) mustEqual entries
24 | }
25 |
26 | "querySingle" - {
27 | "size" in {
28 | result(context.run(entitySize)) mustEqual 3
29 | }
30 | "parametrized size" in {
31 | result(context.run(parametrizedSize(lift(10000)))) mustEqual 0
32 | }
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/resources/schema_snakecase_twotable_differentcolumns.sql:
--------------------------------------------------------------------------------
1 | create table Alpha_Person (
2 | id int primary key,
3 | first_name varchar(255),
4 | last_name varchar(255),
5 | age int not null,
6 | foo varchar(255)
7 | );
8 |
9 | create table Bravo_Person (
10 | id int primary key,
11 | first_name varchar(255),
12 | bar varchar(255),
13 | last_name varchar(255),
14 | age int not null
15 | );
16 |
17 | create table Address (
18 | person_fk int not null,
19 | street varchar(255),
20 | zip int
21 | );
22 |
23 | insert into Alpha_Person values (1, 'Joe', 'Bloggs', 22, 'blah');
24 | insert into Alpha_Person values (2, 'Jack', 'Ripper', 33, 'blah');
25 |
26 | insert into Bravo_Person values (1, 'George', 'blah', 'Oleaf', 22);
27 | insert into Bravo_Person values (2, 'Greg', 'blah', 'Raynor', 33);
28 |
29 | insert into Address values (1, '123 Someplace', 1001);
30 | insert into Address values (1, '678 Blah', 2002);
31 | insert into Address values (2, '111234 Some Other Place', 3333);
--------------------------------------------------------------------------------
/quill-codegen/src/main/scala/io/getquill/codegen/util/ScalaLangUtil.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.util
2 |
3 | object ScalaLangUtil {
4 | def escape(str: String) =
5 | if (isKeyword(str)) s"`${str}`" else str
6 |
7 | def isKeyword(word: String) = keywords.contains(word.trim)
8 | private val keywords = Set(
9 | "abstract",
10 | "case",
11 | "catch",
12 | "class",
13 | "def",
14 | "do",
15 | "else",
16 | "extends",
17 | "false",
18 | "final",
19 | "finally",
20 | "for",
21 | "forSome",
22 | "if",
23 | "implicit",
24 | "import",
25 | "lazy",
26 | "match",
27 | "new",
28 | "null",
29 | "object",
30 | "override",
31 | "package",
32 | "private",
33 | "protected",
34 | "return",
35 | "sealed",
36 | "super",
37 | "this",
38 | "throw",
39 | "trait",
40 | "try",
41 | "true",
42 | "type",
43 | "val",
44 | "var",
45 | "while",
46 | "with",
47 | "yield"
48 | )
49 | }
50 |
--------------------------------------------------------------------------------
/quill-codegen/src/main/scala/io/getquill/codegen/util/MapExtensions.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.util
2 |
3 | import scala.collection.immutable.{ListMap, ListSet}
4 |
5 | object MapExtensions {
6 |
7 | implicit final class MapOps[K, V](private val m: Map[K, V]) extends AnyVal {
8 | def zipOnKeys(o: Map[K, V]): Map[K, (Option[V], Option[V])] = zipMapsOnKeys(m, o)
9 | def zipOnKeysOrdered(o: Map[K, V]): ListMap[K, (Option[V], Option[V])] = zipMapsOnKeysOrdered(m, o)
10 | }
11 |
12 | def zipMapsOnKeys[K, V](one: Map[K, V], two: Map[K, V]): Map[K, (Option[V], Option[V])] =
13 | (for (key <- one.keys ++ two.keys)
14 | yield (key, (one.get(key), two.get(key)))).toMap
15 |
16 | def zipMapsOnKeysOrdered[K, V](one: Map[K, V], two: Map[K, V]): ListMap[K, (Option[V], Option[V])] = {
17 | val outList =
18 | (for (key <- (ListSet() ++ one.keys.toSeq.reverse) ++ (ListSet() ++ two.keys.toSeq.reverse))
19 | yield (key, (one.get(key), two.get(key))))
20 | (new ListMap() ++ outList.toSeq.reverse)
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/PrepareStatementCache.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra
2 |
3 | import com.datastax.oss.driver.shaded.guava.common.base.Charsets
4 | import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder
5 | import com.datastax.oss.driver.shaded.guava.common.hash.Hashing
6 |
7 | import java.util.concurrent.Callable
8 |
9 | class PrepareStatementCache[V <: AnyRef](size: Long) {
10 |
11 | private val cache =
12 | CacheBuilder.newBuilder
13 | .maximumSize(size)
14 | .build[java.lang.Long, V]()
15 |
16 | private val hasher = Hashing.goodFastHash(128)
17 |
18 | def apply(stmt: String)(prepare: String => V): V =
19 | cache.get(
20 | hash(stmt),
21 | new Callable[V] {
22 | override def call: V = prepare(stmt)
23 | }
24 | )
25 |
26 | def invalidate(stmt: String): Unit = cache.invalidate(hash(stmt))
27 |
28 | private def hash(string: String): java.lang.Long =
29 | hasher
30 | .hashString(string, Charsets.UTF_8)
31 | .asLong()
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/quill-orientdb/src/test/scala/io/getquill/context/orientdb/PeopleOrientDBSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.orientdb
2 |
3 | import io.getquill.Query
4 | import io.getquill.base.Spec
5 |
6 | class PeopleOrientDBSpec extends Spec {
7 |
8 | case class Person(id: Int, name: String, age: Int)
9 |
10 | override protected def beforeAll(): Unit = {
11 | val ctx = orientdb.testSyncDB
12 | import ctx._
13 | val entries = List(
14 | Person(1, "Bob", 30),
15 | Person(2, "Gus", 40),
16 | Person(3, "Pet", 20),
17 | Person(4, "Don", 50),
18 | Person(5, "Dre", 60)
19 | )
20 | ctx.run(query[Person].delete)
21 | ctx.run(liftQuery(entries).foreach(e => query[Person].insertValue(e)))
22 | ()
23 | }
24 |
25 | "Contains id" - {
26 | "empty" in {
27 | val ctx = orientdb.testSyncDB
28 | import ctx._
29 | val q = quote { (ids: Query[Int]) =>
30 | query[Person].filter(p => ids.contains(p.id))
31 | }
32 | ctx.run(q(liftQuery(Set.empty[Int]))) mustEqual List.empty[Person]
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtOps.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.encoding
2 |
3 | import com.datastax.oss.driver.api.core.data.UdtValue
4 |
5 | import scala.jdk.CollectionConverters._
6 |
7 | class UdtValueOps(val udt: UdtValue) extends AnyVal {
8 | def getScalaList[A](name: String, cls: Class[A]) =
9 | udt.getList(name, cls).asScala
10 |
11 | def getScalaSet[A](name: String, cls: Class[A]) =
12 | udt.getSet(name, cls).asScala
13 |
14 | def getScalaMap[K, V](name: String, kcls: Class[K], vcls: Class[V]) =
15 | udt.getMap(name, kcls, vcls).asScala
16 |
17 | def setScalaList[A](name: String, v: Seq[A], cls: Class[A]) =
18 | udt.setList(name, v.asJava, cls)
19 |
20 | def setScalaSet[A](name: String, v: Set[A], cls: Class[A]) =
21 | udt.setSet(name, v.asJava, cls)
22 |
23 | def setScalaMap[K, V](name: String, v: Map[K, V], kcls: Class[K], vcls: Class[V]) =
24 | udt.setMap(name, v.asJava, kcls, vcls)
25 | }
26 |
27 | object UdtValueOps {
28 | def apply(udt: UdtValue) = new UdtValueOps(udt)
29 | }
30 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/quotation/IsDynamicSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.quotation
2 |
3 | import io.getquill.ast.Dynamic
4 | import io.getquill.ast.Property
5 | import io.getquill.ast.Renameable.Fixed
6 | import io.getquill.ast.Visibility.Visible
7 | import io.getquill.base.Spec
8 | import io.getquill.MirrorContexts.testContext.qr1
9 | import io.getquill.MirrorContexts.testContext.qrRegular
10 |
11 | class IsDynamicSpec extends Spec {
12 |
13 | "detects if the quotation has dynamic parts" - {
14 | "true" - {
15 | "fully dynamic" in {
16 | IsDynamic(Dynamic(1)) mustEqual true
17 | }
18 | "partially dynamic" in {
19 | IsDynamic(Property(Dynamic(1), "a")) mustEqual true
20 | }
21 | "partially dynamic - fixed" in {
22 | IsDynamic(Property.Opinionated(Dynamic(1), "a", Fixed, Visible)) mustEqual true
23 | }
24 | }
25 | "false" in {
26 | IsDynamic(qr1.ast) mustEqual false
27 | }
28 | "false when using CaseClass" in {
29 | IsDynamic(qrRegular.ast) mustEqual false
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/ast/CollectAst.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.ast
2 |
3 | import scala.collection.immutable.Queue
4 | import scala.reflect.ClassTag
5 |
6 | /**
7 | * The collection is treated as immutable internally but an ArrayBuffer is more
8 | * efficient then Collection.list at appending which is mostly what the
9 | * collection does
10 | */
11 | class CollectAst[T](p: PartialFunction[Ast, T], val state: Queue[T]) extends StatefulTransformer[Queue[T]] {
12 |
13 | override def apply(a: Ast): (Ast, StatefulTransformer[Queue[T]]) =
14 | a match {
15 | case d if (p.isDefinedAt(d)) => (d, new CollectAst(p, state :+ p(d)))
16 | case other => super.apply(other)
17 | }
18 | }
19 |
20 | object CollectAst {
21 |
22 | def byType[T: ClassTag](a: Ast): Queue[T] =
23 | apply[T](a) { case t: T =>
24 | t
25 | }
26 |
27 | def apply[T](a: Ast)(p: PartialFunction[Ast, T]): Queue[T] =
28 | new CollectAst(p, Queue.empty[T]).apply(a) match {
29 | case (_, transformer) =>
30 | transformer.state
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/norm/ExpandMappedInfixSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.norm
2 |
3 | import io.getquill.context.sql.testContext._
4 | import io.getquill.Query
5 | import io.getquill.base.Spec
6 |
7 | class ExpandMappedInfixSpec extends Spec {
8 | "expand infix out of map body if first part is empty" in {
9 | val forUpdate = quote { q: Query[TestEntity] =>
10 | sql"$q FOR UPDATE".as[Query[TestEntity]]
11 | }
12 | val q = quote {
13 | forUpdate(qr1).map(x => x)
14 | }
15 | q.ast.toString mustEqual
16 | s"""sql"$${querySchema("TestEntity")} FOR UPDATE".map(x => x)"""
17 |
18 | ExpandMappedInfix(q.ast).toString mustEqual
19 | s"""sql"$${querySchema("TestEntity").map(x => x)} FOR UPDATE""""
20 | }
21 |
22 | "do not expand other cases" in {
23 | val forUpdate = quote { q: Query[TestEntity] =>
24 | sql"SELECT $q FOR UPDATE".as[Query[TestEntity]]
25 | }
26 | val q = quote {
27 | forUpdate(qr1).map(x => x)
28 | }
29 | ExpandMappedInfix(q.ast) mustEqual q.ast
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/BindVariablesSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.context.mirror.Row
5 | import io.getquill.context.sql.testContext._
6 |
7 | class BindVariablesSpec extends Spec {
8 |
9 | "binds values according to the sql terms order" - {
10 | "drop.take" in {
11 | val q =
12 | quote {
13 | query[TestEntity].drop(lift(1)).take(lift(2))
14 | }
15 | val mirror = testContext.run(q)
16 | mirror.string mustEqual "SELECT x.s, x.i, x.l, x.o, x.b FROM TestEntity x LIMIT ? OFFSET ?"
17 | mirror.prepareRow mustEqual Row(2, 1)
18 | }
19 | "drop.take with extra param" in {
20 | val q =
21 | quote {
22 | query[TestEntity].filter(_.i == lift(3)).drop(lift(1)).take(lift(2))
23 | }
24 | val mirror = testContext.run(q)
25 | mirror.string mustEqual "SELECT x1.s, x1.i, x1.l, x1.o, x1.b FROM TestEntity x1 WHERE x1.i = ? LIMIT ? OFFSET ?"
26 | mirror.prepareRow mustEqual Row(3, 2, 1)
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import io.getquill._
4 |
5 | trait PostgresJdbcContextBase[+D <: PostgresDialect, +N <: NamingStrategy]
6 | extends PostgresJdbcTypes[D, N]
7 | with JdbcContextBase[D, N]
8 |
9 | trait H2JdbcContextBase[+D <: H2Dialect, +N <: NamingStrategy] extends H2JdbcTypes[D, N] with JdbcContextBase[D, N]
10 |
11 | trait MysqlJdbcContextBase[+D <: MySQLDialect, +N <: NamingStrategy]
12 | extends MysqlJdbcTypes[D, N]
13 | with JdbcContextBase[D, N]
14 |
15 | trait SqliteJdbcContextBase[+D <: SqliteDialect, +N <: NamingStrategy]
16 | extends SqliteJdbcTypes[D, N]
17 | with SqliteExecuteOverride[D, N]
18 | with JdbcContextBase[D, N]
19 |
20 | trait SqlServerJdbcContextBase[+D <: SQLServerDialect, +N <: NamingStrategy]
21 | extends SqlServerJdbcTypes[D, N]
22 | with SqlServerExecuteOverride[N]
23 | with JdbcContextBase[D, N]
24 |
25 | trait OracleJdbcContextBase[+D <: OracleDialect, +N <: NamingStrategy]
26 | extends OracleJdbcTypes[D, N]
27 | with JdbcContextBase[D, N]
28 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/util/Cache.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import java.io.Closeable
4 | import scala.collection.mutable
5 | import scala.concurrent.duration.Duration
6 |
7 | final class Cache[K, V <: Closeable] {
8 |
9 | private case class Entry(value: Option[V], expiration: Long)
10 |
11 | private val cache = mutable.Map.empty[K, Entry]
12 |
13 | def getOrElseUpdate(key: K, value: => Option[V], ttl: Duration): Option[V] =
14 | synchronized {
15 | val now = System.currentTimeMillis
16 | evict(now)
17 | val expiration = now + ttl.toMillis
18 | cache.get(key) match {
19 | case Some(entry) =>
20 | cache += key -> entry.copy(expiration = expiration)
21 | entry.value
22 | case None =>
23 | val v = value
24 | cache += key -> Entry(v, expiration)
25 | v
26 | }
27 | }
28 |
29 | private def evict(now: Long): Unit =
30 | for ((key, Entry(value, expiration)) <- cache)
31 | if (now > expiration) {
32 | value.foreach(_.close)
33 | cache -= key
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/.scalafmt.conf:
--------------------------------------------------------------------------------
1 | version = "3.9.4"
2 | maxColumn = 240
3 | align.preset = most
4 | align.multiline = false
5 | continuationIndent.defnSite = 2
6 | assumeStandardLibraryStripMargin = true
7 | docstrings.style = Asterisk
8 | docstrings.wrapMaxColumn = 80
9 | lineEndings = preserve
10 | danglingParentheses.preset = true
11 | optIn.annotationNewlines = true
12 | newlines.alwaysBeforeMultilineDef = false
13 | runner.dialect = scala213
14 | rewrite.rules = [RedundantBraces]
15 |
16 | # If I've inserted extra newlines I know what I'm doing, don't wrap them back.
17 | newlines.source = keep
18 |
19 | # Don't change braces in one-liners to parens e.g. don't change this: `test("foo") { assertEquals(x,y) }`
20 | # to this `test("foo")(assertEquals(x,y))`. The `rewrite.rules = [RedundantBraces]` will introduce this behavior
21 | # unless you add the below option.
22 | rewrite.redundantBraces.parensForOneLineApply = false
23 |
24 | project.excludePaths = ["glob:**/scalafix/input/**", "glob:**/scalafix/output/**"]
25 |
26 | rewrite.redundantBraces.generalExpressions = false
27 | rewriteTokens = {
28 | "⇒": "=>"
29 | "→": "->"
30 | "←": "<-"
31 | }
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.examples.other
2 |
3 | import io.getquill._
4 | import io.getquill.context.qzio.ImplicitSyntax._
5 | import io.getquill.util.LoadConfig
6 | import zio.Console.printLine
7 | import zio.ZIOAppDefault
8 |
9 | import javax.sql.DataSource
10 |
11 | object ZioAppImplicitEnv extends ZIOAppDefault {
12 |
13 | object Ctx extends PostgresZioJdbcContext(Literal)
14 |
15 | case class Person(name: String, age: Int)
16 |
17 | def dataSource = JdbcContextConfig(LoadConfig("testPostgresDB")).dataSource
18 |
19 | case class MyQueryService(ds: DataSource) {
20 | import Ctx._
21 | implicit val env = Implicit(ds)
22 |
23 | val joes = Ctx.run(query[Person].filter(p => p.name == "Joe")).implicitly
24 | val jills = Ctx.run(query[Person].filter(p => p.name == "Jill")).implicitly
25 | val alexes = Ctx.run(query[Person].filter(p => p.name == "Alex")).implicitly
26 | }
27 |
28 | override def run =
29 | MyQueryService(dataSource).joes
30 | .tap(result => printLine(result.toString))
31 | .exitCode
32 | }
33 |
--------------------------------------------------------------------------------
/quill-jdbc-test-mysql/src/test/scala/io/getquill/context/jdbc/mysql/OnConflictJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.mysql
2 |
3 | import io.getquill.context.sql.base.OnConflictSpec
4 |
5 | class OnConflictJdbcSpec extends OnConflictSpec {
6 | val ctx = testContext
7 | import ctx._
8 |
9 | override protected def beforeAll(): Unit = {
10 | ctx.run(qr1.delete)
11 | ()
12 | }
13 |
14 | "INSERT IGNORE" in {
15 | import `onConflictIgnore`._
16 | ctx.run(testQuery1) mustEqual res1
17 | ctx.run(testQuery2) mustEqual res2
18 | ctx.run(testQuery3) mustEqual res3
19 | }
20 |
21 | "ON DUPLICATE KEY UPDATE i=i " in {
22 | import `onConflictIgnore(_.i)`._
23 | ctx.run(testQuery1) mustEqual res1
24 | ctx.run(testQuery2) mustEqual res2 + 1
25 | ctx.run(testQuery3) mustEqual res3
26 | }
27 |
28 | "ON DUPLICATE KEY UPDATE ..." in {
29 | import `onConflictUpdate((t, e) => ...)`._
30 | ctx.run(testQuery(e1)) mustEqual res1
31 | ctx.run(testQuery(e2)) mustEqual res2 + 1
32 | ctx.run(testQuery(e3)) mustEqual res3 + 1
33 | ctx.run(testQuery4) mustEqual res4
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-jdbc-test-postgres/src/test/scala/io/getquill/context/jdbc/postgres/BatchValuesJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.postgres
2 |
3 | import io.getquill.context.sql.base.BatchValuesSpec
4 | import io.getquill._
5 |
6 | class BatchValuesJdbcSpec extends BatchValuesSpec {
7 |
8 | val context = testContext
9 | import testContext._
10 |
11 | override def beforeEach(): Unit = {
12 | testContext.run(sql"TRUNCATE TABLE Product RESTART IDENTITY CASCADE".as[Delete[Product]])
13 | super.beforeEach()
14 | }
15 |
16 | "Ex 1 - Batch Insert Normal" in {
17 | import `Ex 1 - Batch Insert Normal`._
18 | testContext.run(op, batchSize)
19 | testContext.run(get) mustEqual result
20 | }
21 |
22 | "Ex 2 - Batch Insert Returning" in {
23 | import `Ex 2 - Batch Insert Returning`._
24 | val ids = testContext.run(op, batchSize)
25 | ids mustEqual expectedIds
26 | testContext.run(get) mustEqual result
27 | }
28 |
29 | "Ex 3 - Batch Insert Mixed" in {
30 | import `Ex 3 - Batch Insert Mixed`._
31 | testContext.run(op, batchSize)
32 | testContext.run(get) mustEqual result
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/ProductSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql
2 |
3 | import io.getquill.Query
4 | import io.getquill.base.Spec
5 |
6 | case class Id(value: Long) extends AnyVal
7 |
8 | trait ProductSpec extends Spec {
9 |
10 | val context: SqlContext[_, _]
11 |
12 | import context._
13 |
14 | case class Product(id: Long, description: String, sku: Long)
15 |
16 | val product = quote {
17 | query[Product]
18 | }
19 |
20 | val productInsert = quote { (p: Product) =>
21 | query[Product].insertValue(p).returningGenerated(_.id)
22 | }
23 |
24 | val productInsertBatch = quote { (b: Query[Product]) =>
25 | b.foreach(p => productInsert.apply(p))
26 | }
27 |
28 | def productById = quote { (id: Long) =>
29 | product.filter(_.id == id)
30 | }
31 |
32 | val productEntries = List(
33 | Product(0L, "Notebook", 1001L),
34 | Product(0L, "Soap", 1002L),
35 | Product(0L, "Pencil", 1003L)
36 | )
37 |
38 | val productSingleInsert = quote {
39 | product.insert(_.id -> 0, _.description -> "Window", _.sku -> 1004L).returningGenerated(_.id)
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/quill-test-kit/src/test/scala/io/getquill/context/sql/ProductSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql
2 |
3 | import io.getquill.Query
4 | import io.getquill.base.Spec
5 |
6 | case class Id(value: Long) extends AnyVal
7 |
8 | trait ProductSpec extends Spec {
9 |
10 | val context: SqlContext[_, _]
11 |
12 | import context._
13 |
14 | case class Product(id: Long, description: String, sku: Long)
15 |
16 | val product = quote {
17 | query[Product]
18 | }
19 |
20 | val productInsert = quote { (p: Product) =>
21 | query[Product].insertValue(p).returningGenerated(_.id)
22 | }
23 |
24 | val productInsertBatch = quote { (b: Query[Product]) =>
25 | b.foreach(p => productInsert.apply(p))
26 | }
27 |
28 | def productById = quote { (id: Long) =>
29 | product.filter(_.id == id)
30 | }
31 |
32 | val productEntries = List(
33 | Product(0L, "Notebook", 1001L),
34 | Product(0L, "Soap", 1002L),
35 | Product(0L, "Pencil", 1003L)
36 | )
37 |
38 | val productSingleInsert = quote {
39 | product.insert(_.id -> 0, _.description -> "Window", _.sku -> 1004L).returningGenerated(_.id)
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/context/mirror/Row.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.mirror
2 |
3 | import scala.reflect.ClassTag
4 |
5 | /**
6 | * Defines a artificial 'Row' used for the mirror context. Mostly used for
7 | * testing. (Note that this must not be in quill-engine or it will conflict with
8 | * the io.getquill.context.mirror.Row class in ProtoQuill.)
9 | */
10 | case class Row private (data: List[Any]) {
11 | // Nulls need a special placeholder so they can be checked via `nullAt`.
12 | def add(value: Any): Row =
13 | value match {
14 | case null => new Row((data :+ null))
15 | case _ => new Row((data :+ value))
16 | }
17 |
18 | def nullAt(index: Int): Boolean = data.apply(index) == null
19 | def apply[T](index: Int)(implicit t: ClassTag[T]): T =
20 | data(index) match {
21 | case v: T => v
22 | case other =>
23 | throw new IllegalStateException(s"Invalid column type. Expected '${t.runtimeClass}', but got '$other'")
24 | }
25 | }
26 |
27 | object Row {
28 | def apply(data: Any*): Row =
29 | data.foldLeft(new Row(List.empty))((r, value) => r.add(value))
30 | }
31 |
--------------------------------------------------------------------------------
/quill-jdbc-test-h2/src/test/scala/io/getquill/context/jdbc/h2/DepartmentsJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.h2
2 |
3 | import io.getquill.context.sql.base.DepartmentsSpec
4 |
5 | class DepartmentsJdbcSpec extends DepartmentsSpec {
6 |
7 | val context = testContext
8 | import testContext._
9 |
10 | override def beforeAll = {
11 | testContext.transaction {
12 | testContext.run(query[Department].delete)
13 | testContext.run(query[Employee].delete)
14 | testContext.run(query[Task].delete)
15 |
16 | testContext.run(liftQuery(departmentEntries).foreach(p => departmentInsert(p)))
17 | testContext.run(liftQuery(employeeEntries).foreach(p => employeeInsert(p)))
18 | testContext.run(liftQuery(taskEntries).foreach(p => taskInsert(p)))
19 | }
20 | ()
21 | }
22 |
23 | "Example 8 - nested naive" in {
24 | testContext.run(`Example 8 expertise naive`(lift(`Example 8 param`))) mustEqual `Example 8 expected result`
25 | }
26 |
27 | "Example 9 - nested db" in {
28 | testContext.run(`Example 9 expertise`(lift(`Example 9 param`))) mustEqual `Example 9 expected result`
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/quill-jdbc-test-sqlite/src/test/scala/io/getquill/context/jdbc/sqlite/OnConflictJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.sqlite
2 |
3 | import io.getquill.context.sql.base.OnConflictSpec
4 |
5 | class OnConflictJdbcSpec extends OnConflictSpec {
6 | val ctx = testContext
7 | import ctx._
8 |
9 | override protected def beforeAll(): Unit = {
10 | ctx.run(qr1.delete)
11 | ()
12 | }
13 |
14 | "ON CONFLICT DO NOTHING" in {
15 | import `onConflictIgnore`._
16 | ctx.run(testQuery1) mustEqual res1
17 | ctx.run(testQuery2) mustEqual res2
18 | ctx.run(testQuery3) mustEqual res3
19 | }
20 |
21 | "ON CONFLICT (i) DO NOTHING" in {
22 | import `onConflictIgnore(_.i)`._
23 | ctx.run(testQuery1) mustEqual res1
24 | ctx.run(testQuery2) mustEqual res2
25 | ctx.run(testQuery3) mustEqual res3
26 | }
27 |
28 | "ON CONFLICT (i) DO UPDATE ..." in {
29 | import `onConflictUpdate(_.i)((t, e) => ...)`._
30 | ctx.run(testQuery(e1)) mustEqual res1
31 | ctx.run(testQuery(e2)) mustEqual res2
32 | ctx.run(testQuery(e3)) mustEqual res3
33 | ctx.run(testQuery4) mustEqual res4
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-jdbc-test-mysql/src/test/scala/io/getquill/context/jdbc/mysql/DepartmentsJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.mysql
2 |
3 | import io.getquill.context.sql.base.DepartmentsSpec
4 |
5 | class DepartmentsJdbcSpec extends DepartmentsSpec {
6 |
7 | val context = testContext
8 | import testContext._
9 |
10 | override def beforeAll = {
11 | testContext.transaction {
12 | testContext.run(query[Department].delete)
13 | testContext.run(query[Employee].delete)
14 | testContext.run(query[Task].delete)
15 |
16 | testContext.run(liftQuery(departmentEntries).foreach(p => departmentInsert(p)))
17 | testContext.run(liftQuery(employeeEntries).foreach(p => employeeInsert(p)))
18 | testContext.run(liftQuery(taskEntries).foreach(p => taskInsert(p)))
19 | }
20 | ()
21 | }
22 |
23 | "Example 8 - nested naive" in {
24 | testContext.run(`Example 8 expertise naive`(lift(`Example 8 param`))) mustEqual `Example 8 expected result`
25 | }
26 |
27 | "Example 9 - nested db" in {
28 | testContext.run(`Example 9 expertise`(lift(`Example 9 param`))) mustEqual `Example 9 expected result`
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/FutureConversions.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.util
2 |
3 | import com.datastax.oss.driver.shaded.guava.common.util.concurrent.ListenableFuture
4 |
5 | import java.util.concurrent.Executor
6 |
7 | import scala.concurrent.{ExecutionContext, Future, Promise}
8 | import scala.util.Try
9 |
10 | object FutureConversions {
11 |
12 | implicit final class ListenableFutureConverter[A](private val lf: ListenableFuture[A]) extends AnyVal {
13 | def asScala(implicit ec: ExecutionContext): Future[A] = {
14 | val promise = Promise[A]()
15 | lf.addListener(
16 | new Runnable {
17 | def run(): Unit = {
18 | promise.complete(Try(lf.get()))
19 | ()
20 | }
21 | },
22 | new Executor {
23 | override def execute(command: Runnable): Unit = ec.execute(command)
24 | }
25 | )
26 | promise.future
27 | }
28 |
29 | def asScalaWithDefaultGlobal: Future[A] = {
30 | import scala.concurrent.ExecutionContext.Implicits.global
31 | asScala(global)
32 | }
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/OrderedGroupByExt.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import scala.collection.immutable.ListSet
4 | import scala.collection.mutable
5 | import scala.collection.mutable.{LinkedHashMap => MMap}
6 |
7 | object OrderedGroupByExt {
8 | implicit final class GroupByOrderedImplicitImpl[A](private val t: Iterable[A]) extends AnyVal {
9 | def groupByOrderedUnique[K](f: A => K): Map[K, ListSet[A]] =
10 | groupByGen(ListSet.newBuilder[A])(f)
11 |
12 | def groupByOrdered[K](f: A => K): Map[K, List[A]] =
13 | groupByGen(List.newBuilder[A])(f)
14 |
15 | def groupByGen[K, C[_]](makeBuilder: => mutable.Builder[A, C[A]])(f: A => K): Map[K, C[A]] = {
16 | val map = MMap[K, mutable.Builder[A, C[A]]]()
17 | for (i <- t) {
18 | val key = f(i)
19 | val builder = map.get(key) match {
20 | case Some(existing) => existing
21 | case None =>
22 | val newBuilder = makeBuilder
23 | map(key) = newBuilder
24 | newBuilder
25 | }
26 | builder += i
27 | }
28 | map.mapValues(_.result).toMap
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/quill-jdbc-test-sqlite/src/test/scala/io/getquill/context/jdbc/sqlite/DepartmentsJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.sqlite
2 |
3 | import io.getquill.context.sql.base.DepartmentsSpec
4 |
5 | class DepartmentsJdbcSpec extends DepartmentsSpec {
6 |
7 | val context = testContext
8 | import testContext._
9 |
10 | override def beforeAll = {
11 | testContext.transaction {
12 | testContext.run(query[Department].delete)
13 | testContext.run(query[Employee].delete)
14 | testContext.run(query[Task].delete)
15 |
16 | testContext.run(liftQuery(departmentEntries).foreach(p => departmentInsert(p)))
17 | testContext.run(liftQuery(employeeEntries).foreach(p => employeeInsert(p)))
18 | testContext.run(liftQuery(taskEntries).foreach(p => taskInsert(p)))
19 | }
20 | ()
21 | }
22 |
23 | "Example 8 - nested naive" in {
24 | testContext.run(`Example 8 expertise naive`(lift(`Example 8 param`))) mustEqual `Example 8 expected result`
25 | }
26 |
27 | "Example 9 - nested db" in {
28 | testContext.run(`Example 9 expertise`(lift(`Example 9 param`))) mustEqual `Example 9 expected result`
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/quill-jdbc-test-postgres/src/test/scala/io/getquill/context/jdbc/postgres/DepartmentsJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.postgres
2 |
3 | import io.getquill.context.sql.base.DepartmentsSpec
4 |
5 | class DepartmentsJdbcSpec extends DepartmentsSpec {
6 |
7 | val context = testContext
8 | import testContext._
9 |
10 | override def beforeAll = {
11 | testContext.transaction {
12 | testContext.run(query[Department].delete)
13 | testContext.run(query[Employee].delete)
14 | testContext.run(query[Task].delete)
15 |
16 | testContext.run(liftQuery(departmentEntries).foreach(p => departmentInsert(p)))
17 | testContext.run(liftQuery(employeeEntries).foreach(p => employeeInsert(p)))
18 | testContext.run(liftQuery(taskEntries).foreach(p => taskInsert(p)))
19 | }
20 | ()
21 | }
22 |
23 | "Example 8 - nested naive" in {
24 | testContext.run(`Example 8 expertise naive`(lift(`Example 8 param`))) mustEqual `Example 8 expected result`
25 | }
26 |
27 | "Example 9 - nested db" in {
28 | testContext.run(`Example 9 expertise`(lift(`Example 9 param`))) mustEqual `Example 9 expected result`
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/quill-jdbc-test-sqlserver/src/test/scala/io/getquill/context/jdbc/sqlserver/DepartmentsJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.sqlserver
2 |
3 | import io.getquill.context.sql.base.DepartmentsSpec
4 |
5 | class DepartmentsJdbcSpec extends DepartmentsSpec {
6 |
7 | val context = testContext
8 | import testContext._
9 |
10 | override def beforeAll = {
11 | testContext.transaction {
12 | testContext.run(query[Department].delete)
13 | testContext.run(query[Employee].delete)
14 | testContext.run(query[Task].delete)
15 |
16 | testContext.run(liftQuery(departmentEntries).foreach(p => departmentInsert(p)))
17 | testContext.run(liftQuery(employeeEntries).foreach(p => employeeInsert(p)))
18 | testContext.run(liftQuery(taskEntries).foreach(p => taskInsert(p)))
19 | }
20 | ()
21 | }
22 |
23 | "Example 8 - nested naive" in {
24 | testContext.run(`Example 8 expertise naive`(lift(`Example 8 param`))) mustEqual `Example 8 expected result`
25 | }
26 |
27 | "Example 9 - nested db" in {
28 | testContext.run(`Example 9 expertise`(lift(`Example 9 param`))) mustEqual `Example 9 expected result`
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.examples.other
2 |
3 | import com.zaxxer.hikari.HikariDataSource
4 | import io.getquill.util.LoadConfig
5 | import io.getquill.jdbczio.Quill
6 | import io.getquill.{JdbcContextConfig, Literal, PostgresZioJdbcContext}
7 | import zio.Console.printLine
8 | import zio.{Runtime, Unsafe}
9 |
10 | object PlainAppDataSource {
11 |
12 | object MyPostgresContext extends PostgresZioJdbcContext(Literal)
13 | import MyPostgresContext._
14 |
15 | case class Person(name: String, age: Int)
16 |
17 | def config = JdbcContextConfig(LoadConfig("testPostgresDB")).dataSource
18 |
19 | val zioDS = Quill.DataSource.fromDataSource(new HikariDataSource(config))
20 |
21 | def main(args: Array[String]): Unit = {
22 | val people = quote {
23 | query[Person].filter(p => p.name == "Alex")
24 | }
25 | val qzio =
26 | MyPostgresContext
27 | .run(people)
28 | .tap(result => printLine(result.toString))
29 | .provide(zioDS)
30 |
31 | Unsafe.unsafe { implicit u =>
32 | Runtime.default.unsafe.run(qzio).getOrThrow()
33 | }
34 | ()
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/dsl/QueryDslMacro.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.dsl
2 |
3 | import io.getquill.util.MacroContextExt._
4 | import scala.reflect.macros.blackbox.{Context => MacroContext}
5 |
6 | class QueryDslMacro(val c: MacroContext) {
7 |
8 | import c.universe._
9 |
10 | def expandEntity[T](implicit t: WeakTypeTag[T]): Tree =
11 | q"${meta[T]("Schema")}.entity"
12 |
13 | def expandInsert[T](value: Tree)(implicit t: WeakTypeTag[T]): Tree =
14 | expandAction(value, "Insert")
15 |
16 | def expandUpdate[T](value: Tree)(implicit t: WeakTypeTag[T]): Tree =
17 | expandAction(value, "Update")
18 |
19 | private def expandAction[T](value: Tree, prefix: String)(implicit t: WeakTypeTag[T]) =
20 | q"${meta(prefix)}.expand(${c.prefix}, $value)"
21 |
22 | private def meta[T](prefix: String)(implicit t: WeakTypeTag[T]): Tree = {
23 | val expanderTpe = c.typecheck(tq"io.getquill.dsl.MetaDsl#${TypeName(s"${prefix}Meta")}[$t]", c.TYPEmode)
24 | c.inferImplicitValue(expanderTpe.tpe, silent = true) match {
25 | case EmptyTree => c.fail(s"Can't find an implicit `${prefix}Meta` for type `${t.tpe}`")
26 | case tree => tree
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/quill-jdbc-test-h2/src/test/scala/io/getquill/context/jdbc/h2/BatchValuesJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.h2
2 |
3 | import io.getquill.context.sql.base.BatchValuesSpec
4 | import io.getquill._
5 |
6 | class BatchValuesJdbcSpec extends BatchValuesSpec {
7 |
8 | val context = testContext
9 | import testContext._
10 |
11 | override def beforeEach(): Unit = {
12 | testContext.run(sql"TRUNCATE TABLE Product; ALTER TABLE Product ALTER COLUMN id RESTART WITH 1".as[Delete[Product]])
13 | super.beforeEach()
14 | }
15 |
16 | "Ex 1 - Batch Insert Normal" in {
17 | import `Ex 1 - Batch Insert Normal`._
18 | testContext.run(op, batchSize)
19 | testContext.run(get).toSet mustEqual result.toSet
20 | }
21 |
22 | "Ex 2 - Batch Insert Returning" in {
23 | import `Ex 2 - Batch Insert Returning`._
24 | val ids = testContext.run(op, batchSize)
25 | ids mustEqual productsOriginal.map(_.id)
26 | testContext.run(get) mustEqual productsOriginal
27 | }
28 |
29 | "Ex 3 - Batch Insert Mixed" in {
30 | import `Ex 3 - Batch Insert Mixed`._
31 | testContext.run(op, batchSize)
32 | testContext.run(get).toSet mustEqual result.toSet
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/Quoted.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.ast.Ast
4 |
5 | /**
6 | * Defines the primary interface by which information in Quill is composed. This
7 | * includes not only queries but all code fragments. A quotation can be a simple
8 | * value: {{ val pi = quote(3.14159) }} And be used within another quotation: {{
9 | * case class Circle(radius: Float)
10 | *
11 | * val areas = quote { query[Circle].map(c => pi * c.radius * c.radius) } }}
12 | * Quotations can also contain high-order functions and inline values: {{ val
13 | * area = quote { (c: Circle) => { val r2 = c.radius * c.radius pi * r2 } } val
14 | * areas = quote { query[Circle].map(c => area(c)) } }}
15 | *
16 | * Note that this class must not be in quill-engine since it cannot be shared
17 | * with ProtoQuill and which has a different implementation of Quoted.
18 | * @see
19 | * Scala
21 | * 3.1.1 fails to compile / no method options for more info.
22 | */
23 | trait Quoted[+T] {
24 | def ast: Ast
25 | override def toString: String = ast.toString
26 | }
27 |
--------------------------------------------------------------------------------
/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingMirrorContextSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.udt
2 |
3 | import io.getquill.context.cassandra.mirrorContext
4 |
5 | class UdtEncodingMirrorContextSpec extends UdtSpec {
6 |
7 | import mirrorContext._
8 |
9 | "Provide implicit decoders/encoders" - {
10 |
11 | "UDT raw columns" in {
12 | implicitly[Decoder[Name]]
13 | implicitly[Encoder[Name]]
14 | }
15 |
16 | "UDT collections columns" in {
17 | implicitly[Decoder[List[Name]]]
18 | implicitly[Encoder[List[Name]]]
19 |
20 | implicitly[Decoder[Set[Name]]]
21 | implicitly[Encoder[Set[Name]]]
22 |
23 | implicitly[Decoder[Map[String, Name]]]
24 | implicitly[Encoder[Map[Name, String]]]
25 | }
26 | }
27 |
28 | "Encode/decode UDT within entity" in {
29 | case class User(id: Int, name: Name, names: List[Name])
30 | mirrorContext.run(query[User]).string mustBe "SELECT id, name, names FROM User"
31 | mirrorContext
32 | .run(
33 | query[User]
34 | .insertValue(lift(User(1, Name("1", None), Nil)))
35 | )
36 | .string mustBe "INSERT INTO User (id,name,names) VALUES (?, ?, ?)"
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/quill-core/src/main/scala/io/getquill/util/QueryLogger.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.util
2 |
3 | import io.getquill.util.Messages.LogToFile
4 | import zio._
5 | import zio.logging._
6 | import zio.logging.extensions.executeWithLogger
7 |
8 | import java.nio.file.Paths
9 | import java.time.ZonedDateTime
10 | import java.time.format.DateTimeFormatter
11 |
12 | class QueryLogger(logToFile: LogToFile) {
13 |
14 | def apply(queryString: String, sourcePath: String, line: Int, column: Int): Unit =
15 | logToFile match {
16 | case LogToFile.Enabled(logFile) =>
17 | val config =
18 | FileLoggerConfig(
19 | destination = Paths.get(logFile),
20 | format = LogFormat.line,
21 | filter = LogFilter.LogLevelByNameConfig.default
22 | )
23 |
24 | executeWithLogger(config) {
25 | ZIO
26 | .logInfo(
27 | s"""
28 | |-- file: $sourcePath:$line:$column
29 | |-- time: ${ZonedDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME)}
30 | |$queryString;
31 | |""".stripMargin
32 | )
33 | }
34 | case LogToFile.Disabled => // do nothing
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/idiom/Statement.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.idiom
2 |
3 | import io.getquill.ast._
4 |
5 | sealed trait Token extends Product with Serializable
6 | sealed trait TagToken extends Token
7 |
8 | final case class StringToken(string: String) extends Token {
9 | override def toString: String = string
10 | }
11 |
12 | final case class ScalarTagToken(tag: ScalarTag) extends TagToken {
13 | override def toString: String = s"lift(${tag.uid})"
14 | }
15 |
16 | final case class QuotationTagToken(tag: QuotationTag) extends TagToken {
17 | override def toString: String = s"quoted(${tag.uid})"
18 | }
19 |
20 | final case class ScalarLiftToken(lift: ScalarLift) extends Token {
21 | override def toString: String = s"lift(${lift.name})"
22 | }
23 |
24 | final case class ValuesClauseToken(statement: Statement) extends Token {
25 | override def toString: String = statement.toString
26 | }
27 |
28 | final case class Statement(tokens: List[Token]) extends Token {
29 | override def toString: String = tokens.mkString
30 | }
31 |
32 | final case class SetContainsToken(a: Token, op: Token, b: Token) extends Token {
33 | override def toString: String = s"${a.toString} ${op.toString} (${b.toString})"
34 | }
35 |
--------------------------------------------------------------------------------
/quill-jdbc-test-mysql/src/test/scala/io/getquill/context/jdbc/mysql/BatchValuesJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.mysql
2 |
3 | import io.getquill.context.sql.base.BatchValuesSpec
4 | import io.getquill._
5 |
6 | class BatchValuesJdbcSpec extends BatchValuesSpec {
7 |
8 | val context = testContext
9 | import testContext._
10 |
11 | override def beforeEach(): Unit = {
12 | testContext.run(query[Product].delete)
13 | testContext.run(sql"ALTER TABLE Product AUTO_INCREMENT = 1".as[Delete[Product]])
14 | super.beforeEach()
15 | }
16 |
17 | "Ex 1 - Batch Insert Normal" in {
18 | import `Ex 1 - Batch Insert Normal`._
19 | testContext.run(op, batchSize)
20 | testContext.run(get).toSet mustEqual result.toSet
21 | }
22 |
23 | "Ex 2 - Batch Insert Returning" in {
24 | import `Ex 2 - Batch Insert Returning`._
25 | val ids = testContext.run(op, batchSize)
26 | ids.toSet mustEqual productsOriginal.map(_.id).toSet
27 | testContext.run(get).toSet mustEqual productsOriginal.toSet
28 | }
29 |
30 | "Ex 3 - Batch Insert Mixed" in {
31 | import `Ex 3 - Batch Insert Mixed`._
32 | testContext.run(op, batchSize)
33 | testContext.run(get).toSet mustEqual result.toSet
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/misc/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.misc
2 |
3 | import io.getquill.{PrepareZioJdbcSpecBase, ZioProxySpec}
4 | import org.scalatest.BeforeAndAfter
5 |
6 | import java.sql.{Connection, ResultSet}
7 |
8 | class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with ZioProxySpec with BeforeAndAfter {
9 |
10 | val context = testContext
11 | import context._
12 |
13 | before {
14 | testContext.run(query[Product].delete).runSyncUnsafe()
15 | }
16 |
17 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
18 | val prepareQuery = prepare(query[Product])
19 |
20 | "single" in {
21 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
22 | singleInsert(prepareInsert) mustEqual false
23 | extractProducts(prepareQuery) === List(productEntries.head)
24 | }
25 |
26 | "batch" in {
27 | val prepareBatchInsert = prepare(
28 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
29 | )
30 |
31 | batchInsert(prepareBatchInsert).distinct mustEqual List(false)
32 | extractProducts(prepareQuery) === withOrderedIds(productEntries)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/idiom/Idiom.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.idiom
2 |
3 | import io.getquill.ast._
4 | import io.getquill.context.{ExecutionType, IdiomReturningCapability}
5 | import io.getquill.quat.Quat
6 | import io.getquill.{IdiomContext, NamingStrategy}
7 |
8 | trait Idiom extends IdiomReturningCapability {
9 | private val _emptySetContainsToken: StringToken = StringToken("FALSE")
10 | private val _defaultAutoGeneratedToken: StringToken = StringToken("DEFAULT VALUES")
11 |
12 | def emptySetContainsToken(field: Token): Token = _emptySetContainsToken
13 |
14 | def defaultAutoGeneratedToken(field: Token): Token = _defaultAutoGeneratedToken
15 |
16 | def liftingPlaceholder(index: Int): String
17 |
18 | def translate(ast: Ast, topLevelQuat: Quat, executionType: ExecutionType, transpileConfig: IdiomContext)(implicit
19 | naming: NamingStrategy
20 | ): (Ast, Statement, ExecutionType)
21 |
22 | def translateCached(ast: Ast, topLevelQuat: Quat, executionType: ExecutionType, transpileConfig: IdiomContext)(
23 | implicit naming: NamingStrategy
24 | ): (Ast, Statement, ExecutionType)
25 |
26 | def format(queryString: String): String = queryString
27 |
28 | def prepareForProbing(string: String): String
29 | }
30 |
--------------------------------------------------------------------------------
/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextBase.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc
2 |
3 | import io.getquill._
4 | import io.getquill.context.ContextVerbPrepareLambda
5 | import io.getquill.context.sql.idiom.SqlIdiom
6 |
7 | import java.sql._
8 |
9 | trait JdbcContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy]
10 | extends JdbcContextVerbExecute[Dialect, Naming]
11 | with JdbcContextVerbPrepare[Dialect, Naming]
12 | with ContextVerbPrepareLambda {
13 |
14 | // Need to re-define these here or they conflict with staged-prepare imported types
15 | override type PrepareQueryResult = Connection => Result[PreparedStatement]
16 | override type PrepareActionResult = Connection => Result[PreparedStatement]
17 | override type PrepareBatchActionResult = Connection => Result[List[PreparedStatement]]
18 |
19 | def constructPrepareQuery(f: Connection => Result[PreparedStatement]): Connection => Result[PreparedStatement] = f
20 | def constructPrepareAction(f: Connection => Result[PreparedStatement]): Connection => Result[PreparedStatement] = f
21 | def constructPrepareBatchAction(
22 | f: Connection => Result[List[PreparedStatement]]
23 | ): Connection => Result[List[PreparedStatement]] = f
24 | }
25 |
--------------------------------------------------------------------------------
/quill-doobie/src/test/scala/io/getquill/doobie/issue/Issue1067.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.doobie.issue
2 |
3 | import cats.effect._
4 | import doobie._
5 | import doobie.implicits._
6 | import io.getquill._
7 | import org.scalatest.freespec.AnyFreeSpec
8 | import org.scalatest.matchers.must.Matchers
9 | import io.getquill.doobie.DoobieContext
10 |
11 | // https://github.com/tpolecat/doobie/issues/1067
12 | class Issue1067 extends AnyFreeSpec with Matchers {
13 |
14 | import cats.effect.unsafe.implicits.global
15 |
16 | lazy val xa = Transactor.fromDriverManager[IO](
17 | "org.postgresql.Driver",
18 | s"jdbc:postgresql://${System.getenv("POSTGRES_HOST")}:${System.getenv("POSTGRES_PORT")}/doobie_test",
19 | "postgres",
20 | System.getenv("POSTGRES_PASSWORD"),
21 | None
22 | )
23 |
24 | val dc = new DoobieContext.Postgres(Literal)
25 | import dc._
26 |
27 | case class Country(name: String, indepYear: Option[Short])
28 |
29 | "Issue1067 - correctly select many countries, with a null in last position" in {
30 | val stmt = quote(query[Country])
31 | val actual = dc.run(stmt).transact(xa).unsafeRunSync()
32 | actual.count(_.indepYear.isDefined) mustEqual 3
33 | actual.count(_.indepYear.isEmpty) mustEqual 1
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/norm/DisablePhase.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm
2 |
3 | import io.getquill.norm.ConfigList._
4 | import io.getquill.util.Messages.TraceType
5 | import io.getquill.util.TraceConfig
6 |
7 | final case class TranspileConfig(disablePhases: List[OptionalPhase], traceConfig: TraceConfig)
8 | object TranspileConfig {
9 | val Empty = TranspileConfig(List.empty, TraceConfig(List.empty))
10 | }
11 |
12 | sealed trait OptionalPhase
13 | object OptionalPhase {
14 | sealed trait ApplyMap extends OptionalPhase
15 | case object ApplyMap extends ApplyMap
16 |
17 | val all: List[OptionalPhase] = List(ApplyMap)
18 | }
19 |
20 | trait DisablePhase {
21 | type Phase <: HList[OptionalPhase]
22 | }
23 |
24 | object DisablePhaseNone extends DisablePhase {
25 | type Phase = HNil
26 | }
27 |
28 | trait EnableTrace {
29 | type Trace <: HList[TraceType]
30 | }
31 |
32 | object EnableTraceNone extends EnableTrace {
33 | type Trace = HNil
34 | }
35 |
36 | object ConfigList {
37 | sealed trait HList[+H]
38 | final case class ::[+H, +T <: HList[_]](head: H, tail: T) extends HList[H]
39 | sealed trait HNil extends HList[Nothing] {
40 | def ::[H](h: H): HList[H] = ConfigList.::[H, HNil](h, this)
41 | }
42 | case object HNil extends HNil
43 | }
44 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/MirrorIdiomExt.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.context.{CanReturnField, CanReturnMultiField, CannotReturn}
4 |
5 | class TestMirrorContextTemplate[Dialect <: MirrorIdiomBase, +Naming <: NamingStrategy](dialect: Dialect, naming: Naming)
6 | extends MirrorContext[Dialect, Naming](dialect, naming)
7 | with TestEntities {
8 |
9 | def withDialect[I <: MirrorIdiomBase](dialect: I)(f: TestMirrorContextTemplate[I, Naming] => Any): Unit = {
10 | val ctx = new TestMirrorContextTemplate[I, Naming](dialect, naming)
11 | f(ctx)
12 | ctx.close
13 | }
14 | }
15 |
16 | // Mirror idiom supporting only single-field returning clauses
17 | trait MirrorIdiomReturningSingle extends MirrorIdiomBase with CanReturnField
18 | object MirrorIdiomReturningSingle extends MirrorIdiomReturningSingle
19 |
20 | // Mirror idiom supporting only multi-field returning clauses
21 | trait MirrorIdiomReturningMulti extends MirrorIdiomBase with CanReturnMultiField
22 | object MirrorIdiomReturningMulti extends MirrorIdiomReturningMulti
23 |
24 | // Mirror idiom not supporting any returns
25 | trait MirrorIdiomReturningUnsupported extends MirrorIdiomBase with CannotReturn
26 | object MirrorIdiomReturningUnsupported extends MirrorIdiomReturningUnsupported
27 |
--------------------------------------------------------------------------------
/quill-engine/src/main/scala/io/getquill/dsl/InfixDsl.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.dsl
2 |
3 | import io.getquill.quotation.NonQuotedException
4 |
5 | import scala.annotation.compileTimeOnly
6 |
7 | private[getquill] trait InfixDsl {
8 |
9 | private[getquill] trait InfixValue {
10 | def as[T]: T
11 | def asCondition: Boolean
12 | def pure: InfixValue
13 | private[getquill] def generic: InfixValue
14 | private[getquill] def transparent: InfixValue
15 | }
16 |
17 | implicit final class InfixInterpolator(val sc: StringContext) {
18 |
19 | @compileTimeOnly(NonQuotedException.message)
20 | @deprecated("""Use sql"${content}" instead""", "3.3.0")
21 | def infix(args: Any*): InfixValue = NonQuotedException()
22 | }
23 |
24 | implicit final class SqlInfixInterpolator(val sc: StringContext) {
25 |
26 | @compileTimeOnly(NonQuotedException.message)
27 | def sql(args: Any*): InfixValue = NonQuotedException()
28 | }
29 |
30 | object compat {
31 | // For compatibility with Slick/Doobie/etc... that already have an SQL interpolator
32 | implicit final class QsqlInfixInterpolator(val sc: StringContext) {
33 |
34 | @compileTimeOnly(NonQuotedException.message)
35 | def qsql(args: Any*): InfixValue = NonQuotedException()
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/quill-sql-test/src/test/scala/io/getquill/context/sql/base/ArrayOpsSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.base
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.context.sql.SqlContext
5 | import io.getquill.context.sql.encoding.ArrayEncoding
6 |
7 | trait ArrayOpsSpec extends Spec {
8 |
9 | val ctx: SqlContext[_, _] with ArrayEncoding
10 |
11 | import ctx._
12 |
13 | case class ArrayOps(id: Int, numbers: Seq[Int])
14 |
15 | val entriesList = List(
16 | ArrayOps(1, List(1, 2, 3)),
17 | ArrayOps(2, List(1, 4, 5)),
18 | ArrayOps(3, List(1, 4, 6))
19 | )
20 |
21 | val entity = quote(query[ArrayOps])
22 |
23 | val insertEntries = quote {
24 | liftQuery(entriesList).foreach(e => entity.insertValue(e))
25 | }
26 |
27 | object `contains` {
28 | def idByContains(x: Int) = quote(entity.filter(_.numbers.contains(lift(x))).map(_.id))
29 |
30 | val `Ex 1 return all` = quote(idByContains(1))
31 | val `Ex 1 expected` = List(1, 2, 3)
32 |
33 | val `Ex 2 return 1` = quote(idByContains(3))
34 | val `Ex 2 expected` = List(1)
35 |
36 | val `Ex 3 return 2,3` = quote(idByContains(4))
37 | val `Ex 3 expected` = List(2, 3)
38 |
39 | val `Ex 4 return empty` = quote(idByContains(10))
40 | val `Ex 4 expected` = Nil
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/quill-test-kit/src/test/scala/io/getquill/context/sql/base/ArrayOpsSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.sql.base
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.context.sql.SqlContext
5 | import io.getquill.context.sql.encoding.ArrayEncoding
6 |
7 | trait ArrayOpsSpec extends Spec {
8 |
9 | val ctx: SqlContext[_, _] with ArrayEncoding
10 |
11 | import ctx._
12 |
13 | case class ArrayOps(id: Int, numbers: Seq[Int])
14 |
15 | val entriesList = List(
16 | ArrayOps(1, List(1, 2, 3)),
17 | ArrayOps(2, List(1, 4, 5)),
18 | ArrayOps(3, List(1, 4, 6))
19 | )
20 |
21 | val entity = quote(query[ArrayOps])
22 |
23 | val insertEntries = quote {
24 | liftQuery(entriesList).foreach(e => entity.insertValue(e))
25 | }
26 |
27 | object `contains` {
28 | def idByContains(x: Int) = quote(entity.filter(_.numbers.contains(lift(x))).map(_.id))
29 |
30 | val `Ex 1 return all` = quote(idByContains(1))
31 | val `Ex 1 expected` = List(1, 2, 3)
32 |
33 | val `Ex 2 return 1` = quote(idByContains(3))
34 | val `Ex 2 expected` = List(1)
35 |
36 | val `Ex 3 return 2,3` = quote(idByContains(4))
37 | val `Ex 3 expected` = List(2, 3)
38 |
39 | val `Ex 4 return empty` = quote(idByContains(10))
40 | val `Ex 4 expected` = Nil
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/quill-codegen-jdbc/src/test/scala/io/getquill/codegen/util/SchemaMaker.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.codegen.util
2 |
3 | import java.io.Closeable
4 |
5 | import io.getquill._
6 | import io.getquill.codegen.integration.DbHelper
7 | import javax.sql.DataSource
8 | import org.scalatest.freespec.AnyFreeSpec
9 |
10 | import scala.language.implicitConversions
11 |
12 | abstract class CodegenSpec extends AnyFreeSpec with SchemaMaker {
13 | type Prefix <: ConfigPrefix
14 | val prefix: Prefix
15 |
16 | implicit def regToOption[T](t: T): Option[T] = Some(t)
17 | }
18 |
19 | object SchemaMaker extends SchemaMaker
20 |
21 | case class SchemaMakerCoordinates(dbPrefix: ConfigPrefix, naming: NamingStrategy, schemaConfig: SchemaConfig)
22 |
23 | trait SchemaMaker {
24 |
25 | private[getquill] def withDatasource[T](schemaConfig: SchemaConfig, dbPrefix: ConfigPrefix)(
26 | testCode: DataSource with Closeable => T
27 | ): T = {
28 | val ds = dbPrefix.makeDatasource
29 | val helper = new DbHelper(schemaConfig, dbPrefix, ds)
30 | DbHelper.dropTables(ds)
31 | helper.setup()
32 | testCode(ds)
33 | }
34 |
35 | def withContext[T](coords: SchemaMakerCoordinates)(testCode: => T): T = {
36 | import coords._
37 | withDatasource(schemaConfig, dbPrefix)(ds => testCode)
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/AsyncMirrorContextSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.MirrorContexts._
5 |
6 | class AsyncMirrorContextSpec extends Spec {
7 | val ctx = testAsyncContext
8 | import ctx._
9 |
10 | "executeQuery" in {
11 | eval(ctx.run(qr1))
12 | }
13 |
14 | "executeQuerySingle" in {
15 | eval(ctx.run(qr1.map(_.i).max))
16 | }
17 |
18 | "executeAction" in {
19 | eval(ctx.run(qr4.insertValue(lift(TestEntity4(1)))))
20 | }
21 |
22 | "executeActionReturning" in {
23 | eval(ctx.run(qr4.insertValue(lift(TestEntity4(0))).returning(_.i)))
24 | }
25 |
26 | "executeBatchAction" in {
27 | eval(ctx.run {
28 | liftQuery(List(TestEntity4(1))).foreach(e => qr4.insertValue(e))
29 | })
30 | }
31 |
32 | "executeBatchActionReturning" in {
33 | eval(ctx.run {
34 | liftQuery(List(TestEntity4(0))).foreach(e => qr4.insertValue(e).returning(_.i))
35 | })
36 | }
37 |
38 | "prepare" in {
39 | ctx.prepareParams("", (ps, session) => (Nil, ps.add("Sarah").add(127))) mustEqual List("'Sarah'", "127")
40 | }
41 |
42 | "probe" in {
43 | ctx.probe("Ok").toOption mustBe defined
44 | ctx.probe("Fail").toOption mustBe empty
45 | }
46 |
47 | "close" in {
48 | ctx.close()
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource2.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.examples.other
2 |
3 | import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
4 | import io.getquill.util.LoadConfig
5 | import io.getquill.{JdbcContextConfig, Literal, PostgresZioJdbcContext}
6 | import zio.Console.printLine
7 | import zio.{Runtime, Unsafe, ZIO, ZLayer}
8 |
9 | import javax.sql.DataSource
10 |
11 | object PlainAppDataSource2 {
12 |
13 | object MyPostgresContext extends PostgresZioJdbcContext(Literal)
14 | import MyPostgresContext._
15 |
16 | case class Person(name: String, age: Int)
17 |
18 | def hikariConfig = new HikariConfig(JdbcContextConfig(LoadConfig("testPostgresDB")).configProperties)
19 | def hikariDataSource = new HikariDataSource(hikariConfig)
20 |
21 | val zioDS: ZLayer[Any, Throwable, DataSource] =
22 | ZLayer(ZIO.attempt(hikariDataSource))
23 |
24 | def main(args: Array[String]): Unit = {
25 | val people = quote {
26 | query[Person].filter(p => p.name == "Alex")
27 | }
28 | val qzio =
29 | MyPostgresContext
30 | .run(people)
31 | .tap(result => printLine(result.toString))
32 | .provide(zioDS)
33 |
34 | Unsafe.unsafe { implicit u =>
35 | Runtime.default.unsafe.run(qzio)
36 | }
37 | ()
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/h2/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.h2
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.PrepareZioJdbcSpecBase
5 | import io.getquill.context.qzio.ImplicitSyntax.Implicit
6 | import org.scalatest.BeforeAndAfter
7 |
8 | class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter {
9 |
10 | val context = testContext.underlying
11 | import context._
12 | implicit val implicitPool = Implicit(pool)
13 |
14 | before {
15 | testContext.run(query[Product].delete).runSyncUnsafe()
16 | }
17 |
18 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
19 | val prepareQuery = prepare(query[Product])
20 |
21 | "single" in {
22 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
23 | singleInsert(prepareInsert) mustEqual false
24 | extractProducts(prepareQuery) === List(productEntries.head)
25 | }
26 |
27 | "batch" in {
28 | val prepareBatchInsert = prepare(
29 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
30 | )
31 |
32 | batchInsert(prepareBatchInsert).distinct mustEqual List(false)
33 | extractProducts(prepareQuery) === withOrderedIds(productEntries)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-cassandra/src/main/scala/io/getquill/context/Caches.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context
2 |
3 | import com.datastax.oss.driver.api.core.cql.{BoundStatement, PreparedStatement}
4 | import io.getquill.context.cassandra.PrepareStatementCache
5 | import io.getquill.context.cassandra.util.FutureConversions._
6 |
7 | import java.util.concurrent.CompletionStage
8 | import scala.concurrent.{ExecutionContext, Future}
9 | import scala.util.Failure
10 | import scala.compat.java8.FutureConverters._
11 |
12 | trait SyncCache { this: CassandraSession =>
13 | lazy val syncCache = new PrepareStatementCache[PreparedStatement](preparedStatementCacheSize)
14 | def prepare(cql: String): BoundStatement =
15 | syncCache(cql)(stmt => session.prepare(stmt)).bind()
16 | }
17 |
18 | trait AsyncFutureCache { this: CassandraSession =>
19 | lazy val asyncCache = new PrepareStatementCache[CompletionStage[PreparedStatement]](preparedStatementCacheSize)
20 |
21 | def prepareAsync(cql: String)(implicit executionContext: ExecutionContext): Future[BoundStatement] = {
22 | val output = asyncCache(cql) { stmt =>
23 | session.prepareAsync(stmt)
24 | }.toScala
25 |
26 | output.onComplete {
27 | case Failure(_) => asyncCache.invalidate(cql)
28 | case _ => ()
29 | }
30 | output.map(_.bind())
31 |
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/quill-jdbc-test-h2/src/test/scala/io/getquill/context/jdbc/h2/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.h2
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.context.jdbc.PrepareJdbcSpecBase
5 | import org.scalatest.BeforeAndAfter
6 |
7 | class PrepareJdbcSpec extends PrepareJdbcSpecBase with BeforeAndAfter {
8 |
9 | val context = testContext
10 | import testContext._
11 |
12 | before {
13 | testContext.run(query[Product].delete)
14 | }
15 |
16 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
17 | val prepareQuery = prepare(query[Product])
18 |
19 | "single" in {
20 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
21 | singleInsert(dataSource.getConnection)(prepareInsert) mustEqual false
22 | extractProducts(dataSource.getConnection)(prepareQuery) === List(productEntries.head)
23 | }
24 |
25 | "batch" in {
26 | val prepareBatchInsert = prepare(
27 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
28 | )
29 |
30 | batchInsert(dataSource.getConnection)(prepareBatchInsert).distinct mustEqual List(false)
31 | extractProducts(dataSource.getConnection)(prepareQuery) === withOrderedIds(productEntries)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppExample.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.examples.other
2 |
3 | import io.getquill._
4 | import io.getquill.jdbczio.Quill
5 | import zio._
6 |
7 | import javax.sql.DataSource
8 |
9 | case class Person(name: String, age: Int)
10 |
11 | object QuillContext extends PostgresZioJdbcContext(SnakeCase) {
12 | val dataSourceLayer = Quill.DataSource.fromPrefix("testPostgresDB").orDie
13 | }
14 |
15 | object DataService {
16 | def getPeople =
17 | ZIO.serviceWith[DataServiceLive](_.getPeople)
18 | def getPeopleOlderThan(age: Int) =
19 | ZIO.serviceWith[DataServiceLive](_.getPeopleOlderThan(age))
20 | }
21 |
22 | object DataServiceLive {
23 | val layer = ZLayer.fromFunction(DataServiceLive.apply _)
24 | }
25 |
26 | final case class DataServiceLive(dataSource: DataSource) {
27 | import QuillContext._
28 | def getPeople = run(query[Person]).provideEnvironment(ZEnvironment(dataSource))
29 | def getPeopleOlderThan(age: Int) =
30 | run(query[Person].filter(p => p.age > lift(age))).provideEnvironment(ZEnvironment(dataSource))
31 | }
32 |
33 | object ZioAppExample extends ZIOAppDefault {
34 | override def run =
35 | DataService.getPeople
36 | .provide(QuillContext.dataSourceLayer, DataServiceLive.layer)
37 | .debug("Results")
38 | .exitCode
39 | }
40 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.mysql
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.PrepareZioJdbcSpecBase
5 | import io.getquill.context.qzio.ImplicitSyntax.Implicit
6 | import org.scalatest.BeforeAndAfter
7 |
8 | class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter {
9 |
10 | val context = testContext.underlying
11 | import context._
12 | implicit val implicitPool = Implicit(pool)
13 |
14 | before {
15 | testContext.run(query[Product].delete).runSyncUnsafe()
16 | }
17 |
18 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
19 | val prepareQuery = prepare(query[Product])
20 |
21 | "single" in {
22 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
23 | singleInsert(prepareInsert) mustEqual false
24 | extractProducts(prepareQuery) === List(productEntries.head)
25 | }
26 |
27 | "batch" in {
28 | val prepareBatchInsert = prepare(
29 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
30 | )
31 |
32 | batchInsert(prepareBatchInsert).distinct mustEqual List(false)
33 | extractProducts(prepareQuery) === withOrderedIds(productEntries)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.oracle
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.PrepareZioJdbcSpecBase
5 | import io.getquill.context.qzio.ImplicitSyntax.Implicit
6 | import org.scalatest.BeforeAndAfter
7 |
8 | class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter {
9 |
10 | val context = testContext.underlying
11 | import context._
12 | implicit val implicitPool = Implicit(pool)
13 |
14 | before {
15 | testContext.run(query[Product].delete).runSyncUnsafe()
16 | }
17 |
18 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
19 | val prepareQuery = prepare(query[Product])
20 |
21 | "single" in {
22 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
23 | singleInsert(prepareInsert) mustEqual false
24 | extractProducts(prepareQuery) === List(productEntries.head)
25 | }
26 |
27 | "batch" in {
28 | val prepareBatchInsert = prepare(
29 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
30 | )
31 |
32 | batchInsert(prepareBatchInsert).distinct mustEqual List(false)
33 | extractProducts(prepareQuery) === withOrderedIds(productEntries)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.sqlite
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.PrepareZioJdbcSpecBase
5 | import io.getquill.context.qzio.ImplicitSyntax.Implicit
6 | import org.scalatest.BeforeAndAfter
7 |
8 | class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter {
9 |
10 | val context = testContext.underlying
11 | import context._
12 | implicit val implicitPool = Implicit(pool)
13 |
14 | before {
15 | testContext.run(query[Product].delete).runSyncUnsafe()
16 | }
17 |
18 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
19 | val prepareQuery = prepare(query[Product])
20 |
21 | "single" in {
22 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
23 | singleInsert(prepareInsert) mustEqual false
24 | extractProducts(prepareQuery) === List(productEntries.head)
25 | }
26 |
27 | "batch" in {
28 | val prepareBatchInsert = prepare(
29 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
30 | )
31 |
32 | batchInsert(prepareBatchInsert).distinct mustEqual List(false)
33 | extractProducts(prepareQuery) === withOrderedIds(productEntries)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/quill-core/src/test/scala/io/getquill/norm/NormalizeSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.norm
2 |
3 | import io.getquill.base.Spec
4 | import io.getquill.MirrorContexts.testContext.implicitOrd
5 | import io.getquill.MirrorContexts.testContext.qr1
6 | import io.getquill.MirrorContexts.testContext.qr2
7 | import io.getquill.MirrorContexts.testContext.quote
8 | import io.getquill.MirrorContexts.testContext.unquote
9 |
10 | class NormalizeSpec extends Spec {
11 |
12 | val normalize = new Normalize(TranspileConfig.Empty)
13 |
14 | "normalizes random-generated queries" - {
15 | val gen = new QueryGenerator(1)
16 | for (i <- (3 to 15)) {
17 | for (j <- (0 until 30)) {
18 | val query = gen(i)
19 | s"$i levels ($j) - $query" in {
20 | // println("=================== Normalizing Query ==================\n" + query + "\n" + "=== Full ===" + "\n" + Messages.qprint(query).render)
21 | normalize(query)
22 | ()
23 | }
24 | }
25 | }
26 | }
27 |
28 | "doesn't apply the avoid capture normalization to branches in isolation" in {
29 | val q = quote {
30 | qr1.sortBy(t => t.i).flatMap(f => qr2.map(t => 1))
31 | }
32 | val n = quote {
33 | qr1.sortBy(t => t.i).flatMap(t => qr2.map(t1 => 1))
34 | }
35 | normalize(q.ast) mustEqual n.ast
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/quill-jdbc-test-mysql/src/test/scala/io/getquill/context/jdbc/mysql/JdbcEncodingSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.mysql
2 |
3 | import io.getquill.context.sql.EncodingSpec
4 | import io.getquill.Query
5 |
6 | import java.time.ZoneId
7 |
8 | class JdbcEncodingSpec extends EncodingSpec {
9 |
10 | val context = testContext
11 | import testContext._
12 |
13 | "encodes and decodes types" in {
14 | testContext.run(delete)
15 | testContext.run(liftQuery(insertValues).foreach(p => insert(p)))
16 | verify(testContext.run(query[EncodingTestEntity]))
17 | }
18 |
19 | "encodes sets" in {
20 | testContext.run(query[EncodingTestEntity].delete)
21 | testContext.run(liftQuery(insertValues).foreach(p => query[EncodingTestEntity].insertValue(p)))
22 | val q = quote { (set: Query[Int]) =>
23 | query[EncodingTestEntity].filter(t => set.contains(t.v6))
24 | }
25 | verify(testContext.run(q(liftQuery(insertValues.map(_.v6).toSet))))
26 | }
27 |
28 | "Encode/Decode Other Time Types" in {
29 | context.run(query[TimeEntity].delete)
30 | val zid = ZoneId.systemDefault()
31 | val timeEntity = TimeEntity.make(zid)
32 | context.run(query[TimeEntity].insertValue(lift(timeEntity)))
33 | val actual = context.run(query[TimeEntity]).head
34 | timeEntity mustEqual actual
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/quill-jdbc-test-mysql/src/test/scala/io/getquill/context/jdbc/mysql/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.mysql
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.context.jdbc.PrepareJdbcSpecBase
5 | import org.scalatest.BeforeAndAfter
6 |
7 | class PrepareJdbcSpec extends PrepareJdbcSpecBase with BeforeAndAfter {
8 |
9 | val context = testContext
10 | import testContext._
11 |
12 | before {
13 | testContext.run(query[Product].delete)
14 | }
15 |
16 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
17 | val prepareQuery = prepare(query[Product])
18 |
19 | "single" in {
20 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
21 | singleInsert(dataSource.getConnection)(prepareInsert) mustEqual false
22 | extractProducts(dataSource.getConnection)(prepareQuery) === List(productEntries.head)
23 | }
24 |
25 | "batch" in {
26 | val prepareBatchInsert = prepare(
27 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
28 | )
29 |
30 | batchInsert(dataSource.getConnection)(prepareBatchInsert).distinct mustEqual List(false)
31 | extractProducts(dataSource.getConnection)(prepareQuery) === withOrderedIds(productEntries)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.cassandra.zio.examples.other
2 |
3 | import io.getquill._
4 | import io.getquill.context.qzio.ImplicitSyntax.Implicit
5 | import zio.{ZIO, ZIOAppDefault}
6 | import zio.Console.printLine
7 | import io.getquill.context.qzio.ImplicitSyntax._
8 |
9 | object ExampleAppImplicitEnv extends ZIOAppDefault {
10 |
11 | object Ctx extends CassandraZioContext(Literal)
12 |
13 | case class Person(name: String, age: Int)
14 |
15 | val zioSessionLayer =
16 | CassandraZioSession.fromPrefix("testStreamDB")
17 |
18 | case class MyQueryService(cs: CassandraZioSession) {
19 | import Ctx._
20 | implicit val env = Implicit(cs)
21 |
22 | def joes = Ctx.run(query[Person].filter(p => p.name == "Joe")).implicitly
23 | def jills = Ctx.run(query[Person].filter(p => p.name == "Jill")).implicitly
24 | def alexes = Ctx.run(query[Person].filter(p => p.name == "Alex")).implicitly
25 | }
26 |
27 | override def run = {
28 | val result =
29 | for {
30 | csession <- ZIO.scoped(zioSessionLayer.build)
31 | joes <- MyQueryService(csession.get).joes
32 | } yield joes
33 |
34 | result
35 | .tap(result => printLine(result.toString))
36 | .exitCode
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/quill-jdbc-test-oracle/src/test/scala/io/getquill/context/jdbc/oracle/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.oracle
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.context.jdbc.PrepareJdbcSpecBase
5 | import org.scalatest.BeforeAndAfter
6 |
7 | class PrepareJdbcSpec extends PrepareJdbcSpecBase with BeforeAndAfter {
8 |
9 | val context = testContext
10 | import testContext._
11 |
12 | before {
13 | testContext.run(query[Product].delete)
14 | }
15 |
16 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
17 | val prepareQuery = prepare(query[Product])
18 |
19 | "single" in {
20 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
21 | singleInsert(dataSource.getConnection)(prepareInsert) mustEqual false
22 | extractProducts(dataSource.getConnection)(prepareQuery) === List(productEntries.head)
23 | }
24 |
25 | "batch" in {
26 | val prepareBatchInsert = prepare(
27 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
28 | )
29 |
30 | batchInsert(dataSource.getConnection)(prepareBatchInsert).distinct mustEqual List(false)
31 | extractProducts(dataSource.getConnection)(prepareQuery) === withOrderedIds(productEntries)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/quill-jdbc-test-sqlite/src/test/scala/io/getquill/context/jdbc/sqlite/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.sqlite
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.context.jdbc.PrepareJdbcSpecBase
5 | import org.scalatest.BeforeAndAfter
6 |
7 | class PrepareJdbcSpec extends PrepareJdbcSpecBase with BeforeAndAfter {
8 |
9 | val context = testContext
10 | import testContext._
11 |
12 | before {
13 | testContext.run(query[Product].delete)
14 | }
15 |
16 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
17 | val prepareQuery = prepare(query[Product])
18 |
19 | "single" in {
20 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
21 | singleInsert(dataSource.getConnection)(prepareInsert) mustEqual false
22 | extractProducts(dataSource.getConnection)(prepareQuery) === List(productEntries.head)
23 | }
24 |
25 | "batch" in {
26 | val prepareBatchInsert = prepare(
27 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
28 | )
29 |
30 | batchInsert(dataSource.getConnection)(prepareBatchInsert).distinct mustEqual List(false)
31 | extractProducts(dataSource.getConnection)(prepareQuery) === withOrderedIds(productEntries)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/quill-jdbc-test-postgres/src/test/scala/io/getquill/context/jdbc/postgres/PrepareJdbcSpec.scala:
--------------------------------------------------------------------------------
1 | package io.getquill.context.jdbc.postgres
2 |
3 | import java.sql.{Connection, ResultSet}
4 | import io.getquill.context.jdbc.PrepareJdbcSpecBase
5 | import org.scalatest.BeforeAndAfter
6 |
7 | class PrepareJdbcSpec extends PrepareJdbcSpecBase with BeforeAndAfter {
8 |
9 | val context = testContext
10 | import testContext._
11 |
12 | before {
13 | testContext.run(query[Product].delete)
14 | }
15 |
16 | def productExtractor = (rs: ResultSet, conn: Connection) => materializeQueryMeta[Product].extract(rs, conn)
17 | val prepareQuery = prepare(query[Product])
18 |
19 | "single" in {
20 | val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head)))
21 | singleInsert(dataSource.getConnection)(prepareInsert) mustEqual false
22 | extractProducts(dataSource.getConnection)(prepareQuery) === List(productEntries.head)
23 | }
24 |
25 | "batch" in {
26 | val prepareBatchInsert = prepare(
27 | liftQuery(withOrderedIds(productEntries)).foreach(p => query[Product].insertValue(p))
28 | )
29 |
30 | batchInsert(dataSource.getConnection)(prepareBatchInsert).distinct mustEqual List(false)
31 | extractProducts(dataSource.getConnection)(prepareQuery) === withOrderedIds(productEntries)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------