├── .git-blame-ignore-revs
├── .gitattributes
├── .github
├── CODEOWNERS
├── release-drafter.yml
└── workflows
│ ├── auto-approve.yml
│ ├── ci.yml
│ ├── release-drafter.yml
│ └── site.yml
├── .gitignore
├── .nvmrc
├── .scalafmt.conf
├── LICENSE
├── README.md
├── build.sbt
├── core
├── jvm
│ └── src
│ │ ├── main
│ │ ├── scala-2.12
│ │ │ └── zio
│ │ │ │ └── sql
│ │ │ │ └── expr
│ │ │ │ └── Set.scala
│ │ ├── scala-2.13
│ │ │ └── zio
│ │ │ │ └── sql
│ │ │ │ └── expr
│ │ │ │ └── Set.scala
│ │ └── scala
│ │ │ └── zio
│ │ │ └── sql
│ │ │ ├── Sql.scala
│ │ │ ├── delete
│ │ │ └── Delete.scala
│ │ │ ├── expr
│ │ │ ├── AggregationDef.scala
│ │ │ ├── ComparableTypes.scala
│ │ │ ├── Expr.scala
│ │ │ ├── FunctionDef.scala
│ │ │ └── FunctionName.scala
│ │ │ ├── insert
│ │ │ ├── Insert.scala
│ │ │ ├── InsertBuilder.scala
│ │ │ └── InsertByCommaBuilder.scala
│ │ │ ├── ops
│ │ │ └── Operator.scala
│ │ │ ├── package.scala
│ │ │ ├── select
│ │ │ ├── ColumnSelection.scala
│ │ │ ├── DecodingError.scala
│ │ │ ├── Ordering.scala
│ │ │ ├── Read.scala
│ │ │ ├── SelectAll.scala
│ │ │ ├── SelectBuilder.scala
│ │ │ ├── SelectByCommaBuilder.scala
│ │ │ ├── Selection.scala
│ │ │ ├── SelectionMacro.scala
│ │ │ ├── SelectionSet.scala
│ │ │ ├── SubselectBuilder.scala
│ │ │ └── SubselectByCommaBuilder.scala
│ │ │ ├── table
│ │ │ ├── Column.scala
│ │ │ ├── ExprAccesorBuilder.scala
│ │ │ ├── JoinType.scala
│ │ │ ├── Table.scala
│ │ │ └── TableNameAnnotation.scala
│ │ │ ├── typetag
│ │ │ ├── Decodable.scala
│ │ │ ├── IsDate.scala
│ │ │ ├── IsIntegral.scala
│ │ │ ├── IsNumeric.scala
│ │ │ └── TypeTag.scala
│ │ │ ├── update
│ │ │ ├── Update.scala
│ │ │ └── UpdateBuilder.scala
│ │ │ └── utils
│ │ │ ├── Pluralize.scala
│ │ │ └── TrailingUnitNormalizer.scala
│ │ └── test
│ │ └── scala
│ │ └── zio
│ │ └── sql
│ │ ├── ArithmeticOpsSpec.scala
│ │ ├── BitwiseOpSpec.scala
│ │ ├── GroupByHavingSpec.scala
│ │ ├── LogicalOpsSpec.scala
│ │ ├── PluralizeSpec.scala
│ │ ├── PredicateOpSpec.scala
│ │ ├── ProductSchema.scala
│ │ └── TestBasicSelectSpec.scala
└── sql
│ └── shop.sql
├── docs
├── deep-dive.md
├── index.md
├── package.json
└── sidebars.js
├── driver
└── src
│ └── main
│ └── scala
│ └── zio
│ └── sql
│ └── driver
│ └── Renderer.scala
├── examples
└── src
│ └── main
│ └── scala
│ └── zio
│ └── sql
│ ├── Examples.scala
│ ├── GroupByExamples.scala
│ ├── LiveExample.scala
│ └── OracleExamples.scala
├── jdbc-hikaricp
└── src
│ ├── main
│ └── scala
│ │ └── zio
│ │ └── sql
│ │ ├── HikariConnectionPool.scala
│ │ └── HikariConnectionPoolConfig.scala
│ └── test
│ └── scala
│ └── zio
│ └── sql
│ ├── HikariConnectionPoolSpec.scala
│ └── MySqlTestContainer.scala
├── jdbc
└── src
│ ├── main
│ └── scala
│ │ └── zio
│ │ └── sql
│ │ ├── ConnectionPool.scala
│ │ ├── ConnectionPoolConfig.scala
│ │ ├── JdbcInternalModule.scala
│ │ ├── SqlDriverLiveModule.scala
│ │ ├── TransactionSyntaxModule.scala
│ │ └── jdbc.scala
│ └── test
│ └── scala
│ └── zio
│ └── sql
│ ├── ConnectionPoolSpec.scala
│ ├── JdbcRunnableSpec.scala
│ └── TestContainer.scala
├── macros
└── src
│ └── main
│ ├── scala-2
│ └── zio
│ │ └── sql
│ │ └── macros
│ │ ├── Normalizer.scala
│ │ ├── groupbylike.scala
│ │ ├── havinglike.scala
│ │ ├── insertlike.scala
│ │ ├── notliteral.scala
│ │ ├── tablelike.scala
│ │ └── wherelike.scala
│ ├── scala-3
│ └── zio
│ │ └── sql
│ │ └── macros
│ │ ├── groupbylike.scala
│ │ ├── havinglike.scala
│ │ ├── insertlike.scala
│ │ ├── normalizer.scala
│ │ ├── notliteral.scala
│ │ ├── tablelike.scala
│ │ └── wherelike.scala
│ └── scala
│ └── zio
│ └── sql
│ └── Features.scala
├── mysql
├── project
│ └── build.properties
└── src
│ ├── main
│ └── scala
│ │ └── zio
│ │ └── sql
│ │ └── mysql
│ │ ├── MysqlJdbcModule.scala
│ │ ├── MysqlRenderModule.scala
│ │ └── MysqlSqlModule.scala
│ └── test
│ ├── resources
│ ├── logback.xml
│ └── shop_schema.sql
│ └── scala
│ └── zio
│ └── sql
│ └── mysql
│ ├── CommonFunctionDefSpec.scala
│ ├── CustomFunctionDefSpec.scala
│ ├── DeleteSpec.scala
│ ├── FunctionDefSpec.scala
│ ├── MysqlModuleSpec.scala
│ ├── MysqlRunnableSpec.scala
│ └── TransactionSpec.scala
├── oracle
└── src
│ ├── main
│ └── scala
│ │ └── zio
│ │ └── sql
│ │ └── oracle
│ │ ├── OracleJdbcModule.scala
│ │ ├── OracleRenderModule.scala
│ │ └── OracleSqlModule.scala
│ └── test
│ ├── resources
│ ├── logback.xml
│ └── shop_schema.sql
│ └── scala
│ └── zio
│ └── sql
│ └── oracle
│ ├── CommonFunctionDefSpec.scala
│ ├── CustomFunctionDefSpec.scala
│ ├── DualSchema.scala
│ ├── OracleRunnableSpec.scala
│ ├── OracleSqlModuleSpec.scala
│ └── ShopSchema.scala
├── postgres
└── src
│ ├── main
│ └── scala
│ │ └── zio
│ │ └── sql
│ │ └── postgresql
│ │ ├── PostgresJdbcModule.scala
│ │ ├── PostgresRenderModule.scala
│ │ └── PostgresSqlModule.scala
│ └── test
│ ├── resources
│ ├── db_schema.sql
│ └── logback.xml
│ └── scala
│ └── zio
│ └── sql
│ └── postgresql
│ ├── AgregationSpec.scala
│ ├── CommonFunctionDefSpec.scala
│ ├── CustomFunctionDefSpec.scala
│ ├── DbSchema.scala
│ ├── DeleteBatchSpec.scala
│ ├── DeleteSpec.scala
│ ├── InsertBatchSpec.scala
│ ├── PostgresRunnableSpec.scala
│ ├── PostgresSqlModuleSpec.scala
│ ├── TransactionSpec.scala
│ └── UpdateBatchSpec.scala
├── project
├── BuildHelper.scala
├── build.properties
└── plugins.sbt
├── sbt
└── sqlserver
└── src
├── main
└── scala
│ └── zio
│ └── sql
│ └── sqlserver
│ ├── SqlServerJdbcModule.scala
│ ├── SqlServerRenderModule.scala
│ └── SqlServerSqlModule.scala
└── test
├── resources
├── container-license-acceptance.txt
├── db_schema.sql
└── logback.xml
└── scala
└── zio
└── sql
└── sqlserver
├── CommonFunctionDefSpec.scala
├── DbSchema.scala
├── SqlServerModuleSpec.scala
└── SqlServerRunnableSpec.scala
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # Scala Steward: Reformat with scalafmt 3.7.10
2 | 9bcedec20a4fc7adedce8aea4921c05b0c7acfa4
3 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | sbt linguist-vendored
2 | website/* linguist-vendored
3 | docs/* linguist-vendored
4 |
5 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @zio/zio-sql
2 |
3 |
--------------------------------------------------------------------------------
/.github/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name-template: 'v$NEXT_PATCH_VERSION'
2 | tag-template: 'v$NEXT_PATCH_VERSION'
3 | categories:
4 | - title: '🚀 Features'
5 | labels:
6 | - 'feature'
7 | - title: '🐛 Bug Fixes'
8 | labels:
9 | - 'bug'
10 | - title: '🧰 Maintenance'
11 | labels:
12 | - 'build'
13 | - title: '🌱 Dependency Updates'
14 | labels:
15 | - 'dependency-update'
16 | change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
17 | template: |
18 | ## Changes
19 | $CHANGES
20 |
--------------------------------------------------------------------------------
/.github/workflows/auto-approve.yml:
--------------------------------------------------------------------------------
1 | name: Auto approve
2 |
3 | on:
4 | pull_request_target
5 |
6 | jobs:
7 | auto-approve:
8 | runs-on: ubuntu-20.04
9 | steps:
10 | - uses: hmarr/auto-approve-action@v2.0.0
11 | if: github.actor == 'scala-steward'
12 | with:
13 | github-token: "${{ secrets.GITHUB_TOKEN }}"
14 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | env:
4 | JDK_JAVA_OPTIONS: -XX:+PrintCommandLineFlags -XX:+UseG1GC -Xmx4g -Xms4g -Xss4m
5 | JVM_OPTS: -XX:+PrintCommandLineFlags -XX:+UseG1GC -Xmx4g -Xms4g -Xss4m
6 |
7 | on:
8 | pull_request:
9 | push:
10 | branches: ['master']
11 | release:
12 | types:
13 | - published
14 |
15 | jobs:
16 | lint:
17 | runs-on: ubuntu-20.04
18 | strategy:
19 | fail-fast: false
20 | steps:
21 | - name: Checkout current branch
22 | uses: actions/checkout@v2.3.4
23 | with:
24 | fetch-depth: 0
25 | - name: Setup Scala and Java
26 | uses: olafurpg/setup-scala@v14
27 | - name: Cache scala dependencies
28 | uses: coursier/cache-action@v5
29 | - name: Lint code
30 | run: sbt check
31 |
32 | test:
33 | runs-on: ubuntu-20.04
34 | strategy:
35 | fail-fast: false
36 | matrix:
37 | java: ['openjdk@1.11.0', 'openjdk@1.17.0']
38 | scala: ['2.12.19', '2.13.13']
39 | steps:
40 | - name: Checkout current branch
41 | uses: actions/checkout@v2.3.4
42 | with:
43 | fetch-depth: 0
44 | - name: Setup Scala and Java
45 | uses: olafurpg/setup-scala@v14
46 | with:
47 | java-version: ${{ matrix.java }}
48 | - name: Cache scala dependencies
49 | uses: coursier/cache-action@v5
50 | - name: Run tests
51 | run: sbt ++${{ matrix.scala }} test
52 |
53 | website:
54 | runs-on: ubuntu-20.04
55 | timeout-minutes: 60
56 | steps:
57 | - name: Checkout current branch
58 | uses: actions/checkout@v3.1.0
59 | - name: Setup Scala and Java
60 | uses: olafurpg/setup-scala@v14
61 | - name: Cache scala dependencies
62 | uses: coursier/cache-action@v6
63 | - name: Check Document Generation
64 | run: sbt docs/compileDocs
65 |
66 | publish:
67 | runs-on: ubuntu-20.04
68 | needs: [lint, test, website]
69 | if: github.event_name != 'pull_request'
70 | steps:
71 | - name: Checkout current branch
72 | uses: actions/checkout@v2.3.4
73 | with:
74 | fetch-depth: 0
75 | - name: Setup Scala and Java
76 | uses: olafurpg/setup-scala@v14
77 | - name: Cache scala dependencies
78 | uses: coursier/cache-action@v5
79 | - name: Release artifacts
80 | run: sbt ci-release
81 | env:
82 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }}
83 | PGP_SECRET: ${{ secrets.PGP_SECRET }}
84 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
85 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}
86 |
--------------------------------------------------------------------------------
/.github/workflows/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name: Release Drafter
2 |
3 | on:
4 | push:
5 | branches: ['master']
6 |
7 | jobs:
8 | update_release_draft:
9 | runs-on: ubuntu-20.04
10 | steps:
11 | - uses: release-drafter/release-drafter@v5
12 | env:
13 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
14 |
--------------------------------------------------------------------------------
/.github/workflows/site.yml:
--------------------------------------------------------------------------------
1 | # This file was autogenerated using `zio-sbt-website` via `sbt generateGithubWorkflow`
2 | # task and should be included in the git repository. Please do not edit it manually.
3 |
4 | name: Website
5 | 'on':
6 | workflow_dispatch: {}
7 | release:
8 | types:
9 | - published
10 | push:
11 | branches:
12 | - master
13 | pull_request: {}
14 | jobs:
15 | build:
16 | name: Build and Test
17 | runs-on: ubuntu-latest
18 | if: ${{ github.event_name == 'pull_request' }}
19 | steps:
20 | - name: Git Checkout
21 | uses: actions/checkout@v3.3.0
22 | with:
23 | fetch-depth: '0'
24 | - name: Setup Scala
25 | uses: actions/setup-java@v3.9.0
26 | with:
27 | distribution: temurin
28 | java-version: 17
29 | check-latest: true
30 | - name: Check if the README file is up to date
31 | run: sbt docs/checkReadme
32 | - name: Check if the site workflow is up to date
33 | run: sbt docs/checkGithubWorkflow
34 | - name: Check artifacts build process
35 | run: sbt +publishLocal
36 | - name: Check website build process
37 | run: sbt docs/clean; sbt docs/buildWebsite
38 | publish-docs:
39 | name: Publish Docs
40 | runs-on: ubuntu-latest
41 | if: ${{ ((github.event_name == 'release') && (github.event.action == 'published')) || (github.event_name == 'workflow_dispatch') }}
42 | steps:
43 | - name: Git Checkout
44 | uses: actions/checkout@v3.3.0
45 | with:
46 | fetch-depth: '0'
47 | - name: Setup Scala
48 | uses: actions/setup-java@v3.9.0
49 | with:
50 | distribution: temurin
51 | java-version: 17
52 | check-latest: true
53 | - name: Setup NodeJs
54 | uses: actions/setup-node@v3
55 | with:
56 | node-version: 16.x
57 | registry-url: https://registry.npmjs.org
58 | - name: Publish Docs to NPM Registry
59 | run: sbt docs/publishToNpm
60 | env:
61 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
62 | generate-readme:
63 | name: Generate README
64 | runs-on: ubuntu-latest
65 | if: ${{ (github.event_name == 'push') || ((github.event_name == 'release') && (github.event.action == 'published')) }}
66 | steps:
67 | - name: Git Checkout
68 | uses: actions/checkout@v3.3.0
69 | with:
70 | ref: ${{ github.head_ref }}
71 | fetch-depth: '0'
72 | - name: Setup Scala
73 | uses: actions/setup-java@v3.9.0
74 | with:
75 | distribution: temurin
76 | java-version: 17
77 | check-latest: true
78 | - name: Generate Readme
79 | run: sbt docs/generateReadme
80 | - name: Commit Changes
81 | run: |
82 | git config --local user.email "github-actions[bot]@users.noreply.github.com"
83 | git config --local user.name "github-actions[bot]"
84 | git add README.md
85 | git commit -m "Update README.md" || echo "No changes to commit"
86 | - name: Create Pull Request
87 | uses: peter-evans/create-pull-request@v4.2.3
88 | with:
89 | body: |-
90 | Autogenerated changes after running the `sbt docs/generateReadme` command of the [zio-sbt-website](https://zio.dev/zio-sbt) plugin.
91 |
92 | I will automatically update the README.md file whenever there is new change for README.md, e.g.
93 | - After each release, I will update the version in the installation section.
94 | - After any changes to the "docs/index.md" file, I will update the README.md file accordingly.
95 | branch: zio-sbt-website/update-readme
96 | commit-message: Update README.md
97 | delete-branch: true
98 | title: Update README.md
99 |
--------------------------------------------------------------------------------
/.nvmrc:
--------------------------------------------------------------------------------
1 | v10.8.0
2 |
--------------------------------------------------------------------------------
/.scalafmt.conf:
--------------------------------------------------------------------------------
1 | version = "3.8.1"
2 | maxColumn = 120
3 | align.preset = most
4 | continuationIndent.defnSite = 2
5 | assumeStandardLibraryStripMargin = true
6 | docstrings.style = keep
7 | lineEndings = preserve
8 | includeCurlyBraceInSelectChains = false
9 | danglingParentheses.preset = true
10 | spaces {
11 | inImportCurlyBraces = true
12 | }
13 | optIn.annotationNewlines = true
14 |
15 | runner.dialect = scala213
16 |
17 | rewrite.rules = [SortImports, RedundantBraces]
18 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala-2.12/zio/sql/expr/Set.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.expr
2 |
3 | import zio.sql.typetag.TypeTag
4 | import zio.sql.Features
5 |
6 | sealed trait Set[F, -A] {
7 | type Value
8 |
9 | def lhs: Expr[F, A, Value]
10 | def rhs: Expr[_, A, Value]
11 |
12 | def typeTag: TypeTag[Value]
13 |
14 | }
15 |
16 | object Set {
17 | type Aux[F, -A, Value0] = Set[F, A] { type Value = Value0 }
18 |
19 | def apply[F: Features.IsSource, A, Value0: TypeTag](
20 | lhs0: Expr[F, A, Value0],
21 | rhs0: Expr[_, A, Value0]
22 | ): Set.Aux[F, A, Value0] =
23 | new Set[F, A] {
24 | type Value = Value0
25 |
26 | def lhs = lhs0
27 | def rhs = rhs0
28 |
29 | def typeTag = implicitly[TypeTag[Value]]
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala-2.13/zio/sql/expr/Set.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.expr
2 |
3 | import zio.sql.typetag.TypeTag
4 | import zio.sql.Features
5 |
6 | import scala.annotation.nowarn
7 |
8 | sealed trait Set[F, -A] {
9 | type Value
10 |
11 | def lhs: Expr[F, A, Value]
12 | def rhs: Expr[_, A, Value]
13 |
14 | def typeTag: TypeTag[Value]
15 |
16 | }
17 |
18 | object Set {
19 | type Aux[F, -A, Value0] = Set[F, A] { type Value = Value0 }
20 |
21 | @nowarn
22 | def apply[F: Features.IsSource, A, Value0: TypeTag](
23 | lhs0: Expr[F, A, Value0],
24 | rhs0: Expr[_, A, Value0]
25 | ): Set.Aux[F, A, Value0] =
26 | new Set[F, A] {
27 | type Value = Value0
28 |
29 | def lhs = lhs0
30 | def rhs = rhs0
31 |
32 | def typeTag = implicitly[TypeTag[Value]]
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/Sql.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.schema.{ Schema, StandardType }
4 | import zio.sql.table._
5 | import zio.sql.update._
6 | import zio.sql.select._
7 | import zio.sql.insert._
8 | import zio.sql.delete._
9 |
10 | case class SqlRow(params: List[SqlParameter])
11 | case class SqlParameter(_type: StandardType[_], value: Any)
12 | case class SqlStatement(query: String, rows: List[SqlRow])
13 |
14 | trait Sql {
15 |
16 | /*
17 | * (SELECT *, "foo", table.a + table.b AS sum... FROM table WHERE cond) UNION (SELECT ... FROM table)
18 | * UNION ('1', '2', '3')
19 | * ORDER BY table.a ASC, foo, sum DESC
20 | * LIMIT 200
21 | * OFFSET 100
22 | * UPDATE table SET ...
23 | * INSERT ... INTO table
24 | * DELETE ... FROM table
25 | *
26 | * SELECT ARBITRARY(age), COUNT(*) FROM person GROUP BY age
27 | */
28 | val select: SelectByCommaBuilder = SelectByCommaBuilder()
29 |
30 | sealed trait Star
31 | val * : Star = new Star {}
32 |
33 | def select(star: Star): SelectAll = {
34 | val _ = star
35 | new SelectAll()
36 | }
37 |
38 | def select[F, A, B <: SelectionSet[A]](selection: Selection[F, A, B]): SelectBuilder[F, A, B] =
39 | SelectBuilder[F, A, B](selection)
40 |
41 | def subselect[ParentTable]: SubselectByCommaBuilder[ParentTable] = new SubselectByCommaBuilder[ParentTable]
42 |
43 | def deleteFrom[T <: Table](table: T): Delete[table.TableType] = Delete(table, true)
44 |
45 | def update[A](table: Table.Aux[A]): UpdateBuilder[A] = UpdateBuilder(table)
46 |
47 | val insertInto: InsertByCommaBuilder = InsertByCommaBuilder()
48 |
49 | def renderDelete(delete: Delete[_]): String
50 |
51 | def renderRead(read: Read[_]): String
52 |
53 | def renderUpdate(update: Update[_]): String
54 |
55 | def renderInsert[A: Schema](insert: Insert[_, A]): SqlStatement
56 |
57 | // TODO don't know where to put it now
58 | implicit def convertOptionToSome[A](implicit op: Schema[Option[A]]): Schema[Some[A]] =
59 | op.transformOrFail[Some[A]](
60 | {
61 | case Some(a) => Right(Some(a))
62 | case None => Left("cannot encode Right")
63 | },
64 | someA => Right(someA)
65 | )
66 | implicit val none: Schema[None.type] = Schema.singleton(None)
67 | }
68 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/delete/Delete.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.delete
2 |
3 | import zio.sql.table._
4 | import zio.sql.expr.Expr
5 |
6 | final case class Delete[A](table: Table.Aux[A], whereExpr: Expr[_, A, Boolean]) {
7 | def where[F](expr: Expr[F, A, Boolean]): Delete[A] = Delete(table, expr)
8 | }
9 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/expr/AggregationDef.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.expr
2 |
3 | import zio.sql.typetag.TypeTag
4 | import zio.sql.Features
5 | import java.math.BigDecimal
6 |
7 | final case class AggregationDef[-A, +B](name: FunctionName) { self =>
8 |
9 | def apply[F, Source, B1 >: B](expr: Expr[F, Source, A])(implicit
10 | typeTag: TypeTag[B1]
11 | ): Expr[Features.Aggregated[F], Source, B1] =
12 | Expr.AggregationCall[F, Source, A, B1](expr, self)
13 | }
14 |
15 | object AggregationDef {
16 | val Count = AggregationDef[Any, Long](FunctionName("count"))
17 | val Sum = AggregationDef[Double, Double](FunctionName("sum"))
18 | val SumInt = AggregationDef[Int, Int](FunctionName("sum"))
19 | val SumDec = AggregationDef[BigDecimal, BigDecimal](FunctionName("sum"))
20 | val Avg = AggregationDef[Double, Double](FunctionName("avg"))
21 | val AvgDec = AggregationDef[BigDecimal, BigDecimal](FunctionName("avg"))
22 | def Min[F, A, B: TypeTag](expr: Expr[F, A, B]) = AggregationDef[B, B](FunctionName("min"))(expr)
23 | def Max[F, A, B: TypeTag](expr: Expr[F, A, B]) = AggregationDef[B, B](FunctionName("max"))(expr)
24 | }
25 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/expr/ComparableTypes.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.expr
2 |
3 | import scala.annotation.implicitNotFound
4 | import zio.sql.typetag._
5 |
6 | @implicitNotFound(
7 | "You cannot compare values of different types ${A} and ${B}. " +
8 | "As those are unrelated types, this query would fail at database level."
9 | )
10 | sealed trait ComparableTypes[A, B]
11 |
12 | object ComparableTypes extends ComparableTypesLowPriority {
13 | implicit final def comparableSubtype1[A <: B, B]: ComparableTypes[A, B] = new ComparableTypes[A, B] {}
14 |
15 | implicit final def AWithOptionIsComparable[A]: ComparableTypes[A, Option[A]] = new ComparableTypes[A, Option[A]] {}
16 | implicit final def optionWithAIsComparable[A]: ComparableTypes[Option[A], A] = new ComparableTypes[Option[A], A] {}
17 |
18 | implicit final def optionAndNone[A]: ComparableTypes[Option[A], None.type] =
19 | new ComparableTypes[Option[A], None.type] {}
20 | implicit final def noneAndOption[A]: ComparableTypes[None.type, Option[A]] =
21 | new ComparableTypes[None.type, Option[A]] {}
22 |
23 | implicit final def optionAndSome[A]: ComparableTypes[Option[A], Expr.Literal[Some[A]]] =
24 | new ComparableTypes[Option[A], Expr.Literal[Some[A]]] {}
25 | implicit final def someAndOption[A]: ComparableTypes[Expr.Literal[Some[A]], Option[A]] =
26 | new ComparableTypes[Expr.Literal[Some[A]], Option[A]] {}
27 |
28 | implicit final def dateIsComprable[A, B](implicit ev1: IsDate[A], ev2: IsDate[B]): ComparableTypes[A, B] =
29 | new ComparableTypes[A, B] {}
30 |
31 | implicit final def numericIsComparable[A, B](implicit
32 | ev1: IsNumeric[A],
33 | ev2: IsNumeric[B]
34 | ): ComparableTypes[A, B] = new ComparableTypes[A, B] {}
35 | }
36 |
37 | sealed trait ComparableTypesLowPriority {
38 | implicit final def comparableSubtype2[A, B <: A]: ComparableTypes[A, B] = new ComparableTypes[A, B] {}
39 | }
40 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/expr/FunctionDef.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.expr
2 |
3 | import java.time._
4 | import zio.sql.typetag.TypeTag
5 | import zio.sql.Features
6 |
7 | final case class FunctionDef[-A, +B](name: FunctionName) { self =>
8 |
9 | def apply[B1 >: B]()(implicit ev: Any <:< A, typeTag: TypeTag[B1]): Expr[Features.Function0, Any, B1] =
10 | Expr.FunctionCall0(self.asInstanceOf[FunctionDef[Any, B1]])
11 |
12 | def apply[F, Source, B1 >: B](param1: Expr[F, Source, A])(implicit typeTag: TypeTag[B1]): Expr[F, Source, B1] =
13 | Expr.FunctionCall1(param1, self: FunctionDef[A, B1])
14 |
15 | def apply[F1, F2, Source, P1, P2, B1 >: B](param1: Expr[F1, Source, P1], param2: Expr[F2, Source, P2])(implicit
16 | ev: (P1, P2) <:< A,
17 | typeTag: TypeTag[B1]
18 | ): Expr[F1 with F2, Source, B1] =
19 | Expr.FunctionCall2(param1, param2, self.narrow[(P1, P2)]: FunctionDef[(P1, P2), B1])
20 |
21 | def apply[F1, F2, F3, Source, P1, P2, P3, B1 >: B](
22 | param1: Expr[F1, Source, P1],
23 | param2: Expr[F2, Source, P2],
24 | param3: Expr[F3, Source, P3]
25 | )(implicit ev: (P1, P2, P3) <:< A, typeTag: TypeTag[B1]): Expr[F1 with F2 with F3, Source, B1] =
26 | Expr.FunctionCall3(param1, param2, param3, self.narrow[(P1, P2, P3)]: FunctionDef[(P1, P2, P3), B1])
27 |
28 | def apply[F1, F2, F3, F4, Source, P1, P2, P3, P4, B1 >: B](
29 | param1: Expr[F1, Source, P1],
30 | param2: Expr[F2, Source, P2],
31 | param3: Expr[F3, Source, P3],
32 | param4: Expr[F4, Source, P4]
33 | )(implicit ev: (P1, P2, P3, P4) <:< A, typeTag: TypeTag[B1]): Expr[F1 with F2 with F3 with F4, Source, B1] =
34 | Expr.FunctionCall4(
35 | param1,
36 | param2,
37 | param3,
38 | param4,
39 | self.narrow[(P1, P2, P3, P4)]: FunctionDef[(P1, P2, P3, P4), B1]
40 | )
41 |
42 | def apply[F1, F2, F3, F4, F5, Source, P1, P2, P3, P4, P5, B1 >: B](
43 | param1: Expr[F1, Source, P1],
44 | param2: Expr[F2, Source, P2],
45 | param3: Expr[F3, Source, P3],
46 | param4: Expr[F4, Source, P4],
47 | param5: Expr[F5, Source, P5]
48 | )(implicit
49 | ev: (P1, P2, P3, P4, P5) <:< A,
50 | typeTag: TypeTag[B1]
51 | ): Expr[F1 with F2 with F3 with F4 with F5, Source, B1] =
52 | Expr.FunctionCall5(
53 | param1,
54 | param2,
55 | param3,
56 | param4,
57 | param5,
58 | self.narrow[(P1, P2, P3, P4, P5)]: FunctionDef[(P1, P2, P3, P4, P5), B1]
59 | )
60 |
61 | def apply[F1, F2, F3, F4, F5, F6, Source, P1, P2, P3, P4, P5, P6, B1 >: B](
62 | param1: Expr[F1, Source, P1],
63 | param2: Expr[F2, Source, P2],
64 | param3: Expr[F3, Source, P3],
65 | param4: Expr[F4, Source, P4],
66 | param5: Expr[F5, Source, P5],
67 | param6: Expr[F6, Source, P6]
68 | )(implicit
69 | ev: (P1, P2, P3, P4, P5, P6) <:< A,
70 | typeTag: TypeTag[B1]
71 | ): Expr[F1 with F2 with F3 with F4 with F5 with F6, Source, B1] =
72 | Expr.FunctionCall6(
73 | param1,
74 | param2,
75 | param3,
76 | param4,
77 | param5,
78 | param6,
79 | self.narrow[(P1, P2, P3, P4, P5, P6)]: FunctionDef[(P1, P2, P3, P4, P5, P6), B1]
80 | )
81 |
82 | def apply[F1, F2, F3, F4, F5, F6, F7, Source, P1, P2, P3, P4, P5, P6, P7, B1 >: B](
83 | param1: Expr[F1, Source, P1],
84 | param2: Expr[F2, Source, P2],
85 | param3: Expr[F3, Source, P3],
86 | param4: Expr[F4, Source, P4],
87 | param5: Expr[F5, Source, P5],
88 | param6: Expr[F6, Source, P6],
89 | param7: Expr[F7, Source, P7]
90 | )(implicit
91 | ev: (P1, P2, P3, P4, P5, P6, P7) <:< A,
92 | typeTag: TypeTag[B1]
93 | ): Expr[F1 with F2 with F3 with F4 with F5 with F6 with F7, Source, B1] =
94 | Expr.FunctionCall7(
95 | param1,
96 | param2,
97 | param3,
98 | param4,
99 | param5,
100 | param6,
101 | param7,
102 | self.narrow[(P1, P2, P3, P4, P5, P6, P7)]: FunctionDef[(P1, P2, P3, P4, P5, P6, P7), B1]
103 | )
104 |
105 | def narrow[C](implicit ev: C <:< A): FunctionDef[C, B] = {
106 | val _ = ev
107 | self.asInstanceOf[FunctionDef[C, B]]
108 | }
109 | }
110 |
111 | object FunctionDef {
112 |
113 | // math functions
114 | val Abs = FunctionDef[Double, Double](FunctionName("abs"))
115 | val Acos = FunctionDef[Double, Double](FunctionName("acos"))
116 | val Asin = FunctionDef[Double, Double](FunctionName("asin"))
117 | val Atan = FunctionDef[Double, Double](FunctionName("atan"))
118 | val Ceil = FunctionDef[Double, Double](FunctionName("ceil"))
119 | val Cos = FunctionDef[Double, Double](FunctionName("cos"))
120 | val Exp = FunctionDef[Double, Double](FunctionName("exp"))
121 | val Floor = FunctionDef[Double, Double](FunctionName("floor"))
122 | val Ln = FunctionDef[Double, Double](FunctionName("ln"))
123 | val Log = FunctionDef[(Double, Double), Double](FunctionName("log"))
124 | val Mod = FunctionDef[(Double, Double), Double](FunctionName("mod"))
125 | val Power = FunctionDef[(Double, Double), Double](FunctionName("power"))
126 | val Round = FunctionDef[(Double, Int), Double](FunctionName("round"))
127 | val Sign = FunctionDef[Double, Int](FunctionName("sign"))
128 | val Sin = FunctionDef[Double, Double](FunctionName("sin"))
129 | val Sqrt = FunctionDef[Double, Double](FunctionName("sqrt"))
130 | val Tan = FunctionDef[Double, Double](FunctionName("tan"))
131 | val WidthBucket = FunctionDef[(Double, Double, Double, Int), Int](FunctionName("width_bucket"))
132 |
133 | // string functions
134 | val Ascii = FunctionDef[String, Int](FunctionName("ascii"))
135 | val CharLength = FunctionDef[String, Int](FunctionName("character_length"))
136 | val Concat = FunctionDef[(String, String), String](FunctionName("concat")) // todo varargs
137 | val ConcatWs3 = FunctionDef[(String, String, String), String](FunctionName("concat_ws"))
138 | val ConcatWs4 = FunctionDef[(String, String, String, String), String](FunctionName("concat_ws"))
139 | val Lower = FunctionDef[String, String](FunctionName("lower"))
140 | val Ltrim = FunctionDef[String, String](FunctionName("ltrim"))
141 | val OctetLength = FunctionDef[String, Int](FunctionName("octet_length"))
142 | val Overlay = FunctionDef[(String, String, Int, Option[Int]), String](FunctionName("overlay"))
143 | val Position = FunctionDef[(String, String), Int](FunctionName("position"))
144 | val Replace = FunctionDef[(String, String, String), String](FunctionName("replace"))
145 | val Rtrim = FunctionDef[String, String](FunctionName("rtrim"))
146 | val Substring = FunctionDef[(String, Int, Option[Int]), String](FunctionName("substring"))
147 | // TODO substring regex
148 | val Trim = FunctionDef[String, String](FunctionName("trim"))
149 | val Upper = FunctionDef[String, String](FunctionName("upper"))
150 |
151 | // date functions
152 | val CurrentTimestamp = FunctionDef[Nothing, Instant](FunctionName("current_timestamp"))
153 | }
154 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/expr/FunctionName.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.expr
2 |
3 | final case class FunctionName(name: String)
4 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/insert/Insert.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.insert
2 |
3 | import zio.schema.Schema
4 | import zio.sql.table._
5 | import zio.sql.select._
6 |
7 | final case class Insert[A, Z](table: Table.Source.Aux[A], sources: SelectionSet[A], values: Seq[Z])(implicit
8 | schemaN: Schema[Z]
9 | )
10 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/insert/InsertBuilder.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.insert
2 |
3 | import zio.schema.Schema
4 | import zio.sql.macros._
5 | import zio.sql.table._
6 | import zio.sql.select._
7 |
8 | final case class InsertBuilder[F, Source, AllColumnIdentities, B <: SelectionSet[Source]](
9 | table: Table.Source.Aux_[Source, AllColumnIdentities],
10 | sources: Selection[F, Source, B]
11 | ) {
12 |
13 | def values[Z](values: Seq[Z])(implicit
14 | schemaCC: Schema[Z],
15 | schemaValidity: InsertLike[F, sources.ColsRepr, AllColumnIdentities, Z]
16 | ): Insert[Source, Z] = Insert(table, sources.value, values)
17 |
18 | def values[Z](value: Z)(implicit
19 | schemaCC: Schema[Z],
20 | schemaValidity: InsertLike[F, sources.ColsRepr, AllColumnIdentities, Z]
21 | ): Insert[Source, Z] = Insert(table, sources.value, Seq(value))
22 | }
23 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/insert/InsertByCommaBuilder.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.insert
2 |
3 | import zio.sql.table._
4 | import zio.sql.select._
5 | import scala.language.experimental.macros
6 |
7 | final case class InsertByCommaBuilder() {
8 | def apply[F, Source, Set <: SelectionSet[Source], AllColumnIdentities](
9 | table: Table.Source.Aux_[Source, AllColumnIdentities]
10 | )(selections: Selection[F, Source, _ <: SelectionSet[Source]]*): InsertBuilder[F, Source, AllColumnIdentities, Set] =
11 | macro SelectionMacro.insertApplyMacro[F, Source, AllColumnIdentities]
12 | }
13 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/ops/Operator.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.ops
2 |
3 | import zio.sql.typetag._
4 |
5 | trait Operator {
6 | val symbol: String
7 | }
8 |
9 | object Operator {
10 |
11 | sealed trait BinaryOp[A] extends Operator {
12 | val symbol: String
13 | }
14 |
15 | object BinaryOp {
16 |
17 | final case class Add[A: IsNumeric]() extends BinaryOp[A] {
18 | def isNumeric: IsNumeric[A] = implicitly[IsNumeric[A]]
19 |
20 | override val symbol: String = "+"
21 | }
22 |
23 | final case class Sub[A: IsNumeric]() extends BinaryOp[A] {
24 | def isNumeric: IsNumeric[A] = implicitly[IsNumeric[A]]
25 |
26 | override val symbol: String = "-"
27 | }
28 |
29 | final case class Mul[A: IsNumeric]() extends BinaryOp[A] {
30 | def isNumeric: IsNumeric[A] = implicitly[IsNumeric[A]]
31 |
32 | override val symbol: String = "*"
33 | }
34 |
35 | final case class Div[A: IsNumeric]() extends BinaryOp[A] {
36 | def isNumeric: IsNumeric[A] = implicitly[IsNumeric[A]]
37 |
38 | override val symbol: String = "/"
39 | }
40 | case object AndBool extends BinaryOp[Boolean] {
41 | override val symbol: String = "and"
42 | }
43 |
44 | case object OrBool extends BinaryOp[Boolean] {
45 | override val symbol: String = "or"
46 | }
47 |
48 | final case class AndBit[A: IsIntegral]() extends BinaryOp[A] {
49 | def isIntegral: IsIntegral[A] = implicitly[IsIntegral[A]]
50 | override val symbol: String = "&"
51 | }
52 | final case class OrBit[A: IsIntegral]() extends BinaryOp[A] {
53 | def isIntegral: IsIntegral[A] = implicitly[IsIntegral[A]]
54 | override val symbol: String = "|"
55 |
56 | }
57 | }
58 |
59 | sealed trait PropertyOp extends Operator
60 |
61 | object PropertyOp {
62 | case object IsNull extends PropertyOp {
63 | override val symbol: String = "is null"
64 | }
65 | case object IsNotNull extends PropertyOp {
66 | override val symbol: String = "is not null"
67 | }
68 | case object IsTrue extends PropertyOp {
69 | override val symbol: String = "= true"
70 | }
71 | case object IsNotTrue extends PropertyOp {
72 | override val symbol: String = "= false"
73 | }
74 | }
75 |
76 | sealed trait RelationalOp extends Operator
77 |
78 | object RelationalOp {
79 | case object Equals extends RelationalOp {
80 | override val symbol: String = "="
81 | }
82 | case object LessThan extends RelationalOp {
83 | override val symbol: String = "<"
84 | }
85 | case object GreaterThan extends RelationalOp {
86 | override val symbol: String = ">"
87 | }
88 | case object LessThanEqual extends RelationalOp {
89 | override val symbol: String = "<="
90 | }
91 | case object GreaterThanEqual extends RelationalOp {
92 | override val symbol: String = ">="
93 | }
94 | case object NotEqual extends RelationalOp {
95 | override val symbol: String = "<>"
96 | }
97 | case object Like extends RelationalOp {
98 | override val symbol: String = "like"
99 | }
100 | }
101 |
102 | sealed trait UnaryOp[A] extends Operator
103 |
104 | object UnaryOp {
105 | final case class Negate[A: IsNumeric]() extends UnaryOp[A] {
106 | def isNumeric: IsNumeric[A] = implicitly[IsNumeric[A]]
107 | val symbol = "-"
108 | }
109 |
110 | final case class NotBit[A: IsIntegral]() extends UnaryOp[A] {
111 | def isIntegral: IsIntegral[A] = implicitly[IsIntegral[A]]
112 | val symbol = "~"
113 | }
114 |
115 | case object NotBool extends UnaryOp[Boolean] {
116 | val symbol = "not"
117 | }
118 | }
119 |
120 | }
121 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/package.scala:
--------------------------------------------------------------------------------
1 | package zio
2 |
3 | import zio.sql.expr.Expr
4 |
5 | package object sql {
6 |
7 | type Lens[F, S, A] = Expr[Features.Source[F, S], S, A]
8 |
9 | type Prism[F, S, A] = Unit
10 |
11 | type Traversal[S, A] = Unit
12 | }
13 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/ColumnSelection.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.table.Column
4 | import zio.sql.typetag.TypeTag
5 | import zio.sql.expr.Expr
6 |
7 | sealed trait ColumnSelection[-Source, +ColumnType] {
8 | type ColumnType0 <: ColumnType
9 |
10 | def name: Option[String]
11 |
12 | val toColumn: Column[ColumnType]
13 | }
14 |
15 | object ColumnSelection {
16 | final case class Constant[ColumnType: TypeTag](value: ColumnType, name: Option[String])
17 | extends ColumnSelection[Any, ColumnType] {
18 | def typeTag: TypeTag[ColumnType] = implicitly[TypeTag[ColumnType]]
19 |
20 | val toColumn: Column[ColumnType] = name match {
21 | case Some(value) => Column.Named(value)
22 | case None => Column.Indexed()
23 | }
24 | }
25 |
26 | final case class Computed[F, Source, ColumnType](expr: Expr[F, Source, ColumnType], name: Option[String])
27 | extends ColumnSelection[Source, ColumnType] {
28 | implicit def typeTag: TypeTag[ColumnType] = Expr.typeTagOf(expr)
29 |
30 | val toColumn: Column[ColumnType] = name match {
31 | case Some(value) => Column.Named(value)
32 | case None => Column.Indexed()
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/DecodingError.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.typetag._
4 |
5 | sealed trait DecodingError extends Exception {
6 | def message: String
7 | }
8 |
9 | object DecodingError {
10 | final case class UnexpectedNull(column: Int) extends DecodingError {
11 | def message = s"Expected column with index ${column} to be non-null"
12 | }
13 | final case class UnexpectedType(expected: TypeTag[_], actual: Int) extends DecodingError {
14 | def message = s"Expected type ${expected} but found ${actual}"
15 | }
16 | final case class MissingColumn(column: Int) extends DecodingError {
17 | def message = s"The column with index ${column} does not exist"
18 | }
19 | case object Closed extends DecodingError {
20 | def message = s"The ResultSet has been closed, so decoding is impossible"
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/Ordering.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.expr.Expr
4 | import scala.language.implicitConversions
5 |
6 | sealed trait Ordering[+A] {
7 | val value: A
8 | }
9 |
10 | object Ordering {
11 | final case class Asc[A](value: A) extends Ordering[A]
12 | final case class Desc[A](value: A) extends Ordering[A]
13 |
14 | implicit def exprToOrdering[F, A, B](expr: Expr[F, A, B]): Ordering[Expr[F, A, B]] =
15 | Asc(expr)
16 | }
17 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/SelectAll.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.table._
4 | import zio.sql.expr._
5 | import scala.language.experimental.macros
6 | import scala.language.implicitConversions
7 |
8 | final case class SelectAll() {
9 | def from[AllColumnsIdentities, TableType, ColumnsOut, F, Repr, Head, Tail <: SelectionSet[TableType]](
10 | wrapper: SelectAllWrapper[AllColumnsIdentities, TableType, ColumnsOut]
11 | ): Read.Subselect[F, Repr, TableType, TableType, Head, Tail] =
12 | macro SelectionMacro.buildSelectAll[AllColumnsIdentities, ColumnsOut, TableType]
13 | }
14 |
15 | case class SelectAllWrapper[AllColumnsIdentities, TableType, ColumnsOut](
16 | table: Table.Source.WithTableDetails[AllColumnsIdentities, TableType, ColumnsOut],
17 | exprs: List[zio.sql.expr.Expr[_, TableType, _]]
18 | )
19 |
20 | object SelectAllWrapper {
21 | implicit def tableToExprs[AllColumnsIdentities, TableType, ColumnsOut](
22 | table: Table.Source.WithTableDetails[AllColumnsIdentities, TableType, ColumnsOut]
23 | ): SelectAllWrapper[AllColumnsIdentities, TableType, ColumnsOut] =
24 | SelectAllWrapper(
25 | table,
26 | table.columns.asInstanceOf[Product].productIterator.toList.map(_.asInstanceOf[Expr[_, TableType, _]])
27 | )
28 | }
29 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/SelectBuilder.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.table._
4 | import zio.sql.macros._
5 | import scala.language.implicitConversions
6 |
7 | final case class SelectBuilder[F0, Source, B <: SelectionSet[Source]](selection: Selection[F0, Source, B]) {
8 |
9 | def from[Source0 <: Source](table: Table.Aux[Source0])(implicit
10 | ev: B <:< SelectionSet.Cons[Source0, selection.value.ColumnHead, selection.value.SelectionTail],
11 | normalizer: Normalizer[selection.value.ResultTypeRepr]
12 | ): Read.Select[
13 | F0,
14 | normalizer.Out,
15 | Source0,
16 | selection.value.ColumnHead,
17 | selection.value.SelectionTail
18 | ] = {
19 | type B0 = SelectionSet.ConsAux[
20 | selection.value.ResultTypeRepr,
21 | Source0,
22 | selection.value.ColumnHead,
23 | selection.value.SelectionTail
24 | ]
25 | val b: B0 = selection.value.asInstanceOf[B0]
26 |
27 | Read.Subselect(Selection[F0, Source0, B0](b), Some(table), true).normalize
28 | }
29 | }
30 |
31 | object SelectBuilder {
32 |
33 | implicit def noTable[F, Source >: Any, B <: SelectionSet[Source]](
34 | builder: SelectBuilder[F, Source, B]
35 | )(implicit
36 | ev: B <:< SelectionSet.Cons[
37 | Source,
38 | builder.selection.value.ColumnHead,
39 | builder.selection.value.SelectionTail
40 | ],
41 | normalizer: Normalizer[builder.selection.value.ResultTypeRepr]
42 | ): Read.Select[
43 | F,
44 | normalizer.Out,
45 | Source,
46 | builder.selection.value.ColumnHead,
47 | builder.selection.value.SelectionTail
48 | ] = {
49 | type B0 = SelectionSet.ConsAux[
50 | builder.selection.value.ResultTypeRepr,
51 | Source,
52 | builder.selection.value.ColumnHead,
53 | builder.selection.value.SelectionTail
54 | ]
55 | val b: B0 = builder.selection.value.asInstanceOf[B0]
56 |
57 | Read.Subselect(Selection[F, Source, B0](b), None, true).normalize
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/SelectByCommaBuilder.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.expr.Expr
4 | import scala.language.experimental.macros
5 |
6 | final case class SelectByCommaBuilder() {
7 | def apply[F, Source, Set <: SelectionSet[Source]](exprs: Expr[F, Source, _]*): SelectBuilder[F, Source, Set] =
8 | macro SelectionMacro.selectApplyMacro[F, Source]
9 | }
10 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/Selection.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.typetag.TypeTag
4 | import zio.sql.expr.Expr
5 |
6 | /**
7 | * A columnar selection of `B` from a source `A`, modeled as `A => B`.
8 | */
9 | final case class Selection[-F, -A, +B <: SelectionSet[A]](value: B) { self =>
10 |
11 | type ColsRepr = value.ResultTypeRepr
12 |
13 | type ColumnsOut[S] = value.ColumnsOut[S]
14 |
15 | def columns[S](name: String): ColumnsOut[S] = value.columns[S](name)
16 |
17 | def ++[F2, A1 <: A, C <: SelectionSet[A1]](
18 | that: Selection[F2, A1, C]
19 | ): Selection[F with F2, A1, self.value.Append[A1, C]] =
20 | Selection(self.value ++ that.value)
21 | }
22 |
23 | object Selection {
24 | import ColumnSelection._
25 | import SelectionSet.{ Cons, Empty }
26 |
27 | type Aux[F, -A, +B <: SelectionSet[A], ColsRepr0] = Selection[F, A, B] {
28 | type ColsRepr = ColsRepr0
29 | }
30 |
31 | def constantOption[A: TypeTag](value: A, option: Option[String]): Selection[Any, Any, Cons[Any, A, Empty]] =
32 | Selection(Cons(Constant(value, option), Empty))
33 |
34 | def constant[A: TypeTag](value: A): Selection[Any, Any, Cons[Any, A, Empty]] = constantOption(value, None)
35 |
36 | def constantAs[A: TypeTag](value: A, name: String): Selection[Any, Any, Cons[Any, A, Empty]] =
37 | constantOption(value, Some(name))
38 |
39 | def computedOption[F, A, B](expr: Expr[F, A, B], name: Option[String]): Selection[F, A, Cons[A, B, Empty]] =
40 | Selection(Cons(Computed(expr, name), Empty))
41 |
42 | def computed[F, A, B](expr: Expr[F, A, B]): Selection[F, A, Cons[A, B, Empty]] =
43 | computedOption(expr, None)
44 |
45 | def computedAs[F, A, B](expr: Expr[F, A, B], name: String): Selection[F, A, Cons[A, B, Empty]] =
46 | computedOption(expr, Some(name))
47 | }
48 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/SelectionMacro.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import scala.reflect.macros.whitebox
4 | import zio.sql.expr.{ Expr => ZExpr }
5 | import scala.collection.immutable
6 |
7 | private[sql] class SelectionMacro(val c: whitebox.Context) {
8 | import c.universe._
9 |
10 | def selectApplyMacro[F, Source](
11 | exprs: c.Expr[ZExpr[F, Source, _]]*
12 | )(implicit i1: c.WeakTypeTag[F], i2: c.WeakTypeTag[Source]): c.Tree = {
13 |
14 | val selection = buildSelectionFromExpr[F, Source](exprs.toList)
15 |
16 | q"""zio.sql.select.SelectBuilder(
17 | ${selection.tree}
18 | )"""
19 | }
20 |
21 | def subselectApplyMacro[F, Source, ParentTable](
22 | exprs: c.Expr[ZExpr[F, Source, _]]*
23 | )(implicit i1: c.WeakTypeTag[F], i2: c.WeakTypeTag[Source], i3: c.WeakTypeTag[ParentTable]): c.Tree = {
24 |
25 | val fType = weakTypeOf[F]
26 | val sourceType = weakTypeOf[Source]
27 | val parentTable = weakTypeOf[ParentTable]
28 |
29 | val selection = buildSelectionFromExpr[F, Source](exprs.toList)
30 |
31 | val selectionSetType = buildSelectionSetType(exprs.toList.map(e => e.actualType), sourceType)
32 |
33 | q"""zio.sql.select.SubselectBuilder[${q"$fType"}, ${q"$sourceType"}, ${q"${selectionSetType}"} , ${q"$parentTable"}](
34 | ${selection.tree}
35 | )"""
36 | }
37 |
38 | def insertApplyMacro[F, Source, AllColumnIdentities](
39 | table: c.Expr[zio.sql.table.Table.Source.Aux_[Source, AllColumnIdentities]]
40 | )(selections: c.Expr[Selection[F, Source, _]]*): c.Tree = {
41 |
42 | val selection = buildSelection[F, Source](selections.toList)
43 |
44 | q"""
45 | zio.sql.insert.InsertBuilder(
46 | ${table.tree},
47 | ${selection.tree}
48 | )
49 | """
50 | }
51 |
52 | // TODO clean up :) - extract common functionality etc.
53 | def buildSelectAll[AllColumnsIdentities, ColumnsOut: WeakTypeTag, TableType: WeakTypeTag](
54 | wrapper: c.Expr[SelectAllWrapper[AllColumnsIdentities, TableType, ColumnsOut]]
55 | ): c.Tree = {
56 |
57 | val tableType = weakTypeOf[TableType]
58 | val columnsOutType = weakTypeOf[ColumnsOut]
59 |
60 | val exprs = splitTuple(columnsOutType)
61 |
62 | val expList: List[(Type, Type, Type)] = exprs.map(e => splitExpr(e))
63 |
64 | val intF = buildIntesectionF(expList.map(_._1))
65 |
66 | val selSetTail = buildSelectionSetType(exprs.tail, tableType)
67 |
68 | val tupleOfTypes = expList.map(_._3)
69 | val repr = tq"(..$tupleOfTypes)"
70 |
71 | val headType = expList.head._3
72 |
73 | val table = reify(wrapper.splice.table)
74 |
75 | val selection = reify {
76 | wrapper.splice.exprs
77 | .map(e => zio.sql.expr.Expr.expToSelection(e))
78 | .reduce[zio.sql.select.Selection[_, TableType, _ <: SelectionSet[TableType]]] { case (ex1, ex2) =>
79 | ex1 ++ ex2
80 | }
81 | }
82 |
83 | val selSet = q"${buildSelectionSetType(
84 | exprs,
85 | tableType
86 | )}.asInstanceOf[zio.sql.select.SelectionSet.ConsAux[${repr},${q"${tableType}"},${q"${headType}"}, ${q"${selSetTail}"}]]"
87 |
88 | q"""
89 | zio.sql.select.Read.Subselect[${q"${intF}"}, ${repr}, ${q"${tableType}"}, ${q"${tableType}"}, ${q"${headType}"}, ${q"${selSetTail}"}](
90 | ${selection.tree}.asInstanceOf[zio.sql.select.Selection[${q"${intF}"}, ${tableType}, ${q"${selSet}"}]],
91 | Some(${table}),
92 | true
93 | )
94 | """
95 | }
96 |
97 | private def buildIntesectionF(l: List[Type]): Tree =
98 | l match {
99 | case immutable.Nil => tq""
100 | case head :: Nil => tq"${q"$head"} "
101 | case head :: tail => tq"${q"$head"} with ${buildIntesectionF(tail)}"
102 | }
103 |
104 | private def splitTuple(tuple: Type): List[Type] =
105 | tuple.dealias match {
106 | case TypeRef(_, _, args) => args.map(_.dealias)
107 | case e => c.abort(c.enclosingPosition, s"Error extracting table and expr type: ${e}")
108 | }
109 |
110 | private def buildSelection[F, Source](sels: List[c.Expr[Selection[F, Source, _]]]) =
111 | sels
112 | .map(_.asInstanceOf[c.Expr[Selection[F, Source, _ <: SelectionSet[Source]]]])
113 | .reduce[c.Expr[zio.sql.select.Selection[F, Source, _ <: SelectionSet[Source]]]] { case (ex1, ex2) =>
114 | reify {
115 | ex1.splice ++ ex2.splice
116 | }
117 | }
118 |
119 | private def buildSelectionFromExpr[F, Source](exprs: List[c.Expr[ZExpr[F, Source, _]]]) =
120 | exprs
121 | .map(e =>
122 | reify {
123 | zio.sql.expr.Expr.expToSelection(e.splice)
124 | }
125 | )
126 | .reduce[c.Expr[zio.sql.select.Selection[F, Source, _ <: SelectionSet[Source]]]] { case (ex1, ex2) =>
127 | reify {
128 | ex1.splice ++ ex2.splice
129 | }
130 | }
131 |
132 | private def splitExpr(f: Type): (Type, Type, Type) =
133 | f.dealias match {
134 | case TypeRef(_, _, args) if args.size == 3 => (args(0), args(1), args(2))
135 | case e => c.abort(c.enclosingPosition, s"Error extracting table and expr type: ${e}")
136 | }
137 |
138 | // Table type in subselect is the intersection type of parent table and sub table (not only subtable taken from the expr)
139 | private def buildSelectionSetType(types: List[Type], parentTableType: Type): Tree =
140 | types match {
141 | case Nil =>
142 | tq"zio.sql.select.SelectionSet.Empty"
143 | case head :: tail =>
144 | val (_, _, a) = splitExpr(head)
145 | tq"zio.sql.select.SelectionSet.Cons[${q"${parentTableType.dealias}"}, ${q"$a"}, ${buildSelectionSetType(tail, parentTableType)}]"
146 | }
147 | }
148 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/SelectionSet.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.expr.Expr
4 | import zio.sql.Features
5 | import zio.sql.table.Column
6 |
7 | sealed trait SelectionSet[-Source] {
8 | type SelectionsRepr[Source1, T]
9 |
10 | type ResultTypeRepr
11 |
12 | type Append[Source1, That <: SelectionSet[Source1]] <: SelectionSet[Source1]
13 |
14 | type ColumnHead
15 |
16 | type SelectionTail <: SelectionSet[Source]
17 | type HeadIdentity
18 |
19 | type ColumnsOut[S]
20 |
21 | def columns[S](name: String): ColumnsOut[S]
22 |
23 | def ++[Source1 <: Source, That <: SelectionSet[Source1]](that: That): Append[Source1, That]
24 |
25 | def selectionsUntyped: List[ColumnSelection[Source, _]]
26 |
27 | def selections[Source1 <: Source, T]: SelectionsRepr[Source1, T]
28 | }
29 |
30 | object SelectionSet {
31 |
32 | type Aux[-Source, ResultTypeRepr0] =
33 | SelectionSet[Source] {
34 | type ResultTypeRepr = ResultTypeRepr0
35 | }
36 |
37 | type ConsAux[ResultTypeRepr0, -Source, A, B <: SelectionSet[Source]] =
38 | SelectionSet.Cons[Source, A, B] {
39 | type ResultTypeRepr = ResultTypeRepr0
40 | }
41 |
42 | type Empty = Empty.type
43 |
44 | case object Empty extends SelectionSet[Any] {
45 |
46 | override type ColumnHead = Unit
47 | override type SelectionTail = SelectionSet.Empty
48 |
49 | override type HeadIdentity = Any
50 |
51 | override type SelectionsRepr[Source1, T] = Unit
52 |
53 | override type ResultTypeRepr = Unit
54 |
55 | override type Append[Source1, That <: SelectionSet[Source1]] = That
56 |
57 | override type ColumnsOut[S] = Unit
58 |
59 | override def columns[S](name: String): ColumnsOut[S] = ()
60 |
61 | override def ++[Source1 <: Any, That <: SelectionSet[Source1]](that: That): Append[Source1, That] =
62 | that
63 |
64 | override def selectionsUntyped: List[ColumnSelection[Any, _]] = Nil
65 |
66 | override def selections[Source1 <: Any, T]: SelectionsRepr[Source1, T] = ()
67 | }
68 |
69 | final case class Cons[-Source, A, B <: SelectionSet[Source]](head: ColumnSelection[Source, A], tail: B)
70 | extends SelectionSet[Source] { self =>
71 |
72 | override type ColumnHead = A
73 | override type SelectionTail = B
74 |
75 | override type HeadIdentity = head.toColumn.Identity
76 |
77 | override type SelectionsRepr[Source1, T] = (ColumnSelection[Source1, A], tail.SelectionsRepr[Source1, T])
78 |
79 | override type ResultTypeRepr = (A, tail.ResultTypeRepr)
80 |
81 | override type Append[Source1, That <: SelectionSet[Source1]] =
82 | Cons[Source1, A, tail.Append[Source1, That]]
83 |
84 | override type ColumnsOut[S] = (Expr[Features.Source[HeadIdentity, S], S, A], tail.ColumnsOut[S])
85 |
86 | override def columns[S](name: String): ColumnsOut[S] = {
87 | val column: Column.Aux[A, HeadIdentity] = head.toColumn
88 |
89 | (Expr.Source(name, column), tail.columns[S](name))
90 | }
91 |
92 | override def ++[Source1 <: Source, That <: SelectionSet[Source1]](that: That): Append[Source1, That] =
93 | Cons[Source1, A, tail.Append[Source1, That]](head, tail ++ that)
94 |
95 | override def selectionsUntyped: List[ColumnSelection[Source, _]] = head :: tail.selectionsUntyped
96 |
97 | override def selections[Source1 <: Source, T]: SelectionsRepr[Source1, T] = (head, tail.selections[Source1, T])
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/SubselectBuilder.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.table._
4 | import zio.sql.macros._
5 |
6 | final case class SubselectBuilder[F, Source, B <: SelectionSet[Source], ParentTable](
7 | selection: Selection[F, Source, B]
8 | ) {
9 | def from[Source0](table: Table.Aux[Source0])(implicit
10 | ev1: Source0 with ParentTable <:< Source,
11 | ev2: B <:< SelectionSet.Cons[Source, selection.value.ColumnHead, selection.value.SelectionTail],
12 | normalizer: Normalizer[selection.value.ResultTypeRepr]
13 | ): Read.Subselect[
14 | F,
15 | normalizer.Out,
16 | Source with ParentTable,
17 | Source0,
18 | selection.value.ColumnHead,
19 | selection.value.SelectionTail
20 | ] = {
21 | type B0 = SelectionSet.ConsAux[
22 | selection.value.ResultTypeRepr,
23 | Source with ParentTable,
24 | selection.value.ColumnHead,
25 | selection.value.SelectionTail
26 | ]
27 | val b: B0 = selection.value.asInstanceOf[B0]
28 |
29 | Read.Subselect(Selection[F, Source with ParentTable, B0](b), Some(table), true).normalize
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/select/SubselectByCommaBuilder.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.select
2 |
3 | import zio.sql.expr.Expr
4 | import zio.sql.select._
5 | import scala.language.experimental.macros
6 |
7 | final case class SubselectByCommaBuilder[ParentTable]() {
8 | def apply[F, A, B <: SelectionSet[A]](selection: Selection[F, A, B]) =
9 | SubselectBuilder[F, A, B, ParentTable](selection)
10 |
11 | def apply[F, Source, Set <: SelectionSet[Source]](
12 | exprs: Expr[F, Source, _]*
13 | ): SubselectBuilder[F, Source, Set, ParentTable] =
14 | macro SelectionMacro.subselectApplyMacro[F, Source, ParentTable]
15 | }
16 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/table/Column.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.table
2 |
3 | import zio.sql.typetag.TypeTag
4 |
5 | sealed trait Column[+A] {
6 | type Identity
7 | def typeTag: TypeTag[A]
8 |
9 | def name: Option[String]
10 |
11 | def nullable[A1 >: A](implicit ev: TypeTag.NotNull[A1]): Column.Aux[Option[A1], Identity]
12 | }
13 |
14 | object Column {
15 |
16 | type Aux[+A0, Identity0] = Column[A0] {
17 | type Identity = Identity0
18 | }
19 |
20 | final case class Named[A: TypeTag, ColumnIdentity](columnName: String) extends Column[A] {
21 | override type Identity = ColumnIdentity
22 |
23 | override def typeTag: TypeTag[A] = implicitly[TypeTag[A]]
24 |
25 | override def name = Some(columnName)
26 |
27 | override def nullable[A1 >: A](implicit ev: TypeTag.NotNull[A1]): Column.Aux[Option[A1], Identity] =
28 | Column.Named[Option[A1], ColumnIdentity](columnName)
29 | }
30 |
31 | final case class Indexed[A: TypeTag, ColumnIdentity]() extends Column[A] {
32 |
33 | override type Identity = ColumnIdentity
34 |
35 | override def typeTag: TypeTag[A] = implicitly[TypeTag[A]]
36 |
37 | override def name = None
38 |
39 | override def nullable[A1 >: A](implicit ev: TypeTag.NotNull[A1]): Column.Aux[Option[A1], Identity] =
40 | Column.Indexed[Option[A1], ColumnIdentity]()
41 | }
42 |
43 | type Untyped = Column[_]
44 | }
45 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/table/ExprAccesorBuilder.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.table
2 |
3 | import zio.sql._
4 | import zio.schema._
5 | import zio.sql.typetag._
6 |
7 | import zio.sql.expr.Expr
8 |
9 | class ExprAccessorBuilder(name: String) extends AccessorBuilder {
10 |
11 | override type Lens[F, S, A] = Expr[Features.Source[F, S], S, A]
12 |
13 | override type Prism[F, S, A] = Unit
14 |
15 | override type Traversal[S, A] = Unit
16 |
17 | def makeLens[F, S, A](product: Schema.Record[S], term: Schema.Field[S, A]): Expr[Features.Source[F, S], S, A] = {
18 | implicit val typeTag = TypeTag.deriveTypeTag(term.schema).get
19 |
20 | val column: Column.Aux[A, F] = Column.Named[A, F](Table.convertToSnakeCase(term.name.toString()))
21 |
22 | Expr.Source(name, column)
23 | }
24 |
25 | def makePrism[F, S, A](sum: Schema.Enum[S], term: Schema.Case[S, A]): Unit = ()
26 |
27 | def makeTraversal[S, A](collection: Schema.Collection[S, A], element: Schema[A]): Traversal[S, A] = ()
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/table/JoinType.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.table
2 |
3 | sealed trait JoinType
4 |
5 | object JoinType {
6 | case object Inner extends JoinType
7 | case object LeftOuter extends JoinType
8 | case object RightOuter extends JoinType
9 | case object FullOuter extends JoinType
10 | }
11 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/table/Table.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.table
2 |
3 | import zio.schema._
4 | import zio.sql.macros.TableSchema
5 | import scala.collection.immutable
6 | import zio.sql.macros.IsNotLiteral
7 | import zio.sql._
8 | import zio.sql.utils.TrailingUnitNormalizer
9 |
10 | import zio.sql.expr.Expr
11 | import zio.sql.select._
12 |
13 | import zio.sql.utils.Pluralize
14 |
15 | sealed trait Table { self =>
16 | protected[sql] type TableType
17 |
18 | final def fullOuter[That](that: Table.Aux[That]): Table.JoinBuilder[self.TableType, That] =
19 | new Table.JoinBuilder[self.TableType, That](JoinType.FullOuter, self, that)
20 |
21 | final def join[That](that: Table.Aux[That]): Table.JoinBuilder[self.TableType, That] =
22 | new Table.JoinBuilder[self.TableType, That](JoinType.Inner, self, that)
23 |
24 | final def leftOuter[That](that: Table.Aux[That]): Table.JoinBuilder[self.TableType, That] =
25 | new Table.JoinBuilder[self.TableType, That](JoinType.LeftOuter, self, that)
26 |
27 | final def rightOuter[That](that: Table.Aux[That]): Table.JoinBuilder[self.TableType, That] =
28 | new Table.JoinBuilder[self.TableType, That](JoinType.RightOuter, self, that)
29 |
30 | final val subselect: SubselectByCommaBuilder[TableType] = new SubselectByCommaBuilder[TableType]
31 | }
32 |
33 | object Table {
34 |
35 | /**
36 | * Creates a table descripton from the Schema of T.
37 | * Table name is taken either from @name annotation or schema id type and pluralized.
38 | */
39 | // TODO do not allow CaseClass0 with macro
40 | def defineTableSmart[T](implicit
41 | schema: Schema.Record[T],
42 | tableLike: TableSchema[T]
43 | ): Table.Source.WithTableDetails[schema.Terms, T, schema.Accessors[Lens, Prism, Traversal]] = {
44 | val tableName = extractAnnotationName(schema) match {
45 | case Some(name) => name
46 | case None =>
47 | pluralize(
48 | convertToSnakeCase(schema.id.name)
49 | .split("_")
50 | .toList
51 | )
52 | }
53 |
54 | defineTable(tableName)
55 | }
56 |
57 | /**
58 | * Creates a table descripton from the Schema of T.
59 | * Table name is taken either from @name annotation or schema id type.
60 | */
61 | def defineTable[T](implicit
62 | schema: Schema.Record[T],
63 | tableLike: TableSchema[T]
64 | ): Table.Source.WithTableDetails[schema.Terms, T, schema.Accessors[Lens, Prism, Traversal]] = {
65 | val tableName = extractAnnotationName(schema) match {
66 | case Some(name) => name
67 | case None => convertToSnakeCase(schema.id.name)
68 | }
69 |
70 | defineTable(tableName)
71 | }
72 |
73 | /**
74 | * Creates a table descripton from the Schema of T.
75 | * Table name is explicitely provided.
76 | */
77 | def defineTable[T](
78 | tableName: String
79 | )(implicit
80 | schema: Schema.Record[T],
81 | tableLike: TableSchema[T]
82 | ): Table.Source.WithTableDetails[schema.Terms, T, schema.Accessors[Lens, Prism, Traversal]] =
83 | new Table.Source { self =>
84 | protected[sql] val exprAccessorBuilder = new ExprAccessorBuilder(tableName)
85 |
86 | override protected[sql] type AllColumnIdentities = schema.Terms
87 |
88 | override protected[sql] type TableType = T
89 |
90 | override protected[sql] type ColumnsOut =
91 | schema.Accessors[exprAccessorBuilder.Lens, exprAccessorBuilder.Prism, exprAccessorBuilder.Traversal]
92 |
93 | override val columns: ColumnsOut = schema.makeAccessors(exprAccessorBuilder)
94 |
95 | override val name: String = tableName.toLowerCase()
96 | }
97 |
98 | def convertToSnakeCase(name: String): String = {
99 | val temp = (name.head.toLower.toString + name.tail)
100 | temp.indexWhere(_.isUpper) match {
101 | case -1 => temp
102 | case i =>
103 | val (prefix, suffix) = temp.splitAt(i)
104 | prefix + "_" + convertToSnakeCase(suffix)
105 | }
106 | }
107 |
108 | private def pluralize(names: List[String]): String =
109 | names match {
110 | case Nil => ""
111 | case head :: immutable.Nil => Pluralize.pluralize(head)
112 | case head :: next => head + "_" + pluralize(next)
113 | }
114 |
115 | private def extractAnnotationName[T](schema: Schema.Record[T]): Option[String] =
116 | schema.annotations.collectFirst { case TableNameAnnotation.name(name) => name } match {
117 | case Some(name) if raw"[A-Za-z_][A-Za-z0-9_]*".r.pattern.matcher(name).matches() => Some(name)
118 | case _ => None
119 | }
120 |
121 | class JoinBuilder[A, B](joinType: JoinType, left: Table.Aux[A], right: Table.Aux[B]) {
122 | def on[F](expr: Expr[F, A with B, Boolean])(implicit ev: IsNotLiteral[F]): Table.Aux[A with B] =
123 | Joined(joinType, left, right, expr)
124 | }
125 |
126 | type Aux[A] = Table { type TableType = A }
127 |
128 | type WithColumnsOut[A, ColumnsOut0] = Table {
129 | type TabelType = A
130 | type ColumnsOut = ColumnsOut0
131 | }
132 |
133 | sealed trait Source extends Table {
134 | protected[sql] type AllColumnIdentities
135 |
136 | val name: String
137 |
138 | protected[sql] type ColumnsOut
139 |
140 | val columns: ColumnsOut
141 | }
142 |
143 | object Source {
144 | type Aux[A] = Table.Source {
145 | type TableType = A
146 | }
147 |
148 | type Aux_[A, AllColumnIdentities0] = Table.Source {
149 | type TableType = A
150 | type AllColumnIdentities = AllColumnIdentities0
151 | }
152 |
153 | type WithTableDetails[AllColumnIdentities0, T, ColumnsOut0] = Table.Source {
154 | type AllColumnIdentities = AllColumnIdentities0
155 | type TableType = T
156 | type ColumnsOut = ColumnsOut0
157 | }
158 | }
159 |
160 | final case class Joined[FF, A, B](
161 | joinType: JoinType,
162 | left: Table.Aux[A],
163 | right: Table.Aux[B],
164 | on: Expr[FF, A with B, Boolean]
165 | ) extends Table {
166 |
167 | override type TableType = left.TableType with right.TableType
168 | }
169 |
170 | final case class DerivedTable[CO, +Out, +R <: Read.WithReprs[Out, CO], Source](read: R, name: String) extends Table {
171 | self =>
172 | type ColumnsOut = CO
173 |
174 | override type TableType = Source
175 |
176 | def columns(implicit normalizer: TrailingUnitNormalizer[CO]): normalizer.Out =
177 | normalizer.apply(read.columns(name))
178 | }
179 |
180 | final case class DialectSpecificTable[A](tableExtension: TableExtension[A]) extends Table {
181 |
182 | override type TableType = A
183 | }
184 |
185 | trait TableExtension[A]
186 | }
187 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/table/TableNameAnnotation.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.table
2 |
3 | import scala.annotation.StaticAnnotation
4 |
5 | // TODO add ColumnNameAnnotation
6 | object TableNameAnnotation {
7 | final case class name(name: String) extends StaticAnnotation
8 | }
9 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/typetag/Decodable.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.typetag
2 |
3 | import java.sql.ResultSet
4 | import zio.sql.select.DecodingError
5 |
6 | trait Decodable[+A] {
7 | def decode(column: Int, resultSet: ResultSet): Either[DecodingError, A]
8 | }
9 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/typetag/IsDate.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.typetag
2 |
3 | import java.time._
4 |
5 | sealed trait IsDate[A] {
6 | def typeTag: TypeTag[A]
7 | }
8 |
9 | object IsDate {
10 | abstract class AbstractIsDate[A: TypeTag] extends IsDate[A] {
11 | def typeTag = implicitly[TypeTag[A]]
12 | }
13 | implicit case object InstantIsDate extends AbstractIsDate[Instant]
14 | implicit case object LocalDateIsDate extends AbstractIsDate[LocalDate]
15 | implicit case object LocalDateTimeIsDate extends AbstractIsDate[LocalDateTime]
16 | implicit case object LocalTimeIsDate extends AbstractIsDate[LocalTime]
17 | implicit case object OffsetDateTimeIsDate extends AbstractIsDate[OffsetDateTime]
18 | implicit case object OffsetTimeIsDate extends AbstractIsDate[OffsetTime]
19 | implicit case object ZonedDateTimeIsDate extends AbstractIsDate[ZonedDateTime]
20 | }
21 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/typetag/IsIntegral.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.typetag
2 |
3 | sealed trait IsIntegral[A] {
4 | def typeTag: TypeTag[A]
5 | }
6 |
7 | object IsIntegral {
8 |
9 | abstract class AbstractIsIntegral[A: TypeTag] extends IsIntegral[A] {
10 | def typeTag = implicitly[TypeTag[A]]
11 | }
12 | implicit case object TByteIsIntegral extends AbstractIsIntegral[Byte]
13 | implicit case object TShortIsIntegral extends AbstractIsIntegral[Short]
14 | implicit case object TIntIsIntegral extends AbstractIsIntegral[Int]
15 | implicit case object TLongIsIntegral extends AbstractIsIntegral[Long]
16 | }
17 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/typetag/IsNumeric.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.typetag
2 |
3 | sealed trait IsNumeric[A] {
4 | def typeTag: TypeTag[A]
5 | }
6 |
7 | object IsNumeric {
8 |
9 | abstract class AbstractIsNumeric[A: TypeTag] extends IsNumeric[A] {
10 | def typeTag = implicitly[TypeTag[A]]
11 | }
12 | implicit case object TShortIsNumeric extends AbstractIsNumeric[Short]
13 | implicit case object TIntIsNumeric extends AbstractIsNumeric[Int]
14 | implicit case object TLongIsNumeric extends AbstractIsNumeric[Long]
15 | implicit case object TFloatIsNumeric extends AbstractIsNumeric[Float]
16 | implicit case object TDoubleIsNumeric extends AbstractIsNumeric[Double]
17 | // TODO IS BigDecimal numeric? can I work in sql with -, + on `money` type?
18 | implicit case object TBigDecimalIsNumeric extends AbstractIsNumeric[java.math.BigDecimal]
19 | }
20 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/typetag/TypeTag.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.typetag
2 |
3 | import java.time._
4 | import java.util.UUID
5 | import zio.Chunk
6 | import zio.schema._
7 |
8 | trait Tag[+A] {
9 | private[zio] def cast(a: Any): A = a.asInstanceOf[A]
10 | }
11 |
12 | sealed trait TypeTag[+A] extends Tag[A]
13 |
14 | object TypeTag {
15 |
16 | trait TypeTagExtension[+A] extends Tag[A] with Decodable[A]
17 |
18 | sealed trait NotNull[+A] extends TypeTag[A]
19 | implicit case object TBigDecimal extends NotNull[java.math.BigDecimal]
20 | implicit case object TBoolean extends NotNull[Boolean]
21 | implicit case object TByte extends NotNull[Byte]
22 | implicit case object TByteArray extends NotNull[Chunk[Byte]]
23 | implicit case object TChar extends NotNull[Char]
24 | implicit case object TDouble extends NotNull[Double]
25 | implicit case object TFloat extends NotNull[Float]
26 | implicit case object TInstant extends NotNull[Instant]
27 | implicit case object TInt extends NotNull[Int]
28 | implicit case object TLocalDate extends NotNull[LocalDate]
29 | implicit case object TLocalDateTime extends NotNull[LocalDateTime]
30 | implicit case object TLocalTime extends NotNull[LocalTime]
31 | implicit case object TLong extends NotNull[Long]
32 | implicit case object TOffsetDateTime extends NotNull[OffsetDateTime]
33 | implicit case object TOffsetTime extends NotNull[OffsetTime]
34 | implicit case object TShort extends NotNull[Short]
35 | implicit case object TString extends NotNull[String]
36 | implicit case object TUUID extends NotNull[UUID]
37 | implicit case object TZonedDateTime extends NotNull[ZonedDateTime]
38 |
39 | // TODO how to handle dialect specific in tablelike macro ?
40 | final case class TDialectSpecific[+A](typeTagExtension: TypeTagExtension[A]) extends NotNull[A]
41 | final case class Nullable[A: NotNull]() extends TypeTag[Option[A]] {
42 | def typeTag: TypeTag[A] = implicitly[TypeTag[A]]
43 | }
44 | implicit case object TNone extends TypeTag[None.type]
45 |
46 | implicit def option[A: NotNull]: TypeTag[Option[A]] = Nullable[A]()
47 |
48 | implicit def dialectSpecific[A](implicit typeTagExtension: TypeTagExtension[A]): TypeTag[A] =
49 | TDialectSpecific(typeTagExtension)
50 |
51 | def deriveTypeTag[A](standardType: StandardType[A]): Option[TypeTag.NotNull[A]] =
52 | standardType match {
53 | case StandardType.BigDecimalType => Some(TypeTag.TBigDecimal)
54 | case StandardType.BoolType => Some(TypeTag.TBoolean)
55 | case StandardType.ByteType => Some(TypeTag.TByte)
56 | case StandardType.BinaryType => Some(TypeTag.TByteArray)
57 | case StandardType.CharType => Some(TypeTag.TChar)
58 | case StandardType.DoubleType => Some(TypeTag.TDouble)
59 | case StandardType.FloatType => Some(TypeTag.TFloat)
60 | case StandardType.InstantType => Some(TypeTag.TInstant)
61 | case StandardType.IntType => Some(TypeTag.TInt)
62 | case StandardType.LocalDateType => Some(TypeTag.TLocalDate)
63 | case StandardType.LocalDateTimeType => Some(TypeTag.TLocalDateTime)
64 | case StandardType.OffsetTimeType => Some(TypeTag.TOffsetTime)
65 | case StandardType.LocalTimeType => Some(TypeTag.TLocalTime)
66 | case StandardType.LongType => Some(TypeTag.TLong)
67 | case StandardType.OffsetDateTimeType => Some(TypeTag.TOffsetDateTime)
68 | case StandardType.ShortType => Some(TypeTag.TShort)
69 | case StandardType.StringType => Some(TypeTag.TString)
70 | case StandardType.UUIDType => Some(TypeTag.TUUID)
71 | case StandardType.ZonedDateTimeType => Some(TypeTag.TZonedDateTime)
72 | // TODO What other types to support ?
73 | case StandardType.BigIntegerType => None
74 | case StandardType.ZoneOffsetType => None
75 | case StandardType.DurationType => None
76 | case StandardType.YearType => None
77 | case StandardType.MonthType => None
78 | case StandardType.MonthDayType => None
79 | case StandardType.ZoneIdType => None
80 | case StandardType.PeriodType => None
81 | case StandardType.YearMonthType => None
82 | case StandardType.DayOfWeekType => None
83 | case StandardType.UnitType => None
84 | }
85 |
86 | def deriveTypeTag[A](opSchema: Schema.Optional[A]): Option[TypeTag[Option[A]]] =
87 | opSchema.schema match {
88 | case Schema.Primitive(standardType, _) =>
89 | implicit val notNullTypeTag = deriveTypeTag(standardType).get
90 |
91 | Some(TypeTag.option[A])
92 | case _ => None
93 | }
94 |
95 | def deriveTypeTag[A](fieldSchema: Schema[A]): Option[TypeTag[A]] =
96 | fieldSchema match {
97 | case s: Schema.Optional[_] => deriveTypeTag(s)
98 | case s: Schema.Lazy[A] => deriveTypeTag(s.schema)
99 | case Schema.Primitive(standardType, _) => deriveTypeTag(standardType)
100 | case Schema.Sequence(elementSchema, _, _, _, _) =>
101 | elementSchema match {
102 | case Schema.Primitive(standardType, _) if (standardType == StandardType.ByteType) =>
103 | Some(TypeTag.TByteArray.asInstanceOf[TypeTag[A]])
104 | case _ => None
105 | }
106 |
107 | // TODO get TypeTag of A available out of Schema[A] and derive typetag from Schema.Transform
108 | case _: Schema.Transform[_, _, _] => None
109 | case _ => None
110 | }
111 | }
112 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/update/Update.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.update
2 |
3 | import zio.sql.table._
4 | import zio.sql.typetag._
5 | import zio.sql.Features
6 |
7 | import zio.sql.expr.{ Expr, Set }
8 |
9 | // UPDATE table
10 | // SET foo = bar
11 | // WHERE baz > buzz
12 | // todo `set` must be non-empty
13 | final case class Update[A](table: Table.Aux[A], set: List[Set[_, A]], whereExpr: Expr[_, A, Boolean]) {
14 |
15 | def set[F: Features.IsSource, Value: TypeTag](lhs: Expr[F, A, Value], rhs: Expr[_, A, Value]): Update[A] =
16 | copy(set = set :+ Set(lhs, rhs))
17 |
18 | def where(whereExpr2: Expr[_, A, Boolean]): Update[A] =
19 | copy(whereExpr = whereExpr && whereExpr2)
20 | }
21 |
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/zio/sql/update/UpdateBuilder.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.update
2 |
3 | import zio.sql.table._
4 | import zio.sql.typetag._
5 | import zio.sql.Features
6 | import zio.sql.expr._
7 |
8 | final case class UpdateBuilder[A](table: Table.Aux[A]) {
9 | def set[F: Features.IsSource, Value: TypeTag](lhs: Expr[F, A, Value], rhs: Expr[_, A, Value]): Update[A] =
10 | Update(table, Set(lhs, rhs) :: Nil, true)
11 | }
12 |
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/zio/sql/ArithmeticOpsSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.test.Assertion.anything
4 | import zio.test.assert
5 | import zio.test.ZIOSpecDefault
6 |
7 | object ArithmeticOpsSpec extends ZIOSpecDefault {
8 | import ProductSchema._
9 |
10 | def spec = suite("Arithmetic operators")(
11 | test("+ works on integer columns") {
12 | val query = selectAll.where(baseAmount + finalAmount > 42)
13 | assert(query)(anything)
14 | },
15 | test("- works on integer columns") {
16 | val query = selectAll.where(baseAmount - finalAmount > 42)
17 | assert(query)(anything)
18 | },
19 | test("* works on integer columns") {
20 | val query = selectAll.where(baseAmount * finalAmount > 42)
21 | assert(query)(anything)
22 | },
23 | test("/ works on integer columns") {
24 | val query = selectAll.where(baseAmount / finalAmount > 42)
25 | assert(query)(anything)
26 | },
27 | test("- works on integer column") {
28 | val selectNotDeleted = selectAll.where(-baseAmount < 0)
29 | assert(selectNotDeleted)(anything)
30 | }
31 | )
32 | }
33 |
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/zio/sql/BitwiseOpSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.test.Assertion.anything
4 | import zio.test.assert
5 | import zio.test.ZIOSpecDefault
6 |
7 | object BitwiseOpSpec extends ZIOSpecDefault {
8 | import ProductSchema._
9 |
10 | def spec = suite("Bitwise operators")(
11 | test("~ works on integer columns") {
12 | val query = selectAll.where((~baseAmount) > 0)
13 | assert(query)(anything)
14 | },
15 | test("& works on integer columns") {
16 | val query = selectAll.where((baseAmount & finalAmount) > 0)
17 | assert(query)(anything)
18 | },
19 | test("| works on integer columns") {
20 | val query = selectAll.where((baseAmount | finalAmount) > 0)
21 | assert(query)(anything)
22 | }
23 | )
24 | }
25 |
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/zio/sql/GroupByHavingSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.test.Assertion.anything
4 | import zio.test.assert
5 | import zio.test.ZIOSpecDefault
6 |
7 | object GroupByHavingSpec extends ZIOSpecDefault {
8 |
9 | def spec = suite("groupBy")(
10 | test("works") {
11 | assert("")(anything)
12 | }
13 | )
14 | }
15 |
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/zio/sql/LogicalOpsSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.test.Assertion.anything
4 | import zio.test.assert
5 | import zio.test.ZIOSpecDefault
6 |
7 | class LogicalOpsSpec extends ZIOSpecDefault {
8 | import ProductSchema._
9 |
10 | def spec = suite("Relational operators")(
11 | test("<= works on integer columns") {
12 | val query = selectAll.where(baseAmount <= finalAmount)
13 | assert(query)(anything)
14 | },
15 | test(">= works on integer columns") {
16 | val query = selectAll.where(baseAmount >= finalAmount)
17 | assert(query)(anything)
18 | },
19 | test("<> works on integer columns") {
20 | val query = selectAll.where(baseAmount <> finalAmount)
21 | assert(query)(anything)
22 | },
23 | test("< works on integer columns") {
24 | val query = selectAll.where(baseAmount < finalAmount)
25 | assert(query)(anything)
26 | },
27 | test("> works on integer columns") {
28 | val query = selectAll.where(baseAmount > finalAmount)
29 | assert(query)(anything)
30 | },
31 | test("=== works on integer columns") {
32 | val query = selectAll.where(baseAmount === finalAmount)
33 | assert(query)(anything)
34 | },
35 | test("not works on boolean column") {
36 | val selectNotDeleted = selectAll.where(deleted.not)
37 | assert(selectNotDeleted)(anything)
38 | },
39 | test("like works on a string column") {
40 | val query = selectAll.where(name like "%")
41 | assert(query)(anything)
42 | }
43 | )
44 | }
45 |
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/zio/sql/PredicateOpSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.test.assert
4 | import zio.test.Assertion.anything
5 | import zio.test.ZIOSpecDefault
6 |
7 | object PredicateOpSpec extends ZIOSpecDefault {
8 | import ProductSchema._
9 |
10 | def spec = suite("Unary operators")(
11 | test("isTrue works on boolean column") {
12 | val query = selectAll.where(deleted.isTrue)
13 | assert(query)(anything)
14 | },
15 | test("isNotTrue works on boolean column") {
16 | val query = selectAll.where(deleted.isNotTrue)
17 | assert(query)(anything)
18 | },
19 | test("isNotNull works on column with any type") {
20 | val query = selectAll.where(name.isNotNull)
21 | assert(query)(anything)
22 | },
23 | test("isNull works on column with any type") {
24 | val query = selectAll.where(name.isNull)
25 | assert(query)(anything)
26 | }
27 | )
28 | }
29 |
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/zio/sql/ProductSchema.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import java.time.LocalDate
4 |
5 | import zio.schema.Schema
6 | import zio.schema.DeriveSchema
7 | import zio.schema.StandardType
8 | import zio.sql.table._
9 | import zio.sql.insert._
10 | import zio.sql.select._
11 | import zio.sql.update._
12 | import zio.sql.delete._
13 |
14 | object ProductSchema {
15 | val sql = new Sql { self =>
16 | override def renderDelete(delete: Delete[_]): String = ???
17 | override def renderRead(read: Read[_]): String = ???
18 | override def renderUpdate(update: Update[_]): String = ???
19 | override def renderInsert[A: Schema](insert: Insert[_, A]): SqlStatement = ???
20 | }
21 |
22 | import sql._
23 |
24 | case class Product(
25 | id: String,
26 | last_updated: LocalDate,
27 | name: String,
28 | base_amount: Int,
29 | final_amount: Int,
30 | deleted: Boolean
31 | )
32 |
33 | implicit val localDateSchema: Schema[LocalDate] =
34 | Schema.primitive[LocalDate](StandardType.LocalDateType)
35 |
36 | implicit val productsSchema: Schema.CaseClass6[String, LocalDate, String, Int, Int, Boolean, Product] =
37 | DeriveSchema.gen[Product]
38 |
39 | val productTable = Table.defineTable[Product]
40 |
41 | val (id, lastUpdated, name, baseAmount, finalAmount, deleted) = productTable.columns
42 |
43 | val selectAll = select(id, lastUpdated, baseAmount, deleted) from productTable
44 | }
45 |
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/zio/sql/TestBasicSelectSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.test._
4 | import zio.test.Assertion._
5 | import zio.schema._
6 | import zio.test.ZIOSpecDefault
7 | import zio.schema.{ DeriveSchema, Schema }
8 | import java.time.LocalDate
9 | import zio.sql.table._
10 | import zio.sql.select._
11 | import zio.sql.insert._
12 | import zio.sql.update._
13 | import zio.sql.delete._
14 |
15 | object TestBasicSelect {
16 | val userSql = new Sql { self =>
17 | override def renderDelete(delete: Delete[_]): String = ???
18 | override def renderRead(read: Read[_]): String = ???
19 | override def renderUpdate(update: Update[_]): String = ???
20 | override def renderInsert[A: Schema](insert: Insert[_, A]): SqlStatement = ???
21 |
22 | case class Users(user_id: String, dob: LocalDate, first_name: String, last_name: String)
23 |
24 | implicit val localDateSchema: Schema[LocalDate] =
25 | Schema.primitive[LocalDate](StandardType.LocalDateType)
26 | implicit val userSchema: Schema.CaseClass4[String, LocalDate, String, String, Users] = DeriveSchema.gen[Users]
27 |
28 | val userTable = Table.defineTable[Users]
29 |
30 | val (userId, dob, fName, lName) = userTable.columns
31 |
32 | // todo this should compile using column names defined in the table
33 | val basicSelect = select(fName, lName) from userTable
34 |
35 | // fName and lName already have column names, shouldn't have to do this
36 | val basicSelectWithAliases = (select(
37 | (fName as "first_name"),
38 | (lName as "last_name")
39 | ) from userTable)
40 | }
41 | }
42 |
43 | object TestBasicSelectSpec extends ZIOSpecDefault {
44 | import TestBasicSelect.userSql._
45 |
46 | def spec = suite("TestBasicSelectSpec")(
47 | test("Selecting columns using existing column names") {
48 | assert(basicSelect)(equalTo(basicSelectWithAliases))
49 | }
50 | )
51 | }
52 |
--------------------------------------------------------------------------------
/docs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@zio.dev/zio-sql",
3 | "description": "ZIO SQL Documentation",
4 | "license": "Apache-2.0"
5 | }
6 |
--------------------------------------------------------------------------------
/docs/sidebars.js:
--------------------------------------------------------------------------------
1 | const sidebars = {
2 | sidebar: [
3 | {
4 | type: "category",
5 | label: "ZIO SQL",
6 | collapsed: false,
7 | link: { type: "doc", id: "index" },
8 | items: [
9 | "deep-dive"
10 | ]
11 | }
12 | ]
13 | };
14 |
15 | module.exports = sidebars;
16 |
--------------------------------------------------------------------------------
/driver/src/main/scala/zio/sql/driver/Renderer.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.driver
2 |
3 | private[sql] class Renderer(val builder: StringBuilder) {
4 | // not using vararg to avoid allocating `Seq`s
5 | def apply(s1: Any): Unit = {
6 | val _ = builder.append(s1)
7 | }
8 | def apply(s1: Any, s2: Any): Unit = {
9 | val _ = builder.append(s1).append(s2)
10 | }
11 | def apply(s1: Any, s2: Any, s3: Any): Unit = {
12 | val _ = builder.append(s1).append(s2).append(s3)
13 | }
14 | def apply(s1: Any, s2: Any, s3: Any, s4: Any): Unit = {
15 | val _ = builder.append(s1).append(s2).append(s3).append(s4)
16 | }
17 |
18 | override def toString: String = builder.toString()
19 | }
20 |
21 | private[sql] object Renderer {
22 | def apply(): Renderer = new Renderer(new StringBuilder)
23 |
24 | implicit class Extensions(val value: String) {
25 | def doubleQuoted: String = s""""$value""""
26 | def singleQuoted: String = s"'$value'"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/examples/src/main/scala/zio/sql/GroupByExamples.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.schema.{ DeriveSchema, Schema }
4 | import zio.sql.postgresql.PostgresJdbcModule
5 | import zio.sql.expr.AggregationDef._
6 | import zio.sql.table._
7 |
8 | object GroupByExamples extends App with PostgresJdbcModule {
9 |
10 | case class Product(id: Int, name: String, amount: Int, price: Double)
11 |
12 | implicit val productSchema: Schema.CaseClass4[Int, String, Int, Double, Product] = DeriveSchema.gen[Product]
13 |
14 | val productTable = Table.defineTable[Product]
15 |
16 | val (id, name, amount, price) = productTable.columns
17 |
18 | select(Count(price))
19 | .from(productTable)
20 | .groupBy(price)
21 |
22 | val e = Sum(price) > 10
23 |
24 | val orderValue = select(name, Sum(price))
25 | .from(productTable)
26 | .groupBy(name, price)
27 | .having(Sum(price) > 10)
28 |
29 | execute(orderValue)
30 |
31 | select(Sum(price))
32 | .from(productTable)
33 | .groupBy(name)
34 | .having(Sum(price) > 10)
35 |
36 | select(name, amount, price)
37 | .from(productTable)
38 | .groupBy(name, amount, price)
39 | .having(Sum(price) > 10)
40 |
41 | select(amount)
42 | .from(productTable)
43 | .groupBy(amount)
44 | .having(amount > 10)
45 |
46 | select(Sum(price))
47 | .from(productTable)
48 | .groupBy(name)
49 | .having(name > "z")
50 |
51 | select(price)
52 | .from(productTable)
53 | .groupBy(price)
54 | .having(Count(price) > 10)
55 |
56 | // Following should not compile
57 | // select(amount ++ price)
58 | // .from(productTable)
59 | // .groupBy(amount)
60 | // .having(amount > 10)
61 |
62 | // select(price)
63 | // .from(productTable)
64 | // .groupBy(name)
65 | // .having(name > "")
66 |
67 | // select(price ++ name)
68 | // .from(productTable)
69 | // .groupBy(price)
70 | // .having(Count(price) > 10)
71 |
72 | // execute(select(name, Sum(price)).from(productTable))
73 |
74 | select(price)
75 | .from(productTable)
76 | .groupBy(price)
77 | .having(Count(price) > 10)
78 |
79 | select(Sum(price))
80 | .from(productTable)
81 | .having(Sum(price) > 10)
82 |
83 | select(price)
84 | .from(productTable)
85 | .groupBy(price, amount)
86 | .having(amount > 200)
87 |
88 | select(amount)
89 | .from(productTable)
90 | .groupBy(amount)
91 | .having(Sum(price) > 200)
92 |
93 | // select(price)
94 | // .from(productTable)
95 | // .groupBy(price)
96 | // .having(amount > 200)
97 |
98 | // select(amount)
99 | // .from(productTable)
100 | // .having(Sum(price) > 200)
101 |
102 | // select(amount)
103 | // .from(productTable)
104 | // .groupBy(amount)
105 | // .having(amount > 10)
106 | // .where(amount > 10)
107 |
108 | select(amount)
109 | .from(productTable)
110 | .groupBy(amount)
111 | .having(amount > 10)
112 | }
113 |
--------------------------------------------------------------------------------
/examples/src/main/scala/zio/sql/LiveExample.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | import java.util.UUID
4 | import java.time.LocalDate
5 | import java.util.Properties
6 |
7 | import zio._
8 | import zio.schema.{ DeriveSchema, Schema }
9 | import zio.sql.{ ConnectionPool, ConnectionPoolConfig }
10 | import zio.sql.postgresql.PostgresJdbcModule
11 | import zio.sql.table._
12 |
13 | /**
14 | *
15 | * docker run --name zio-sql-db -p 5432:5432 -e POSTGRES_DB=ziosqltest -e POSTGRES_PASSWORD=12345 -d postgres
16 | *
17 | * psql -h localhost -U postgres -p 5432 -d ziosqltest
18 | *
19 | * create table "customers" (
20 | * "id" uuid not null primary key,
21 | * "age" integer not null,
22 | * "dob" date not null,
23 | * "first_name" varchar not null,
24 | * "last_name" varchar not null
25 | * );
26 | *
27 | */
28 | object LiveExample extends ZIOAppDefault with PostgresJdbcModule {
29 |
30 | import Tables._
31 |
32 | def run = myAppLogic
33 |
34 | val data = Customer(UUID.randomUUID(), 22, LocalDate.ofYearDay(1990, 1), "Ronald", "Russel")
35 |
36 | val stmt = insertInto(customers)(
37 | userId,
38 | age,
39 | dob,
40 | fName,
41 | lName
42 | ).values(data)
43 |
44 | val properties = {
45 | val p = new Properties()
46 | p.setProperty("user", "postgres")
47 | p.setProperty("password", "12345")
48 | p
49 | }
50 |
51 | val cpConfig =
52 | ZLayer(
53 | ZIO.succeed(
54 | ConnectionPoolConfig(
55 | url = "jdbc:postgresql://localhost:5432/ziosqltest?loglevel=2",
56 | properties = properties,
57 | autoCommit = true
58 | )
59 | )
60 | )
61 |
62 | val myAppLogic =
63 | execute(stmt)
64 | .provide(
65 | SqlDriver.live,
66 | ConnectionPool.live,
67 | cpConfig
68 | )
69 |
70 | object Tables {
71 |
72 | case class Customer(id: UUID, age: Int, dob: LocalDate, firstName: String, lastName: String)
73 |
74 | implicit val customerSchema: Schema.CaseClass5[UUID, Int, LocalDate, String, String, Customer] =
75 | DeriveSchema.gen[Customer]
76 |
77 | val customers = Table.defineTable[Customer]("customers")
78 |
79 | val (userId, age, dob, fName, lName) = customers.columns
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/jdbc-hikaricp/src/main/scala/zio/sql/HikariConnectionPool.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 | import com.zaxxer.hikari.{ HikariConfig, HikariDataSource }
3 | import zio.{ Scope, ZIO, ZLayer }
4 |
5 | import java.sql.{ Connection, SQLException }
6 |
7 | class HikariConnectionPool private (hikariDataSource: HikariDataSource) extends ConnectionPool {
8 |
9 | private[sql] val dataSource = hikariDataSource
10 |
11 | override def connection: ZIO[Scope, Exception, Connection] =
12 | ZIO.acquireRelease(ZIO.attemptBlocking(hikariDataSource.getConnection).refineToOrDie[SQLException])(con =>
13 | ZIO.attemptBlocking(hikariDataSource.evictConnection(con)).orDie
14 | )
15 | }
16 |
17 | object HikariConnectionPool {
18 |
19 | private[sql] def initDataSource(config: HikariConfig): ZIO[Scope, Throwable, HikariDataSource] =
20 | ZIO.acquireRelease(ZIO.attemptBlocking(new HikariDataSource(config)))(ds => ZIO.attemptBlocking(ds.close()).orDie)
21 |
22 | val live: ZLayer[HikariConnectionPoolConfig, Throwable, HikariConnectionPool] =
23 | ZLayer.scoped {
24 | for {
25 | config <- ZIO.service[HikariConnectionPoolConfig]
26 | dataSource <- initDataSource(config.toHikariConfig)
27 | pool = new HikariConnectionPool(dataSource)
28 | } yield pool
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/jdbc-hikaricp/src/main/scala/zio/sql/HikariConnectionPoolConfig.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import com.zaxxer.hikari.HikariConfig
4 |
5 | /**
6 | * Configuration information for the connection pool.
7 | *
8 | * @param url The JDBC connection string.
9 | * @param properties JDBC connection properties (username / password could go here).
10 | * @param poolSize The size of the pool.
11 | * @param connectionTimeout Maximum number of milliseconds that a client will wait for a connection from the pool.
12 | * If this time is exceeded without a connection becoming available, a SQLException will be thrown from javax.sql.DataSource.getConnection().
13 | * @param idleTimeout This property controls the maximum amount of time (in milliseconds) that a connection is allowed to sit idle in the pool.
14 | * Whether a connection is retired as idle or not is subject to a maximum variation of +30 seconds, and average variation of +15 seconds.
15 | * A connection will never be retired as idle before this timeout. A value of 0 means that idle connections are never removed from the pool.
16 | * @param initializationFailTimeout the number of milliseconds before the
17 | * pool initialization fails, or 0 to validate connection setup but continue with
18 | * pool start, or less than zero to skip all initialization checks and start the
19 | * pool without delay.
20 | * @param maxLifetime This property controls the maximum lifetime of a connection in the pool.
21 | * When a connection reaches this timeout, even if recently used, it will be retired from the pool.
22 | * An in-use connection will never be retired, only when it is idle will it be removed. Should be bigger then 30000
23 | * @param minimumIdle The property controls the minimum number of idle connections that HikariCP tries to maintain in the pool, including both idle and in-use connections.
24 | * If the idle connections dip below this value, HikariCP will make a best effort to restore them quickly and efficiently.
25 | * @param connectionInitSql the SQL to execute on new connections
26 | * Set the SQL string that will be executed on all new connections when they are
27 | * created, before they are added to the pool. If this query fails, it will be
28 | * treated as a failed connection attempt.
29 | */
30 | final case class HikariConnectionPoolConfig(
31 | url: String,
32 | userName: String,
33 | password: String,
34 | poolSize: Int = 10,
35 | autoCommit: Boolean = true,
36 | connectionTimeout: Option[Long] = None,
37 | idleTimeout: Option[Long] = None,
38 | initializationFailTimeout: Option[Long] = None,
39 | maxLifetime: Option[Long] = None,
40 | minimumIdle: Option[Int] = None,
41 | connectionInitSql: Option[String] = None
42 | ) {
43 | private[sql] def toHikariConfig = {
44 | val hikariConfig = new HikariConfig()
45 | hikariConfig.setJdbcUrl(this.url)
46 | hikariConfig.setAutoCommit(this.autoCommit)
47 | hikariConfig.setMaximumPoolSize(this.poolSize)
48 | hikariConfig.setUsername(userName)
49 | hikariConfig.setPassword(password)
50 | connectionTimeout.foreach(hikariConfig.setConnectionTimeout)
51 | idleTimeout.foreach(hikariConfig.setIdleTimeout)
52 | initializationFailTimeout.foreach(hikariConfig.setInitializationFailTimeout)
53 | maxLifetime.foreach(hikariConfig.setMaxLifetime)
54 | minimumIdle.foreach(hikariConfig.setMinimumIdle)
55 | connectionInitSql.foreach(hikariConfig.setConnectionInitSql)
56 | hikariConfig
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/jdbc-hikaricp/src/test/scala/zio/sql/HikariConnectionPoolSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.test.TestAspect.{ sequential, timeout, withLiveClock }
4 | import zio.test.{ TestEnvironment, _ }
5 | import zio.{ durationInt, ZIO, ZLayer }
6 |
7 | object HikariConnectionPoolSpec extends ZIOSpecDefault {
8 |
9 | val mySqlConfigLayer: ZLayer[Any, Throwable, MySqlConfig] =
10 | ZLayer.scoped {
11 | MySqlTestContainer
12 | .mysql()
13 | .map(a =>
14 | MySqlConfig(
15 | url = a.jdbcUrl,
16 | username = a.username,
17 | password = a.password
18 | )
19 | )
20 | }
21 |
22 | val hikariPoolConfigLayer: ZLayer[MySqlConfig, Nothing, HikariConnectionPoolConfig] =
23 | ZLayer.fromFunction((conf: MySqlConfig) =>
24 | HikariConnectionPoolConfig(url = conf.url, userName = conf.username, password = conf.password)
25 | )
26 | val poolLayer: ZLayer[HikariConnectionPoolConfig, Nothing, HikariConnectionPool] = HikariConnectionPool.live.orDie
27 |
28 | override def spec: Spec[TestEnvironment, Any] =
29 | specLayered.provideCustomShared(mySqlConfigLayer.orDie)
30 |
31 | def specLayered: Spec[TestEnvironment with MySqlConfig, Any] =
32 | suite("Hikaricp module")(
33 | test("Pool size should be configurable") {
34 | val poolSize = 20
35 | (for {
36 | cp <- ZIO.service[HikariConnectionPool]
37 | } yield assertTrue(cp.dataSource.getMaximumPoolSize == poolSize))
38 | .provideSomeLayer[TestEnvironment with MySqlConfig](
39 | hikariPoolConfigLayer.map(_.update(_.copy(poolSize = poolSize))) >>> poolLayer
40 | )
41 | } @@ timeout(10.seconds) @@ withLiveClock,
42 | test("Pool size should have 10 connections by default") {
43 | (for {
44 | cp <- ZIO.service[HikariConnectionPool]
45 | _ <- ZIO.replicateZIO(10)(ZIO.scoped(cp.connection))
46 | } yield assertTrue(cp.dataSource.getMaximumPoolSize == 10))
47 | .provideSomeLayer[TestEnvironment with MySqlConfig](hikariPoolConfigLayer >>> poolLayer)
48 | } @@ timeout(10.minutes) @@ withLiveClock,
49 | test("It should be possible to acquire connections from the pool") {
50 | val poolSize = 20
51 | (for {
52 | cp <- ZIO.service[HikariConnectionPool]
53 | _ <-
54 | ZIO.collectAllParDiscard(ZIO.replicate(poolSize)(ZIO.scoped(cp.connection *> ZIO.sleep(500.millisecond))))
55 | } yield assert("")(Assertion.anything)).provideSomeLayer[TestEnvironment with MySqlConfig](
56 | hikariPoolConfigLayer.map(_.update(_.copy(poolSize = poolSize))) >>> poolLayer
57 | )
58 | } @@ timeout(10.seconds) @@ withLiveClock,
59 | test("Auto commit should be configurable") {
60 | val autoCommit = false
61 | (for {
62 | cp <- ZIO.service[HikariConnectionPool]
63 | } yield assertTrue(cp.dataSource.isAutoCommit == autoCommit))
64 | .provideSomeLayer[TestEnvironment with MySqlConfig](
65 | hikariPoolConfigLayer.map(_.update(_.copy(autoCommit = autoCommit))) >>> poolLayer
66 | )
67 | } @@ timeout(10.seconds) @@ withLiveClock,
68 | test("Auto commit should be true by default") {
69 | (for {
70 | cp <- ZIO.service[HikariConnectionPool]
71 | } yield assertTrue(cp.dataSource.isAutoCommit))
72 | .provideSomeLayer[TestEnvironment with MySqlConfig](hikariPoolConfigLayer >>> poolLayer)
73 | } @@ timeout(10.seconds) @@ withLiveClock,
74 | test("Connection timeout should be configurable") {
75 | val connectionTimeout = 2000L
76 | (for {
77 | cp <- ZIO.service[HikariConnectionPool]
78 | } yield assertTrue(cp.dataSource.getConnectionTimeout == connectionTimeout))
79 | .provideSomeLayer[TestEnvironment with MySqlConfig](
80 | hikariPoolConfigLayer.map(_.update(_.copy(connectionTimeout = Some(connectionTimeout)))) >>> poolLayer
81 | )
82 | } @@ timeout(10.seconds) @@ withLiveClock,
83 | test("Idle timeout should be configurable") {
84 | val idleTimeout = 2000L
85 | (for {
86 | cp <- ZIO.service[HikariConnectionPool]
87 | } yield assertTrue(cp.dataSource.getIdleTimeout == idleTimeout))
88 | .provideSomeLayer[TestEnvironment with MySqlConfig](
89 | hikariPoolConfigLayer.map(_.update(_.copy(idleTimeout = Some(idleTimeout)))) >>> poolLayer
90 | )
91 | } @@ timeout(10.seconds) @@ withLiveClock,
92 | test("initialization fail timeout should be configurable") {
93 | val initializationFailTimeout = 2000L
94 | (for {
95 | cp <- ZIO.service[HikariConnectionPool]
96 | } yield assertTrue(cp.dataSource.getInitializationFailTimeout == initializationFailTimeout))
97 | .provideSomeLayer[TestEnvironment with MySqlConfig](
98 | hikariPoolConfigLayer.map(
99 | _.update(_.copy(initializationFailTimeout = Some(initializationFailTimeout)))
100 | ) >>> poolLayer
101 | )
102 | } @@ timeout(10.seconds) @@ withLiveClock,
103 | test("max lifetime should be configurable") {
104 | val maxLifetime = 40000L
105 | (for {
106 | cp <- ZIO.service[HikariConnectionPool]
107 | } yield assertTrue(cp.dataSource.getMaxLifetime == maxLifetime))
108 | .provideSomeLayer[TestEnvironment with MySqlConfig](
109 | hikariPoolConfigLayer.map(_.update(_.copy(maxLifetime = Some(maxLifetime)))) >>> poolLayer
110 | )
111 | } @@ timeout(10.seconds) @@ withLiveClock,
112 | test("minimum idle should be configurable") {
113 | val minimumIdle = 2
114 | (for {
115 | cp <- ZIO.service[HikariConnectionPool]
116 | } yield assertTrue(cp.dataSource.getMinimumIdle == minimumIdle))
117 | .provideSomeLayer[TestEnvironment with MySqlConfig](
118 | hikariPoolConfigLayer.map(_.update(_.copy(minimumIdle = Some(minimumIdle)))) >>> poolLayer
119 | )
120 | } @@ timeout(10.seconds) @@ withLiveClock,
121 | test("connection init SQL should be configurable") {
122 | val initialSql = "SELECT 1"
123 | (for {
124 | cp <- ZIO.service[HikariConnectionPool]
125 | } yield assertTrue(cp.dataSource.getConnectionInitSql == initialSql))
126 | .provideSomeLayer[TestEnvironment with MySqlConfig](
127 | hikariPoolConfigLayer.map(_.update(_.copy(connectionInitSql = Some(initialSql)))) >>> poolLayer
128 | )
129 | } @@ timeout(10.seconds) @@ withLiveClock
130 | ) @@ sequential
131 | }
132 |
--------------------------------------------------------------------------------
/jdbc-hikaricp/src/test/scala/zio/sql/MySqlTestContainer.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import com.dimafeng.testcontainers.MySQLContainer
4 | import org.testcontainers.utility.DockerImageName
5 | import zio._
6 |
7 | final case class MySqlConfig(username: String, password: String, url: String)
8 | object MySqlTestContainer {
9 |
10 | def mysql(imageName: String = "mysql:8.2.0"): ZIO[Scope, Throwable, MySQLContainer] =
11 | ZIO.acquireRelease {
12 | ZIO.attemptBlocking {
13 | val c = new MySQLContainer(
14 | mysqlImageVersion = Option(imageName).map(DockerImageName.parse)
15 | )
16 | c.start()
17 | c
18 | }
19 | }(container => ZIO.attemptBlocking(container.stop()).orDie)
20 | }
21 |
--------------------------------------------------------------------------------
/jdbc/src/main/scala/zio/sql/ConnectionPool.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import java.io.IOException
4 | import java.sql._
5 |
6 | import zio.stm._
7 | import zio._
8 |
9 | import zio.sql.ConnectionPool.QueueItem
10 |
11 | trait ConnectionPool {
12 |
13 | /**
14 | * Retrieves a JDBC java.sql.Connection as a [[ZIO[Scope, Exception, Connection]]] resource.
15 | * The managed resource will safely acquire and release the connection, and
16 | * may be interrupted or timed out if necessary.
17 | */
18 | def connection: ZIO[Scope, Exception, Connection]
19 | }
20 | object ConnectionPool {
21 |
22 | case class QueueItem(promise: TPromise[Nothing, ResettableConnection], interrupted: TRef[Boolean])
23 |
24 | /**
25 | * A live layer for `ConnectionPool` that creates a JDBC connection pool
26 | * from the specified connection pool settings.
27 | */
28 | val live: ZLayer[ConnectionPoolConfig, IOException, ConnectionPool] =
29 | ZLayer.scoped {
30 | for {
31 | config <- ZIO.service[ConnectionPoolConfig]
32 | queue <- TQueue.bounded[QueueItem](config.queueCapacity).commit
33 | available <- TRef.make(List.empty[ResettableConnection]).commit
34 | pool = ConnectionPoolLive(queue, available, config)
35 | _ <- pool.initialize
36 | _ <- ZIO.addFinalizer(pool.close.orDie)
37 | } yield pool
38 | }
39 | }
40 |
41 | /**
42 | * A live concurrent connection pool.
43 | *
44 | * Improvements to make:
45 | *
46 | * - A connection may die. If so, it should be reacquired.
47 | * - Someone may try to use a connection forever. If so, we should
48 | * take it away from them.
49 | */
50 | final case class ConnectionPoolLive(
51 | queue: TQueue[QueueItem],
52 | available: TRef[List[ResettableConnection]],
53 | config: ConnectionPoolConfig
54 | ) extends ConnectionPool {
55 |
56 | /**
57 | * Adds a fresh connection to the connection pool.
58 | */
59 | val addFreshConnection: IO[IOException, ResettableConnection] = {
60 | val makeConnection = ZIO.attemptBlocking {
61 | val connection = DriverManager.getConnection(config.url, config.properties)
62 |
63 | val autoCommit = config.autoCommit
64 | val catalog = connection.getCatalog()
65 | val clientInfo = connection.getClientInfo()
66 | val holdability = connection.getHoldability()
67 | val schema = connection.getSchema()
68 | val isolation = connection.getTransactionIsolation()
69 |
70 | val restore: Connection => Unit = connection => {
71 | if (connection.getAutoCommit() != autoCommit) connection.setAutoCommit(autoCommit)
72 | if (connection.getCatalog() ne catalog) connection.setCatalog(catalog)
73 | if (connection.getClientInfo() ne clientInfo) connection.setClientInfo(clientInfo)
74 | if (connection.getHoldability() != holdability) connection.setHoldability(holdability)
75 | if (connection.getSchema() != schema) connection.setSchema(schema)
76 | if (connection.getTransactionIsolation() != isolation) connection.setTransactionIsolation(isolation)
77 | }
78 |
79 | new ResettableConnection(connection, restore)
80 | }.refineToOrDie[IOException]
81 |
82 | for {
83 | connection <- makeConnection.retry(config.retryPolicy)
84 | _ <- available.update(connection :: _).commit
85 | } yield connection
86 | }
87 |
88 | /**
89 | * Closes the connection pool, terminating each connection in parallel.
90 | */
91 | val close: IO[IOException, Any] =
92 | ZIO.uninterruptible {
93 | available.get.commit.flatMap { all =>
94 | ZIO.foreachPar(all) { connection =>
95 | ZIO.attemptBlocking(connection.connection.close()).refineToOrDie[IOException]
96 | }
97 | }
98 | }
99 |
100 | def connection: ZIO[Scope, Exception, Connection] =
101 | ZIO
102 | .acquireRelease(tryTake.commit.flatMap {
103 | case Left(queueItem) =>
104 | ZIO.interruptible(queueItem.promise.await.commit).onInterrupt {
105 | (for {
106 | res <- queueItem.promise.poll
107 | _ <- res match {
108 | case Some(Right(connection)) =>
109 | ZSTM.succeed(release(connection))
110 | case _ =>
111 | queueItem.interrupted.set(true)
112 | }
113 | } yield ()).commit
114 | }
115 |
116 | case Right(connection) =>
117 | ZIO.succeed(connection)
118 | })(release(_))
119 | .flatMap(rc => rc.reset.as(rc.connection))
120 |
121 | /**
122 | * Initializes the connection pool.
123 | */
124 | val initialize: IO[IOException, Unit] =
125 | ZIO.uninterruptible {
126 | ZIO
127 | .foreachParDiscard(1 to config.poolSize) { _ =>
128 | addFreshConnection
129 | }
130 | .unit
131 | }
132 |
133 | private def release(connection: ResettableConnection): UIO[Any] =
134 | ZIO.uninterruptible {
135 | ZIO
136 | .whenZIO(connection.isValid.map(!_)) {
137 | ZIO.attempt(connection.connection.close).zipParRight(addFreshConnection).orDie
138 | }
139 | .flatMap { opt =>
140 | val conn = opt match {
141 | case Some(c) => c
142 | case None => connection
143 | }
144 | tryRelease(conn).commit.flatMap {
145 | case Some(handle) =>
146 | handle.interrupted.get.tap { interrupted =>
147 | ZSTM.when(!interrupted)(handle.promise.succeed(conn))
148 | }.commit.flatMap { interrupted =>
149 | ZIO.when(interrupted)(release(conn))
150 | }
151 | case None => ZIO.unit
152 | }
153 | }
154 | }
155 |
156 | private def tryRelease(
157 | connection: ResettableConnection
158 | ): STM[Nothing, Option[QueueItem]] =
159 | for {
160 | empty <- queue.isEmpty
161 | result <- if (empty) available.update(connection :: _) *> STM.none
162 | else queue.take.map(Some(_))
163 | } yield result
164 |
165 | private val tryTake: STM[Nothing, Either[QueueItem, ResettableConnection]] =
166 | for {
167 | headOption <- available.get.map(_.headOption)
168 | either <- headOption match {
169 | case None =>
170 | for {
171 | promise <- TPromise.make[Nothing, ResettableConnection]
172 | ref <- TRef.make[Boolean](false)
173 | item = QueueItem(promise, ref)
174 | _ <- queue.offer(item)
175 | } yield Left(item)
176 |
177 | case Some(connection) =>
178 | available.update(_.tail) *> ZSTM.succeed(Right(connection))
179 | }
180 | } yield either
181 | }
182 |
183 | private[sql] final class ResettableConnection(val connection: Connection, resetter: Connection => Unit) {
184 | def reset: UIO[Any] = ZIO.succeed(resetter(connection))
185 | def isValid: UIO[Boolean] =
186 | ZIO
187 | .when(!connection.isClosed) {
188 | ZIO.succeed(connection.prepareStatement("SELECT 1"))
189 | }
190 | .map {
191 | case Some(stmt) => stmt != null
192 | case None => false
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/jdbc/src/main/scala/zio/sql/ConnectionPoolConfig.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.{ durationInt, Schedule }
4 |
5 | /**
6 | * Configuration information for the connection pool.
7 | *
8 | * @param url The JDBC connection string.
9 | * @param properties JDBC connection properties (username / password could go here).
10 | * @param poolSize The size of the pool.
11 | * @param queueCapacity The capacity of the queue for connections. When this size is reached, back pressure will block attempts to add more.
12 | * @param retryPolicy The retry policy to use when acquiring connections.
13 | */
14 | final case class ConnectionPoolConfig(
15 | url: String,
16 | properties: java.util.Properties,
17 | poolSize: Int = 10,
18 | queueCapacity: Int = 1000,
19 | autoCommit: Boolean = true,
20 | retryPolicy: Schedule[Any, Exception, Any] = Schedule.recurs(20) && Schedule.exponential(10.millis)
21 | )
22 |
--------------------------------------------------------------------------------
/jdbc/src/main/scala/zio/sql/SqlDriverLiveModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import java.sql.{ Connection, SQLException }
4 |
5 | import zio._
6 | import zio.stream.{ Stream, ZStream }
7 | import zio.schema.Schema
8 | import zio.IO
9 | import zio.sql.update._
10 | import zio.sql.select._
11 | import zio.sql.insert._
12 | import zio.sql.delete._
13 |
14 | trait SqlDriverLiveModule { self: Jdbc =>
15 | private[sql] trait SqlDriverCore {
16 |
17 | def deleteOnBatch(delete: List[Delete[_]], conn: Connection): IO[Exception, Int]
18 |
19 | def updateOnBatch(update: List[Update[_]], conn: Connection): IO[Exception, Int]
20 |
21 | def deleteOn(delete: Delete[_], conn: Connection): IO[Exception, Int]
22 |
23 | def updateOn(update: Update[_], conn: Connection): IO[Exception, Int]
24 |
25 | def readOn[A](read: Read[A], conn: Connection): Stream[Exception, A]
26 |
27 | def insertOn[A: Schema](insert: Insert[_, A], conn: Connection): IO[Exception, Int]
28 | }
29 |
30 | sealed class SqlDriverLive(pool: ConnectionPool) extends SqlDriver with SqlDriverCore { self =>
31 | def delete(delete: Delete[_]): IO[Exception, Int] =
32 | ZIO.scoped(pool.connection.flatMap(deleteOn(delete, _)))
33 |
34 | def delete(delete: List[Delete[_]]): IO[Exception, Int] =
35 | ZIO.scoped(pool.connection.flatMap(deleteOnBatch(delete, _)))
36 |
37 | def deleteOn(delete: Delete[_], conn: Connection): IO[Exception, Int] =
38 | ZIO.attemptBlocking {
39 | val query = renderDelete(delete)
40 | val statement = conn.createStatement()
41 | statement.executeUpdate(query)
42 | }.refineToOrDie[Exception]
43 |
44 | def deleteOnBatch(delete: List[Delete[_]], conn: Connection): IO[Exception, Int] =
45 | ZIO.attemptBlocking {
46 | val statement = conn.createStatement()
47 | delete.map(delete_ => statement.addBatch(renderDelete(delete_)))
48 | statement.executeBatch().sum
49 | }.refineToOrDie[Exception]
50 |
51 | def update(update: Update[_]): IO[Exception, Int] =
52 | ZIO.scoped(pool.connection.flatMap(updateOn(update, _)))
53 |
54 | def updateOn(update: Update[_], conn: Connection): IO[Exception, Int] =
55 | ZIO.attemptBlocking {
56 | val query = renderUpdate(update)
57 | val statement = conn.createStatement()
58 | statement.executeUpdate(query)
59 | }.refineToOrDie[Exception]
60 |
61 | def update(update: List[Update[_]]): IO[Exception, Int] =
62 | ZIO.scoped(pool.connection.flatMap(updateOnBatch(update, _)))
63 |
64 | def updateOnBatch(update: List[Update[_]], conn: Connection): IO[Exception, Int] =
65 | ZIO.attemptBlocking {
66 | val statement = conn.createStatement()
67 | update.map(update_ => statement.addBatch(renderUpdate(update_)))
68 | statement.executeBatch().sum
69 | }.refineToOrDie[Exception]
70 |
71 | def read[A](read: Read[A]): Stream[Exception, A] =
72 | ZStream
73 | .scoped(pool.connection)
74 | .flatMap(readOn(read, _))
75 |
76 | override def readOn[A](read: Read[A], conn: Connection): Stream[Exception, A] =
77 | ZStream.unwrap {
78 | ZIO.attemptBlocking {
79 | val schema = getColumns(read).zipWithIndex.map { case (value, index) =>
80 | (value, index + 1)
81 | } // SQL is 1-based indexing
82 |
83 | val query = renderRead(read)
84 |
85 | val statement = conn.createStatement()
86 |
87 | val hasResultSet = statement.execute(query)
88 |
89 | if (hasResultSet) {
90 | val resultSet = statement.getResultSet()
91 |
92 | ZStream
93 | .unfoldZIO(resultSet) { rs =>
94 | if (rs.next()) {
95 | try
96 | unsafeExtractRow[read.ResultType](resultSet, schema) match {
97 | case Left(error) => ZIO.fail(error)
98 | case Right(value) => ZIO.succeed(Some((value, rs)))
99 | }
100 | catch {
101 | case e: SQLException => ZIO.fail(e)
102 | }
103 | } else ZIO.succeed(None)
104 | }
105 | .map(read.mapper)
106 | } else ZStream.empty
107 |
108 | }.refineToOrDie[Exception]
109 | }
110 |
111 | override def insertOn[A: Schema](insert: Insert[_, A], conn: Connection): IO[Exception, Int] =
112 | ZIO.attemptBlocking {
113 |
114 | val SqlStatement(query, params) = renderInsert(insert)
115 |
116 | val ps = conn.prepareStatement(query)
117 |
118 | setParams(params)(ps)
119 |
120 | ps.executeBatch().foldLeft(0) { case (acc, el) => acc + el }
121 |
122 | }.refineToOrDie[Exception]
123 |
124 | override def insert[A: Schema](insert: Insert[_, A]): IO[Exception, Int] =
125 | ZIO.scoped(pool.connection.flatMap(insertOn(insert, _)))
126 |
127 | override def transaction: ZLayer[Any, Exception, SqlTransaction] =
128 | ZLayer.scoped {
129 | for {
130 | connection <- pool.connection
131 | _ <- ZIO.attemptBlocking(connection.setAutoCommit(false)).refineToOrDie[Exception]
132 | _ <- ZIO.addFinalizerExit(c =>
133 | ZIO.attempt(if (c.isSuccess) connection.commit() else connection.rollback()).ignore
134 | )
135 | } yield new SqlTransaction {
136 | def delete(delete: Delete[_]): IO[Exception, Int] =
137 | deleteOn(delete, connection)
138 |
139 | def delete(delete: List[Delete[_]]): IO[Exception, Int] =
140 | deleteOnBatch(delete, connection)
141 |
142 | def update(update: Update[_]): IO[Exception, Int] =
143 | updateOn(update, connection)
144 |
145 | def update(update: List[Update[_]]): IO[Exception, Int] =
146 | updateOnBatch(update, connection)
147 |
148 | def read[A](read: Read[A]): Stream[Exception, A] =
149 | readOn(read, connection)
150 |
151 | def insert[A: Schema](insert: Insert[_, A]): IO[Exception, Int] =
152 | insertOn(insert, connection)
153 |
154 | }
155 | }
156 | }
157 | }
158 |
--------------------------------------------------------------------------------
/jdbc/src/main/scala/zio/sql/TransactionSyntaxModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio._
4 | import zio.stream.ZStream
5 | import zio.schema.Schema
6 | import zio.sql.update._
7 | import zio.sql.select._
8 | import zio.sql.insert._
9 | import zio.sql.delete._
10 |
11 | trait TransactionSyntaxModule { self: Jdbc =>
12 | implicit final class ReadSyntax[A](self: Read[A]) {
13 | def run: ZStream[SqlTransaction, Exception, A] =
14 | ZStream.serviceWithStream(_.read(self))
15 | }
16 |
17 | implicit final class DeleteSyntax(self: Delete[_]) {
18 | def run: ZIO[SqlTransaction, Exception, Int] =
19 | ZIO.serviceWithZIO(_.delete(self))
20 | }
21 |
22 | implicit final class BatchDeleteSyntax[A: Schema](self: List[Delete[_]]) {
23 | def run: ZIO[SqlTransaction, Exception, Int] =
24 | ZIO.serviceWithZIO(_.delete(self))
25 | }
26 |
27 | implicit final class InsertSyntax[A: Schema](self: Insert[_, A]) {
28 | def run: ZIO[SqlTransaction, Exception, Int] =
29 | ZIO.serviceWithZIO(_.insert(self))
30 | }
31 |
32 | implicit final class UpdatedSyntax(self: Update[_]) {
33 | def run: ZIO[SqlTransaction, Exception, Int] =
34 | ZIO.serviceWithZIO(_.update(self))
35 | }
36 |
37 | implicit final class BatchUpdatedSyntax(self: List[Update[_]]) {
38 | def run: ZIO[SqlTransaction, Exception, Int] =
39 | ZIO.serviceWithZIO(_.update(self))
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/jdbc/src/main/scala/zio/sql/jdbc.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio._
4 | import zio.stream._
5 | import zio.schema.Schema
6 | import zio.sql.macros.GroupByLike
7 | import zio.sql.update._
8 | import zio.sql.select._
9 | import zio.sql.insert._
10 | import zio.sql.delete._
11 |
12 | trait Jdbc extends Sql with JdbcInternalModule with SqlDriverLiveModule with TransactionSyntaxModule {
13 | trait SqlDriver {
14 | def delete(delete: Delete[_]): IO[Exception, Int]
15 |
16 | def delete(delete: List[Delete[_]]): IO[Exception, Int]
17 |
18 | def update(update: Update[_]): IO[Exception, Int]
19 |
20 | def update(update: List[Update[_]]): IO[Exception, Int]
21 |
22 | def read[A](read: Read[A]): Stream[Exception, A]
23 |
24 | def insert[A: Schema](insert: Insert[_, A]): IO[Exception, Int]
25 |
26 | def transaction: ZLayer[Any, Exception, SqlTransaction]
27 | }
28 | object SqlDriver {
29 |
30 | val live: ZLayer[ConnectionPool, Nothing, SqlDriver] =
31 | ZLayer(ZIO.serviceWith[ConnectionPool](new SqlDriverLive(_)))
32 | }
33 |
34 | trait SqlTransaction {
35 |
36 | def delete(delete: Delete[_]): IO[Exception, Int]
37 |
38 | def delete(delete: List[Delete[_]]): IO[Exception, Int]
39 |
40 | def update(update: Update[_]): IO[Exception, Int]
41 |
42 | def update(update: List[Update[_]]): IO[Exception, Int]
43 |
44 | def read[A](read: Read[A]): Stream[Exception, A]
45 |
46 | def insert[A: Schema](insert: Insert[_, A]): IO[Exception, Int]
47 |
48 | }
49 |
50 | def setParam(param: SqlParameter, jdbcIndex: Int): java.sql.PreparedStatement => Unit
51 |
52 | private[sql] def setParams(rows: List[SqlRow]): java.sql.PreparedStatement => Unit = ps =>
53 | rows.foreach { row =>
54 | row.params.zipWithIndex.foreach { case (param, i) =>
55 | val jdbcIndex = i + 1
56 | setParam(param, jdbcIndex)(ps)
57 | }
58 | ps.addBatch()
59 | }
60 |
61 | def execute[A](read: Read[A]): ZStream[SqlDriver, Exception, A] =
62 | ZStream.serviceWithStream(_.read(read))
63 |
64 | def execute[F, A, Source, Subsource, Head, Tail <: SelectionSet[Source]](
65 | select: Read.Subselect[F, A, Source, Subsource, Head, Tail]
66 | )(implicit verify: GroupByLike[F, select.GroupByF]): ZStream[SqlDriver, Exception, A] =
67 | ZStream.serviceWithStream(_.read(select))
68 |
69 | def execute(delete: Delete[_]): ZIO[SqlDriver, Exception, Int] =
70 | ZIO.serviceWithZIO(_.delete(delete))
71 |
72 | def executeBatchDelete(delete: List[Delete[_]]): ZIO[SqlDriver, Exception, Int] =
73 | ZIO.serviceWithZIO(_.delete(delete))
74 |
75 | def execute[A: Schema](insert: Insert[_, A]): ZIO[SqlDriver, Exception, Int] =
76 | ZIO.serviceWithZIO(_.insert(insert))
77 |
78 | def execute(update: Update[_]): ZIO[SqlDriver, Exception, Int] =
79 | ZIO.serviceWithZIO(_.update(update))
80 |
81 | def executeBatchUpdate(update: List[Update[_]]): ZIO[SqlDriver, Exception, Int] =
82 | ZIO.serviceWithZIO(_.update(update))
83 |
84 | val transact: ZLayer[SqlDriver, Exception, SqlTransaction] =
85 | ZLayer(ZIO.serviceWith[SqlDriver](_.transaction)).flatten
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/jdbc/src/test/scala/zio/sql/ConnectionPoolSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import zio.{ durationInt, Promise, ZIO, ZLayer }
4 | import zio.test.TestAspect.{ sequential, timeout, withLiveClock }
5 | import zio.test.{ TestEnvironment, _ }
6 |
7 | import java.util.Properties
8 |
9 | object ConnectionPoolSpec extends ZIOSpecDefault {
10 |
11 | val poolSize = 10
12 |
13 | private def connProperties(user: String, password: String): Properties = {
14 | val props = new Properties
15 | props.setProperty("user", user)
16 | props.setProperty("password", password)
17 | props
18 | }
19 |
20 | val poolConfigLayer: ZLayer[Any, Throwable, ConnectionPoolConfig] =
21 | ZLayer.scoped {
22 | TestContainer
23 | .postgres()
24 | .map(a =>
25 | ConnectionPoolConfig(
26 | url = a.jdbcUrl,
27 | properties = connProperties(a.username, a.password),
28 | poolSize = poolSize
29 | )
30 | )
31 | }
32 |
33 | override def spec: Spec[TestEnvironment, Any] =
34 | specLayered.provideCustomShared((poolConfigLayer >>> ConnectionPool.live).orDie)
35 |
36 | def specLayered: Spec[TestEnvironment with ConnectionPool, Object] =
37 | suite("Postgres module")(
38 | test("Fibers waiting for connections can be interrupted") {
39 | // We need to actually sleep here to make sure that the started fibers
40 | // had started acquiring connections.
41 | for {
42 | cp <- ZIO.service[ConnectionPool]
43 | promise <- Promise.make[Nothing, Unit]
44 | _ <- ZIO.replicateZIO(poolSize)(ZIO.scoped(cp.connection *> promise.await).fork)
45 | _ <- ZIO.sleep(1.second)
46 | waiting <- ZIO.replicateZIO(poolSize)(ZIO.scoped(cp.connection).fork)
47 | _ <- ZIO.sleep(1.second)
48 | _ <- ZIO.foreach(waiting)(_.interrupt)
49 | _ <- promise.complete(ZIO.unit)
50 | _ <- ZIO.scoped(cp.connection)
51 | } yield assert("")(Assertion.anything)
52 | } @@ timeout(10.seconds) @@ withLiveClock +
53 |
54 | test("Invalid or closed fibers should be reacquired") {
55 | for {
56 | cp <- ZIO.service[ConnectionPool]
57 | _ <- ZIO.replicateZIO(poolSize)(ZIO.scoped(cp.connection.map(_.close)))
58 | conn <- ZIO.scoped(cp.connection)
59 | } yield assert(conn.isValid(10))(Assertion.isTrue)
60 | }
61 | ) @@ sequential
62 | }
63 |
--------------------------------------------------------------------------------
/jdbc/src/test/scala/zio/sql/JdbcRunnableSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import com.dimafeng.testcontainers.JdbcDatabaseContainer
4 | import zio.test.TestEnvironment
5 | import zio.{ Scope, ZIO, ZLayer }
6 | import zio.test.ZIOSpecDefault
7 | import zio.prelude.AssociativeBoth
8 | import zio.test.Gen
9 | import zio.prelude.Covariant
10 | import com.dimafeng.testcontainers.SingleContainer
11 | import java.util.Properties
12 | import zio.test.Spec
13 |
14 | /**
15 | * Base trait for integration-style tests running on Testcontainers.
16 | * Extending classes are expected to provide the container implementation
17 | * this test suite will work on by implementing {@link getContainer}.
18 | *
19 | * Test suite should be implemented in {@link specLayered} and
20 | * particular tests can depend on {@link SQLDriver} in the environment.
21 | */
22 | trait JdbcRunnableSpec extends ZIOSpecDefault with Jdbc {
23 |
24 | type JdbcEnvironment = TestEnvironment with SqlDriver
25 |
26 | def specLayered: Spec[JdbcEnvironment, Object]
27 |
28 | protected def getContainer: SingleContainer[_] with JdbcDatabaseContainer
29 |
30 | protected val autoCommit = false
31 |
32 | override def spec: Spec[TestEnvironment, Any] =
33 | specLayered.provideCustomShared(jdbcLayer)
34 |
35 | private[this] def connProperties(user: String, password: String): Properties = {
36 | val props = new Properties
37 | props.setProperty("user", user)
38 | props.setProperty("password", password)
39 | props
40 | }
41 |
42 | private[this] val poolConfigLayer: ZLayer[Any, Throwable, ConnectionPoolConfig] =
43 | ZLayer.scoped {
44 | testContainer
45 | .map(a =>
46 | ConnectionPoolConfig(
47 | url = a.jdbcUrl,
48 | properties = connProperties(a.username, a.password),
49 | autoCommit = autoCommit
50 | )
51 | )
52 | }
53 |
54 | val connectionPool: ZLayer[Any, Throwable, ConnectionPool] = poolConfigLayer >>> ConnectionPool.live
55 |
56 | private[this] final lazy val jdbcLayer: ZLayer[Any, Any, SqlDriver] =
57 | ZLayer.make[SqlDriver](
58 | connectionPool.orDie,
59 | SqlDriver.live
60 | )
61 |
62 | protected implicit def genInstances[R]
63 | : AssociativeBoth[({ type T[A] = Gen[R, A] })#T] with Covariant[({ type T[+A] = Gen[R, A] })#T] =
64 | new AssociativeBoth[({ type T[A] = Gen[R, A] })#T] with Covariant[({ type T[+A] = Gen[R, A] })#T] {
65 | def map[A, B](f: A => B): Gen[R, A] => Gen[R, B] = _.map(f)
66 | def both[A, B](fa: => Gen[R, A], fb: => Gen[R, B]): Gen[R, (A, B)] = fa.zip(fb)
67 | }
68 |
69 | val testContainer: ZIO[Scope, Throwable, SingleContainer[_] with JdbcDatabaseContainer] =
70 | ZIO.acquireRelease {
71 | ZIO.attemptBlocking {
72 | val c = getContainer
73 | c.start()
74 | c
75 | }
76 | } { container =>
77 | ZIO.attemptBlocking(container.stop()).orDie
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/jdbc/src/test/scala/zio/sql/TestContainer.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import com.dimafeng.testcontainers.PostgreSQLContainer
4 | import org.testcontainers.utility.DockerImageName
5 | import zio._
6 |
7 | object TestContainer {
8 |
9 | def postgres(imageName: String = "postgres:alpine"): ZIO[Scope, Throwable, PostgreSQLContainer] =
10 | ZIO.acquireRelease {
11 | ZIO.attemptBlocking {
12 | val c = new PostgreSQLContainer(
13 | dockerImageNameOverride = Option(imageName).map(DockerImageName.parse)
14 | )
15 | c.start()
16 | c
17 | }
18 | } { container =>
19 | ZIO.attemptBlocking(container.stop()).orDie
20 | }
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/macros/src/main/scala-2/zio/sql/macros/Normalizer.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | import scala.reflect.macros.whitebox
4 | import scala.language.experimental.macros
5 |
6 | sealed trait Normalizer[In] {
7 | type Out
8 | }
9 |
10 | object Normalizer {
11 |
12 | final case class Instance[In, Out2]() extends Normalizer[In] {
13 | override type Out = Out2
14 | }
15 |
16 | implicit def createNormalizer[In, Out]: Instance[In, Out] = macro createNormalizerImpl[In, Out]
17 |
18 | def createNormalizerImpl[In: c.WeakTypeTag, Out: c.WeakTypeTag](
19 | c: whitebox.Context
20 | ): c.Tree = {
21 | import c.universe._
22 |
23 | val inType = weakTypeOf[In]
24 | val _ = weakTypeOf[Out]
25 |
26 | def deconstructType(t: Type): List[Type] =
27 | t.dealias match {
28 | case TypeRef(_, y, types) if (types != Nil && (y == symbolOf[scala.Tuple2[_, _]])) =>
29 | types.head :: deconstructType(types.tail.head)
30 | case TypeRef(_, _, types) if (types == Nil) =>
31 | Nil
32 | case s =>
33 | c.abort(c.enclosingPosition, s"Error ${showRaw(s)}")
34 | }
35 |
36 | val values = deconstructType(inType)
37 | val outType = tq"(..$values)"
38 |
39 | q"""zio.sql.macros.Normalizer.Instance[${q"$inType"}, $outType]()"""
40 | }
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/macros/src/main/scala-2/zio/sql/macros/groupbylike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | import scala.reflect.macros.blackbox
4 | import scala.language.experimental.macros
5 |
6 | /**
7 | * select Count(id)
8 | from orders
9 | group by customer_id
10 |
11 | select customer_id
12 | from orders
13 |
14 | select Count(id)
15 | from orders
16 |
17 | select customer_id
18 | from orders
19 | group by customer_id
20 |
21 | select customer_id, Count(id)
22 | from orders
23 | group by customer_id
24 | */
25 | sealed trait GroupByLike[All, Grouped]
26 |
27 | object GroupByLike {
28 |
29 | final case class CanBeGrouped[All, Grouped]() extends GroupByLike[All, Grouped]
30 |
31 | implicit def createGroupByLike[All, Grouped]: GroupByLike[All, Grouped] = macro createInsertLikeImpl[All, Grouped]
32 |
33 | def createInsertLikeImpl[All: c.WeakTypeTag, Grouped: c.WeakTypeTag](
34 | c: blackbox.Context
35 | ): c.Expr[GroupByLike[All, Grouped]] = {
36 | import c.universe._
37 |
38 | val allType = weakTypeOf[All]
39 | val groupedType = weakTypeOf[Grouped]
40 |
41 | def splitIntersection(t: Type): List[Type] =
42 | t.dealias match {
43 | case t: RefinedType =>
44 | t.parents.flatMap(s => splitIntersection(s))
45 | case TypeRef(_, sym, _) if sym.info.isInstanceOf[RefinedTypeApi] =>
46 | splitIntersection(sym.info)
47 | case t: TypeRef =>
48 | t.args.headOption match {
49 | case Some(value) => List(value.dealias)
50 | case None => Nil
51 | }
52 | case _ => Nil
53 | }
54 |
55 | def isThereAggregation(t: Type): Boolean =
56 | t.dealias match {
57 | case RefinedType(members, _) =>
58 | members.find(t => isThereAggregation(t)) match {
59 | case None => false
60 | case Some(_) => true
61 | }
62 | case TypeRef(_, typeSymbol, _) if typeSymbol == symbolOf[zio.sql.Features.Aggregated[_]] =>
63 | true
64 | case _ => false
65 | }
66 |
67 | def extractFromFeatures(f: Type): List[Type] =
68 | f.dealias match {
69 | case TypeRef(_, typeSymbol, args) if typeSymbol == symbolOf[zio.sql.Features.Source[_, _]] =>
70 | List(args.head.dealias)
71 | case RefinedType(members, _) =>
72 | members.flatMap { f =>
73 | extractFromFeatures(f.dealias)
74 | }
75 | case _ =>
76 | Nil
77 | }
78 |
79 | // EXAMPLE
80 | // select(name, Sum(price))
81 | // .from(productTable)
82 | // .groupBy(name, price)
83 |
84 | // name & price
85 | val groupedByF = splitIntersection(groupedType)
86 |
87 | // name
88 | val notAggregatedF = extractFromFeatures(allType)
89 |
90 | // true
91 | val aggregateFunctionExists = isThereAggregation(allType)
92 |
93 | val result = c.Expr[GroupByLike[All, Grouped]](
94 | q"new zio.sql.macros.GroupByLike.CanBeGrouped[${q"$allType"}, ${q"$groupedType"}]()"
95 | )
96 |
97 | val partialAggregation = aggregateFunctionExists && !notAggregatedF.isEmpty
98 |
99 | // price
100 | // val _ = groupedByF diff notAggregatedF
101 |
102 | // Nil
103 | val missing = notAggregatedF diff groupedByF
104 |
105 | // group by not called
106 | if (groupedByF.isEmpty) {
107 | if (partialAggregation) {
108 | c.abort(
109 | c.enclosingPosition,
110 | s"Column(s) ${missing.distinct.mkString(" and ")} must appear in the GROUP BY clause or be used in an aggregate function"
111 | )
112 | } else {
113 | result
114 | }
115 | // group by called
116 | } else {
117 | if (!missing.isEmpty) {
118 | c.abort(
119 | c.enclosingPosition,
120 | s"Column(s) ${missing.distinct.mkString(" and ")} must appear in the GROUP BY clause or be used in an aggregate function"
121 | )
122 | } else {
123 | result
124 | }
125 | }
126 | }
127 | }
128 |
--------------------------------------------------------------------------------
/macros/src/main/scala-2/zio/sql/macros/havinglike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | import scala.reflect.macros.blackbox
4 | import scala.language.experimental.macros
5 |
6 | /**
7 | * `HAVING` can only be called:
8 | *
9 | * 1. If its called with an aggregated function returning boolean like `Having Count(id) > 5`,
10 | * while all the previously selected columns appeared in group by clause.
11 | * 2. If its called with a normal expression returning boolean like `having customer_id = '636ae137-5b1a-4c8c-b11f-c47c624d9cdc``
12 | * and all the previously selected columns appeared in group by clause.
13 | *
14 | * select order_date, Count(id)
15 | from orders
16 | group by order_date, customer_id
17 | having customer_id = '60b01fc9-c902-4468-8d49-3c0f989def37'
18 |
19 | select Count(id)
20 | from orders
21 | group by customer_id
22 | having Count(id) > 5
23 |
24 | select customer_id
25 | from orders
26 | group by customer_id
27 | having Count(id) > 5
28 |
29 | select Count(id)
30 | from orders
31 | having Count(id) > 5
32 | */
33 | sealed trait HavingIsSound[AllF, GroupByF, HavingF]
34 |
35 | object HavingIsSound {
36 |
37 | final case class HavingCanBeCalled[AllF, GroupByF, HavingF]() extends HavingIsSound[AllF, GroupByF, HavingF]
38 |
39 | implicit def materializeHavingIsSound[AllF, GroupByF, HavingF]: HavingIsSound[AllF, GroupByF, HavingF] =
40 | macro materializeHavingIsSoundImpl[AllF, GroupByF, HavingF]
41 |
42 | def materializeHavingIsSoundImpl[AllF: c.WeakTypeTag, GroupByF: c.WeakTypeTag, HavingF: c.WeakTypeTag](
43 | c: blackbox.Context
44 | ): c.Expr[HavingIsSound[AllF, GroupByF, HavingF]] = {
45 | import c.universe._
46 |
47 | val allType = weakTypeOf[AllF]
48 | val groupedType = weakTypeOf[GroupByF]
49 | val havingType = weakTypeOf[HavingF]
50 |
51 | def splitIntersection(t: Type): List[Type] =
52 | t.dealias match {
53 | case t: RefinedType =>
54 | t.parents.flatMap(s => splitIntersection(s))
55 | case TypeRef(_, sym, _) if sym.info.isInstanceOf[RefinedTypeApi] =>
56 | splitIntersection(sym.info)
57 | case t: TypeRef =>
58 | t.args.headOption match {
59 | case Some(value) => List(value.dealias)
60 | case None => Nil
61 | }
62 | case _ => Nil
63 | }
64 |
65 | def extractSourceColumns(f: Type): List[Type] =
66 | f.dealias match {
67 | case TypeRef(_, typeSymbol, args) if typeSymbol == symbolOf[zio.sql.Features.Source[_, _]] =>
68 | List(args.head.dealias)
69 | case RefinedType(members, _) =>
70 | members.flatMap(f => extractSourceColumns(f))
71 | case _ =>
72 | Nil
73 | }
74 |
75 | val groupedByF = splitIntersection(groupedType)
76 | val nonAggSelection = extractSourceColumns(allType)
77 |
78 | val notCovered = nonAggSelection diff groupedByF
79 |
80 | val havingSources = extractSourceColumns(havingType)
81 |
82 | val missing = havingSources diff groupedByF
83 |
84 | if (!notCovered.isEmpty) {
85 | c.abort(
86 | c.enclosingPosition,
87 | s"Column(s) ${notCovered.distinct.mkString(" and ")} must appear in the GROUP BY clause or be used in an aggregate function"
88 | )
89 | } else {
90 | if ((havingSources.isEmpty) || missing.isEmpty) {
91 | c.Expr[HavingIsSound[AllF, GroupByF, HavingF]](
92 | q"new zio.sql.macros.HavingIsSound.HavingCanBeCalled[${q"$allType"}, ${q"$groupedType"}, ${havingType}]()"
93 | )
94 | } else {
95 | c.abort(
96 | c.enclosingPosition,
97 | s"Column(s) ${missing.distinct.mkString(" and ")} must appear in the GROUP BY clause or be used in an aggregate function"
98 | )
99 | }
100 | }
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/macros/src/main/scala-2/zio/sql/macros/notliteral.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | import scala.reflect.macros.blackbox
4 | import scala.language.experimental.macros
5 |
6 | sealed trait IsNotLiteral[F]
7 |
8 | object IsNotLiteral {
9 |
10 | final case class FIsNotLiteral[F]() extends IsNotLiteral[F]
11 |
12 | implicit def materializeIsNotLiteral[F]: IsNotLiteral[F] =
13 | macro materializeIsNotLiteralImpl[F]
14 |
15 | def materializeIsNotLiteralImpl[F: c.WeakTypeTag](
16 | c: blackbox.Context
17 | ): c.Expr[IsNotLiteral[F]] = {
18 | import c.universe._
19 | val fType = weakTypeOf[F]
20 |
21 | def isLiteral(t: Type): Boolean =
22 | t.dealias match {
23 | case TypeRef(_, typeSymbol, _) if typeSymbol == symbolOf[zio.sql.Features.Literal] =>
24 | true
25 | case RefinedType(members, _) =>
26 | members.find(t => isLiteral(t)) match {
27 | case None => false
28 | case Some(_) => true
29 | }
30 | case _ => false
31 | }
32 |
33 | if (isLiteral(fType)) {
34 | c.abort(c.enclosingPosition, "Use === instead of == when comparing two Exprs")
35 | } else {
36 | c.Expr[IsNotLiteral[F]](
37 | q"new zio.sql.macros.IsNotLiteral.FIsNotLiteral[${q"$fType"}]()"
38 | )
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/macros/src/main/scala-2/zio/sql/macros/tablelike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | import scala.reflect.macros.blackbox
4 | import java.time._
5 | import java.util.UUID
6 | import zio.Chunk
7 |
8 | sealed trait TableSchema[T]
9 |
10 | object TableSchema {
11 | import scala.language.experimental.macros
12 |
13 | final case class Compatible[T]() extends TableSchema[T]
14 |
15 | implicit def materializeTableSchema[T]: TableSchema[T] = macro materializeTableSchemaImpl[T]
16 |
17 | def materializeTableSchemaImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[TableSchema[T]] = {
18 | import c.universe._
19 |
20 | val tpe = weakTypeOf[T]
21 |
22 | // TODO support scala.math.BigDecimal
23 | val sqlPrimitives =
24 | Seq(
25 | typeOf[java.math.BigDecimal],
26 | typeOf[Boolean],
27 | typeOf[Byte],
28 | typeOf[Chunk[Byte]],
29 | typeOf[Char],
30 | typeOf[Double],
31 | typeOf[Float],
32 | typeOf[Instant],
33 | typeOf[Int],
34 | typeOf[LocalDate],
35 | typeOf[LocalDateTime],
36 | typeOf[OffsetTime],
37 | typeOf[LocalTime],
38 | typeOf[Long],
39 | typeOf[OffsetDateTime],
40 | typeOf[Short],
41 | typeOf[String],
42 | typeOf[UUID],
43 | typeOf[ZonedDateTime]
44 | )
45 |
46 | def isSqlPrimitive(tpe: Type): Boolean = sqlPrimitives.contains(tpe)
47 |
48 | val membrs = tpe.decls.sorted.collect {
49 | case p: TermSymbol if p.isCaseAccessor && !p.isMethod => p
50 | }.map(_.typeSignature)
51 |
52 | def isSupportedOption(tpe: Type): Boolean =
53 | tpe match {
54 | case TypeRef(_, hkt, t) => (hkt == symbolOf[Option[_]]) && sqlPrimitives.contains(t.head)
55 | case _ => false
56 | }
57 |
58 | val uncompatible =
59 | membrs
60 | .filter(m => !isSqlPrimitive(m))
61 | .filter(t => !isSupportedOption(t))
62 |
63 | if (!tpe.typeSymbol.asClass.isCaseClass) {
64 | c.abort(
65 | c.enclosingPosition,
66 | s"You can only define table with case class"
67 | )
68 | } else {
69 | if (uncompatible.isEmpty)
70 | c.Expr[TableSchema[T]](q"new zio.sql.macros.TableSchema.Compatible[${q"$tpe"}]()")
71 | else {
72 | c.abort(
73 | c.enclosingPosition,
74 | s"Unsupported types by SQL ${uncompatible.map(_.dealias.toString()).mkString(", ")}"
75 | )
76 | }
77 | }
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/macros/src/main/scala-2/zio/sql/macros/wherelike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | import scala.reflect.macros.blackbox
4 | import scala.language.experimental.macros
5 |
6 | sealed trait WhereIsSound[WhereF, GroupByF]
7 |
8 | /**
9 | // WHERE not allowed. Use `HAVING` instead.
10 | select(amount)
11 | .from(productTable)
12 | .groupBy(amount)
13 | .having(amount > 10)
14 | .where(amount > 10)
15 |
16 | GroupByF = Any with Source["amount"]
17 | WhereF = Source["amount"] with Literal]
18 |
19 | // Aggregate functions are not allowed in WHERE
20 | select(amount)
21 | .from(productTable)
22 | .where(Sum(price) > 200)
23 |
24 | WhereF = Aggregate[Source["price"]] with Literal]
25 | GroupByF = Any
26 |
27 | */
28 | object WhereIsSound {
29 |
30 | // WhereF
31 | final case class WhereCanBeCalled[WhereF, GroupByF]() extends WhereIsSound[WhereF, GroupByF]
32 |
33 | implicit def materializeWhereIsSound[WhereF, GroupByF]: WhereIsSound[WhereF, GroupByF] =
34 | macro materializeWhereIsSoundImpl[WhereF, GroupByF]
35 |
36 | def materializeWhereIsSoundImpl[WhereF: c.WeakTypeTag, GroupByF: c.WeakTypeTag](
37 | c: blackbox.Context
38 | ): c.Expr[WhereIsSound[WhereF, GroupByF]] = {
39 | import c.universe._
40 |
41 | val groupedType = weakTypeOf[GroupByF]
42 | val whereF = weakTypeOf[WhereF]
43 |
44 | // Source["age"] with Source["amount"] : List(Source["age"], Source["amount"])
45 | def splitIntersection(t: Type): List[Type] =
46 | t.dealias match {
47 | case t: RefinedType =>
48 | t.parents.flatMap(s => splitIntersection(s))
49 | case TypeRef(_, sym, _) if sym.info.isInstanceOf[RefinedTypeApi] =>
50 | splitIntersection(sym.info)
51 | case t: TypeRef =>
52 | t.args.headOption match {
53 | case Some(value) => List(value.dealias)
54 | case None => Nil
55 | }
56 | case _ => Nil
57 | }
58 |
59 | // Aggregate[Source["price"]] with Literal] : true
60 | // Source["price"] with Literal] : false
61 | // we split the intersection and look for Aggregated in F
62 | def isThereAggregation(t: Type): Boolean =
63 | t.dealias match {
64 | case TypeRef(_, typeSymbol, _) if typeSymbol == symbolOf[zio.sql.Features.Aggregated[_]] =>
65 | true
66 | case RefinedType(members, _) =>
67 | members.find(t => isThereAggregation(t)) match {
68 | case None => false
69 | case Some(_) => true
70 | }
71 | case _ => false
72 | }
73 |
74 | if (!splitIntersection(groupedType).isEmpty) {
75 | c.abort(c.enclosingPosition, "WHERE not allowed. Use `HAVING` instead.")
76 | }
77 |
78 | if (isThereAggregation(whereF)) {
79 | c.abort(c.enclosingPosition, "Aggregate functions are not allowed in WHERE")
80 | }
81 |
82 | c.Expr[WhereIsSound[WhereF, GroupByF]](
83 | q"new zio.sql.macros.WhereIsSound.WhereCanBeCalled[${q"$whereF"}, ${q"$groupedType"}]()"
84 | )
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/macros/src/main/scala-3/zio/sql/macros/groupbylike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | sealed trait GroupByLike[All, Grouped]
4 |
5 | object GroupByLike {
6 |
7 | final case class CanBeGrouped[All, Grouped]() extends GroupByLike[All, Grouped]
8 |
9 | implicit def createGroupByLike[All, Grouped]: GroupByLike[All, Grouped] = CanBeGrouped[All, Grouped]()
10 | }
11 |
--------------------------------------------------------------------------------
/macros/src/main/scala-3/zio/sql/macros/havinglike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | sealed trait HavingIsSound[AllF, GroupByF, HavingF]
4 |
5 | object HavingIsSound {
6 |
7 | final case class HavingCanBeCalled[AllF, GroupByF, HavingF]() extends HavingIsSound[AllF, GroupByF, HavingF]
8 |
9 | implicit def materializeHavingIsSound[AllF, GroupByF, HavingF]: HavingIsSound[AllF, GroupByF, HavingF] =
10 | HavingCanBeCalled[AllF, GroupByF, HavingF]()
11 |
12 | }
13 |
--------------------------------------------------------------------------------
/macros/src/main/scala-3/zio/sql/macros/insertlike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | sealed trait InsertLike[F, ColsRepr, AllColumnIdentities, Z]
4 |
5 | object InsertLike {
6 |
7 | final case class CanBeInserted[F, ColsRepr, AllColumnIdentities, Z]()
8 | extends InsertLike[F, ColsRepr, AllColumnIdentities, Z]
9 |
10 | implicit def createInsertLike[
11 | F,
12 | ColsRepr,
13 | AllColumnIdentities,
14 | Z
15 | ]: InsertLike[
16 | F,
17 | ColsRepr,
18 | AllColumnIdentities,
19 | Z
20 | ] = CanBeInserted[F, ColsRepr, AllColumnIdentities, Z]()
21 | }
22 |
--------------------------------------------------------------------------------
/macros/src/main/scala-3/zio/sql/macros/normalizer.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | import scala.language.experimental.macros
4 |
5 | sealed trait Normalizer[In] {
6 | type Out
7 | }
8 |
9 | // TODO transparent inline
10 | object Normalizer {
11 |
12 | // final case class Instance[In, Out2]() extends Normalizer[In] {
13 | // override type Out = Out2
14 | // }
15 |
16 | implicit def createNormalizer[In]: Normalizer[In] =
17 | new Normalizer[In] {}
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/macros/src/main/scala-3/zio/sql/macros/notliteral.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | sealed trait IsNotLiteral[F]
4 |
5 | object IsNotLiteral {
6 |
7 | final case class FIsNotLiteral[F]() extends IsNotLiteral[F]
8 |
9 | implicit def materializeIsNotLiteral[F]: IsNotLiteral[F] =
10 | FIsNotLiteral[F]()
11 | }
12 |
--------------------------------------------------------------------------------
/macros/src/main/scala-3/zio/sql/macros/tablelike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | sealed trait TableSchema[T]
4 |
5 | object TableSchema {
6 |
7 | final case class Compatible[T]() extends TableSchema[T]
8 |
9 | implicit def materializeTableSchema[T]: TableSchema[T] = Compatible[T]()
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/macros/src/main/scala-3/zio/sql/macros/wherelike.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.macros
2 |
3 | sealed trait WhereIsSound[WhereF, GroupByF]
4 |
5 | object WhereIsSound {
6 | final case class WhereCanBeCalled[WhereF, GroupByF]() extends WhereIsSound[WhereF, GroupByF]
7 |
8 | implicit def materializeWhereIsSound[WhereF, GroupByF]: WhereIsSound[WhereF, GroupByF] =
9 | WhereCanBeCalled[WhereF, GroupByF]()
10 | }
11 |
--------------------------------------------------------------------------------
/macros/src/main/scala/zio/sql/Features.scala:
--------------------------------------------------------------------------------
1 | package zio.sql
2 |
3 | import scala.annotation.implicitNotFound
4 |
5 | object Features {
6 |
7 | type Aggregated[_]
8 | type Source[ColumnName, TableType]
9 | type Literal
10 | type Function0
11 | type Derived
12 |
13 | @implicitNotFound("You can only use this function on a column in the source table")
14 | sealed trait IsSource[-A]
15 |
16 | object IsSource {
17 | implicit def isSource[ColumnName, TableType]: IsSource[Source[ColumnName, TableType]] =
18 | new IsSource[Source[ColumnName, TableType]] {}
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/mysql/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.9.9
2 |
--------------------------------------------------------------------------------
/mysql/src/main/scala/zio/sql/mysql/MysqlJdbcModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.mysql
2 |
3 | import java.sql.Timestamp
4 | import java.time._
5 | import java.util.UUID
6 |
7 | import zio.schema.StandardType._
8 | import zio.sql.{ Jdbc, SqlParameter }
9 |
10 | trait MysqlJdbcModule extends MysqlRenderModule with Jdbc {
11 |
12 | override def setParam(param: SqlParameter, jdbcIndex: Int): java.sql.PreparedStatement => Unit = ps =>
13 | param._type match {
14 | case BigDecimalType => ps.setBigDecimal(jdbcIndex, param.value.asInstanceOf[java.math.BigDecimal])
15 | case InstantType => ps.setTimestamp(jdbcIndex, Timestamp.from(param.value.asInstanceOf[Instant]))
16 | case ByteType => ps.setByte(jdbcIndex, param.value.asInstanceOf[Byte])
17 | case CharType => ps.setString(jdbcIndex, String.valueOf(param.value.asInstanceOf[Char]))
18 | case IntType => ps.setInt(jdbcIndex, param.value.asInstanceOf[Int])
19 | case MonthDayType => ps.setString(jdbcIndex, param.value.toString())
20 | case BinaryType => ps.setString(jdbcIndex, param.value.toString())
21 | case MonthType => ps.setString(jdbcIndex, param.value.toString())
22 | case LocalDateTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalDateTime])
23 | case UnitType => ps.setObject(jdbcIndex, null)
24 | case YearMonthType => ps.setString(jdbcIndex, param.value.toString())
25 | case DoubleType => ps.setDouble(jdbcIndex, param.value.asInstanceOf[Double])
26 | case YearType => ps.setString(jdbcIndex, param.value.toString())
27 | case OffsetDateTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[OffsetDateTime])
28 | case ZonedDateTimeType =>
29 | ps.setObject(jdbcIndex, Timestamp.from(param.value.asInstanceOf[ZonedDateTime].toInstant))
30 | case BigIntegerType => ps.setLong(jdbcIndex, param.value.asInstanceOf[BigInt].longValue)
31 | case UUIDType => ps.setString(jdbcIndex, param.value.asInstanceOf[UUID].toString())
32 | case ZoneOffsetType => ps.setString(jdbcIndex, param.value.toString())
33 | case ShortType => ps.setShort(jdbcIndex, param.value.asInstanceOf[Short])
34 | case LocalTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalTime])
35 | case OffsetTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[OffsetTime])
36 | case LongType => ps.setLong(jdbcIndex, param.value.asInstanceOf[Long])
37 | case StringType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
38 | case PeriodType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
39 | case ZoneIdType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
40 | case LocalDateType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalDate])
41 | case BoolType => ps.setBoolean(jdbcIndex, param.value.asInstanceOf[Boolean])
42 | case DayOfWeekType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
43 | case FloatType => ps.setFloat(jdbcIndex, param.value.asInstanceOf[Float])
44 | case DurationType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/mysql/src/main/scala/zio/sql/mysql/MysqlSqlModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.mysql
2 |
3 | import java.time._
4 | import java.sql.ResultSet
5 | import java.util.UUID
6 | import zio.sql.Sql
7 | import zio.sql.select._
8 | import zio.sql.expr._
9 | import zio.sql.typetag._
10 |
11 | trait MysqlSqlModule extends Sql { self =>
12 |
13 | object MysqlSpecific {
14 | trait MysqlTypeTag[+A] extends TypeTag.TypeTagExtension[A]
15 |
16 | object MysqlTypeTag {
17 | implicit case object TYear extends MysqlTypeTag[Year] {
18 | override def decode(column: Int, resultSet: ResultSet): Either[DecodingError, Year] =
19 | scala.util
20 | .Try(Year.of(resultSet.getByte(column).toInt))
21 | .fold(
22 | _ => Left(DecodingError.UnexpectedNull(column)),
23 | r => Right(r)
24 | )
25 | }
26 | }
27 | }
28 |
29 | object MysqlFunctionDef {
30 | val BitLength = FunctionDef[String, Int](FunctionName("bit_length"))
31 | val Crc32 = FunctionDef[String, Long](FunctionName("crc32"))
32 | val CurrentDate = Expr.ParenlessFunctionCall0[LocalDate](FunctionName("current_date"))
33 | val CurrentTime = Expr.ParenlessFunctionCall0[OffsetTime](FunctionName("current_time"))
34 | val Degrees = FunctionDef[Double, Double](FunctionName("degrees"))
35 | val Hex = FunctionDef[Long, String](FunctionName("hex"))
36 | val Log2 = FunctionDef[Double, Double](FunctionName("log2"))
37 | val Log10 = FunctionDef[Double, Double](FunctionName("log10"))
38 | val MakeDate = FunctionDef[(Int, Int), LocalDate](FunctionName("makedate"))
39 | val MakeTime = FunctionDef[(Int, Int, Double), LocalTime](FunctionName("maketime"))
40 | val Now = FunctionDef[Any, ZonedDateTime](FunctionName("now"))
41 | val Pi = Expr.FunctionCall0[Double](FunctionDef[Any, Double](FunctionName("pi")))
42 | val Soundex = FunctionDef[String, String](FunctionName("soundex"))
43 | val Rand = FunctionDef[Int, Double](FunctionName("rand"))
44 | val RPad = FunctionDef[(String, Int, String), String](FunctionName("rpad"))
45 | val Uuid = Expr.FunctionCall0[UUID](FunctionDef[Any, UUID](FunctionName("uuid")))
46 | val Radians = FunctionDef[Double, Double](FunctionName("radians"))
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/mysql/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/mysql/src/test/scala/zio/sql/mysql/CustomFunctionDefSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.mysql
2 |
3 | import zio.test._
4 | import zio.test.Assertion._
5 | import zio.schema._
6 | import java.time.{ LocalDate, LocalTime, ZoneId }
7 | import java.time.format.DateTimeFormatter
8 | import zio.sql.Jdbc
9 | import java.util.UUID
10 | import zio.sql.table._
11 |
12 | object CustomFunctionDefSpec extends MysqlRunnableSpec with Jdbc {
13 |
14 | import MysqlFunctionDef._
15 |
16 | case class Customers(id: UUID, dob: LocalDate, first_name: String, last_name: String, verified: Boolean)
17 |
18 | implicit val customerSchema: Schema.CaseClass5[UUID, LocalDate, String, String, Boolean, Customers] =
19 | DeriveSchema.gen[Customers]
20 |
21 | val customers = Table.defineTable[Customers]
22 |
23 | val (customerId, dob, fName, lName, verified) = customers.columns
24 |
25 | override def specLayered = suite("MySQL FunctionDef")(
26 | test("crc32") {
27 | val query = select(Crc32("MySQL")) from customers
28 |
29 | val expected = 3259397556L
30 |
31 | val testResult = execute(query)
32 |
33 | assertZIO(testResult.runHead.some)(equalTo(expected))
34 | },
35 | test("degrees") {
36 | val query = select(Degrees(Math.PI)) from customers
37 |
38 | val expected = 180d
39 |
40 | val testResult = execute(query)
41 |
42 | assertZIO(testResult.runHead.some)(equalTo(expected))
43 | },
44 | test("hex") {
45 | val query = select(Hex(255L)) from customers
46 | val expected = "FF"
47 | val queryResult = execute(query)
48 |
49 | assertZIO(queryResult.runHead.some)(equalTo(expected))
50 | },
51 | test("log2") {
52 | val query = select(Log2(8d)) from customers
53 |
54 | val expected = 3d
55 |
56 | val testResult = execute(query)
57 |
58 | assertZIO(testResult.runHead.some)(equalTo(expected))
59 | },
60 | test("log10") {
61 | val query = select(Log10(1000000d)) from customers
62 |
63 | val expected = 6d
64 |
65 | val testResult = execute(query)
66 |
67 | assertZIO(testResult.runHead.some)(equalTo(expected))
68 | },
69 | test("now") {
70 | val timestampFormatter =
71 | DateTimeFormatter.ofPattern("uuuu-MM-dd HH:mm:ss").withZone(ZoneId.of("UTC"))
72 |
73 | val query = select(Now())
74 |
75 | val testResult = execute(query)
76 |
77 | assertZIO(
78 | testResult.runHead.some
79 | .map(t => timestampFormatter.format(t))
80 | )(matchesRegex("[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}"))
81 | },
82 | test("bit_length") {
83 | val query = select(BitLength("hello"))
84 |
85 | val expected = 40
86 |
87 | val testResult = execute(query)
88 |
89 | assertZIO(testResult.runHead.some)(equalTo(expected))
90 | },
91 | test("soundex outputs should not match for non-similar-sounding strings") {
92 | val queryForRobert = select(Soundex("Robert"))
93 | val queryForTam = select(Soundex("Tam"))
94 |
95 | val resultForRobert = execute(queryForRobert)
96 | val resultForTam = execute(queryForTam)
97 |
98 | for {
99 | robertResult <- resultForRobert.runCollect
100 | tamResult <- resultForTam.runCollect
101 | } yield assert(robertResult.head.equals(tamResult.head))(equalTo(false))
102 | },
103 | test("soundex outputs should match for equivalent strings") {
104 | val queryForRobert = select(Soundex("Robert"))
105 | val queryForRupert = select(Soundex("Rupert"))
106 |
107 | val resultForRobert = execute(queryForRobert)
108 | val resultForRupert = execute(queryForRupert)
109 |
110 | for {
111 | robertResult <- resultForRobert.runCollect
112 | rupertResult <- resultForRupert.runCollect
113 | } yield assert(robertResult.head.equals(rupertResult.head))(equalTo(true))
114 | },
115 | test("soundex") {
116 | val query = select(Soundex("Robert"))
117 | val expected = "R163"
118 |
119 | val testResult = execute(query)
120 |
121 | assertZIO(testResult.runHead.some)(equalTo(expected))
122 | },
123 | test("current_date") {
124 | val query = select(CurrentDate)
125 |
126 | val expected = LocalDate.now()
127 |
128 | val testResult = execute(query)
129 |
130 | assertZIO(testResult.runHead.some)(equalTo(expected))
131 | },
132 | test("maketime") {
133 | val query = select(MakeTime(12, 15, 30.5)) from customers
134 |
135 | val expected = LocalTime.parse("12:15:30.5")
136 |
137 | val testResult = execute(query)
138 |
139 | assertZIO(testResult.runHead.some)(equalTo(expected))
140 | },
141 | test("pi") {
142 | val query = select(Pi) from customers
143 |
144 | val expected = 3.141593d
145 |
146 | val testResult = execute(query)
147 |
148 | assertZIO(testResult.runHead.some)(equalTo(expected))
149 | },
150 | test("uuid") {
151 | assertZIO(execute(select(Uuid)).runHead.some)(!isNull)
152 | },
153 | test("rand") {
154 | val query = select(Rand(5))
155 |
156 | val testResult = execute(query)
157 |
158 | assertZIO(testResult.runHead.some)(isGreaterThanEqualTo(0d) && isLessThanEqualTo(1d))
159 | },
160 | test("rpad") {
161 | val cases = Seq(("hi", 5, "?", "hi???"), ("hi", 1, "?", "h"))
162 | check(Gen.fromIterable(cases)) { case (str, len, pad, exp) =>
163 | assertZIO(execute(select(RPad(str, len, pad))).runHead.some)(equalTo(exp))
164 | }
165 | },
166 | test("current_time") {
167 | assertZIO(
168 | execute(select(CurrentTime)).runHead.some
169 | .map(t => DateTimeFormatter.ofPattern("HH:mm:ss").format(t))
170 | )(matchesRegex("(2[0-3]|[01][0-9]):[0-5][0-9]:[0-5][0-9]"))
171 | },
172 | test("Radians") {
173 | val query = select(Radians(40d))
174 |
175 | val expected = Math.toRadians(40d)
176 |
177 | val testResult = execute(query)
178 |
179 | assertZIO(testResult.runHead.some)(equalTo(expected))
180 | },
181 | test("makedate") {
182 | val query = select(MakeDate(2022, 31)) from customers
183 |
184 | val expected = LocalDate.of(2022, 1, 31)
185 |
186 | val testResult = execute(query)
187 |
188 | assertZIO(testResult.runHead.some)(equalTo(expected))
189 | }
190 | )
191 | }
192 |
--------------------------------------------------------------------------------
/mysql/src/test/scala/zio/sql/mysql/DeleteSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.mysql
2 |
3 | import zio.test._
4 |
5 | import java.util.UUID
6 | import java.time.LocalDate
7 | import zio.schema.{ DeriveSchema, Schema }
8 | import zio.test.TestAspect.sequential
9 | import zio.sql.table._
10 |
11 | object DeleteSpec extends MysqlRunnableSpec {
12 |
13 | case class Customers(id: UUID, dob: LocalDate, first_name: String, lastName: String, verified: Boolean)
14 |
15 | implicit val customerSchema: Schema.CaseClass5[UUID, LocalDate, String, String, Boolean, Customers] =
16 | DeriveSchema.gen[Customers]
17 |
18 | val customers = Table.defineTable[Customers]
19 |
20 | val (_, _, _, lastName, verified) = customers.columns
21 |
22 | override def specLayered = suite("MySQL module delete")(
23 | test("Can delete from single table with a is not true condition") {
24 | val query = deleteFrom(customers).where(verified.isNotTrue)
25 |
26 | for {
27 | r <- execute(query)
28 | } yield assertTrue(r == 1)
29 | },
30 | test("Can delete from single table with an equals condition") {
31 | val query = deleteFrom(customers).where(lastName === "Murray")
32 |
33 | for {
34 | r <- execute(query)
35 | } yield assertTrue(r == 1)
36 | }
37 | ) @@ sequential
38 | }
39 |
--------------------------------------------------------------------------------
/mysql/src/test/scala/zio/sql/mysql/FunctionDefSpec.scala:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/mysql/src/test/scala/zio/sql/mysql/MysqlRunnableSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.mysql
2 |
3 | import com.dimafeng.testcontainers.{ JdbcDatabaseContainer, MySQLContainer, SingleContainer }
4 | import org.testcontainers.utility.DockerImageName
5 | import zio.sql.JdbcRunnableSpec
6 |
7 | trait MysqlRunnableSpec extends JdbcRunnableSpec with MysqlJdbcModule {
8 |
9 | override protected def getContainer: SingleContainer[_] with JdbcDatabaseContainer =
10 | new MySQLContainer(
11 | mysqlImageVersion = Option("mysql:8.2.0").map(DockerImageName.parse)
12 | ).configure { a =>
13 | a.withInitScript("shop_schema.sql")
14 | ()
15 | }
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/mysql/src/test/scala/zio/sql/mysql/TransactionSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.mysql
2 |
3 | import java.util.UUID
4 |
5 | import zio._
6 | import zio.schema._
7 | import zio.test.Assertion._
8 | import zio.test._
9 | import zio.test.TestAspect.sequential
10 | import java.time.LocalDate
11 | import zio.sql.Jdbc
12 | import zio.sql.table._
13 |
14 | object TransactionSpec extends MysqlRunnableSpec with Jdbc {
15 |
16 | case class Customers(id: UUID, dob: LocalDate, first_name: String, last_name: String, verified: Boolean)
17 |
18 | implicit val customerSchema: Schema.CaseClass5[UUID, LocalDate, String, String, Boolean, Customers] =
19 | DeriveSchema.gen[Customers]
20 |
21 | val customers = Table.defineTable[Customers]
22 |
23 | val (customerId, dob, fName, lName, verified) = customers.columns
24 |
25 | override def specLayered = suite("MySQL module")(
26 | test("Transaction is returning the last value") {
27 | val query = select(customerId) from customers
28 |
29 | val result = transact(
30 | query.run.runCount *> query.run.runCount
31 | )
32 |
33 | val assertion =
34 | result
35 | .map(count => assertTrue(count == 5))
36 | .orDie
37 |
38 | assertion.mapErrorCause(cause => Cause.stackless(cause.untraced))
39 | },
40 | test("Transaction failed and didn't deleted rows") {
41 | val query = select(customerId) from customers
42 | val deleteQuery = deleteFrom(customers).where(verified === false)
43 |
44 | val result = (for {
45 | allCustomersCount <- execute(query).map(identity[UUID](_)).runCount
46 | _ <- transact(
47 | deleteQuery.run *> ZIO.fail(new Exception("this is error")) *> query.run.runCount
48 | ).catchAllCause(_ => ZIO.unit)
49 | remainingCustomersCount <- execute(query).map(identity[UUID](_)).runCount
50 | } yield (allCustomersCount, remainingCustomersCount))
51 |
52 | assertZIO(result)(equalTo((5L, 5L))).mapErrorCause(cause => Cause.stackless(cause.untraced))
53 | },
54 | test("Transaction succeeded and deleted rows") {
55 | val query = select(customerId) from customers
56 | val deleteQuery = deleteFrom(customers).where(verified === false)
57 |
58 | val tx = deleteQuery.run
59 |
60 | val result = (for {
61 | allCustomersCount <- execute(query).map(identity[UUID](_)).runCount
62 | _ <- transact(tx)
63 | remainingCustomersCount <- execute(query).map(identity[UUID](_)).runCount
64 | } yield (allCustomersCount, remainingCustomersCount))
65 |
66 | assertZIO(result)(equalTo((5L, 4L))).mapErrorCause(cause => Cause.stackless(cause.untraced))
67 | }
68 | ) @@ sequential
69 | }
70 |
--------------------------------------------------------------------------------
/oracle/src/main/scala/zio/sql/oracle/OracleJdbcModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.oracle
2 |
3 | import java.sql.Timestamp
4 | import java.time._
5 | import java.util.UUID
6 |
7 | import zio.schema.StandardType._
8 | import zio.sql.{ Jdbc, SqlParameter }
9 |
10 | trait OracleJdbcModule extends OracleRenderModule with Jdbc {
11 |
12 | override def setParam(param: SqlParameter, jdbcIndex: Int): java.sql.PreparedStatement => Unit = ps =>
13 | param._type match {
14 | case BigDecimalType => ps.setBigDecimal(jdbcIndex, param.value.asInstanceOf[java.math.BigDecimal])
15 | case InstantType => ps.setTimestamp(jdbcIndex, Timestamp.from(param.value.asInstanceOf[Instant]))
16 | case ByteType => ps.setByte(jdbcIndex, param.value.asInstanceOf[Byte])
17 | case CharType => ps.setString(jdbcIndex, String.valueOf(param.value.asInstanceOf[Char]))
18 | case IntType => ps.setInt(jdbcIndex, param.value.asInstanceOf[Int])
19 | case MonthDayType => ps.setString(jdbcIndex, param.value.toString())
20 | case BinaryType => ps.setString(jdbcIndex, param.value.toString())
21 | case MonthType => ps.setString(jdbcIndex, param.value.toString())
22 | case LocalDateTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalDateTime])
23 | case UnitType => ps.setObject(jdbcIndex, null)
24 | case YearMonthType => ps.setString(jdbcIndex, param.value.toString())
25 | case DoubleType => ps.setDouble(jdbcIndex, param.value.asInstanceOf[Double])
26 | case YearType => ps.setString(jdbcIndex, param.value.toString())
27 | case OffsetDateTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[OffsetDateTime])
28 | case ZonedDateTimeType =>
29 | ps.setObject(jdbcIndex, Timestamp.from(param.value.asInstanceOf[ZonedDateTime].toInstant))
30 | case BigIntegerType => ps.setLong(jdbcIndex, param.value.asInstanceOf[BigInt].longValue)
31 |
32 | // Oracle doesn't natively support UUID
33 | case UUIDType => ps.setString(jdbcIndex, param.value.asInstanceOf[UUID].toString())
34 | case ZoneOffsetType => ps.setString(jdbcIndex, param.value.toString())
35 | case ShortType => ps.setShort(jdbcIndex, param.value.asInstanceOf[Short])
36 | case LocalTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalTime])
37 | case OffsetTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[OffsetTime])
38 | case LongType => ps.setLong(jdbcIndex, param.value.asInstanceOf[Long])
39 | case StringType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
40 | case PeriodType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
41 | case ZoneIdType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
42 | case LocalDateType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalDate])
43 | case BoolType => ps.setBoolean(jdbcIndex, param.value.asInstanceOf[Boolean])
44 | case DayOfWeekType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
45 | case FloatType => ps.setFloat(jdbcIndex, param.value.asInstanceOf[Float])
46 | case DurationType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/oracle/src/main/scala/zio/sql/oracle/OracleSqlModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.oracle
2 |
3 | import zio.sql.Sql
4 | import java.time.YearMonth
5 | import java.sql.ResultSet
6 | import scala.util.Try
7 | import java.time.Duration
8 | import zio.sql.select._
9 | import zio.sql.expr._
10 | import zio.sql.typetag._
11 |
12 | trait OracleSqlModule extends Sql { self =>
13 |
14 | trait OracleTypeTag[+A] extends TypeTag.TypeTagExtension[A]
15 |
16 | object OracleTypeTag {
17 | implicit case object TYearMonth extends OracleTypeTag[YearMonth] {
18 | def decode(column: Int, resultSet: ResultSet): Either[DecodingError, YearMonth] =
19 | Try(YearMonth.parse(resultSet.getString(column)))
20 | .fold(
21 | _ => Left(DecodingError.UnexpectedNull(column)),
22 | r => Right(r)
23 | )
24 | }
25 | implicit case object TDuration extends OracleTypeTag[Duration] {
26 | def decode(column: Int, resultSet: ResultSet): Either[DecodingError, Duration] =
27 | Try(Duration.parse(resultSet.getString(column)))
28 | .fold(
29 | _ => Left(DecodingError.UnexpectedNull(column)),
30 | r => Right(r)
31 | )
32 | }
33 | }
34 |
35 | object OracleFunctionDef {
36 | val Ltrim2 = FunctionDef[(String, String), String](FunctionName("ltrim"))
37 | val Rtrim2 = FunctionDef[(String, String), String](FunctionName("rtrim"))
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/oracle/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/oracle/src/test/scala/zio/sql/oracle/CustomFunctionDefSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.oracle
2 |
3 | import zio.test.Assertion._
4 | import zio.test.TestAspect.timeout
5 | import zio.test._
6 | import zio._
7 |
8 | object CustomFunctionDefSpec extends OracleRunnableSpec with DualSchema {
9 | import OracleFunctionDef._
10 |
11 | import Dual._
12 |
13 | override def specLayered = suite("Oracle FunctionDef")(
14 | test("ltrim2") {
15 | assertZIO(execute(select(Ltrim2("$## foo$#", "#$")).from(dual)).runHead.some)(
16 | equalTo(" foo$#")
17 | )
18 | },
19 | test("rtrim2") {
20 | assertZIO(execute(select(Rtrim2("$#foo $##", "#$")).from(dual)).runHead.some)(
21 | equalTo("$#foo ")
22 | )
23 | }
24 | ) @@ timeout(5.minutes)
25 | }
26 |
--------------------------------------------------------------------------------
/oracle/src/test/scala/zio/sql/oracle/DualSchema.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.oracle
2 |
3 | import zio.schema.{ DeriveSchema, Schema }
4 | import zio.sql.table._
5 |
6 | trait DualSchema {
7 | object Dual {
8 | case class Dual(dummy: String)
9 |
10 | implicit val dummySchema: Schema.CaseClass1[String, Dual] = DeriveSchema.gen[Dual]
11 | val dual = Table.defineTable[Dual]
12 | val dummy = dual.columns
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/oracle/src/test/scala/zio/sql/oracle/OracleRunnableSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.oracle
2 |
3 | import com.dimafeng.testcontainers.{ JdbcDatabaseContainer, OracleContainer, SingleContainer }
4 | import org.testcontainers.utility.DockerImageName
5 | import zio.sql.JdbcRunnableSpec
6 |
7 | trait OracleRunnableSpec extends JdbcRunnableSpec with OracleJdbcModule {
8 |
9 | override protected def getContainer: SingleContainer[_] with JdbcDatabaseContainer =
10 | new OracleContainer(
11 | dockerImageName = DockerImageName.parse("gvenzl/oracle-xe")
12 | ).configure { container =>
13 | container.withInitScript("shop_schema.sql")
14 | ()
15 | }
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/oracle/src/test/scala/zio/sql/oracle/ShopSchema.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.oracle
2 |
3 | import java.math.BigDecimal
4 | import java.util.UUID
5 | import java.time._
6 | import zio.Chunk
7 | import zio.schema.{ DeriveSchema, Schema }
8 | import zio.sql.table._
9 |
10 | trait ShopSchema extends OracleSqlModule { self =>
11 |
12 | object Customers {
13 |
14 | case class Customers(id: UUID, dob: LocalDate, first_name: String, last_name: String, verified: Boolean)
15 |
16 | implicit val customerSchema: Schema.CaseClass5[UUID, LocalDate, String, String, Boolean, Customers] =
17 | DeriveSchema.gen[Customers]
18 |
19 | val customers = Table.defineTableSmart[Customers]
20 |
21 | val (customerId, dob, fName, lName, verified) = customers.columns
22 | }
23 | object Orders {
24 | case class Order(id: UUID, customerId: UUID, orderDate: LocalDate)
25 |
26 | implicit val orderSchema: Schema.CaseClass3[UUID, UUID, LocalDate, Order] = DeriveSchema.gen[Order]
27 |
28 | val orders = Table.defineTableSmart[Order]
29 |
30 | val (orderId, fkCustomerId, orderDate) = orders.columns
31 | }
32 |
33 | object ProductPrices {
34 | case class ProductPrice(productId: UUID, effective: LocalDate, price: BigDecimal)
35 | implicit val productPriceSchema: Schema.CaseClass3[UUID, LocalDate, BigDecimal, ProductPrice] =
36 | DeriveSchema.gen[ProductPrice]
37 |
38 | val productPrices = Table.defineTableSmart[ProductPrice]
39 |
40 | val (productPricesOrderId, effectiveDate, productPrice) = productPrices.columns
41 | }
42 |
43 | object OrderDetailsSchema {
44 | case class OrderDetails(orderId: UUID, productId: UUID, quantity: Int, unitPrice: BigDecimal)
45 |
46 | implicit val orderDetailsSchema: Schema.CaseClass4[UUID, UUID, Int, BigDecimal, OrderDetails] =
47 | DeriveSchema.gen[OrderDetails]
48 |
49 | val orderDetails = Table.defineTableSmart[OrderDetails]
50 |
51 | val (orderDetailsOrderId, orderDetailsProductId, quantity, unitPrice) = orderDetails.columns
52 | }
53 |
54 | object AllTypes {
55 |
56 | case class AllType(
57 | id: UUID,
58 | bytearray: Chunk[Byte],
59 | bigdecimal: BigDecimal,
60 | boolean_ : Boolean,
61 | char_ : Char,
62 | double_ : Double,
63 | float_ : Float,
64 | instant: Instant,
65 | int_ : Int,
66 | optional_int: Option[Int],
67 | localdate: LocalDate,
68 | localdatetime: LocalDateTime,
69 | localtime: LocalTime,
70 | long_ : Long,
71 | offsetdatetime: OffsetDateTime,
72 | offsettime: OffsetTime,
73 | short: Short,
74 | string: String,
75 | uuid: UUID,
76 | zoneddatetime: ZonedDateTime
77 | )
78 |
79 | implicit val alTypesSchema: Schema.CaseClass20[
80 | UUID,
81 | Chunk[Byte],
82 | BigDecimal,
83 | Boolean,
84 | Char,
85 | Double,
86 | Float,
87 | Instant,
88 | Int,
89 | Option[Int],
90 | LocalDate,
91 | LocalDateTime,
92 | LocalTime,
93 | Long,
94 | OffsetDateTime,
95 | OffsetTime,
96 | Short,
97 | String,
98 | UUID,
99 | ZonedDateTime,
100 | AllType
101 | ] = DeriveSchema.gen[AllType]
102 |
103 | val allTypes = Table.defineTableSmart[AllType]
104 |
105 | val (
106 | id,
107 | bytearrayCol,
108 | bigdecimalCol,
109 | booleanCol,
110 | charCol,
111 | doubleCol,
112 | floatCol,
113 | instantCol,
114 | intCol,
115 | optionalIntCol,
116 | localdateCol,
117 | localdatetimeCol,
118 | localtimeCol,
119 | longCol,
120 | offsetdatetimeCol,
121 | offsettimeCol,
122 | shortCol,
123 | stringCol,
124 | uuidCol,
125 | zonedDatetimeCol
126 | ) = allTypes.columns
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/postgres/src/main/scala/zio/sql/postgresql/PostgresJdbcModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import java.sql.Timestamp
4 | import java.time._
5 | import java.util.UUID
6 |
7 | import zio.schema.StandardType._
8 | import zio.sql.{ Jdbc, SqlParameter }
9 |
10 | trait PostgresJdbcModule extends PostgresRenderModule with Jdbc {
11 |
12 | override def setParam(param: SqlParameter, jdbcIndex: Int): java.sql.PreparedStatement => Unit = ps =>
13 | param._type match {
14 | case BigDecimalType => ps.setBigDecimal(jdbcIndex, param.value.asInstanceOf[java.math.BigDecimal])
15 | case InstantType => ps.setTimestamp(jdbcIndex, Timestamp.from(param.value.asInstanceOf[Instant]))
16 | case ByteType => ps.setByte(jdbcIndex, param.value.asInstanceOf[Byte])
17 | case CharType => ps.setString(jdbcIndex, String.valueOf(param.value.asInstanceOf[Char]))
18 | case IntType => ps.setInt(jdbcIndex, param.value.asInstanceOf[Int])
19 | case MonthDayType => ps.setString(jdbcIndex, param.value.toString())
20 | case BinaryType => ps.setString(jdbcIndex, param.value.toString())
21 | case MonthType => ps.setString(jdbcIndex, param.value.toString())
22 | case LocalDateTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalDateTime])
23 | case UnitType => ps.setObject(jdbcIndex, null)
24 | case YearMonthType => ps.setString(jdbcIndex, param.value.toString())
25 | case DoubleType => ps.setDouble(jdbcIndex, param.value.asInstanceOf[Double])
26 | case YearType => ps.setString(jdbcIndex, param.value.toString())
27 | case OffsetDateTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[OffsetDateTime])
28 | case ZonedDateTimeType =>
29 | ps.setObject(jdbcIndex, Timestamp.from(param.value.asInstanceOf[ZonedDateTime].toInstant))
30 | case BigIntegerType => ps.setLong(jdbcIndex, param.value.asInstanceOf[BigInt].longValue)
31 | case UUIDType => ps.setObject(jdbcIndex, param.value.asInstanceOf[UUID])
32 | case ZoneOffsetType => ps.setString(jdbcIndex, param.value.toString())
33 | case ShortType => ps.setShort(jdbcIndex, param.value.asInstanceOf[Short])
34 | case LocalTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalTime])
35 | case OffsetTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[OffsetTime])
36 | case LongType => ps.setLong(jdbcIndex, param.value.asInstanceOf[Long])
37 | case StringType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
38 | case PeriodType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
39 | case ZoneIdType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
40 | case LocalDateType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalDate])
41 | case BoolType => ps.setBoolean(jdbcIndex, param.value.asInstanceOf[Boolean])
42 | case DayOfWeekType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
43 | case FloatType => ps.setFloat(jdbcIndex, param.value.asInstanceOf[Float])
44 | case DurationType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/postgres/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/postgres/src/test/scala/zio/sql/postgresql/AgregationSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import zio.test.TestAspect._
4 | import zio.test._
5 | import zio.sql.expr.AggregationDef._
6 | import java.math.{ BigDecimal, RoundingMode }
7 | import java.util.UUID
8 |
9 | object AgregationSpec extends PostgresRunnableSpec with DbSchema {
10 |
11 | import OrderDetailsSchema._
12 |
13 | override def specLayered =
14 | suite("Postgres module with aggregate function SumInt, SumDec and avgDec ")(
15 | test("Can aggregate colums SumInt(Int column) and SumDec(BigDdecimal colum)") {
16 |
17 | val query = select((SumDec(unitPrice) as "totalAmount"), (SumInt(quantity) as "soldQuantity"))
18 | .from(orderDetails)
19 | .where(orderDetailsProductId === UUID.fromString("7368ABF4-AED2-421F-B426-1725DE756895"))
20 |
21 | val result = execute(query).runCollect.map(_.toList).head
22 | for {
23 | r <- result
24 | } yield assertTrue(r._1 == BigDecimal.valueOf(215.99)) && assertTrue(r._2 == 40)
25 | },
26 | test("Can aggregate colums of typ money () AvgDec(BigDdecimal colum)") {
27 | val query = select((AvgDec(unitPrice) as "AverageAmount"))
28 | .from(orderDetails)
29 | .where(orderDetailsProductId === UUID.fromString("7368ABF4-AED2-421F-B426-1725DE756895"))
30 |
31 | val result = execute(query).runCollect.map(_.toList).head
32 | for {
33 | r <- result
34 | } yield assertTrue(r.setScale(4, RoundingMode.CEILING) == BigDecimal.valueOf(10.7995))
35 | }
36 | ) @@ sequential
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/postgres/src/test/scala/zio/sql/postgresql/DbSchema.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import java.time.{ LocalDate, ZonedDateTime }
4 | import java.util.UUID
5 | import zio.schema.{ DeriveSchema, Schema }
6 | import java.math.BigDecimal
7 | import zio.sql.table._
8 | import zio.sql.select._
9 |
10 | trait DbSchema extends PostgresJdbcModule { self =>
11 |
12 | object Cities {
13 | case class City(id: Int, name: String, population: Int, area: Float, link: Option[String])
14 | case class MetroSystem(id: Int, cityId: Int, name: String, dailyRidership: Int)
15 | case class MetroLine(id: Int, systemId: Int, name: String, stationCount: Int, trackType: Int)
16 |
17 | implicit val citySchema: Schema.CaseClass5[Int, String, Int, Float, Option[String], City] = DeriveSchema.gen[City]
18 |
19 | val city = Table.defineTable[City]
20 | val (cityId, cityName, population, area, link) = city.columns
21 |
22 | implicit val metroSystemSchema: Schema.CaseClass4[Int, Int, String, Int, MetroSystem] =
23 | DeriveSchema.gen[MetroSystem]
24 |
25 | val metroSystem = Table.defineTable[MetroSystem]
26 |
27 | val (metroSystemId, cityIdFk, metroSystemName, dailyRidership) = metroSystem.columns
28 |
29 | implicit val metroLineSchema: Schema.CaseClass5[Int, Int, String, Int, Int, MetroLine] = DeriveSchema.gen[MetroLine]
30 |
31 | val metroLine = Table.defineTable[MetroLine]
32 |
33 | val (metroLineId, systemId, metroLineName, stationCount, trackType) = metroLine.columns
34 | }
35 |
36 | object DerivedTables {
37 | import OrdersSchema._
38 | import CustomerSchema._
39 | import OrderDetailsSchema._
40 |
41 | val orderDetailsDerived =
42 | select(orderDetailsOrderId, orderDetailsProductId, unitPrice).from(orderDetails).asTable("derived")
43 |
44 | val (derivedOrderId, derivedProductId, derivedUnitPrice) = orderDetailsDerived.columns
45 |
46 | val orderDateDerivedTable = customers
47 | .subselect(orderDate)
48 | .from(orders)
49 | .limit(1)
50 | .where(customerId === fkCustomerId)
51 | .orderBy(Ordering.Desc(orderDate))
52 | .asTable("derived")
53 |
54 | val orderDateDerived = orderDateDerivedTable.columns
55 | }
56 |
57 | object CustomerSchema {
58 | case class Customer(
59 | id: UUID,
60 | dob: LocalDate,
61 | firstName: String,
62 | lastName: String,
63 | verified: Boolean,
64 | createdTimestampString: String,
65 | createdTimestamp: ZonedDateTime
66 | )
67 |
68 | implicit val custommerSchema
69 | : Schema.CaseClass7[UUID, LocalDate, String, String, Boolean, String, ZonedDateTime, Customer] =
70 | DeriveSchema.gen[Customer]
71 |
72 | val customers = Table.defineTableSmart[Customer]
73 |
74 | val (customerId, dob, fName, lName, verified, createdString, createdTimestamp) =
75 | customers.columns
76 |
77 | val ALL = customerId ++ dob ++ fName ++ lName ++ verified ++ createdString ++ createdTimestamp
78 | }
79 |
80 | object OrdersSchema {
81 | case class Orders(id: UUID, customerId: UUID, orderDate: LocalDate)
82 |
83 | implicit val orderSchema: Schema.CaseClass3[UUID, UUID, LocalDate, Orders] = DeriveSchema.gen[Orders]
84 |
85 | val orders = Table.defineTableSmart[Orders]
86 |
87 | val (orderId, fkCustomerId, orderDate) = orders.columns
88 | }
89 |
90 | object ProductSchema {
91 | case class Products(id: UUID, name: String, description: String, imageUrl: String)
92 |
93 | implicit val productSchema: Schema.CaseClass4[UUID, String, String, String, Products] = DeriveSchema.gen[Products]
94 |
95 | val products = Table.defineTableSmart[Products]
96 |
97 | val (productId, productName, description, imageURL) = products.columns
98 | }
99 |
100 | object ProductPrices {
101 | case class ProductPrice(productId: UUID, effective: LocalDate, price: BigDecimal)
102 | implicit val productPriceSchema: Schema.CaseClass3[UUID, LocalDate, BigDecimal, ProductPrice] =
103 | DeriveSchema.gen[ProductPrice]
104 |
105 | val productPrices = Table.defineTableSmart[ProductPrice]
106 |
107 | val (productPricesOrderId, effectiveDate, productPrice) = productPrices.columns
108 | }
109 |
110 | object OrderDetailsSchema {
111 | case class OrderDetails(orderId: UUID, productId: UUID, quantity: Int, unitPrice: BigDecimal)
112 |
113 | implicit val orderDetailsSchema: Schema.CaseClass4[UUID, UUID, Int, BigDecimal, OrderDetails] =
114 | DeriveSchema.gen[OrderDetails]
115 |
116 | val orderDetails = Table.defineTableSmart[OrderDetails]
117 |
118 | val (orderDetailsOrderId, orderDetailsProductId, quantity, unitPrice) = orderDetails.columns
119 | }
120 |
121 | object PersonsSchema {
122 | case class Persons(id: UUID, name: Option[String], birthDate: Option[LocalDate])
123 |
124 | implicit val personsSchema: Schema.CaseClass3[UUID, Option[String], Option[LocalDate], Persons] =
125 | DeriveSchema.gen[Persons]
126 |
127 | val persons = Table.defineTableSmart[Persons]
128 |
129 | val (personsId, personsName, birthDate) = persons.columns
130 | }
131 |
132 | object MoviesSchema {
133 |
134 | case class Movies(id: Int, rating: Option[Int])
135 |
136 | implicit val moviesSchema: Schema.CaseClass2[Int, Option[Int], Movies] = DeriveSchema.gen[Movies]
137 |
138 | val movies = Table.defineTableSmart[Movies]
139 |
140 | val (id, rating) = movies.columns
141 |
142 | }
143 |
144 | }
145 |
--------------------------------------------------------------------------------
/postgres/src/test/scala/zio/sql/postgresql/DeleteBatchSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import zio.Cause
4 | import zio.test.Assertion._
5 | import zio.test.TestAspect._
6 | import zio.test._
7 | import zio.sql.delete._
8 | import java.time.{ LocalDate, ZonedDateTime }
9 | import java.util.UUID
10 |
11 | object DeleteBatchSpec extends PostgresRunnableSpec with DbSchema {
12 |
13 | import CustomerSchema._
14 |
15 | private def delete_(c: Customer): Delete[customers.TableType] =
16 | deleteFrom(customers).where((verified.isTrue) && (customerId === c.id))
17 |
18 | override def specLayered = suite("Postgres module batch delete")(
19 | test("Can delete more than one customer from single table with a condition") {
20 | val query = deleteFrom(customers).where(verified.isNotTrue)
21 |
22 | val result = executeBatchDelete(List(query))
23 |
24 | val assertion = for {
25 | r <- result
26 | } yield assert(r)(equalTo(1))
27 |
28 | assertion.mapErrorCause(cause => Cause.stackless(cause.untraced))
29 | },
30 | test("Can insert more than one customer into single table prior to deleting them") {
31 | val id1 = UUID.randomUUID()
32 | val id2 = UUID.randomUUID()
33 | val id3 = UUID.randomUUID()
34 | val id4 = UUID.randomUUID()
35 | val c1 = Customer(
36 | id1,
37 | LocalDate.now(),
38 | "fnameCustomer1",
39 | "lnameCustomer1",
40 | verified = true,
41 | LocalDate.now().toString,
42 | ZonedDateTime.now()
43 | )
44 | val c2 = Customer(
45 | id2,
46 | LocalDate.now(),
47 | "fnameCustomer2",
48 | "lnameCustomer2",
49 | verified = true,
50 | LocalDate.now().toString,
51 | ZonedDateTime.now()
52 | )
53 | val c3 = Customer(
54 | id3,
55 | LocalDate.now(),
56 | "fnameCustomer3",
57 | "lnameCustomer3",
58 | verified = true,
59 | LocalDate.now().toString,
60 | ZonedDateTime.now()
61 | )
62 | val c4 = Customer(
63 | id4,
64 | LocalDate.now(),
65 | "fnameCustomer4",
66 | "lnameCustomer4",
67 | verified = false,
68 | LocalDate.now().toString,
69 | ZonedDateTime.now()
70 | )
71 |
72 | val allCustomer = List(c1, c2, c3, c4)
73 | val data = allCustomer.map(Customer.unapply(_).get)
74 | val insertStmt = insertInto(customers)(ALL).values(data)
75 | val insertResult = execute(insertStmt)
76 |
77 | val selectStmt = select(ALL).from(customers)
78 | val selectResult = execute(selectStmt.to((Customer.apply _).tupled)).runCollect
79 |
80 | val expected = 8 // 4 customers are in the db alredy and we insert additional 4 in this test
81 |
82 | val assertion = for {
83 | _ <- insertResult
84 | customers <- selectResult
85 | deletes = customers.toList.map(delete_)
86 | result <- executeBatchDelete(deletes)
87 | } yield assert(result)(equalTo(expected))
88 |
89 | assertion.mapErrorCause(cause => Cause.stackless(cause.untraced))
90 |
91 | }
92 | ) @@ sequential
93 |
94 | }
95 |
--------------------------------------------------------------------------------
/postgres/src/test/scala/zio/sql/postgresql/DeleteSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import zio.test._
4 | import java.util.UUID
5 | import java.time.LocalDate
6 | import java.time.ZonedDateTime
7 | import zio.schema.{ DeriveSchema, Schema }
8 | import zio.sql.table._
9 |
10 | object DeleteSpec extends PostgresRunnableSpec {
11 |
12 | case class Customers(
13 | id: UUID,
14 | dob: LocalDate,
15 | firstName: String,
16 | lastName: String,
17 | verified: Boolean,
18 | createdTimestampString: String,
19 | createdTimestamp: ZonedDateTime
20 | )
21 |
22 | implicit val custommerSchema
23 | : Schema.CaseClass7[UUID, LocalDate, String, String, Boolean, String, ZonedDateTime, Customers] =
24 | DeriveSchema.gen[Customers]
25 |
26 | val customers = Table.defineTable[Customers]
27 |
28 | val (customerId, dob, fName, lName, verified, createdString, createdTimestamp) =
29 | customers.columns
30 |
31 | override def specLayered = suite("Postgres module delete")(
32 | test("Can delete from single table with a condition") {
33 | val query = deleteFrom(customers).where(verified.isNotTrue)
34 |
35 | val result = execute(query)
36 |
37 | for {
38 | r <- result
39 | } yield assertTrue(r == 1)
40 | }
41 | )
42 | }
43 |
--------------------------------------------------------------------------------
/postgres/src/test/scala/zio/sql/postgresql/InsertBatchSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import zio.Cause
4 | import zio.test.Assertion._
5 | import zio.test._
6 |
7 | import java.time.{ LocalDate, ZonedDateTime }
8 | import java.util.UUID
9 |
10 | object InsertBatchSpec extends PostgresRunnableSpec with DbSchema {
11 |
12 | import CustomerSchema._
13 |
14 | override def specLayered = suite("Postgres module batch insert")(
15 | test("Can insert more than one customer into a table with a condition") {
16 | val id1 = UUID.randomUUID()
17 | val id2 = UUID.randomUUID()
18 | val id3 = UUID.randomUUID()
19 | val id4 = UUID.randomUUID()
20 | val c1 = Customer(
21 | id1,
22 | LocalDate.now(),
23 | "fnameCustomer1",
24 | "lnameCustomer1",
25 | verified = true,
26 | LocalDate.now().toString,
27 | ZonedDateTime.now()
28 | )
29 | val c2 = Customer(
30 | id2,
31 | LocalDate.now(),
32 | "fnameCustomer2",
33 | "lnameCustomer2",
34 | verified = true,
35 | LocalDate.now().toString,
36 | ZonedDateTime.now()
37 | )
38 | val c3 = Customer(
39 | id3,
40 | LocalDate.now(),
41 | "fnameCustomer3",
42 | "lnameCustomer3",
43 | verified = true,
44 | LocalDate.now().toString,
45 | ZonedDateTime.now()
46 | )
47 | val c4 = Customer(
48 | id4,
49 | LocalDate.now(),
50 | "fnameCustomer4",
51 | "lnameCustomer4",
52 | verified = false,
53 | LocalDate.now().toString,
54 | ZonedDateTime.now()
55 | )
56 |
57 | val allCustomer = List(c1, c2, c3, c4)
58 | val data = allCustomer.map(Customer.unapply(_).get)
59 | val query = insertInto(customers)(ALL).values(data)
60 |
61 | val resultInsert = execute(query)
62 |
63 | val insertAssertion = for {
64 | result <- resultInsert
65 | } yield assert(result)(equalTo(4))
66 | insertAssertion.mapErrorCause(cause => Cause.stackless(cause.untraced))
67 | }
68 | )
69 |
70 | }
71 |
--------------------------------------------------------------------------------
/postgres/src/test/scala/zio/sql/postgresql/PostgresRunnableSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import com.dimafeng.testcontainers.{ JdbcDatabaseContainer, PostgreSQLContainer, SingleContainer }
4 | import org.testcontainers.utility.DockerImageName
5 | import zio.sql.JdbcRunnableSpec
6 |
7 | trait PostgresRunnableSpec extends JdbcRunnableSpec with PostgresJdbcModule {
8 |
9 | override protected def getContainer: SingleContainer[_] with JdbcDatabaseContainer =
10 | new PostgreSQLContainer(
11 | dockerImageNameOverride = Option("postgres:alpine").map(DockerImageName.parse)
12 | ).configure { a =>
13 | a.withInitScript("db_schema.sql")
14 | ()
15 | }
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/postgres/src/test/scala/zio/sql/postgresql/TransactionSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import zio._
4 | import zio.sql.update.Update
5 | import zio.test.Assertion._
6 | import zio.test._
7 | import zio.test.TestAspect.sequential
8 |
9 | import java.time.{ LocalDate, ZonedDateTime }
10 | import java.util.UUID
11 |
12 | object TransactionSpec extends PostgresRunnableSpec with DbSchema {
13 |
14 | override val autoCommit = false
15 |
16 | import CustomerSchema._
17 |
18 | override def specLayered = suite("Postgres module")(
19 | test("Transaction is returning the last value") {
20 | val query = select(customerId) from customers
21 |
22 | val result = transact(
23 | query.run.runCount *> query.run.runCount
24 | )
25 |
26 | val assertion = assertZIO(result)(equalTo(5L)).orDie
27 |
28 | assertion.mapErrorCause(cause => Cause.stackless(cause.untraced))
29 | },
30 | test("Transaction failed and didn't delete rows") {
31 | val query = select(customerId) from customers
32 | val deleteQuery = deleteFrom(customers).where(verified === false)
33 |
34 | val result = (for {
35 | allCustomersCount <- execute(query).runCount
36 | _ <- transact {
37 | deleteQuery.run *> ZIO.fail(new Exception("this is error")) *> query.run.runCount
38 | }.catchAllCause(_ => ZIO.unit)
39 | remainingCustomersCount <- execute(query).runCount
40 | } yield (allCustomersCount, remainingCustomersCount))
41 |
42 | assertZIO(result)(equalTo((5L, 5L))).mapErrorCause(cause => Cause.stackless(cause.untraced))
43 | },
44 | test("Transaction succeeded and deleted rows") {
45 | val deleteQuery = deleteFrom(customers).where(verified === false)
46 | val id1 = UUID.randomUUID()
47 | val id2 = UUID.randomUUID()
48 |
49 | val c1 = Customer(
50 | id1,
51 | LocalDate.now(),
52 | "fnameCustomer1",
53 | "lnameCustomer1",
54 | verified = true,
55 | LocalDate.now().toString,
56 | ZonedDateTime.now()
57 | )
58 | val c2 = Customer(
59 | id2,
60 | LocalDate.now(),
61 | "fnameCustomer2",
62 | "lnameCustomer2",
63 | verified = true,
64 | LocalDate.now().toString,
65 | ZonedDateTime.now()
66 | )
67 | val allCustomer = List(c1, c2)
68 | val data = allCustomer.map(Customer.unapply(_).get)
69 | val insertStmt = insertInto(customers)(ALL).values(data)
70 | val updateStmt = allCustomer.map(update_)
71 |
72 | val batchResult = for {
73 | deleted <- deleteQuery.run
74 | inserted <- insertStmt.run
75 | updated <- updateStmt.run
76 | } yield deleted + inserted + updated
77 |
78 | val result = for {
79 | tx <- transact(batchResult)
80 | } yield tx
81 | assertZIO(result)(equalTo(5)).mapErrorCause(cause => Cause.stackless(cause.untraced))
82 | },
83 | test("Transaction failed and no row was inserted updated or deleted") {
84 | val deleteQuery = deleteFrom(customers).where(verified === false)
85 | val id1 = UUID.randomUUID()
86 |
87 | val c1 = Customer(
88 | id1,
89 | LocalDate.now(),
90 | "fnameCustomer1",
91 | "lnameCustomer1",
92 | verified = true,
93 | LocalDate.now().toString,
94 | ZonedDateTime.now()
95 | )
96 | val c2 = Customer(
97 | id1,
98 | LocalDate.now(),
99 | "fnameCustomer2",
100 | "lnameCustomer2",
101 | verified = true,
102 | LocalDate.now().toString,
103 | ZonedDateTime.now()
104 | )
105 | val allCustomer = List(c1, c2)
106 | val data = allCustomer.map(Customer.unapply(_).get)
107 | val insertStmt = insertInto(customers)(ALL).values(data)
108 | val updateStmt = allCustomer.map(update_)
109 |
110 | val batchResult = for {
111 | deleted <- deleteQuery.run
112 | _ <- ZIO.fail(insertStmt.run).exit
113 | updated <- updateStmt.run
114 |
115 | } yield deleted + updated
116 |
117 | val result = (for {
118 | tx <- transact(batchResult)
119 | } yield tx).flip.exit
120 | assertZIO(result)(fails((anything)))
121 |
122 | }
123 | ) @@ sequential
124 |
125 | private def update_(c: Customer): Update[customers.TableType] =
126 | update(customers)
127 | .set(verified, !c.verified)
128 | .where(customerId === c.id)
129 | }
130 |
--------------------------------------------------------------------------------
/postgres/src/test/scala/zio/sql/postgresql/UpdateBatchSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.postgresql
2 |
3 | import zio.Cause
4 | import zio.test.Assertion._
5 | import zio.test._
6 | import zio.sql.update._
7 | import java.time.{ LocalDate, ZonedDateTime }
8 | import java.util.UUID
9 |
10 | object UpdateBatchSpec extends PostgresRunnableSpec with DbSchema {
11 |
12 | import CustomerSchema._
13 |
14 | private def update_(c: Customer): Update[customers.TableType] =
15 | update(customers)
16 | .set(verified, !c.verified)
17 | .where(customerId === c.id)
18 |
19 | override def specLayered = suite("Postgres module batch update")(
20 | test("Can update more than one customer from single table with a condition") {
21 | val id1 = UUID.randomUUID()
22 | val id2 = UUID.randomUUID()
23 | val id3 = UUID.randomUUID()
24 | val id4 = UUID.randomUUID()
25 | val c1 = Customer(
26 | id1,
27 | LocalDate.now(),
28 | "fnameCustomer1",
29 | "lnameCustomer1",
30 | verified = true,
31 | LocalDate.now().toString,
32 | ZonedDateTime.now()
33 | )
34 | val c2 = Customer(
35 | id2,
36 | LocalDate.now(),
37 | "fnameCustomer2",
38 | "lnameCustomer2",
39 | verified = true,
40 | LocalDate.now().toString,
41 | ZonedDateTime.now()
42 | )
43 | val c3 = Customer(
44 | id3,
45 | LocalDate.now(),
46 | "fnameCustomer3",
47 | "lnameCustomer3",
48 | verified = true,
49 | LocalDate.now().toString,
50 | ZonedDateTime.now()
51 | )
52 | val c4 = Customer(
53 | id4,
54 | LocalDate.now(),
55 | "fnameCustomer4",
56 | "lnameCustomer4",
57 | verified = false,
58 | LocalDate.now().toString,
59 | ZonedDateTime.now()
60 | )
61 |
62 | val allCustomer = List(c1, c2, c3, c4)
63 | val data = allCustomer.map(Customer.unapply(_).get)
64 | val query = insertInto(customers)(ALL).values(data)
65 |
66 | val resultInsert = execute(query)
67 |
68 | val insertAssertion = for {
69 | r <- resultInsert
70 | } yield assert(r)(equalTo(4))
71 | insertAssertion.mapErrorCause(cause => Cause.stackless(cause.untraced))
72 |
73 | val selectAll = select(ALL).from(customers)
74 | val result_ = execute(selectAll.to((Customer.apply _).tupled)).runCollect
75 |
76 | val assertion_ = for {
77 | x <- result_
78 | updated = x.toList.map(update_)
79 | result <- executeBatchUpdate(updated)
80 | } yield assert(result)(equalTo(5))
81 | assertion_.mapErrorCause(cause => Cause.stackless(cause.untraced))
82 | }
83 | )
84 |
85 | }
86 |
--------------------------------------------------------------------------------
/project/BuildHelper.scala:
--------------------------------------------------------------------------------
1 | import sbt._
2 | import sbt.Keys._
3 |
4 | import explicitdeps.ExplicitDepsPlugin.autoImport._
5 | import sbtcrossproject.CrossPlugin.autoImport._
6 | import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._
7 | import sbtbuildinfo._
8 | import BuildInfoKeys._
9 | import scalafix.sbt.ScalafixPlugin.autoImport.scalafixSemanticdb
10 |
11 | object BuildHelper {
12 | val Scala212 = "2.12.19"
13 | val Scala213 = "2.13.13"
14 | val ScalaDotty = "3.3.3"
15 |
16 | def buildInfoSettings(packageName: String) =
17 | Seq(
18 | buildInfoKeys := Seq[BuildInfoKey](organization, moduleName, name, version, scalaVersion, sbtVersion, isSnapshot),
19 | buildInfoPackage := packageName
20 | )
21 |
22 | private val stdOptions = Seq(
23 | "-deprecation",
24 | "-encoding",
25 | "UTF-8",
26 | "-feature",
27 | "-unchecked",
28 | "-Xfatal-warnings"
29 | )
30 |
31 | private val std2xOptions = Seq(
32 | "-language:higherKinds",
33 | "-language:existentials",
34 | "-explaintypes",
35 | "-Yrangepos",
36 | "-Xlint:_,-missing-interpolator,-type-parameter-shadow,-infer-any",
37 | "-Ywarn-numeric-widen",
38 | "-Ywarn-value-discard"
39 | )
40 |
41 | private def propertyFlag(property: String, default: Boolean) =
42 | sys.props.get(property).map(_.toBoolean).getOrElse(default)
43 |
44 | private def optimizerOptions(optimize: Boolean) =
45 | if (optimize)
46 | Seq(
47 | "-opt:l:inline",
48 | "-opt-inline-from:zio.internal.**"
49 | )
50 | else Nil
51 |
52 | def extraOptions(scalaVersion: String, optimize: Boolean) =
53 | CrossVersion.partialVersion(scalaVersion) match {
54 | case Some((3, _)) =>
55 | Seq(
56 | "-language:implicitConversions",
57 | "-Xignore-scala2-macros",
58 | "-noindent"
59 | )
60 | case Some((2, 13)) =>
61 | Seq(
62 | "-Ywarn-unused:params,-implicits"
63 | ) ++ std2xOptions ++ optimizerOptions(optimize)
64 | case Some((2, 12)) =>
65 | Seq(
66 | "-opt-warnings",
67 | "-Ywarn-extra-implicit",
68 | "-Ywarn-unused:_,imports",
69 | "-Ywarn-unused:imports",
70 | "-Ypartial-unification",
71 | "-Yno-adapted-args",
72 | "-Ywarn-inaccessible",
73 | "-Ywarn-nullary-override",
74 | "-Ywarn-nullary-unit",
75 | "-Ywarn-unused:params,-implicits",
76 | "-Xfuture",
77 | "-Xsource:2.13",
78 | "-Xmax-classfile-name",
79 | "242"
80 | ) ++ std2xOptions ++ optimizerOptions(optimize)
81 | case _ => Seq.empty
82 | }
83 |
84 | def platformSpecificSources(platform: String, conf: String, baseDirectory: File)(versions: String*) =
85 | List("scala" :: versions.toList.map("scala-" + _): _*).map { version =>
86 | baseDirectory.getParentFile / platform.toLowerCase / "src" / conf / version
87 | }.filter(_.exists)
88 |
89 | def crossPlatformSources(scalaVer: String, platform: String, conf: String, baseDir: File) = {
90 | val versions = CrossVersion.partialVersion(scalaVer) match {
91 | case Some((2, 12)) =>
92 | List("2.12", "2.11+", "2.12+", "2.11-2.12", "2.12-2.13", "2.x")
93 | case Some((2, 13)) =>
94 | List("2.13", "2.11+", "2.12+", "2.13+", "2.12-2.13", "2.x")
95 | case Some((3, 0)) =>
96 | List("dotty", "2.11+", "2.12+", "2.13+", "3.x")
97 | case _ =>
98 | Nil
99 | }
100 | platformSpecificSources(platform, conf, baseDir)(versions: _*)
101 | }
102 |
103 | val dottySettings = Seq(
104 | crossScalaVersions += ScalaDotty,
105 | scalacOptions ++= {
106 | if (scalaVersion.value == ScalaDotty)
107 | Seq("-noindent")
108 | else
109 | Seq()
110 | },
111 | Compile / doc / sources := {
112 | val old = (Compile / doc / sources).value
113 | if (scalaVersion.value == ScalaDotty) {
114 | Nil
115 | } else {
116 | old
117 | }
118 | },
119 | Test / parallelExecution := {
120 | val old = (Test / parallelExecution).value
121 | if (scalaVersion.value == ScalaDotty) {
122 | false
123 | } else {
124 | old
125 | }
126 | }
127 | )
128 |
129 | lazy val crossProjectSettings = Seq(
130 | Compile / unmanagedSourceDirectories ++= {
131 | val platform = crossProjectPlatform.value.identifier
132 | val baseDir = baseDirectory.value
133 | val scalaVer = scalaVersion.value
134 |
135 | crossPlatformSources(scalaVer, platform, "main", baseDir)
136 | },
137 | Test / unmanagedSourceDirectories ++= {
138 | val platform = crossProjectPlatform.value.identifier
139 | val baseDir = baseDirectory.value
140 | val scalaVer = scalaVersion.value
141 |
142 | crossPlatformSources(scalaVer, platform, "test", baseDir)
143 | }
144 | )
145 |
146 | def stdSettings(prjName: String) = Seq(
147 | name := s"$prjName",
148 | scalacOptions := stdOptions,
149 | crossScalaVersions := Seq(Scala213, Scala212),
150 | ThisBuild / scalaVersion := Scala213,
151 | scalacOptions := stdOptions ++ extraOptions(scalaVersion.value, optimize = !isSnapshot.value),
152 | resolvers ++= Resolver.sonatypeOssRepos("snapshots"),
153 | Test / parallelExecution := true,
154 | incOptions ~= (_.withLogRecompileOnMacro(false)),
155 | autoAPIMappings := true,
156 | unusedCompileDependenciesFilter -= moduleFilter("org.scala-js", "scalajs-library")
157 | )
158 |
159 | def macroExpansionSettings = Seq(
160 | scalacOptions ++= {
161 | CrossVersion.partialVersion(scalaVersion.value) match {
162 | case Some((2, 13)) => Seq("-Ymacro-annotations")
163 | case _ => Seq.empty
164 | }
165 | },
166 | libraryDependencies ++= {
167 | CrossVersion.partialVersion(scalaVersion.value) match {
168 | case Some((2, x)) if x <= 12 =>
169 | Seq(compilerPlugin(("org.scalamacros" % "paradise" % "2.1.1").cross(CrossVersion.full)))
170 | case _ => Seq.empty
171 | }
172 | }
173 | )
174 |
175 | implicit class ModuleHelper(p: Project) {
176 | def module: Project = p.in(file(p.id)).settings(stdSettings(p.id))
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.9.9
2 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.16.0")
2 | addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2")
3 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2")
4 | addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.7")
5 | addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.12.0")
6 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.11")
7 | addSbtPlugin("ch.epfl.scala" % "sbt-bloop" % "1.5.17")
8 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12")
9 | addSbtPlugin("com.github.cb372" % "sbt-explicit-dependencies" % "0.3.1")
10 | addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.12.0")
11 | addSbtPlugin("org.typelevel" % "sbt-tpolecat" % "0.5.1")
12 | addSbtPlugin("dev.zio" % "zio-sbt-website" % "0.3.10")
13 |
--------------------------------------------------------------------------------
/sqlserver/src/main/scala/zio/sql/sqlserver/SqlServerJdbcModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.sqlserver
2 |
3 | import java.sql.Timestamp
4 | import java.time._
5 | import java.time.format.{ DateTimeFormatter, DateTimeFormatterBuilder }
6 | import java.util.UUID
7 |
8 | import zio.schema.StandardType._
9 | import zio.sql.{ Jdbc, SqlParameter }
10 |
11 | trait SqlServerJdbcModule extends SqlServerRenderModule with Jdbc {
12 |
13 | private val fmtDateTimeOffset = new DateTimeFormatterBuilder().parseCaseInsensitive
14 | .append(DateTimeFormatter.ISO_LOCAL_DATE_TIME)
15 | .appendOffset("+HH:MM", "Z")
16 | .toFormatter()
17 |
18 | override def setParam(param: SqlParameter, jdbcIndex: Int): java.sql.PreparedStatement => Unit = ps =>
19 | param._type match {
20 | case BigDecimalType => ps.setBigDecimal(jdbcIndex, param.value.asInstanceOf[java.math.BigDecimal])
21 | case InstantType => ps.setTimestamp(jdbcIndex, Timestamp.from(param.value.asInstanceOf[Instant]))
22 | case ByteType => ps.setByte(jdbcIndex, param.value.asInstanceOf[Byte])
23 | case CharType => ps.setString(jdbcIndex, String.valueOf(param.value.asInstanceOf[Char]))
24 | case IntType => ps.setInt(jdbcIndex, param.value.asInstanceOf[Int])
25 | case MonthDayType => ps.setString(jdbcIndex, param.value.toString())
26 | case BinaryType => ps.setBytes(jdbcIndex, param.value.asInstanceOf[Array[Byte]])
27 | case MonthType => ps.setString(jdbcIndex, param.value.toString())
28 | case LocalDateTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalDateTime])
29 | case UnitType => ps.setObject(jdbcIndex, null)
30 | case YearMonthType => ps.setString(jdbcIndex, param.value.toString())
31 | case DoubleType => ps.setDouble(jdbcIndex, param.value.asInstanceOf[Double])
32 | case YearType => ps.setString(jdbcIndex, param.value.toString())
33 | case OffsetDateTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[OffsetDateTime])
34 | case ZonedDateTimeType =>
35 | ps.setString(jdbcIndex, fmtDateTimeOffset.format(param.value.asInstanceOf[ZonedDateTime]))
36 | case BigIntegerType => ps.setLong(jdbcIndex, param.value.asInstanceOf[BigInt].longValue)
37 | case UUIDType => ps.setString(jdbcIndex, param.value.asInstanceOf[UUID].toString())
38 | case ZoneOffsetType => ps.setString(jdbcIndex, param.value.toString())
39 | case ShortType => ps.setShort(jdbcIndex, param.value.asInstanceOf[Short])
40 | case LocalTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalTime])
41 | case OffsetTimeType => ps.setObject(jdbcIndex, param.value.asInstanceOf[OffsetTime])
42 | case LongType => ps.setLong(jdbcIndex, param.value.asInstanceOf[Long])
43 | case StringType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
44 | case PeriodType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
45 | case ZoneIdType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
46 | case LocalDateType => ps.setObject(jdbcIndex, param.value.asInstanceOf[LocalDate])
47 | case BoolType => ps.setBoolean(jdbcIndex, param.value.asInstanceOf[Boolean])
48 | case DayOfWeekType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
49 | case FloatType => ps.setFloat(jdbcIndex, param.value.asInstanceOf[Float])
50 | case DurationType => ps.setString(jdbcIndex, param.value.asInstanceOf[String])
51 | }
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/sqlserver/src/main/scala/zio/sql/sqlserver/SqlServerSqlModule.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.sqlserver
2 |
3 | import java.math.BigDecimal
4 | import zio.sql.table._
5 | import zio.sql.select._
6 | import zio.sql.Sql
7 | import zio.sql.expr.FunctionName
8 | import zio.sql.expr.AggregationDef
9 |
10 | trait SqlServerSqlModule extends Sql { self =>
11 |
12 | object SqlServerSpecific {
13 |
14 | sealed trait SqlServerTable[A] extends Table.TableExtension[A]
15 |
16 | object SqlServerTable {
17 |
18 | import scala.language.implicitConversions
19 |
20 | sealed trait CrossType
21 | object CrossType {
22 | case object CrossApply extends CrossType
23 | case object OuterApply extends CrossType
24 | }
25 |
26 | sealed case class CrossOuterApplyTable[A, B](
27 | crossType: CrossType,
28 | left: Table.Aux[A],
29 | right: Table.Aux[B]
30 | ) extends SqlServerTable[A with B]
31 |
32 | implicit def tableSourceToSelectedBuilder[A](
33 | table: Table.Aux[A]
34 | ): CrossOuterApplyTableBuilder[A] =
35 | new CrossOuterApplyTableBuilder(table)
36 |
37 | sealed case class CrossOuterApplyTableBuilder[A](left: Table.Aux[A]) {
38 | self =>
39 |
40 | final def crossApply[Reprs, Out, RightSource](
41 | right: Table.DerivedTable[Reprs, Out, Read.WithReprs[Out, Reprs], RightSource]
42 | ): Table.DialectSpecificTable[A with RightSource] = {
43 |
44 | val tableExtension = CrossOuterApplyTable[A, RightSource](
45 | CrossType.CrossApply,
46 | left,
47 | right
48 | )
49 |
50 | new Table.DialectSpecificTable(tableExtension)
51 | }
52 |
53 | final def outerApply[Reprs, Out, RightSource](
54 | right: Table.DerivedTable[Reprs, Out, Read.WithReprs[Out, Reprs], RightSource]
55 | ): Table.DialectSpecificTable[A with RightSource] = {
56 |
57 | val tableExtension = CrossOuterApplyTable[A, RightSource](
58 | CrossType.OuterApply,
59 | left,
60 | right
61 | )
62 |
63 | new Table.DialectSpecificTable(tableExtension)
64 | }
65 | }
66 | }
67 |
68 | object SqlServerFunctionDef {
69 | val Avg = AggregationDef[BigDecimal, Int](FunctionName("avg"))
70 | }
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/sqlserver/src/test/resources/container-license-acceptance.txt:
--------------------------------------------------------------------------------
1 | mcr.microsoft.com/azure-sql-edge:latest
--------------------------------------------------------------------------------
/sqlserver/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/sqlserver/src/test/scala/zio/sql/sqlserver/DbSchema.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.sqlserver
2 |
3 | import java.util.UUID
4 | import java.time._
5 | import java.math.BigDecimal
6 | import zio.schema.{ DeriveSchema, Schema }
7 | import zio.sql.table._
8 | import zio.sql.select._
9 |
10 | trait DbSchema extends SqlServerSqlModule { self =>
11 |
12 | object DbSchema {
13 |
14 | case class Customer(
15 | id: UUID,
16 | dob: LocalDate,
17 | firstName: String,
18 | lastName: String,
19 | verified: Boolean,
20 | createdTimestampString: String,
21 | createdTimestamp: ZonedDateTime
22 | )
23 |
24 | implicit val custommerSchema
25 | : Schema.CaseClass7[UUID, LocalDate, String, String, Boolean, String, ZonedDateTime, Customer] =
26 | DeriveSchema.gen[Customer]
27 |
28 | val customers = Table.defineTableSmart[Customer]
29 |
30 | val (customerId, dob, fName, lName, verified, createdString, createdTimestamp) =
31 | customers.columns
32 |
33 | val ALL = customerId ++ dob ++ fName ++ lName ++ verified ++ createdString ++ createdTimestamp
34 |
35 | case class Orders(id: UUID, customerId: UUID, orderDate: LocalDate)
36 |
37 | implicit val orderSchema: Schema.CaseClass3[UUID, UUID, LocalDate, Orders] = DeriveSchema.gen[Orders]
38 |
39 | val orders = Table.defineTableSmart[Orders]
40 |
41 | val (orderId, fkCustomerId, orderDate) = orders.columns
42 |
43 | case class Products(id: UUID, name: String, description: String, imageUrl: String)
44 |
45 | implicit val productSchema: Schema.CaseClass4[UUID, String, String, String, Products] = DeriveSchema.gen[Products]
46 |
47 | val products = Table.defineTableSmart[Products]
48 |
49 | val (productId, productName, description, imageURL) = products.columns
50 |
51 | case class OrderDetails(orderId: UUID, productId: UUID, quantity: Int, unitPrice: BigDecimal)
52 |
53 | implicit val orderDetailsSchema: Schema.CaseClass4[UUID, UUID, Int, BigDecimal, OrderDetails] =
54 | DeriveSchema.gen[OrderDetails]
55 |
56 | val orderDetails = Table.defineTableSmart[OrderDetails]
57 |
58 | val (orderDetailsId, orderDetailsProductId, quantity, unitPrice) = orderDetails.columns
59 |
60 | val orderDetailsDerived =
61 | select(orderDetailsId, orderDetailsProductId, unitPrice).from(orderDetails).asTable("derived")
62 |
63 | val (derivedOrderId, derivedProductId, derivedUnitPrice) = orderDetailsDerived.columns
64 |
65 | val orderDateDerivedTable = customers
66 | .subselect(orderDate)
67 | .from(orders)
68 | .limit(1)
69 | .where(customerId === fkCustomerId)
70 | .orderBy(Ordering.Desc(orderDate))
71 | .asTable("derived")
72 |
73 | val orderDateDerived = orderDateDerivedTable.columns
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/sqlserver/src/test/scala/zio/sql/sqlserver/SqlServerRunnableSpec.scala:
--------------------------------------------------------------------------------
1 | package zio.sql.sqlserver
2 |
3 | import com.dimafeng.testcontainers.{ JdbcDatabaseContainer, MSSQLServerContainer, SingleContainer }
4 | import org.testcontainers.utility.DockerImageName
5 | import zio.sql.JdbcRunnableSpec
6 |
7 | trait SqlServerRunnableSpec extends JdbcRunnableSpec with SqlServerJdbcModule {
8 |
9 | override protected def getContainer: SingleContainer[_] with JdbcDatabaseContainer =
10 | new MSSQLServerContainer(
11 | dockerImageName = DockerImageName
12 | .parse("mcr.microsoft.com/azure-sql-edge:latest")
13 | .asCompatibleSubstituteFor("mcr.microsoft.com/mssql/server")
14 | ).configure { a =>
15 | a.withInitScript("db_schema.sql")
16 | ()
17 | }
18 |
19 | }
20 |
--------------------------------------------------------------------------------