├── .github └── workflows │ └── build.yml ├── .gitignore ├── .mill-version ├── .scalafix.conf ├── .scalafmt.conf ├── build.mill ├── docs ├── cheatsheet.md ├── design.md ├── developer.md ├── generateDocs.mill ├── package.mill ├── reference.md └── tutorial.md ├── mill ├── readme.md └── scalasql ├── core └── src │ ├── Aggregatable.scala │ ├── Config.scala │ ├── Context.scala │ ├── DbApi.scala │ ├── DbClient.scala │ ├── DialectConfig.scala │ ├── DialectTypeMappers.scala │ ├── Expr.scala │ ├── ExprsToSql.scala │ ├── FastAccumulator.scala │ ├── JoinNullable.scala │ ├── LiveSqlExprs.scala │ ├── Queryable.scala │ ├── SqlStr.scala │ ├── TypeMapper.scala │ ├── WithSqlExpr.scala │ └── package.scala ├── namedtuples ├── src │ ├── NamedTupleQueryable.scala │ ├── SimpleTable.scala │ ├── SimpleTableMacros.scala │ └── simple.scala └── test │ └── src │ ├── SimpleTableConcreteTestSuites.scala │ ├── SimpleTableExampleTests.scala │ ├── datatypes │ ├── LargeObjectTest.scala │ ├── SimpleTableDataTypesTests.scala │ └── SimpleTableOptionalTests.scala │ └── example │ ├── SimpleTableH2Example.scala │ ├── SimpleTableMySqlExample.scala │ ├── SimpleTablePostgresExample.scala │ ├── SimpleTableSqliteExample.scala │ ├── WorldSqlTestsNamedTuple.scala │ └── foo.scala ├── operations └── src │ ├── AggNumericOps.scala │ ├── AggOps.scala │ ├── BitwiseFunctionOps.scala │ ├── CaseWhen.scala │ ├── ConcatOps.scala │ ├── DbApiOps.scala │ ├── ExprAggOps.scala │ ├── ExprBooleanOps.scala │ ├── ExprNumericOps.scala │ ├── ExprOps.scala │ ├── ExprOptionOps.scala │ ├── ExprStringLikeOps.scala │ ├── ExprStringOps.scala │ ├── ExprTypedOps.scala │ ├── HyperbolicMathOps.scala │ ├── MathOps.scala │ ├── PadOps.scala │ ├── TrimOps.scala │ └── readme.md ├── query ├── src-2 │ └── TableMacro.scala ├── src-3 │ └── TableMacro.scala └── src │ ├── Aggregate.scala │ ├── Column.scala │ ├── CompoundSelect.scala │ ├── Delete.scala │ ├── FlatJoin.scala │ ├── From.scala │ ├── GetGeneratedKeys.scala │ ├── Insert.scala │ ├── InsertColumns.scala │ ├── InsertSelect.scala │ ├── InsertValues.scala │ ├── JoinAppend.scala │ ├── JoinOps.scala │ ├── Joinable.scala │ ├── JoinsToSql.scala │ ├── LateralJoinOps.scala │ ├── Model.scala │ ├── OnConflict.scala │ ├── Query.scala │ ├── Returning.scala │ ├── Select.scala │ ├── SimpleSelect.scala │ ├── SqlWindow.scala │ ├── Table.scala │ ├── Update.scala │ ├── Values.scala │ └── WithCte.scala ├── src ├── dialects │ ├── CompoundSelectRendererForceLimit.scala │ ├── DbApiQueryOps.scala │ ├── Dialect.scala │ ├── H2Dialect.scala │ ├── MySqlDialect.scala │ ├── OnConflictOps.scala │ ├── PostgresDialect.scala │ ├── ReturningDialect.scala │ ├── SqliteDialect.scala │ └── TableOps.scala └── package.scala └── test ├── resources ├── customer-data-plus-schema.sql ├── customer-data.sql ├── h2-customer-schema.sql ├── mysql-customer-schema.sql ├── postgres-customer-schema.sql ├── sqlite-customer-schema.sql ├── world-data.sql └── world-schema.sql ├── src-3 ├── Scala3ExampleTests.scala └── example │ └── Scala3H2Example.scala └── src ├── ConcreteTestSuites.scala ├── ExampleTests.scala ├── FailureTests.scala ├── Main.scala ├── UnitTestData.scala ├── UtestFramework.scala ├── WorldSqlTests.scala ├── api ├── DbApiTests.scala └── TransactionTests.scala ├── datatypes ├── DataTypesTests.scala └── OptionalTests.scala ├── dialects ├── H2DialectTests.scala ├── MySqlDialectTests.scala ├── PostgresDialectTests.scala └── SqliteDialectTests.scala ├── example ├── CheatSheetExample.scala ├── H2Example.scala ├── HikariCpExample.scala ├── MySqlExample.scala ├── PostgresExample.scala └── SqliteExample.scala ├── operations ├── DbAggNumericOpsTests.scala ├── DbAggOpsTests.scala ├── DbApiOpsTests.scala ├── DbBlobOpsTests.scala ├── DbBooleanOpsTests.scala ├── DbMathOpsTests.scala ├── DbNumericOpsTests.scala ├── DbOpsTests.scala └── DbStringOpsTests.scala ├── query ├── CompoundSelectTests.scala ├── DeleteTests.scala ├── EscapedTableNameReturningTests.scala ├── EscapedTableNameTests.scala ├── FlatJoinTests.scala ├── GetGeneratedKeysTests.scala ├── InsertTests.scala ├── JoinTests.scala ├── LateralJoinTests.scala ├── OnConflictTests.scala ├── ReturningTests.scala ├── SchemaTests.scala ├── SelectTests.scala ├── SubQueryTests.scala ├── UpdateJoinTests.scala ├── UpdateSubQueryTests.scala ├── UpdateTests.scala ├── ValuesTests.scala ├── WindowFunctionTests.scala └── WithCteTests.scala └── utils ├── ScalaSqlSuite.scala └── TestChecker.scala /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | tags: 8 | - '*' 9 | pull_request: 10 | branches: 11 | - main 12 | 13 | jobs: 14 | test: 15 | runs-on: ubuntu-latest 16 | strategy: 17 | matrix: 18 | java: ['11', '17'] 19 | steps: 20 | - uses: actions/checkout@v3 21 | - uses: actions/setup-java@v3 22 | with: 23 | distribution: 'temurin' 24 | java-version: ${{ matrix.java }} 25 | - name: Run tests 26 | run: ./mill -i __.publishArtifacts + __.test 27 | 28 | 29 | check-formatting: 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: actions/checkout@v4 33 | with: 34 | fetch-depth: 0 35 | 36 | - uses: actions/setup-java@v3 37 | with: 38 | distribution: 'temurin' 39 | java-version: 17 40 | 41 | - run: ./mill -i mill.scalalib.scalafmt.ScalafmtModule/checkFormatAll __.sources 42 | 43 | 44 | check-docs-updated: 45 | runs-on: ubuntu-latest 46 | steps: 47 | - uses: actions/checkout@v4 48 | with: 49 | fetch-depth: 0 50 | 51 | - uses: actions/setup-java@v3 52 | with: 53 | distribution: 'temurin' 54 | java-version: 17 55 | 56 | - run: ./mill -i "__.test" && ./mill -i generateTutorial + generateReference && git diff --exit-code 57 | 58 | check-scalafix: 59 | runs-on: ubuntu-latest 60 | steps: 61 | - uses: actions/checkout@v4 62 | with: 63 | fetch-depth: 0 64 | 65 | - uses: actions/setup-java@v3 66 | with: 67 | distribution: 'temurin' 68 | java-version: 17 69 | 70 | - run: ./mill -i __.fix && git diff --exit-code 71 | 72 | publish-sonatype: 73 | if: github.repository == 'com-lihaoyi/scalasql' && contains(github.ref, 'refs/tags/') 74 | needs: test 75 | runs-on: ubuntu-latest 76 | env: 77 | MILL_SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 78 | MILL_SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 79 | MILL_PGP_SECRET_BASE64: ${{ secrets.SONATYPE_PGP_PRIVATE_KEY }} 80 | MILL_PGP_PASSPHRASE: ${{ secrets.SONATYPE_PGP_PRIVATE_KEY_PASSWORD }} 81 | LANG: "en_US.UTF-8" 82 | LC_MESSAGES: "en_US.UTF-8" 83 | LC_ALL: "en_US.UTF-8" 84 | 85 | steps: 86 | - uses: actions/checkout@v3 87 | - uses: actions/setup-java@v3 88 | with: 89 | distribution: 'temurin' 90 | java-version: 11 91 | - name: Publish to Maven Central 92 | run: ./mill --import "ivy:com.lihaoyi::mill-contrib-sonatypecentral:" mill.contrib.sonatypecentral.SonatypeCentralPublishModule/publishAll --publishArtifacts __.publishArtifacts 93 | 94 | - name: Create GitHub Release 95 | id: create_gh_release 96 | uses: actions/create-release@v1.1.4 97 | env: 98 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token 99 | with: 100 | tag_name: ${{ github.ref }} 101 | release_name: ${{ github.ref }} 102 | draft: false 103 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /*/target/ 2 | target/ 3 | output/ 4 | .DS_STORE 5 | .idea_modules 6 | .idea 7 | .vscode/ 8 | out/ 9 | /.bloop/ 10 | /.metals/ 11 | mill.iml 12 | .bsp/ 13 | bsp.log 14 | lowered.hnir 15 | .dotty-ide* 16 | -------------------------------------------------------------------------------- /.mill-version: -------------------------------------------------------------------------------- 1 | 0.12.10 2 | -------------------------------------------------------------------------------- /.scalafix.conf: -------------------------------------------------------------------------------- 1 | rules = [ 2 | RemoveUnused 3 | ] -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = "3.9.5" 2 | 3 | align.preset = none 4 | align.openParenCallSite = false 5 | align.stripMargin = true 6 | 7 | assumeStandardLibraryStripMargin = true 8 | 9 | continuationIndent.callSite = 2 10 | continuationIndent.defnSite = 4 11 | 12 | docstrings.style = Asterisk 13 | docstrings.oneline = keep 14 | docstrings.wrap = no 15 | 16 | maxColumn = 100 17 | newlines.implicitParamListModifierPrefer = before 18 | runner.dialect = scala3 19 | 20 | fileOverride { 21 | "glob:**/src-2/**" { 22 | runner.dialect = scala213source3 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /docs/developer.md: -------------------------------------------------------------------------------- 1 | 2 | # Developer Docs 3 | 4 | ## Precondition 5 | Before running the unit tests, ensure the following steps are completed: 6 | 7 | 1. **Start Docker on Your Machine:** 8 | Ensure that Docker is running. You can verify this by executing `docker info` in your terminal. If Docker is not running, start it using your system's preferred method (e.g., using the Docker desktop application or by running `systemctl start docker` on Linux). 9 | 10 | ## Running Unit Tests 11 | To facilitate efficient testing, you can choose from several commands based on your testing needs: 12 | 13 | ### Run all Tests 14 | * Running all unit tests: 15 | ```bash 16 | ./mill -i -w "__.test" 17 | ``` 18 | 19 | ### Quick Database-Specific Tests 20 | * Running all unit tests on one database. This 21 | is much faster than running all tests, and useful for quick iteration for changes that 22 | are not database specific: 23 | ```bash 24 | ./mill -i -w "__.test scalasql.sqlite" 25 | ``` 26 | 27 | ### Full Test Suite with Documentation Generation 28 | * Re-generating docs: 29 | ```bash 30 | ./mill -i "__.test" + generateTutorial + generateReference 31 | ``` 32 | * Note that ScalaSql's reference docs are extracted from the test suite, and thus we need 33 | to make sure to run the test suite before re-generating them. 34 | 35 | ### Code Formatting and Auto-Fixes 36 | * Fix all auto-generating and auto-formatting issues at once via 37 | ```bash 38 | ./mill -i -w __.fix + mill.scalalib.scalafmt.ScalafmtModule/reformatAll __.sources + "scalasql[2.13.12].test" + generateTutorial + generateReference 39 | ``` 40 | 41 | ## Benchmarking 42 | * You can run ad-hoc benchmarks using any test case via the `SCALASQL_RUN_BENCHMARK` 43 | environment variable, e.g. 44 | ```bash 45 | SCALASQL_RUN_BENCHMARK=5000 ./mill -i -w __.test scalasql.sqlite.SubQueryTests.deeplyNested 46 | ``` 47 | 48 | ## ScalaSql Modules Overview 49 | * ScalaSql comprises 4 main submodules: 50 | * `scalasql.core`: the core functionality of evaluating `SqlStr` queries, but without any typed 51 | helpers to construct them 52 | * `scalasql.operations`: extension methods on `Expr[T]` values representing operations on typed 53 | SQL expressions, like `LOWER(str)` or `a || b`/`CONCAT(a, b)` 54 | * `scalasql.query`: builders for entire SQL queries, `INSERT`, `SELECT`, `UPDATE`, `DELETE`. 55 | * `scalasql`: the main user-facing ScalaSql module, contains the `package object` defining 56 | what a user sees when they do `import scalasql._`, as well as the various database `*Dialect`s 57 | that provide the relevant set of `query`s and `operations` for each respective database 58 | 59 | * ScalaSql's tests are concentrated within a single `scalasql.test` module, with subfolders 60 | corresponding to the various ScalaSql sub-modules they are intended to cover 61 | 62 | ``` 63 | scalasql.core 64 | | | 65 | +-----+ +-----+ 66 | | | 67 | scalasql.operations scalasql.query 68 | | | 69 | +------+ +------+ 70 | | | 71 | scalasql 72 | | 73 | +------+ 74 | | 75 | scalasql.test 76 | ``` -------------------------------------------------------------------------------- /docs/package.mill: -------------------------------------------------------------------------------- 1 | package build.docs 2 | -------------------------------------------------------------------------------- /mill: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # This is a wrapper script, that automatically download mill from GitHub release pages 4 | # You can give the required mill version with MILL_VERSION env variable 5 | # If no version is given, it falls back to the value of DEFAULT_MILL_VERSION 6 | 7 | set -e 8 | 9 | if [ -z "${DEFAULT_MILL_VERSION}" ] ; then 10 | DEFAULT_MILL_VERSION=0.11.12 11 | fi 12 | 13 | if [ -z "$MILL_VERSION" ] ; then 14 | if [ -f ".mill-version" ] ; then 15 | MILL_VERSION="$(head -n 1 .mill-version 2> /dev/null)" 16 | elif [ -f ".config/mill-version" ] ; then 17 | MILL_VERSION="$(head -n 1 .config/mill-version 2> /dev/null)" 18 | elif [ -f "mill" ] && [ "$0" != "mill" ] ; then 19 | MILL_VERSION=$(grep -F "DEFAULT_MILL_VERSION=" "mill" | head -n 1 | cut -d= -f2) 20 | else 21 | MILL_VERSION=$DEFAULT_MILL_VERSION 22 | fi 23 | fi 24 | 25 | if [ "x${XDG_CACHE_HOME}" != "x" ] ; then 26 | MILL_DOWNLOAD_PATH="${XDG_CACHE_HOME}/mill/download" 27 | else 28 | MILL_DOWNLOAD_PATH="${HOME}/.cache/mill/download" 29 | fi 30 | MILL_EXEC_PATH="${MILL_DOWNLOAD_PATH}/${MILL_VERSION}" 31 | 32 | version_remainder="$MILL_VERSION" 33 | MILL_MAJOR_VERSION="${version_remainder%%.*}"; version_remainder="${version_remainder#*.}" 34 | MILL_MINOR_VERSION="${version_remainder%%.*}"; version_remainder="${version_remainder#*.}" 35 | 36 | if [ ! -s "$MILL_EXEC_PATH" ] ; then 37 | mkdir -p "$MILL_DOWNLOAD_PATH" 38 | if [ "$MILL_MAJOR_VERSION" -gt 0 ] || [ "$MILL_MINOR_VERSION" -ge 5 ] ; then 39 | ASSEMBLY="-assembly" 40 | fi 41 | DOWNLOAD_FILE=$MILL_EXEC_PATH-tmp-download 42 | MILL_VERSION_TAG=$(echo $MILL_VERSION | sed -E 's/([^-]+)(-M[0-9]+)?(-.*)?/\1\2/') 43 | MILL_DOWNLOAD_URL="https://repo1.maven.org/maven2/com/lihaoyi/mill-dist/$MILL_VERSION/mill-dist-$MILL_VERSION.jar" 44 | curl --fail -L -o "$DOWNLOAD_FILE" "$MILL_DOWNLOAD_URL" 45 | chmod +x "$DOWNLOAD_FILE" 46 | mv "$DOWNLOAD_FILE" "$MILL_EXEC_PATH" 47 | unset DOWNLOAD_FILE 48 | unset MILL_DOWNLOAD_URL 49 | fi 50 | 51 | if [ -z "$MILL_MAIN_CLI" ] ; then 52 | MILL_MAIN_CLI="${0}" 53 | fi 54 | 55 | MILL_FIRST_ARG="" 56 | 57 | # first arg is a long flag for "--interactive" or starts with "-i" 58 | if [ "$1" = "--bsp" ] || [ "${1#"-i"}" != "$1" ] || [ "$1" = "--interactive" ] || [ "$1" = "--no-server" ] || [ "$1" = "--repl" ] || [ "$1" = "--help" ] ; then 59 | # Need to preserve the first position of those listed options 60 | MILL_FIRST_ARG=$1 61 | shift 62 | fi 63 | 64 | unset MILL_DOWNLOAD_PATH 65 | unset MILL_VERSION 66 | 67 | exec $MILL_EXEC_PATH $MILL_FIRST_ARG -D "mill.main.cli=${MILL_MAIN_CLI}" "$@" 68 | -------------------------------------------------------------------------------- /scalasql/core/src/Aggregatable.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | /** 4 | * Something that supports aggregate operations. Most commonly a [[Select]], but 5 | * also could be a [[Aggregatable.Proxy]] 6 | */ 7 | trait Aggregatable[Q] extends WithSqlExpr[Q] { 8 | def aggregateExpr[V: TypeMapper](f: Q => Context => SqlStr)( 9 | implicit qr: Queryable.Row[Expr[V], V] 10 | ): Expr[V] 11 | } 12 | 13 | object Aggregatable { 14 | 15 | /** 16 | * A reference that aggregations for usage within [[Select.aggregate]], to allow 17 | * the caller to perform multiple aggregations within a single query. 18 | */ 19 | class Proxy[Q](val expr: Q) extends Aggregatable[Q] { 20 | def aggregateExpr[V: TypeMapper](f: Q => Context => SqlStr)( 21 | implicit qr: Queryable.Row[Expr[V], V] 22 | ): Expr[V] = { Expr[V] { implicit c => f(expr)(c) } } 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /scalasql/core/src/Config.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | /** 4 | * Things you to do to configure ScalaSql 5 | */ 6 | trait Config { 7 | 8 | /** 9 | * Render a sequence of tokens to a column label; used primarily for 10 | * making the generated queries more easily human readable. 11 | */ 12 | def renderColumnLabel(tokens: Seq[String]): String = { 13 | val prefixedTokens = 14 | if (tokens.isEmpty || !Config.isNormalCharacter(tokens.head.head)) "res" +: tokens 15 | else tokens 16 | 17 | prefixedTokens 18 | .map(tableNameMapper) 19 | .mkString("_") 20 | } 21 | 22 | /** 23 | * Configures the underlying JDBC connection's `setFetchSize`. Can be overriden 24 | * on a per-query basis by passing `fetchSize = n` to `db.run` 25 | */ 26 | def defaultFetchSize: Int = -1 27 | 28 | /** 29 | * Configures the underlying JDBC connection's `setQueryTimeout`. Can be overriden 30 | * on a per-query basis by passing `queryTimeoutSeconds = n` to `db.run` 31 | */ 32 | def defaultQueryTimeoutSeconds: Int = -1 33 | 34 | /** 35 | * Translates table and column names from Scala `object` names to SQL names. 36 | * 37 | * Use [[tableNameMapper]] and [[columnNameMapper]] if you want different 38 | * translations for table and column names 39 | */ 40 | def nameMapper(v: String): String = Config.camelToSnake(v) 41 | 42 | /** 43 | * Translates table names from Scala `object` names to SQL names. 44 | */ 45 | def tableNameMapper(v: String): String = nameMapper(v) 46 | 47 | /** 48 | * Translates column names from Scala `case class` field names to SQL names. 49 | */ 50 | def columnNameMapper(v: String): String = nameMapper(v) 51 | 52 | /** 53 | * Override this to log the executed SQL queries 54 | */ 55 | def logSql(sql: String, file: String, line: Int): Unit = () 56 | } 57 | 58 | object Config { 59 | def isNormalCharacter(c: Char) = (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' 60 | def camelToSnake(s: String) = { 61 | val chars = new collection.mutable.StringBuilder 62 | var lowercase = false 63 | for (c <- s) { 64 | if (c.isUpper) { 65 | if (lowercase == true) chars.append('_') 66 | chars.append(c.toLower) 67 | lowercase = false 68 | } else { 69 | chars.append(c) 70 | lowercase = true 71 | } 72 | 73 | } 74 | chars.toString() 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /scalasql/core/src/Context.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | /** 6 | * The contextual information necessary for rendering a ScalaSql query or expression 7 | * into a SQL string 8 | */ 9 | trait Context { 10 | 11 | /** 12 | * Any [[From]]/`FROM` clauses that are in scope, and the aliases those clauses are given 13 | */ 14 | def fromNaming: Map[Context.From, String] 15 | 16 | /** 17 | * Any [[Expr]]s/SQL-expressions that are present in [[fromNaming]], and what those 18 | * expressions are named in SQL 19 | */ 20 | def exprNaming: Map[Expr.Identity, SqlStr] 21 | 22 | /** 23 | * The ScalaSql configuration 24 | */ 25 | def config: Config 26 | 27 | def dialectConfig: DialectConfig 28 | 29 | def withFromNaming(fromNaming: Map[Context.From, String]): Context 30 | def withExprNaming(exprNaming: Map[Expr.Identity, SqlStr]): Context 31 | } 32 | 33 | object Context { 34 | trait From { 35 | 36 | /** 37 | * What alias to name this [[From]] for better readability 38 | */ 39 | def fromRefPrefix(prevContext: Context): String 40 | 41 | /** 42 | * A mapping of any aliased [[Expr]] that this [[From]] produces along 43 | * with their rendered [[SqlStr]]s 44 | */ 45 | def fromExprAliases(prevContext: Context): Seq[(Expr.Identity, SqlStr)] 46 | 47 | /** 48 | * How this [[From]] can be rendered into a [[SqlStr]] for embedding into 49 | * a larger query 50 | */ 51 | def renderSql( 52 | name: SqlStr, 53 | prevContext: Context, 54 | liveExprs: LiveExprs 55 | ): SqlStr 56 | } 57 | 58 | case class Impl( 59 | fromNaming: Map[From, String], 60 | exprNaming: Map[Expr.Identity, SqlStr], 61 | config: Config, 62 | dialectConfig: DialectConfig 63 | ) extends Context { 64 | def withFromNaming(fromNaming: Map[From, String]): Context = copy(fromNaming = fromNaming) 65 | 66 | def withExprNaming(exprNaming: Map[Expr.Identity, SqlStr]): Context = 67 | copy(exprNaming = exprNaming) 68 | } 69 | 70 | /** 71 | * Derives a new [[Context]] based on [[prevContext]] with additional [[prefixedFroms]] 72 | * and [[unPrefixedFroms]] added to the [[Context.fromNaming]] and [[Context.exprNaming]] 73 | * tables 74 | */ 75 | def compute(prevContext: Context, prefixedFroms: Seq[From], unPrefixedFroms: Option[From]) = { 76 | 77 | val prevSize = prevContext.fromNaming.size 78 | val newFromNaming = Map.from( 79 | prevContext.fromNaming.iterator ++ 80 | prefixedFroms.iterator.zipWithIndex.collect { 81 | case (r, i) if !prevContext.fromNaming.contains(r) => 82 | (r, r.fromRefPrefix(prevContext) + (i + prevSize)) 83 | } ++ 84 | unPrefixedFroms.iterator.collect { 85 | case t if !prevContext.fromNaming.contains(t) => 86 | t -> t.fromRefPrefix(prevContext) 87 | } 88 | ) 89 | 90 | val newExprNaming = 91 | prevContext.exprNaming ++ 92 | prefixedFroms.iterator 93 | .flatMap { t => 94 | t 95 | .fromExprAliases(prevContext.withFromNaming(newFromNaming)) 96 | .map { case (e, s) => (e, sql"${SqlStr.raw(newFromNaming(t), Array(e))}.$s") } 97 | } 98 | 99 | Context.Impl(newFromNaming, newExprNaming, prevContext.config, prevContext.dialectConfig) 100 | } 101 | 102 | } 103 | -------------------------------------------------------------------------------- /scalasql/core/src/DialectConfig.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | trait DialectConfig { that => 4 | def castParams: Boolean 5 | def escape(str: String): String 6 | 7 | def withCastParams(params: Boolean) = new DialectConfig { 8 | def castParams: Boolean = params 9 | 10 | def escape(str: String): String = that.escape(str) 11 | 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /scalasql/core/src/DialectTypeMappers.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | import java.util.UUID 4 | import java.time.{ 5 | LocalDate, 6 | LocalTime, 7 | LocalDateTime, 8 | ZonedDateTime, 9 | Instant, 10 | OffsetTime, 11 | OffsetDateTime 12 | } 13 | 14 | /** 15 | * A default set of data type mappers that need to be present in any ScalaSql dialect 16 | */ 17 | trait DialectTypeMappers extends DialectConfig { 18 | implicit val dialectSelf: DialectTypeMappers 19 | 20 | implicit def StringType: TypeMapper[String] 21 | implicit def ByteType: TypeMapper[Byte] 22 | implicit def ShortType: TypeMapper[Short] 23 | implicit def IntType: TypeMapper[Int] 24 | implicit def LongType: TypeMapper[Long] 25 | 26 | implicit def FloatType: TypeMapper[Float] 27 | implicit def DoubleType: TypeMapper[Double] 28 | implicit def BigDecimalType: TypeMapper[scala.math.BigDecimal] 29 | implicit def BooleanType: TypeMapper[Boolean] 30 | implicit def UuidType: TypeMapper[UUID] 31 | implicit def BytesType: TypeMapper[geny.Bytes] 32 | implicit def UtilDateType: TypeMapper[java.util.Date] 33 | implicit def LocalDateType: TypeMapper[LocalDate] 34 | implicit def LocalTimeType: TypeMapper[LocalTime] 35 | 36 | implicit def LocalDateTimeType: TypeMapper[LocalDateTime] 37 | 38 | implicit def ZonedDateTimeType: TypeMapper[ZonedDateTime] 39 | implicit def InstantType: TypeMapper[Instant] 40 | 41 | implicit def OffsetTimeType: TypeMapper[OffsetTime] 42 | 43 | implicit def OffsetDateTimeType: TypeMapper[OffsetDateTime] 44 | implicit def EnumType[T <: Enumeration#Value](implicit constructor: String => T): TypeMapper[T] 45 | implicit def OptionType[T](implicit inner: TypeMapper[T]): TypeMapper[Option[T]] 46 | } 47 | -------------------------------------------------------------------------------- /scalasql/core/src/Expr.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | /** 6 | * A single "value" in your SQL query that can be mapped to and from 7 | * a Scala value of a particular type [[T]] 8 | */ 9 | trait Expr[T] extends SqlStr.Renderable { 10 | private[scalasql] final def renderSql(ctx: Context): SqlStr = { 11 | ctx.exprNaming.get(this.exprIdentity).getOrElse(renderToSql0(ctx)) 12 | } 13 | 14 | protected def renderToSql0(implicit ctx: Context): SqlStr 15 | 16 | override def toString: String = 17 | throw new Exception("Expr#toString is not defined. Use Expr#exprToString") 18 | 19 | override def equals(other: Any): Boolean = throw new Exception( 20 | "Expr#equals is not defined. Use Expr#exprIdentity for your equality checks" 21 | ) 22 | private lazy val exprIdentity: Expr.Identity = new Expr.Identity() 23 | private def exprToString: String = super.toString 24 | 25 | /** 26 | * Some syntax like `for` comprehensions likes to generate spurious `Expr(true)` 27 | * clauses. We need to mark them as such so we can filter them out later during 28 | * code generation 29 | */ 30 | protected def exprIsLiteralTrue: Boolean = false 31 | } 32 | 33 | object Expr { 34 | def isLiteralTrue[T](e: Expr[T]): Boolean = e.exprIsLiteralTrue 35 | def toString[T](e: Expr[T]): String = e.exprToString 36 | 37 | def identity[T](e: Expr[T]): Identity = e.exprIdentity 38 | class Identity() 39 | 40 | implicit def ExprQueryable[E[_] <: Expr[?], T]( 41 | implicit mt: TypeMapper[T] 42 | ): Queryable.Row[E[T], T] = new ExprQueryable[E, T]() 43 | 44 | class ExprQueryable[E[_] <: Expr[?], T]( 45 | implicit tm: TypeMapper[T] 46 | ) extends Queryable.Row[E[T], T] { 47 | def walkLabels(): Seq[List[String]] = Seq(Nil) 48 | def walkExprs(q: E[T]): Seq[Expr[?]] = Seq(q) 49 | 50 | override def construct(args: Queryable.ResultSetIterator): T = args.get(tm) 51 | 52 | def deconstruct(r: T): E[T] = Expr[T] { implicit ctx: Context => 53 | sql"$r" 54 | }.asInstanceOf[E[T]] 55 | } 56 | 57 | def apply[T](f: Context => SqlStr): Expr[T] = new Simple[T](f) 58 | implicit def optionalize[T](e: Expr[T]): Expr[Option[T]] = { 59 | new Simple[Option[T]](e.renderToSql0(_)) 60 | } 61 | class Simple[T](f: Context => SqlStr) extends Expr[T] { 62 | def renderToSql0(implicit ctx: Context): SqlStr = f(ctx) 63 | } 64 | 65 | implicit def apply[T]( 66 | x: T 67 | )(implicit conv: T => SqlStr.Interp): Expr[T] = { 68 | apply0[T](x)(conv) 69 | } 70 | def apply0[T]( 71 | x: T, 72 | exprIsLiteralTrue0: Boolean = false 73 | )(implicit conv: T => SqlStr.Interp): Expr[T] = new Expr[T] { 74 | override def renderToSql0(implicit ctx: Context): SqlStr = 75 | new SqlStr(Array("", ""), Array(conv(x)), false, Array.empty[Expr.Identity]) 76 | protected override def exprIsLiteralTrue = exprIsLiteralTrue0 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /scalasql/core/src/ExprsToSql.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | import scalasql.core.SqlStr.{Renderable, SqlStringSyntax} 4 | 5 | object ExprsToSql { 6 | 7 | def apply(walked: Queryable.Walked, context: Context, prefix: SqlStr): SqlStr = { 8 | selectColumnSql(walked, context) match { 9 | // Aggregate operators return expressions that are actually entire queries. 10 | // We thus check to avoid redundantly wrapping them in another `SELECT`, and 11 | // instead return them unchanged 12 | case Seq((prefix, singleExpr)) 13 | if prefix == context.config.renderColumnLabel(Nil) && singleExpr.isCompleteQuery => 14 | singleExpr 15 | 16 | case flatQuery => 17 | val exprsStr = SqlStr.join( 18 | flatQuery.map { case (k, v) => 19 | sql"$v AS ${SqlStr.raw(context.config.tableNameMapper(k))}" 20 | }, 21 | SqlStr.commaSep 22 | ) 23 | 24 | prefix + exprsStr 25 | } 26 | } 27 | 28 | def selectColumnSql(walked: Queryable.Walked, ctx: Context): Seq[(String, SqlStr)] = { 29 | walked.map { case (k, v) => (ctx.config.renderColumnLabel(k), Renderable.renderSql(v)(ctx)) } 30 | } 31 | 32 | def selectColumnReferences( 33 | walked: Queryable.Walked, 34 | ctx: Context 35 | ): Seq[(Expr.Identity, SqlStr)] = { 36 | walked.map { case (tokens, expr) => 37 | val dbId = Expr.identity(expr) 38 | (dbId, SqlStr.raw(ctx.config.renderColumnLabel(tokens), Array(dbId))) 39 | } 40 | } 41 | 42 | def booleanExprs(prefix: SqlStr, exprs: Seq[Expr[?]])(implicit ctx: Context) = { 43 | SqlStr.optSeq(exprs.filter(!Expr.isLiteralTrue(_))) { having => 44 | prefix + SqlStr.join(having.map(Renderable.renderSql(_)), sql" AND ") 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /scalasql/core/src/FastAccumulator.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | import scala.reflect.ClassTag 4 | 5 | class FastAccumulator[T: ClassTag](startSize: Int = 16) { 6 | val arr = new Array[T](startSize) 7 | } 8 | -------------------------------------------------------------------------------- /scalasql/core/src/JoinNullable.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | /** 6 | * Represents a set of nullable columns that come from a `LEFT`/`RIGHT`/`OUTER` `JOIN` 7 | * clause. 8 | */ 9 | trait JoinNullable[Q] { 10 | def get: Q 11 | def isEmpty[T](f: Q => Expr[T])(implicit qr: Queryable[Q, ?]): Expr[Boolean] 12 | def nonEmpty[T](f: Q => Expr[T])(implicit qr: Queryable[Q, ?]): Expr[Boolean] 13 | def map[V](f: Q => V): JoinNullable[V] 14 | 15 | } 16 | object JoinNullable { 17 | implicit def toExpr[T](n: JoinNullable[Expr[T]])(implicit mt: TypeMapper[T]): Expr[Option[T]] = 18 | Expr { implicit ctx => sql"${n.get}" } 19 | 20 | def apply[Q](t: Q): JoinNullable[Q] = new JoinNullable[Q] { 21 | def get: Q = t 22 | def isEmpty[T](f: Q => Expr[T])(implicit qr: Queryable[Q, ?]): Expr[Boolean] = Expr { 23 | implicit ctx => 24 | sql"(${f(t)} IS NULL)" 25 | } 26 | def nonEmpty[T](f: Q => Expr[T])(implicit qr: Queryable[Q, ?]): Expr[Boolean] = Expr { 27 | implicit ctx => 28 | sql"(${f(t)} IS NOT NULL)" 29 | } 30 | def map[V](f: Q => V) = JoinNullable(f(t)) 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /scalasql/core/src/LiveSqlExprs.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | /** 4 | * Models a set of live [[Expr]] expressions which need to be rendered; 5 | * [[Expr]] expressions not in this set can be skipped during rendering 6 | * to improve the conciseness of the rendered SQL string. 7 | * 8 | * - `None` is used to indicate this is a top-level context and we want 9 | * all expressions to be rendered 10 | * 11 | * - `Some(set)` indicates that only the expressions present in the `set` 12 | * need to be rendered, and the rest can be elided. 13 | * 14 | * Typically downstream parts of a SQL query (e.g. the outer `SELECT`) are 15 | * rendered before the upstream parts (e.g. `FROM (SELECT ...)` subqueries), 16 | * so the [[LiveExprs]] from the downstream parts can be used to decide 17 | * which columns to skip when rendering the upstream parts. The outermost 18 | * `SELECT` is rendered using [[LiveExprs.none]] since we cannot know what 19 | * columns end up being used in the application code after the query has 20 | * finished running, and thus have to preserve all of them 21 | */ 22 | class LiveExprs(val values: Option[Set[Expr.Identity]]) { 23 | def map(f: Set[Expr.Identity] => Set[Expr.Identity]) = new LiveExprs(values.map(f)) 24 | def isLive(e: Expr.Identity) = values.fold(true)(_.contains(e)) 25 | } 26 | 27 | object LiveExprs { 28 | def some(v: Set[Expr.Identity]) = new LiveExprs(Some(v)) 29 | def none = new LiveExprs(None) 30 | } 31 | -------------------------------------------------------------------------------- /scalasql/core/src/TypeMapper.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | 3 | import java.sql.{JDBCType, PreparedStatement, ResultSet} 4 | import java.time.{ 5 | LocalDate, 6 | LocalTime, 7 | LocalDateTime, 8 | ZonedDateTime, 9 | Instant, 10 | OffsetTime, 11 | OffsetDateTime 12 | } 13 | import java.util.UUID 14 | 15 | // What Quill does 16 | // https://github.com/zio/zio-quill/blob/43ee1dab4f717d7e6683aa24c391740f3d17df50/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala#L104 17 | 18 | // What SLICK does 19 | // https://github.com/slick/slick/blob/88b2ffb177776fd74dee38124b8c54d616d1a9ae/slick/src/main/scala/slick/jdbc/JdbcTypesComponent.scala#L15 20 | 21 | // Official JDBC mapping docs 22 | // https://docs.oracle.com/javase/tutorial/jdbc/basics/index.html 23 | // https://docs.oracle.com/javase/1.5.0/docs/guide/jdbc/getstart/mapping.html#1055162 24 | 25 | /** 26 | * A mapping between a Scala type [[T]] and a JDBC type, defined by 27 | * it's [[jdbcType]], [[castTypeString]], and [[get]] and [[put]] operations. 28 | * 29 | * Defaults are provided for most common Scala primitives, but you can also provide 30 | * your own by defining an `implicit val foo: TypeMapper[T]` 31 | */ 32 | trait TypeMapper[T] { outer => 33 | 34 | /** 35 | * The JDBC type of this type. 36 | */ 37 | def jdbcType: JDBCType 38 | 39 | /** 40 | * What SQL string to use when you run `cast[T]` to a specific type 41 | */ 42 | def castTypeString: String = jdbcType.toString 43 | 44 | /** 45 | * How to extract a value of type [[T]] from a `ResultSet` 46 | */ 47 | def get(r: ResultSet, idx: Int): T 48 | 49 | /** 50 | * How to insert a value of type [[T]] into a `PreparedStatement` 51 | */ 52 | def put(r: PreparedStatement, idx: Int, v: T): Unit 53 | 54 | /** 55 | * Create a new `TypeMapper[V]` based on this `TypeMapper[T]` given the 56 | * two conversion functions `f: V => T`, `g: T => V` 57 | */ 58 | def bimap[V](f: V => T, g: T => V): TypeMapper[V] = new TypeMapper[V] { 59 | def jdbcType: JDBCType = outer.jdbcType 60 | override def castTypeString: String = outer.castTypeString 61 | def get(r: ResultSet, idx: Int): V = g(outer.get(r, idx)) 62 | def put(r: PreparedStatement, idx: Int, v: V): Unit = outer.put(r, idx, f(v)) 63 | } 64 | } 65 | 66 | object TypeMapper { 67 | def apply[T](implicit t: TypeMapper[T]): TypeMapper[T] = t 68 | } 69 | -------------------------------------------------------------------------------- /scalasql/core/src/WithSqlExpr.scala: -------------------------------------------------------------------------------- 1 | package scalasql.core 2 | trait WithSqlExpr[Q] { 3 | protected def expr: Q 4 | } 5 | object WithSqlExpr { 6 | def get[Q](v: WithSqlExpr[Q]) = v.expr 7 | } 8 | -------------------------------------------------------------------------------- /scalasql/core/src/package.scala: -------------------------------------------------------------------------------- 1 | package scalasql 2 | package object core { 3 | type Sc[T] = T 4 | } 5 | -------------------------------------------------------------------------------- /scalasql/namedtuples/src/NamedTupleQueryable.scala: -------------------------------------------------------------------------------- 1 | package scalasql.namedtuples 2 | 3 | import scala.NamedTuple.NamedTuple 4 | import scalasql.core.{Queryable, Expr} 5 | 6 | object NamedTupleQueryable { 7 | 8 | /** A sequence of n `Queryable.Row[Q, R]` instances, where `X` corresponds to all the `Q` and `Y` to all the `R` */ 9 | opaque type Rows[X <: Tuple, +Y <: Tuple] = List[Queryable.Row[?, ?]] 10 | 11 | object Rows { 12 | // it seems "traditional" recursive implicit search is the only way to infer the types of `R` when only `Qs` is known. 13 | // see https://gist.github.com/bishabosha/e630f76384093153b17f1498a9459518 for a variant that 14 | // uses compiletime.summonAll, but it does a double implicit search, so wasnt chosen for the moment. 15 | 16 | given concatRows: [Q, R, Qs <: Tuple, Rs <: Tuple] 17 | => (x: Queryable.Row[Q, R]) 18 | => (xs: Rows[Qs, Rs]) 19 | => Rows[Q *: Qs, R *: Rs] = 20 | x :: xs 21 | 22 | given emptyRows: Rows[EmptyTuple, EmptyTuple] = Nil 23 | } 24 | 25 | /** 26 | * A `Queryable.Row` instance for an arbitrary named tuple type, can be derived even 27 | * when one of `X` or `Y` is unknown. 28 | */ 29 | given NamedTupleRow: [N <: Tuple, X <: Tuple, Y <: Tuple] 30 | => (rs: Rows[X, Y]) 31 | => Queryable.Row[NamedTuple[N, X], NamedTuple[N, Y]] = 32 | NamedTupleRowImpl[N, X, Y](rs) 33 | 34 | private final class NamedTupleRowImpl[ 35 | N <: Tuple, 36 | X <: Tuple, 37 | Y <: Tuple 38 | ]( 39 | rs: List[Queryable.Row[?, ?]] 40 | ) extends Queryable.Row[NamedTuple[N, X], NamedTuple[N, Y]]: 41 | def walkExprs(q: NamedTuple[N, X]): Seq[Expr[?]] = { 42 | val walkExprs0 = { 43 | val ps = q.toTuple.productIterator 44 | rs.iterator 45 | .zip(ps) 46 | .map({ (row, p) => 47 | type Q 48 | type R 49 | val q = p.asInstanceOf[Q] 50 | row.asInstanceOf[Queryable.Row[Q, R]].walkExprs(q) 51 | }) 52 | } 53 | 54 | walkExprs0.zipWithIndex 55 | .map { case (v, i) => (i.toString, v) } 56 | .flatMap { case (prefix, vs0) => vs0 } 57 | .toIndexedSeq 58 | } 59 | def walkLabels(): Seq[List[String]] = { 60 | val walkLabels0 = rs.iterator.map(_.walkLabels()) 61 | walkLabels0.zipWithIndex 62 | .map { case (v, i) => (i.toString, v) } 63 | .flatMap { case (prefix, vs0) => vs0.map { k => prefix +: k } } 64 | .toIndexedSeq 65 | } 66 | def construct(args: scalasql.core.Queryable.ResultSetIterator): NamedTuple.NamedTuple[N, Y] = 67 | val data = IArray.from(rs.iterator.map(_.construct(args))) 68 | Tuple.fromIArray(data).asInstanceOf[NamedTuple.NamedTuple[N, Y]] 69 | 70 | def deconstruct(r: NamedTuple.NamedTuple[N, Y]): NamedTuple.NamedTuple[N, X] = 71 | val data = IArray.from { 72 | val ps = r.toTuple.productIterator 73 | rs.iterator 74 | .zip(ps) 75 | .map({ (row, p) => 76 | type Q 77 | type R 78 | val r = p.asInstanceOf[R] 79 | row.asInstanceOf[Queryable.Row[Q, R]].deconstruct(r) 80 | }) 81 | } 82 | Tuple.fromIArray(data).asInstanceOf[NamedTuple.NamedTuple[N, X]] 83 | 84 | } 85 | -------------------------------------------------------------------------------- /scalasql/namedtuples/src/simple.scala: -------------------------------------------------------------------------------- 1 | package scalasql 2 | 3 | package object simple: 4 | export scalasql.namedtuples.SimpleTable 5 | export scalasql.namedtuples.NamedTupleQueryable.given 6 | export scalasql.`package`.{*, given} 7 | -------------------------------------------------------------------------------- /scalasql/namedtuples/test/src/SimpleTableExampleTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.namedtuples 2 | 3 | import utest._ 4 | 5 | object SimpleTableExampleTests extends TestSuite: 6 | def tests = Tests: 7 | test("postgres") - example.SimpleTablePostgresExample.main(Array.empty) 8 | test("mysql") - example.SimpleTableMySqlExample.main(Array.empty) 9 | test("h2") - example.SimpleTableH2Example.main(Array.empty) 10 | test("sqlite") - example.SimpleTableSqliteExample.main(Array.empty) 11 | -------------------------------------------------------------------------------- /scalasql/namedtuples/test/src/example/SimpleTableH2Example.scala: -------------------------------------------------------------------------------- 1 | // duplicated from scalasql/test/src/example/H2Example.scala 2 | package scalasql.namedtuples.example 3 | 4 | import scalasql.simple.{*, given} 5 | import H2Dialect.* 6 | 7 | object SimpleTableH2Example { 8 | 9 | case class ExampleProduct( 10 | id: Int, 11 | kebabCaseName: String, 12 | name: String, 13 | price: Double 14 | ) 15 | 16 | object ExampleProduct extends SimpleTable[ExampleProduct] 17 | 18 | // The example H2 database comes from the library `com.h2database:h2:2.2.224` 19 | val dataSource = new org.h2.jdbcx.JdbcDataSource 20 | dataSource.setUrl("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1") 21 | lazy val h2Client = new DbClient.DataSource( 22 | dataSource, 23 | config = new {} 24 | ) 25 | 26 | def main(args: Array[String]): Unit = { 27 | h2Client.transaction { db => 28 | db.updateRaw(""" 29 | CREATE TABLE example_product ( 30 | id INTEGER AUTO_INCREMENT PRIMARY KEY, 31 | kebab_case_name VARCHAR(256), 32 | name VARCHAR(256), 33 | price DECIMAL(20, 2) 34 | ); 35 | """) 36 | 37 | val inserted = db.run( 38 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 39 | ("face-mask", "Face Mask", 8.88), 40 | ("guitar", "Guitar", 300), 41 | ("socks", "Socks", 3.14), 42 | ("skate-board", "Skate Board", 123.45), 43 | ("camera", "Camera", 1000.00), 44 | ("cookie", "Cookie", 0.10) 45 | ) 46 | ) 47 | 48 | assert(inserted == 6) 49 | 50 | val result = 51 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 52 | 53 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 54 | 55 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 56 | 57 | db.run(ExampleProduct.delete(_.name === "Guitar")) 58 | 59 | val result2 = 60 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 61 | 62 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 63 | 64 | val result3 = 65 | db.run( 66 | ExampleProduct.select 67 | .filter(_.price > 10) 68 | .sortBy(_.price) 69 | .desc 70 | .map(p => (name = p.name, price = p.price)) 71 | ) 72 | 73 | assert( 74 | result3 == Seq( 75 | (name = "Camera", price = 1000.00), 76 | (name = "Skate Board", price = 123.45), 77 | (name = "Cookie", price = 11.0) 78 | ) 79 | ) 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /scalasql/namedtuples/test/src/example/SimpleTableMySqlExample.scala: -------------------------------------------------------------------------------- 1 | // duplicated from scalasql/test/src/example/MySqlExample.scala 2 | package scalasql.namedtuples.example 3 | 4 | import org.testcontainers.containers.MySQLContainer 5 | 6 | import scalasql.simple.{*, given} 7 | import MySqlDialect.* 8 | 9 | object SimpleTableMySqlExample { 10 | 11 | case class ExampleProduct( 12 | id: Int, 13 | kebabCaseName: String, 14 | name: String, 15 | price: Double 16 | ) 17 | 18 | object ExampleProduct extends SimpleTable[ExampleProduct] 19 | 20 | // The example MySQLContainer comes from the library `org.testcontainers:mysql:1.19.1` 21 | lazy val mysql = { 22 | println("Initializing MySql") 23 | val mysql = new MySQLContainer("mysql:8.0.31") 24 | mysql.setCommand("mysqld", "--character-set-server=utf8mb4", "--collation-server=utf8mb4_bin") 25 | mysql.start() 26 | mysql 27 | } 28 | 29 | val dataSource = new com.mysql.cj.jdbc.MysqlDataSource 30 | dataSource.setURL(mysql.getJdbcUrl + "?allowMultiQueries=true") 31 | dataSource.setDatabaseName(mysql.getDatabaseName); 32 | dataSource.setUser(mysql.getUsername); 33 | dataSource.setPassword(mysql.getPassword); 34 | 35 | lazy val mysqlClient = new DbClient.DataSource( 36 | dataSource, 37 | config = new {} 38 | ) 39 | 40 | def main(args: Array[String]): Unit = { 41 | mysqlClient.transaction { db => 42 | db.updateRaw(""" 43 | CREATE TABLE example_product ( 44 | id INTEGER PRIMARY KEY AUTO_INCREMENT, 45 | kebab_case_name VARCHAR(256), 46 | name VARCHAR(256), 47 | price DECIMAL(20, 2) 48 | ); 49 | """) 50 | 51 | val inserted = db.run( 52 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 53 | ("face-mask", "Face Mask", 8.88), 54 | ("guitar", "Guitar", 300), 55 | ("socks", "Socks", 3.14), 56 | ("skate-board", "Skate Board", 123.45), 57 | ("camera", "Camera", 1000.00), 58 | ("cookie", "Cookie", 0.10) 59 | ) 60 | ) 61 | 62 | assert(inserted == 6) 63 | 64 | val result = 65 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 66 | 67 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 68 | 69 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 70 | 71 | db.run(ExampleProduct.delete(_.name === "Guitar")) 72 | 73 | val result2 = 74 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 75 | 76 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 77 | 78 | val result3 = 79 | db.run( 80 | ExampleProduct.select 81 | .filter(_.price > 10) 82 | .sortBy(_.price) 83 | .desc 84 | .map(p => (name = p.name, price = p.price)) 85 | ) 86 | 87 | assert( 88 | result3 == Seq( 89 | (name = "Camera", price = 1000.00), 90 | (name = "Skate Board", price = 123.45), 91 | (name = "Cookie", price = 11.0) 92 | ) 93 | ) 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /scalasql/namedtuples/test/src/example/SimpleTablePostgresExample.scala: -------------------------------------------------------------------------------- 1 | // duplicated from scalasql/test/src/example/PostgresExample.scala 2 | package scalasql.namedtuples.example 3 | 4 | import org.testcontainers.containers.PostgreSQLContainer 5 | 6 | import scalasql.simple.{*, given} 7 | import PostgresDialect.* 8 | 9 | object SimpleTablePostgresExample { 10 | 11 | case class ExampleProduct( 12 | id: Int, 13 | kebabCaseName: String, 14 | name: String, 15 | price: Double 16 | ) 17 | 18 | object ExampleProduct extends SimpleTable[ExampleProduct] 19 | 20 | // The example PostgreSQLContainer comes from the library `org.testcontainers:postgresql:1.19.1` 21 | lazy val postgres = { 22 | println("Initializing Postgres") 23 | val pg = new PostgreSQLContainer("postgres:15-alpine") 24 | pg.start() 25 | pg 26 | } 27 | 28 | val dataSource = new org.postgresql.ds.PGSimpleDataSource 29 | dataSource.setURL(postgres.getJdbcUrl) 30 | dataSource.setDatabaseName(postgres.getDatabaseName); 31 | dataSource.setUser(postgres.getUsername); 32 | dataSource.setPassword(postgres.getPassword); 33 | 34 | lazy val postgresClient = new DbClient.DataSource( 35 | dataSource, 36 | config = new {} 37 | ) 38 | 39 | def main(args: Array[String]): Unit = { 40 | postgresClient.transaction { db => 41 | db.updateRaw(""" 42 | CREATE TABLE example_product ( 43 | id SERIAL PRIMARY KEY, 44 | kebab_case_name VARCHAR(256), 45 | name VARCHAR(256), 46 | price DECIMAL(20, 2) 47 | ); 48 | """) 49 | 50 | val inserted = db.run( 51 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 52 | ("face-mask", "Face Mask", 8.88), 53 | ("guitar", "Guitar", 300), 54 | ("socks", "Socks", 3.14), 55 | ("skate-board", "Skate Board", 123.45), 56 | ("camera", "Camera", 1000.00), 57 | ("cookie", "Cookie", 0.10) 58 | ) 59 | ) 60 | 61 | assert(inserted == 6) 62 | 63 | val result = 64 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 65 | 66 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 67 | 68 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 69 | 70 | db.run(ExampleProduct.delete(_.name === "Guitar")) 71 | 72 | val result2 = 73 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 74 | 75 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 76 | 77 | val result3 = 78 | db.run( 79 | ExampleProduct.select 80 | .filter(_.price > 10) 81 | .sortBy(_.price) 82 | .desc 83 | .map(p => (name = p.name, price = p.price)) 84 | ) 85 | 86 | assert( 87 | result3 == Seq( 88 | (name = "Camera", price = 1000.00), 89 | (name = "Skate Board", price = 123.45), 90 | (name = "Cookie", price = 11.0) 91 | ) 92 | ) 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /scalasql/namedtuples/test/src/example/SimpleTableSqliteExample.scala: -------------------------------------------------------------------------------- 1 | // duplicated from scalasql/test/src/example/SqliteExample.scala 2 | package scalasql.namedtuples.example 3 | 4 | import scalasql.simple.{*, given} 5 | import SqliteDialect.* 6 | 7 | object SimpleTableSqliteExample { 8 | 9 | case class ExampleProduct( 10 | id: Int, 11 | kebabCaseName: String, 12 | name: String, 13 | price: Double 14 | ) 15 | 16 | object ExampleProduct extends SimpleTable[ExampleProduct] 17 | 18 | // The example Sqlite JDBC client comes from the library `org.xerial:sqlite-jdbc:3.43.0.0` 19 | val dataSource = new org.sqlite.SQLiteDataSource() 20 | val tmpDb = java.nio.file.Files.createTempDirectory("sqlite") 21 | dataSource.setUrl(s"jdbc:sqlite:$tmpDb/file.db") 22 | lazy val sqliteClient = new DbClient.DataSource( 23 | dataSource, 24 | config = new {} 25 | ) 26 | 27 | def main(args: Array[String]): Unit = { 28 | sqliteClient.transaction { db => 29 | db.updateRaw(""" 30 | CREATE TABLE example_product ( 31 | id INTEGER PRIMARY KEY AUTOINCREMENT, 32 | kebab_case_name VARCHAR(256), 33 | name VARCHAR(256), 34 | price DECIMAL(20, 2) 35 | ); 36 | """) 37 | 38 | val inserted = db.run( 39 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 40 | ("face-mask", "Face Mask", 8.88), 41 | ("guitar", "Guitar", 300), 42 | ("socks", "Socks", 3.14), 43 | ("skate-board", "Skate Board", 123.45), 44 | ("camera", "Camera", 1000.00), 45 | ("cookie", "Cookie", 0.10) 46 | ) 47 | ) 48 | 49 | assert(inserted == 6) 50 | 51 | val result = 52 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 53 | 54 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 55 | 56 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 57 | 58 | db.run(ExampleProduct.delete(_.name === "Guitar")) 59 | 60 | val result2 = 61 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 62 | 63 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 64 | 65 | val result3 = 66 | db.run( 67 | ExampleProduct.select 68 | .filter(_.price > 10) 69 | .sortBy(_.price) 70 | .desc 71 | .map(p => (name = p.name, price = p.price)) 72 | ) 73 | 74 | assert( 75 | result3 == Seq( 76 | (name = "Camera", price = 1000.00), 77 | (name = "Skate Board", price = 123.45), 78 | (name = "Cookie", price = 11.0) 79 | ) 80 | ) 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /scalasql/namedtuples/test/src/example/foo.scala: -------------------------------------------------------------------------------- 1 | package scalasql.example 2 | 3 | // This file is a simple scratch-pad to demo ideas 4 | 5 | import scalasql.simple.{*, given} 6 | import H2Dialect.* 7 | 8 | case class Person(name: String, age: Int) extends SimpleTable.Nested 9 | object Person extends SimpleTable[Person]() 10 | 11 | case class City(name: String, population: Int, mayor: Person) 12 | object City extends SimpleTable[City]() 13 | 14 | def bar(db: DbApi) = 15 | val m = db.run( 16 | City.select.filter(_.name === "foo").map(c => (name = c.name, mayor = c.mayor)) 17 | ) 18 | val _: Seq[(name: String, mayor: Person)] = m // demonstrate that mayor maps back to case class. 19 | 20 | @main def foo = 21 | City.select.filter(_.name === "foo").map(_.mayor) 22 | City.insert.values(City("foo", 42, Person("bar", 23))) 23 | City.insert.columns(_.name := "foo") 24 | City.insert.batched(_.name, _.population, _.mayor.name)(("foo", 42, "bar"), ("baz", 23, "qux")) 25 | -------------------------------------------------------------------------------- /scalasql/operations/src/AggNumericOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.{Queryable, Expr, TypeMapper} 4 | import scalasql.core.Aggregatable 5 | import scalasql.core.SqlStr.SqlStringSyntax 6 | 7 | class AggNumericOps[V: Numeric: TypeMapper](v: Aggregatable[Expr[V]])( 8 | implicit qr: Queryable.Row[Expr[V], V] 9 | ) { 10 | 11 | /** Computes the sum of column values */ 12 | def sum: Expr[V] = v.aggregateExpr(expr => implicit ctx => sql"SUM($expr)") 13 | 14 | /** Finds the minimum value in a column */ 15 | def min: Expr[V] = v.aggregateExpr(expr => implicit ctx => sql"MIN($expr)") 16 | 17 | /** Finds the maximum value in a column */ 18 | def max: Expr[V] = v.aggregateExpr(expr => implicit ctx => sql"MAX($expr)") 19 | 20 | /** Computes the average value of a column */ 21 | def avg: Expr[V] = v.aggregateExpr(expr => implicit ctx => sql"AVG($expr)") 22 | } 23 | -------------------------------------------------------------------------------- /scalasql/operations/src/AggOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.DialectTypeMappers 4 | import scalasql.core.{Queryable, TypeMapper, Expr} 5 | import scalasql.core.Aggregatable 6 | import scalasql.core.SqlStr.SqlStringSyntax 7 | 8 | class AggOps[T](v: Aggregatable[T])(implicit qr: Queryable.Row[T, ?], dialect: DialectTypeMappers) { 9 | import dialect._ 10 | 11 | /** Counts the rows */ 12 | def size: Expr[Int] = v.aggregateExpr(_ => _ => sql"COUNT(1)") 13 | 14 | /** Computes the sum of column values */ 15 | def sumBy[V: TypeMapper](f: T => Expr[V])( 16 | implicit qr: Queryable.Row[Expr[V], V] 17 | ): Expr[V] = v.aggregateExpr(expr => implicit ctx => sql"SUM(${f(expr)})") 18 | 19 | /** Finds the minimum value in a column */ 20 | def minBy[V: TypeMapper](f: T => Expr[V])( 21 | implicit qr: Queryable.Row[Expr[V], V] 22 | ): Expr[V] = v.aggregateExpr(expr => implicit ctx => sql"MIN(${f(expr)})") 23 | 24 | /** Finds the maximum value in a column */ 25 | def maxBy[V: TypeMapper](f: T => Expr[V])( 26 | implicit qr: Queryable.Row[Expr[V], V] 27 | ): Expr[V] = v.aggregateExpr(expr => implicit ctx => sql"MAX(${f(expr)})") 28 | 29 | /** Computes the average value of a column */ 30 | def avgBy[V: TypeMapper](f: T => Expr[V])( 31 | implicit qr: Queryable.Row[Expr[V], V] 32 | ): Expr[V] = v.aggregateExpr(expr => implicit ctx => sql"AVG(${f(expr)})") 33 | 34 | /** Computes the sum of column values */ 35 | def sumByOpt[V: TypeMapper](f: T => Expr[V])( 36 | implicit qr: Queryable.Row[Expr[V], V] 37 | ): Expr[Option[V]] = v.aggregateExpr(expr => implicit ctx => sql"SUM(${f(expr)})") 38 | 39 | /** Finds the minimum value in a column */ 40 | def minByOpt[V: TypeMapper](f: T => Expr[V])( 41 | implicit qr: Queryable.Row[Expr[V], V] 42 | ): Expr[Option[V]] = v.aggregateExpr(expr => implicit ctx => sql"MIN(${f(expr)})") 43 | 44 | /** Finds the maximum value in a column */ 45 | def maxByOpt[V: TypeMapper](f: T => Expr[V])( 46 | implicit qr: Queryable.Row[Expr[V], V] 47 | ): Expr[Option[V]] = v.aggregateExpr(expr => implicit ctx => sql"MAX(${f(expr)})") 48 | 49 | /** Computes the average value of a column */ 50 | def avgByOpt[V: TypeMapper](f: T => Expr[V])( 51 | implicit qr: Queryable.Row[Expr[V], V] 52 | ): Expr[Option[V]] = v.aggregateExpr(expr => implicit ctx => sql"AVG(${f(expr)})") 53 | 54 | /** TRUE if any value in a set is TRUE */ 55 | def any(f: T => Expr[Boolean]): Expr[Boolean] = v 56 | .aggregateExpr(expr => implicit ctx => sql"ANY(${f(expr)})") 57 | 58 | /** TRUE if all values in a set are TRUE */ 59 | def all(f: T => Expr[Boolean]): Expr[Boolean] = v 60 | .aggregateExpr(expr => implicit ctx => sql"ALL(${f(expr)})") 61 | 62 | /** TRUE if the operand is equal to one of a list of expressions or one or more rows returned by a subquery */ 63 | // def contains(e: Expr[_]): Expr[Boolean] = v.queryExpr(implicit ctx => sql"ALL($e in $v})") 64 | } 65 | -------------------------------------------------------------------------------- /scalasql/operations/src/BitwiseFunctionOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | import scalasql.core.Expr 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | trait BitwiseFunctionOps[T] extends scalasql.operations.ExprNumericOps[T] { 6 | protected def v: Expr[T] 7 | override def &[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"BITAND($v, $x)" } 8 | 9 | override def |[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"BITOR($v, $x)" } 10 | 11 | override def unary_~ : Expr[T] = Expr { implicit ctx => sql"BITNOT($v)" } 12 | } 13 | -------------------------------------------------------------------------------- /scalasql/operations/src/CaseWhen.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.TypeMapper 4 | import scalasql.core.Expr 5 | import scalasql.core.SqlStr 6 | import scalasql.core.SqlStr.SqlStringSyntax 7 | import scalasql.core.Context 8 | class CaseWhen[T: TypeMapper](values: Seq[(Expr[Boolean], Expr[T])]) extends Expr[T] { 9 | 10 | def renderToSql0(implicit ctx: Context): SqlStr = { 11 | val whens = CaseWhen.renderWhens(values) 12 | sql"CASE $whens END" 13 | } 14 | 15 | def `else`(other: Expr[T]) = new CaseWhen.Else(values, other) 16 | } 17 | object CaseWhen { 18 | private def renderWhens[T](values: Seq[(Expr[Boolean], Expr[T])])(implicit ctx: Context) = SqlStr 19 | .join(values.map { case (when, then_) => sql"WHEN $when THEN $then_" }, sql" ") 20 | class Else[T: TypeMapper](values: Seq[(Expr[Boolean], Expr[T])], `else`: Expr[T]) 21 | extends Expr[T] { 22 | 23 | def renderToSql0(implicit ctx: Context): SqlStr = { 24 | val whens = renderWhens(values) 25 | sql"CASE $whens ELSE ${`else`} END" 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /scalasql/operations/src/ConcatOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | import scalasql.core.{Expr, SqlStr} 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | trait ConcatOps { 6 | 7 | /** 8 | * Concatenate all arguments. NULL arguments are ignored. 9 | */ 10 | def concat(values: Expr[?]*): Expr[String] = Expr { implicit ctx => 11 | sql"CONCAT(${SqlStr.join(values.map(v => sql"$v"), SqlStr.commaSep)})" 12 | } 13 | 14 | /** 15 | * Concatenate all but first arguments with separators. The first parameter is used 16 | * as a separator. NULL arguments are ignored. 17 | */ 18 | def concatWs(sep: Expr[String], values: Expr[?]*): Expr[String] = Expr { implicit ctx => 19 | sql"CONCAT_WS($sep, ${SqlStr.join(values.map(v => sql"$v"), SqlStr.commaSep)})" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /scalasql/operations/src/ExprAggOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.Aggregatable 4 | import scalasql.core.{TypeMapper, Expr} 5 | 6 | abstract class ExprAggOps[T](v: Aggregatable[Expr[T]]) { 7 | 8 | /** Concatenates the given values into one string using the given separator */ 9 | def mkString(sep: Expr[String] = null)(implicit tm: TypeMapper[T]): Expr[String] 10 | } 11 | -------------------------------------------------------------------------------- /scalasql/operations/src/ExprBooleanOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.Expr 4 | import scalasql.core.SqlStr.SqlStringSyntax 5 | 6 | class ExprBooleanOps(v: Expr[Boolean]) { 7 | 8 | /** TRUE if both Boolean expressions are TRUE */ 9 | def &&(x: Expr[Boolean]): Expr[Boolean] = Expr { implicit ctx => sql"($v AND $x)" } 10 | 11 | /** TRUE if either Boolean expression is TRUE */ 12 | def ||(x: Expr[Boolean]): Expr[Boolean] = Expr { implicit ctx => sql"($v OR $x)" } 13 | 14 | /** Reverses the value of any other Boolean operator */ 15 | def unary_! : Expr[Boolean] = Expr { implicit ctx => sql"(NOT $v)" } 16 | } 17 | -------------------------------------------------------------------------------- /scalasql/operations/src/ExprNumericOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.TypeMapper 4 | import scalasql.core.Expr 5 | import scalasql.core.SqlStr.SqlStringSyntax 6 | 7 | class ExprNumericOps[T: Numeric](v: Expr[T])(implicit val m: TypeMapper[T]) { 8 | 9 | /** Addition */ 10 | def +[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"($v + $x)" } 11 | 12 | /** Subtraction */ 13 | def -[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"($v - $x)" } 14 | 15 | /** Multiplication */ 16 | def *[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"($v * $x)" } 17 | 18 | /** Division */ 19 | def /[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"($v / $x)" } 20 | 21 | /** Remainder */ 22 | def %[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"MOD($v, $x)" } 23 | 24 | /** Bitwise AND */ 25 | def &[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"($v & $x)" } 26 | 27 | /** Bitwise OR */ 28 | def |[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"($v | $x)" } 29 | 30 | /** Bitwise XOR */ 31 | def ^[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"($v ^ $x)" } 32 | 33 | /** TRUE if the operand is within a range */ 34 | def between(x: Expr[Int], y: Expr[Int]): Expr[Boolean] = Expr { implicit ctx => 35 | sql"$v BETWEEN $x AND $y" 36 | } 37 | 38 | /** Unary Positive Operator */ 39 | def unary_+ : Expr[T] = Expr { implicit ctx => sql"+$v" } 40 | 41 | /** Unary Negation Operator */ 42 | def unary_- : Expr[T] = Expr { implicit ctx => sql"-$v" } 43 | 44 | /** Unary Bitwise NOT Operator */ 45 | def unary_~ : Expr[T] = Expr { implicit ctx => sql"~$v" } 46 | 47 | /** Returns the absolute value of a number. */ 48 | def abs: Expr[T] = Expr { implicit ctx => sql"ABS($v)" } 49 | 50 | /** Returns the remainder of one number divided into another. */ 51 | def mod[V: Numeric](x: Expr[V]): Expr[T] = Expr { implicit ctx => sql"MOD($v, $x)" } 52 | 53 | /** Rounds a noninteger value upwards to the next greatest integer. Returns an integer value unchanged. */ 54 | def ceil: Expr[T] = Expr { implicit ctx => sql"CEIL($v)" } 55 | 56 | /** Rounds a noninteger value downwards to the next least integer. Returns an integer value unchanged. */ 57 | def floor: Expr[T] = Expr { implicit ctx => sql"FLOOR($v)" } 58 | 59 | /** 60 | * The sign(X) function returns -1, 0, or +1 if the argument X is a numeric 61 | * value that is negative, zero, or positive, respectively. If the argument to sign(X) 62 | * is NULL or is a string or blob that cannot be losslessly converted into a number, 63 | * then sign(X) returns NULL. 64 | */ 65 | def sign: Expr[T] = Expr { implicit ctx => sql"SIGN($v)" } 66 | } 67 | -------------------------------------------------------------------------------- /scalasql/operations/src/ExprOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.TypeMapper 4 | import scalasql.core.Expr 5 | import scalasql.core.SqlStr 6 | import scalasql.core.SqlStr.SqlStringSyntax 7 | 8 | class ExprOps(v: Expr[?]) { 9 | 10 | /** 11 | * SQL-style Equals to, translates to SQL `=`. Returns `false` if both values are `NULL` 12 | */ 13 | def `=`[T](x: Expr[T]): Expr[Boolean] = Expr { implicit ctx => sql"($v = $x)" } 14 | 15 | /** 16 | * SQL-style Not equals to, translates to SQL `<>`. Returns `false` if both values are `NULL` 17 | */ 18 | def <>[T](x: Expr[T]): Expr[Boolean] = Expr { implicit ctx => sql"($v <> $x)" } 19 | 20 | /** Greater than */ 21 | def >[V](x: Expr[V]): Expr[Boolean] = Expr { implicit ctx => sql"($v > $x)" } 22 | 23 | /** Less than */ 24 | def <[V](x: Expr[V]): Expr[Boolean] = Expr { implicit ctx => sql"($v < $x)" } 25 | 26 | /** Greater than or equal to */ 27 | def >=[V](x: Expr[V]): Expr[Boolean] = Expr { implicit ctx => sql"($v >= $x)" } 28 | 29 | /** Less than or equal to */ 30 | def <=[V](x: Expr[V]): Expr[Boolean] = Expr { implicit ctx => sql"($v <= $x)" } 31 | 32 | /** Translates to a SQL `CAST` from one type to another */ 33 | def cast[V: TypeMapper]: Expr[V] = Expr { implicit ctx => 34 | sql"CAST($v AS ${SqlStr.raw(implicitly[TypeMapper[V]].castTypeString)})" 35 | } 36 | 37 | /** 38 | * Similar to [[cast]], but allows you to pass in an explicit [[SqlStr]] to 39 | * further specify the SQL type you want to cast to 40 | */ 41 | def castNamed[V: TypeMapper](typeName: SqlStr): Expr[V] = Expr { implicit ctx => 42 | sql"CAST($v AS $typeName)" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /scalasql/operations/src/ExprOptionOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.TypeMapper 4 | import scalasql.core.DialectTypeMappers 5 | import scalasql.core.Expr 6 | import scalasql.core.Context 7 | import scalasql.core.SqlStr.SqlStringSyntax 8 | 9 | class ExprOptionOps[T: TypeMapper](v: Expr[Option[T]])(implicit dialect: DialectTypeMappers) { 10 | import dialect._ 11 | 12 | def isDefined: Expr[Boolean] = Expr { implicit ctx => sql"($v IS NOT NULL)" } 13 | 14 | def isEmpty: Expr[Boolean] = Expr { implicit ctx => sql"($v IS NULL)" } 15 | 16 | // SQL nulls tend to propagate automatically, so we do not need to explicitly 17 | // generate CASE/WHEN/THEN/ELSE syntax and can just use the final expression directly 18 | // and assume the nulls will propagate as necessary 19 | def map[V: TypeMapper](f: Expr[T] => Expr[V]): Expr[Option[V]] = Expr { implicit ctx => 20 | sql"${f(v.asInstanceOf[Expr[T]])}" 21 | } 22 | 23 | def flatMap[V: TypeMapper](f: Expr[T] => Expr[Option[V]]): Expr[Option[V]] = 24 | Expr { implicit ctx => sql"${f(v.asInstanceOf[Expr[T]])}" } 25 | 26 | def get: Expr[T] = Expr[T] { implicit ctx: Context => sql"$v" } 27 | 28 | def getOrElse(other: Expr[T]): Expr[T] = Expr[T] { implicit ctx: Context => 29 | sql"COALESCE($v, $other)" 30 | } 31 | 32 | def orElse(other: Expr[Option[T]]): Expr[Option[T]] = Expr[T] { implicit ctx: Context => 33 | sql"COALESCE($v, $other)" 34 | } 35 | 36 | def filter(other: Expr[T] => Expr[Boolean]): Expr[Option[T]] = new CaseWhen.Else[Option[T]]( 37 | Seq(other(Expr[T] { implicit ctx: Context => sql"$v" }) -> v), 38 | Expr { _ => sql"NULL" } 39 | ) 40 | } 41 | -------------------------------------------------------------------------------- /scalasql/operations/src/ExprStringLikeOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | import scalasql.core.Expr 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | abstract class ExprStringLikeOps[T](v: Expr[T]) { 6 | 7 | /** Concatenates two strings */ 8 | def +(x: Expr[T]): Expr[T] = Expr { implicit ctx => sql"($v || $x)" } 9 | 10 | /** TRUE if the operand matches a pattern */ 11 | def like(x: Expr[T]): Expr[Boolean] = Expr { implicit ctx => sql"($v LIKE $x)" } 12 | 13 | /** Returns an integer value representing the starting position of a string within the search string. */ 14 | def indexOf(x: Expr[T]): Expr[Int] 15 | 16 | /** Converts a string to all lowercase characters. */ 17 | def toLowerCase: Expr[T] = Expr { implicit ctx => sql"LOWER($v)" } 18 | 19 | /** Converts a string to all uppercase characters. */ 20 | def toUpperCase: Expr[T] = Expr { implicit ctx => sql"UPPER($v)" } 21 | 22 | /** Returns the number of characters in this string */ 23 | def length: Expr[Int] = Expr { implicit ctx => sql"LENGTH($v)" } 24 | 25 | /** Returns the number of bytes in this string */ 26 | def octetLength: Expr[Int] = Expr { implicit ctx => sql"OCTET_LENGTH($v)" } 27 | 28 | /** Returns a portion of a string. */ 29 | def substring(start: Expr[Int], length: Expr[Int]): Expr[T] = Expr { implicit ctx => 30 | sql"SUBSTRING($v, $start, $length)" 31 | } 32 | 33 | /** Returns whether or not this strings starts with the other. */ 34 | def startsWith(other: Expr[T]): Expr[Boolean] = Expr { implicit ctx => 35 | sql"($v LIKE $other || '%')" 36 | } 37 | 38 | /** Returns whether or not this strings ends with the other. */ 39 | def endsWith(other: Expr[T]): Expr[Boolean] = Expr { implicit ctx => 40 | sql"($v LIKE '%' || $other)" 41 | } 42 | 43 | /** Returns whether or not this strings contains the other. */ 44 | def contains(other: Expr[T]): Expr[Boolean] = Expr { implicit ctx => 45 | sql"($v LIKE '%' || $other || '%')" 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /scalasql/operations/src/ExprStringOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.Expr 4 | import scalasql.core.SqlStr.SqlStringSyntax 5 | 6 | trait ExprStringOps[T] { 7 | protected def v: Expr[T] 8 | 9 | /** Removes leading and trailing whitespace characters from a character string. */ 10 | def trim: Expr[T] = Expr { implicit ctx => sql"TRIM($v)" } 11 | 12 | /** Removes leading whitespace characters from a character string. */ 13 | def ltrim: Expr[T] = Expr { implicit ctx => sql"LTRIM($v)" } 14 | 15 | /** Removes trailing whitespace characters from a character string. */ 16 | def rtrim: Expr[T] = Expr { implicit ctx => sql"RTRIM($v)" } 17 | 18 | /** 19 | * The replace(X,Y,Z) function returns a string formed by substituting string Z 20 | * for every occurrence of string Y in string X 21 | */ 22 | def replace(y: Expr[T], z: Expr[T]): Expr[T] = Expr { implicit ctx => 23 | sql"REPLACE($v, $y, $z)" 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /scalasql/operations/src/ExprTypedOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.Expr 4 | import scalasql.core.SqlStr.SqlStringSyntax 5 | 6 | import scala.reflect.ClassTag 7 | 8 | class ExprTypedOps[T: ClassTag](v: Expr[T]) { 9 | 10 | protected def isNullable[T: ClassTag] = implicitly[ClassTag[T]].runtimeClass == classOf[Option[?]] 11 | 12 | /** 13 | * Scala-style Equals to, returns `true` if both values are `NULL`. 14 | * Translates to `IS NOT DISTINCT FROM` if both values are nullable, 15 | * otherwise translates to `=` 16 | */ 17 | def ===[V: ClassTag](x: Expr[V]): Expr[Boolean] = Expr { implicit ctx => 18 | (isNullable[T], isNullable[V]) match { 19 | case (true, true) => sql"($v IS NOT DISTINCT FROM $x)" 20 | case _ => sql"($v = $x)" 21 | } 22 | } 23 | 24 | /** 25 | * Scala-style Not equals to, returns `false` if both values are `NULL` 26 | * Translates to `IS DISTINCT FROM` if both values are nullable, 27 | * otherwise translates to `<>` 28 | */ 29 | def !==[V: ClassTag](x: Expr[V]): Expr[Boolean] = Expr { implicit ctx => 30 | (isNullable[T], isNullable[V]) match { 31 | case (true, true) => sql"($v IS DISTINCT FROM $x)" 32 | case _ => sql"($v <> $x)" 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /scalasql/operations/src/HyperbolicMathOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | import scalasql.core.Expr 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | trait HyperbolicMathOps { 6 | 7 | /** Calculate the hyperbolic sine */ 8 | def sinh[T: Numeric](v: Expr[T]): Expr[T] = Expr { implicit ctx => sql"SINH($v)" } 9 | 10 | /** Calculate the hyperbolic cosine */ 11 | def cosh[T: Numeric](v: Expr[T]): Expr[T] = Expr { implicit ctx => sql"COSH($v)" } 12 | 13 | /** Calculate the hyperbolic tangent */ 14 | def tanh[T: Numeric](v: Expr[T]): Expr[T] = Expr { implicit ctx => sql"TANH($v)" } 15 | } 16 | -------------------------------------------------------------------------------- /scalasql/operations/src/MathOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | import scalasql.core.SqlStr.SqlStringSyntax 3 | import scalasql.core.Expr 4 | 5 | trait MathOps { 6 | 7 | /** Converts radians to degrees */ 8 | def degrees[T: Numeric](x: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"DEGREES($x)" } 9 | 10 | /** Converts degrees to radians */ 11 | def radians[T: Numeric](x: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"RADIANS($x)" } 12 | 13 | /** `x` raised to the power of `y` */ 14 | def power[T: Numeric](x: Expr[T], y: Expr[T]): Expr[Double] = Expr { implicit ctx => 15 | sql"POWER($x, $y)" 16 | } 17 | 18 | /** Raises a value to the power of the mathematical constant known as e. */ 19 | def exp[T: Numeric](x: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"EXP($x)" } 20 | 21 | /** Returns the natural logarithm of a number. */ 22 | def ln[T: Numeric](v: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"LN($v)" } 23 | 24 | /** Logarithm of x to base b */ 25 | def log[T: Numeric](b: Expr[Int], x: Expr[T]): Expr[Double] = Expr { implicit ctx => 26 | sql"LOG($b, $x)" 27 | } 28 | 29 | /** Base 10 logarithm */ 30 | def log10[T: Numeric](x: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"LOG10($x)" } 31 | 32 | /** Computes the square root of a number. */ 33 | def sqrt[T: Numeric](v: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"SQRT($v)" } 34 | 35 | /** Calculate the trigonometric sine */ 36 | def sin[T: Numeric](v: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"SIN($v)" } 37 | 38 | /** Calculate the trigonometric cosine */ 39 | def cos[T: Numeric](v: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"COS($v)" } 40 | 41 | /** Calculate the trigonometric tangent */ 42 | def tan[T: Numeric](v: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"TAN($v)" } 43 | 44 | /** Calculate the arc sine */ 45 | def asin[T: Numeric](v: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"ASIN($v)" } 46 | 47 | /** Calculate the arc cosine */ 48 | def acos[T: Numeric](v: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"ACOS($v)" } 49 | 50 | /** Calculate the arc tangent */ 51 | def atan[T: Numeric](v: Expr[T]): Expr[Double] = Expr { implicit ctx => sql"ATAN($v)" } 52 | 53 | /** Calculate the arc tangent */ 54 | def atan2[T: Numeric](v: Expr[T], y: Expr[T]): Expr[Double] = Expr { implicit ctx => 55 | sql"ATAN2($v, $y)" 56 | } 57 | 58 | /** Returns the value of Pi */ 59 | def pi: Expr[Double] = Expr { _ => sql"PI()" } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /scalasql/operations/src/PadOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | import scalasql.core.Expr 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | trait PadOps { 6 | protected def v: Expr[?] 7 | 8 | def rpad(length: Expr[Int], fill: Expr[String]): Expr[String] = Expr { implicit ctx => 9 | sql"RPAD($v, $length, $fill)" 10 | } 11 | 12 | def lpad(length: Expr[Int], fill: Expr[String]): Expr[String] = Expr { implicit ctx => 13 | sql"LPAD($v, $length, $fill)" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /scalasql/operations/src/TrimOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | import scalasql.core.Expr 3 | import scalasql.core.SqlStr.SqlStringSyntax 4 | 5 | trait TrimOps { 6 | protected def v: Expr[?] 7 | 8 | /** 9 | * Trim [[x]]s from the left hand side of the string [[v]] 10 | */ 11 | def ltrim(x: Expr[String]): Expr[String] = Expr { implicit ctx => sql"LTRIM($v, $x)" } 12 | 13 | /** 14 | * Trim [[x]]s from the right hand side of the string [[v]] 15 | */ 16 | def rtrim(x: Expr[String]): Expr[String] = Expr { implicit ctx => sql"RTRIM($v, $x)" } 17 | } 18 | -------------------------------------------------------------------------------- /scalasql/operations/src/readme.md: -------------------------------------------------------------------------------- 1 | List of ANSI SQL operators http://users.atw.hu/sqlnut/sqlnut2-chp-2-sect-2.html 2 | List of ANSI SQL scalar functions http://users.atw.hu/sqlnut/sqlnut2-chp-4-sect-4.html 3 | List of ANSI SQL aggregate functions https://www.oreilly.com/library/view/sql-in-a/9780596155322/ch04s02.html 4 | 5 | SQLite: 6 | * Scalar Functions https://www.sqlite.org/lang_corefunc.html 7 | * Math Functions https://www.sqlite.org/lang_mathfunc.html 8 | * Aggregate Functions https://www.sqlite.org/lang_aggfunc.html 9 | 10 | Postgres: 11 | * https://www.postgresql.org/docs/current/functions.html 12 | 13 | MySQL: 14 | * https://dev.mysql.com/doc/refman/8.0/en/functions.html -------------------------------------------------------------------------------- /scalasql/query/src-2/TableMacro.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.Sc 4 | import scala.language.experimental.macros 5 | 6 | object TableMacros { 7 | type Expr[T] = scalasql.core.Expr[T] 8 | def applyImpl[V[_[_]]]( 9 | c: scala.reflect.macros.blackbox.Context 10 | )(implicit caseClassType: c.WeakTypeTag[V[Any]]): c.Expr[Table.Metadata[V]] = { 11 | import c.universe._ 12 | 13 | val tableRef = TermName(c.freshName("tableRef")) 14 | val constructor = weakTypeOf[V[Any]].members.find(_.isConstructor).head 15 | val constructorParameters = constructor.info.paramLists.head 16 | 17 | def isTypeParamType(param: Symbol) = { 18 | param.info.typeSymbol.toString != caseClassType.tpe.typeParams.head.toString 19 | } 20 | 21 | val columnParams = for (param <- constructorParameters) yield { 22 | val name = param.name 23 | 24 | if (isTypeParamType(param)) { 25 | q"implicitly[scalasql.Table.ImplicitMetadata[${param.info.typeSymbol}]].value.vExpr($tableRef, dialect)" 26 | } else { 27 | q""" 28 | new _root_.scalasql.Column[${param.info.typeArgs.head}]( 29 | $tableRef, 30 | _root_.scalasql.Table.columnNameOverride($tableRef.value)(${name.toString}) 31 | ) 32 | """ 33 | } 34 | } 35 | 36 | def subParam(paramInfo: Type, tpe: Type) = { 37 | paramInfo.substituteTypes( 38 | List(constructor.info.resultType.typeArgs.head.typeSymbol), 39 | List( 40 | tpe 41 | .asInstanceOf[ExistentialType] 42 | .underlying 43 | .asInstanceOf[TypeRef] 44 | .sym 45 | .info 46 | ) 47 | ) 48 | } 49 | 50 | val queryables = for (param <- constructorParameters) yield { 51 | val tpe = subParam(param.info, typeOf[Sc[_]]) 52 | val tpe2 = subParam(param.info, typeOf[TableMacros.Expr[_]]) 53 | q"implicitly[_root_.scalasql.Queryable.Row[$tpe2, $tpe]]" 54 | } 55 | 56 | val constructParams = for ((param, i) <- constructorParameters.zipWithIndex) yield { 57 | val tpe = subParam(param.info, typeOf[Sc[_]]) 58 | val tpe2 = subParam(param.info, typeOf[TableMacros.Expr[_]]) 59 | q"queryable[$tpe2, $tpe]($i).construct(args): _root_.scalasql.Sc[$tpe]" 60 | } 61 | 62 | val deconstructParams = for ((param, i) <- constructorParameters.zipWithIndex) yield { 63 | val tpe = subParam(param.info, typeOf[Sc[_]]) 64 | val tpe2 = subParam(param.info, typeOf[TableMacros.Expr[_]]) 65 | q"queryable[$tpe2, $tpe]($i).deconstruct(r.${TermName(param.name.toString)})" 66 | } 67 | 68 | val flattenLists = for (param <- constructorParameters) yield { 69 | if (isTypeParamType(param)) { 70 | q"implicitly[scalasql.Table.ImplicitMetadata[${param.info.typeSymbol}]].value.walkLabels0()" 71 | } else { 72 | val name = param.name 73 | q"_root_.scala.List(${name.toString})" 74 | } 75 | } 76 | 77 | val flattenExprs = for ((param, i) <- constructorParameters.zipWithIndex) yield { 78 | val tpe = subParam(param.info, typeOf[Sc[_]]) 79 | val tpe2 = subParam(param.info, typeOf[TableMacros.Expr[_]]) 80 | q"queryable[$tpe2, $tpe]($i).walkExprs(table.${TermName(param.name.toString)})" 81 | } 82 | 83 | import compat._ 84 | val typeRef = caseClassType.tpe.resultType.asInstanceOf[TypeRef] 85 | val exprRef = TypeRef( 86 | pre = typeRef.pre, 87 | sym = typeRef.sym, 88 | args = weakTypeOf[V[TableMacros.Expr]].typeArgs 89 | ) 90 | val idRef = TypeRef( 91 | pre = typeRef.pre, 92 | sym = typeRef.sym, 93 | args = weakTypeOf[V[Sc]].typeArgs 94 | ) 95 | c.Expr[Table.Metadata[V]](q"""{ 96 | 97 | new _root_.scalasql.query.Table.Metadata( 98 | (dialect, n) => { 99 | import dialect._; 100 | n match{ case ..${queryables.zipWithIndex.map { case (q, i) => cq"$i => $q" }} } 101 | }, 102 | () => ${flattenLists.reduceLeft((l, r) => q"$l ++ $r")}, 103 | (walkLabels0, dialect, queryable) => { 104 | import dialect._ 105 | 106 | new _root_.scalasql.query.Table.Internal.TableQueryable( 107 | walkLabels0, 108 | (table: $exprRef) => ${flattenExprs.reduceLeft((l, r) => q"$l ++ $r")}, 109 | construct0 = (args: _root_.scalasql.Queryable.ResultSetIterator) => new $caseClassType(..$constructParams), 110 | deconstruct0 = (r: $idRef) => new $caseClassType(..$deconstructParams) 111 | ) 112 | }, 113 | ($tableRef: _root_.scalasql.query.TableRef, dialect, queryable) => { 114 | import dialect._ 115 | 116 | new $caseClassType(..$columnParams) 117 | } 118 | ) 119 | }""") 120 | } 121 | 122 | } 123 | trait TableMacros { 124 | implicit def initTableMetadata[V[_[_]]]: Table.Metadata[V] = macro TableMacros.applyImpl[V] 125 | } 126 | -------------------------------------------------------------------------------- /scalasql/query/src/Aggregate.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Queryable, SqlStr, Context} 4 | 5 | class Aggregate[Q, R]( 6 | toSqlStr0: Context => SqlStr, 7 | construct0: Queryable.ResultSetIterator => R, 8 | protected val expr: Q, 9 | protected val qr: Queryable[Q, R] 10 | ) extends Query.DelegateQueryable[Q, R] { 11 | 12 | protected override def queryIsSingleRow: Boolean = true 13 | private[scalasql] def renderSql(ctx: Context) = toSqlStr0(ctx) 14 | 15 | override protected def queryConstruct(args: Queryable.ResultSetIterator): R = construct0(args) 16 | } 17 | -------------------------------------------------------------------------------- /scalasql/query/src/Column.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Context, Expr, SqlStr, TypeMapper} 4 | import scalasql.core.SqlStr.SqlStringSyntax 5 | 6 | /** 7 | * A variant of [[Expr]] representing a raw table column; allows assignment in updates 8 | * and inserts 9 | */ 10 | class Column[T](tableRef: TableRef, val name: String)(implicit val mappedType: TypeMapper[T]) 11 | extends Expr[T] { 12 | def :=(v: Expr[T]) = Column.Assignment(this, v) 13 | 14 | def renderToSql0(implicit ctx: Context) = { 15 | val suffix = SqlStr.raw(ctx.config.columnNameMapper(name)) 16 | ctx.fromNaming.get(tableRef) match { 17 | case Some("") => suffix 18 | case Some(s) => SqlStr.raw(s) + sql".$suffix" 19 | case None => 20 | sql"SCALASQL_MISSING_TABLE_${SqlStr.raw(Table.name(tableRef.value))}.$suffix" 21 | } 22 | } 23 | } 24 | object Column { 25 | case class Assignment[T](column: Column[T], value: Expr[T]) 26 | } 27 | -------------------------------------------------------------------------------- /scalasql/query/src/Delete.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.DialectTypeMappers 4 | import scalasql.core.Context 5 | import scalasql.core.{Queryable, SqlStr, Expr} 6 | import scalasql.core.SqlStr.SqlStringSyntax 7 | 8 | /** 9 | * A SQL `DELETE` query 10 | */ 11 | trait Delete[Q] extends Query.ExecuteUpdate[Int] with Returning.Base[Q] 12 | 13 | object Delete { 14 | class Impl[Q](val expr: Q, filter: Expr[Boolean], val table: TableRef)( 15 | implicit dialect: DialectTypeMappers 16 | ) extends Delete[Q] { 17 | import dialect._ 18 | 19 | private[scalasql] def renderSql(ctx: Context) = new Renderer(table, filter, ctx).render() 20 | 21 | protected def queryConstruct(args: Queryable.ResultSetIterator): Int = args.get(IntType) 22 | } 23 | 24 | class Renderer(table: TableRef, expr: Expr[Boolean], prevContext: Context) { 25 | implicit val implicitCtx: Context = Context.compute(prevContext, Nil, Some(table)) 26 | lazy val tableNameStr = 27 | SqlStr.raw(Table.fullIdentifier(table.value)) 28 | 29 | def render() = sql"DELETE FROM $tableNameStr WHERE $expr" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /scalasql/query/src/FlatJoin.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Context, JoinNullable, Queryable, Expr} 4 | 5 | object FlatJoin { 6 | trait Rhs[Q2, R2] 7 | class MapResult[Q, Q2, R, R2]( 8 | val prefix: String, 9 | val from: Context.From, 10 | val on: Option[Expr[Boolean]], 11 | val qr: Queryable.Row[Q2, R2], 12 | val f: Q2, 13 | val where: Seq[Expr[Boolean]] 14 | ) extends Rhs[Q2, R2] 15 | 16 | class FlatMapResult[Q, Q2, R, R2]( 17 | val prefix: String, 18 | val from: Context.From, 19 | val on: Option[Expr[Boolean]], 20 | val qr: Queryable.Row[Q2, R2], 21 | val f: Rhs[Q2, R2], 22 | val where: Seq[Expr[Boolean]] 23 | ) extends Rhs[Q2, R2] 24 | 25 | class Mapper[Q, Q2, R, R2]( 26 | prefix: String, 27 | from: Context.From, 28 | expr: Q, 29 | on: Option[Expr[Boolean]], 30 | where: Seq[Expr[Boolean]] 31 | ) { 32 | def map(f: Q => Q2)(implicit qr: Queryable.Row[Q2, R2]): MapResult[Q, Q2, R, R2] = { 33 | new MapResult[Q, Q2, R, R2](prefix, from, on, qr, f(expr), where) 34 | } 35 | 36 | def flatMap( 37 | f: Q => Rhs[Q2, R2] 38 | )(implicit qr: Queryable.Row[Q2, R2]): FlatMapResult[Q, Q2, R, R2] = { 39 | new FlatMapResult[Q, Q2, R, R2](prefix, from, on, qr, f(expr), where) 40 | } 41 | 42 | def filter(x: Q => Expr[Boolean]): Mapper[Q, Q2, R, R2] = withFilter(x) 43 | def withFilter(x: Q => Expr[Boolean]): Mapper[Q, Q2, R, R2] = 44 | new Mapper(prefix, from, expr, on, where ++ Seq(x(expr))) 45 | } 46 | class NullableMapper[Q, Q2, R, R2]( 47 | prefix: String, 48 | from: Context.From, 49 | expr: JoinNullable[Q], 50 | on: Option[Expr[Boolean]], 51 | where: Seq[Expr[Boolean]] 52 | ) { 53 | def lateral = new NullableMapper[Q, Q2, R, R2](prefix + " LATERAL", from, expr, on, where) 54 | def map( 55 | f: JoinNullable[Q] => Q2 56 | )(implicit qr: Queryable.Row[Q2, R2]): MapResult[Q, Q2, R, R2] = { 57 | new MapResult[Q, Q2, R, R2](prefix, from, on, qr, f(expr), where) 58 | } 59 | 60 | def flatMap( 61 | f: JoinNullable[Q] => Rhs[Q2, R2] 62 | )(implicit qr: Queryable.Row[Q2, R2]): FlatMapResult[Q, Q2, R, R2] = { 63 | new FlatMapResult[Q, Q2, R, R2](prefix, from, on, qr, f(expr), where) 64 | } 65 | 66 | def filter(x: JoinNullable[Q] => Expr[Boolean]): NullableMapper[Q, Q2, R, R2] = withFilter(x) 67 | def withFilter(x: JoinNullable[Q] => Expr[Boolean]): NullableMapper[Q, Q2, R, R2] = 68 | new NullableMapper("LEFT JOIN", from, expr, on, where ++ Seq(x(expr))) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /scalasql/query/src/From.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Context, Expr, ExprsToSql, LiveExprs, Queryable, SqlStr} 4 | import scalasql.core.Context.From 5 | import scalasql.core.SqlStr.SqlStringSyntax 6 | 7 | /** 8 | * Models a SQL `FROM` clause 9 | */ 10 | class TableRef(val value: Table.Base) extends From { 11 | override def toString = s"TableRef(${Table.name(value)})" 12 | 13 | def fromRefPrefix(prevContext: Context) = prevContext.config.tableNameMapper(Table.name(value)) 14 | 15 | def fromExprAliases(prevContext: Context): Seq[(Expr.Identity, SqlStr)] = Nil 16 | 17 | def renderSql(name: SqlStr, prevContext: Context, liveExprs: LiveExprs) = { 18 | val resolvedTable = Table.fullIdentifier(value)(prevContext) 19 | SqlStr.raw(resolvedTable + sql" " + name) 20 | } 21 | } 22 | 23 | /** 24 | * Models a subquery: a `SELECT`, `VALUES`, nested `WITH`, etc. 25 | */ 26 | class SubqueryRef(val value: SubqueryRef.Wrapped) extends From { 27 | def fromRefPrefix(prevContext: Context): String = "subquery" 28 | 29 | def fromExprAliases(prevContext: Context) = SubqueryRef.Wrapped.exprAliases(value, prevContext) 30 | 31 | def renderSql(name: SqlStr, prevContext: Context, liveExprs: LiveExprs) = { 32 | val renderSql = SubqueryRef.Wrapped.renderer(value, prevContext) 33 | sql"(${renderSql.render(liveExprs)}) $name" 34 | } 35 | } 36 | 37 | object SubqueryRef { 38 | 39 | trait Wrapped { 40 | protected def selectExprAliases(prevContext: Context): Seq[(Expr.Identity, SqlStr)] 41 | protected def selectRenderer(prevContext: Context): Wrapped.Renderer 42 | } 43 | object Wrapped { 44 | def exprAliases(s: Wrapped, prevContext: Context) = s.selectExprAliases(prevContext) 45 | def renderer(s: Wrapped, prevContext: Context) = s.selectRenderer(prevContext) 46 | 47 | trait Renderer { 48 | def render(liveExprs: LiveExprs): SqlStr 49 | } 50 | } 51 | } 52 | 53 | class WithCteRef(walked: Queryable.Walked) extends From { 54 | def fromRefPrefix(prevContext: Context) = "cte" 55 | 56 | def fromExprAliases(prevContext: Context) = { 57 | ExprsToSql.selectColumnReferences(walked, prevContext) 58 | } 59 | 60 | def renderSql(name: SqlStr, prevContext: Context, liveExprs: LiveExprs) = { 61 | name 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /scalasql/query/src/GetGeneratedKeys.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.SqlStr.Renderable 4 | import scalasql.core.{Context, Expr, Queryable, SqlStr, WithSqlExpr} 5 | 6 | /** 7 | * Represents an [[Insert]] query that you want to call `JdbcStatement.getGeneratedKeys` 8 | * on to retrieve any auto-generated primary key values from the results 9 | */ 10 | trait GetGeneratedKeys[Q, R] extends Query[Seq[R]] { 11 | def single: Query.Single[R] = new Query.Single(this) 12 | } 13 | 14 | object GetGeneratedKeys { 15 | 16 | class Impl[Q, R](base: Returning.InsertBase[Q])(implicit qr: Queryable.Row[?, R]) 17 | extends GetGeneratedKeys[Q, R] { 18 | 19 | def expr = WithSqlExpr.get(base) 20 | override protected def queryConstruct(args: Queryable.ResultSetIterator): Seq[R] = { 21 | Seq(qr.construct(args)) 22 | } 23 | 24 | protected def queryWalkLabels(): Seq[List[String]] = Nil 25 | protected def queryWalkExprs(): Seq[Expr[?]] = Nil 26 | protected override def queryIsSingleRow = false 27 | protected override def queryIsExecuteUpdate = true 28 | 29 | override private[scalasql] def renderSql(ctx: Context): SqlStr = Renderable.renderSql(base)(ctx) 30 | 31 | override protected def queryGetGeneratedKeys: Option[Queryable.Row[?, ?]] = Some(qr) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /scalasql/query/src/Insert.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{DialectTypeMappers, Queryable, Expr, WithSqlExpr} 4 | 5 | /** 6 | * A SQL `INSERT` query 7 | */ 8 | trait Insert[V[_[_]], R] extends WithSqlExpr[V[Column]] with scalasql.generated.Insert[V, R] { 9 | def table: TableRef 10 | def qr: Queryable[V[Column], R] 11 | def select[C, R2](columns: V[Expr] => C, select: Select[C, R2]): InsertSelect[V, C, R, R2] 12 | 13 | def columns(f: (V[Column] => Column.Assignment[?])*): InsertColumns[V, R] 14 | def values(f: R*): InsertValues[V, R] 15 | 16 | def batched[T1](f1: V[Column] => Column[T1])(items: Expr[T1]*): InsertColumns[V, R] 17 | 18 | } 19 | 20 | object Insert { 21 | class Impl[V[_[_]], R](val expr: V[Column], val table: TableRef)( 22 | implicit val qr: Queryable.Row[V[Column], R], 23 | dialect: DialectTypeMappers 24 | ) extends Insert[V, R] 25 | with scalasql.generated.InsertImpl[V, R] { 26 | 27 | def newInsertSelect[C, R, R2]( 28 | insert: Insert[V, R], 29 | columns: C, 30 | select: Select[C, R2] 31 | ): InsertSelect[V, C, R, R2] = { new InsertSelect.Impl(insert, columns, select) } 32 | 33 | def newInsertValues[R]( 34 | insert: Insert[V, R], 35 | columns: Seq[Column[?]], 36 | valuesLists: Seq[Seq[Expr[?]]] 37 | )(implicit qr: Queryable[V[Column], R]): InsertColumns[V, R] = { 38 | new InsertColumns.Impl(insert, columns, valuesLists) 39 | } 40 | 41 | def select[C, R2](columns: V[Expr] => C, select: Select[C, R2]): InsertSelect[V, C, R, R2] = { 42 | newInsertSelect(this, columns(expr.asInstanceOf[V[Expr]]), select) 43 | } 44 | 45 | def columns(f: (V[Column] => Column.Assignment[?])*): InsertColumns[V, R] = { 46 | val kvs = f.map(_(expr)) 47 | newInsertValues(this, columns = kvs.map(_.column), valuesLists = Seq(kvs.map(_.value))) 48 | } 49 | 50 | def batched[T1](f1: V[Column] => Column[T1])(items: Expr[T1]*): InsertColumns[V, R] = { 51 | newInsertValues(this, columns = Seq(f1(expr)), valuesLists = items.map(Seq(_))) 52 | } 53 | 54 | override def values(values: R*): InsertValues[V, R] = 55 | new InsertValues.Impl(this, values, dialect, qr, Nil) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /scalasql/query/src/InsertColumns.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Context, DialectTypeMappers, Queryable, Expr, SqlStr, WithSqlExpr} 4 | import scalasql.core.SqlStr.{Renderable, SqlStringSyntax} 5 | 6 | /** 7 | * A SQL `INSERT VALUES` query 8 | */ 9 | trait InsertColumns[V[_[_]], R] 10 | extends Returning.InsertBase[V[Column]] 11 | with Query.ExecuteUpdate[Int] { 12 | def columns: Seq[Column[?]] 13 | def valuesLists: Seq[Seq[Expr[?]]] 14 | } 15 | object InsertColumns { 16 | class Impl[V[_[_]], R]( 17 | insert: Insert[V, R], 18 | val columns: Seq[Column[?]], 19 | val valuesLists: Seq[Seq[Expr[?]]] 20 | )(implicit val qr: Queryable[V[Column], R], dialect: DialectTypeMappers) 21 | extends InsertColumns[V, R] { 22 | import dialect.{dialectSelf => _, _} 23 | def table = insert.table 24 | protected def expr: V[Column] = WithSqlExpr.get(insert) 25 | 26 | private[scalasql] override def renderSql(ctx: Context) = 27 | new Renderer(columns, ctx, valuesLists, Table.fullIdentifier(table.value)(ctx)).render() 28 | 29 | override protected def queryConstruct(args: Queryable.ResultSetIterator): Int = 30 | args.get(IntType) 31 | } 32 | 33 | class Renderer( 34 | columns0: Seq[Column[?]], 35 | prevContext: Context, 36 | valuesLists: Seq[Seq[Expr[?]]], 37 | tableName: String 38 | ) { 39 | 40 | implicit lazy val ctx: Context = prevContext 41 | lazy val columns = SqlStr 42 | .join(columns0.map(c => SqlStr.raw(ctx.config.columnNameMapper(c.name))), SqlStr.commaSep) 43 | lazy val values = SqlStr.join( 44 | valuesLists 45 | .map(values => 46 | sql"(" + SqlStr.join(values.map(Renderable.renderSql(_)), SqlStr.commaSep) + sql")" 47 | ), 48 | SqlStr.commaSep 49 | ) 50 | def render() = { 51 | sql"INSERT INTO ${SqlStr.raw(tableName)} ($columns) VALUES $values" 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /scalasql/query/src/InsertSelect.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Context, DialectTypeMappers, Queryable, Expr, SqlStr, WithSqlExpr} 4 | import scalasql.core.SqlStr.{Renderable, SqlStringSyntax} 5 | 6 | /** 7 | * A SQL `INSERT SELECT` query 8 | */ 9 | trait InsertSelect[V[_[_]], C, R, R2] 10 | extends Returning.InsertBase[V[Column]] 11 | with Query.ExecuteUpdate[Int] 12 | 13 | object InsertSelect { 14 | class Impl[V[_[_]], C, R, R2](insert: Insert[V, R], columns: C, select: Select[C, R2])( 15 | implicit dialect: DialectTypeMappers 16 | ) extends InsertSelect[V, C, R, R2] { 17 | import dialect.{dialectSelf => _, _} 18 | protected def expr = WithSqlExpr.get(insert) 19 | 20 | def table = insert.table 21 | 22 | private[scalasql] override def renderSql(ctx: Context) = 23 | new Renderer( 24 | select, 25 | select.qr.walkExprs(columns), 26 | ctx, 27 | Table.fullIdentifier(table.value)(ctx) 28 | ) 29 | .render() 30 | 31 | override protected def queryConstruct(args: Queryable.ResultSetIterator): Int = 32 | args.get(IntType) 33 | } 34 | 35 | class Renderer( 36 | select: Select[?, ?], 37 | exprs: Seq[Expr[?]], 38 | prevContext: Context, 39 | tableName: String 40 | ) { 41 | 42 | implicit lazy val ctx: Context = prevContext 43 | 44 | lazy val columns = SqlStr.join( 45 | exprs 46 | .map(_.asInstanceOf[Column[?]]) 47 | .map(c => SqlStr.raw(ctx.config.columnNameMapper(c.name))), 48 | SqlStr.commaSep 49 | ) 50 | 51 | lazy val selectSql = Renderable.renderSql(select).withCompleteQuery(false) 52 | 53 | lazy val tableNameStr = SqlStr.raw(tableName) 54 | def render() = sql"INSERT INTO $tableNameStr ($columns) $selectSql" 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /scalasql/query/src/InsertValues.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Context, DialectTypeMappers, Expr, Queryable, SqlStr, WithSqlExpr} 4 | import scalasql.core.SqlStr.SqlStringSyntax 5 | 6 | trait InsertValues[V[_[_]], R] 7 | extends Returning.InsertBase[V[Column]] 8 | with Query.ExecuteUpdate[Int] { 9 | def skipColumns(x: (V[Column] => Column[?])*): InsertValues[V, R] 10 | } 11 | object InsertValues { 12 | class Impl[V[_[_]], R]( 13 | insert: Insert[V, R], 14 | values: Seq[R], 15 | dialect: DialectTypeMappers, 16 | qr: Queryable.Row[V[Column], R], 17 | skippedColumns: Seq[Column[?]] 18 | ) extends InsertValues[V, R] { 19 | 20 | def table = insert.table 21 | protected def expr: V[Column] = WithSqlExpr.get(insert) 22 | override protected def queryConstruct(args: Queryable.ResultSetIterator): Int = 23 | args.get(dialect.IntType) 24 | 25 | override private[scalasql] def renderSql(ctx: Context): SqlStr = { 26 | new Renderer( 27 | Table.fullIdentifier(insert.table.value)(ctx), 28 | Table.labels(insert.table.value), 29 | values, 30 | qr, 31 | skippedColumns 32 | )(ctx).render() 33 | } 34 | 35 | override def skipColumns(x: (V[Column] => Column[?])*): InsertValues[V, R] = { 36 | 37 | new Impl( 38 | insert, 39 | values, 40 | dialect, 41 | qr, 42 | skippedColumns ++ x.map(_(WithSqlExpr.get(insert))) 43 | ) 44 | } 45 | } 46 | class Renderer[Q, R]( 47 | tableName: String, 48 | columnsList0: Seq[String], 49 | valuesList: Seq[R], 50 | qr: Queryable.Row[Q, R], 51 | skippedColumns: Seq[Column[?]] 52 | )(implicit ctx: Context) { 53 | 54 | lazy val skippedColumnsNames = skippedColumns.map(_.name).toSet 55 | 56 | lazy val (liveCols, liveIndices) = columnsList0.zipWithIndex.filter { case (c, i) => 57 | !skippedColumnsNames.contains(c) 58 | }.unzip 59 | 60 | lazy val columns = SqlStr.join( 61 | liveCols.map(s => SqlStr.raw(ctx.config.columnNameMapper(s))), 62 | SqlStr.commaSep 63 | ) 64 | 65 | lazy val liveIndicesSet = liveIndices.toSet 66 | 67 | val valuesSqls = valuesList.map { v => 68 | val commaSeparated = SqlStr.join( 69 | qr.walkExprs(qr.deconstruct(v)) 70 | .zipWithIndex 71 | .collect { case (s, i) if liveIndicesSet.contains(i) => sql"$s" }, 72 | SqlStr.commaSep 73 | ) 74 | sql"(" + commaSeparated + sql")" 75 | } 76 | 77 | lazy val values = SqlStr.join(valuesSqls, SqlStr.commaSep) 78 | 79 | def render() = { 80 | sql"INSERT INTO ${SqlStr.raw(tableName)} ($columns) VALUES $values" 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /scalasql/query/src/JoinAppend.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.Queryable 4 | 5 | /** 6 | * Typeclass to allow `.join` to append tuples, such that `Query[(A, B)].join(Query[C])` 7 | * returns a flat `Query[(A, B, C)]` rather than a nested `Query[((A, B), B)]`. Can't 8 | * eliminate nesting in all cases, but eliminates nesting often enough to be useful 9 | */ 10 | trait JoinAppend[Q, Q2, QF, RF] { 11 | def appendTuple(t: Q, v: Q2): QF 12 | 13 | def qr: Queryable.Row[QF, RF] 14 | } 15 | object JoinAppend extends scalasql.generated.JoinAppend {} 16 | trait JoinAppendLowPriority { 17 | implicit def default[Q, R, Q2, R2]( 18 | implicit qr0: Queryable.Row[Q, R], 19 | qr20: Queryable.Row[Q2, R2] 20 | ): JoinAppend[Q, Q2, (Q, Q2), (R, R2)] = new JoinAppend[Q, Q2, (Q, Q2), (R, R2)] { 21 | override def appendTuple(t: Q, v: Q2): (Q, Q2) = (t, v) 22 | 23 | def qr: Queryable.Row[(Q, Q2), (R, R2)] = Queryable.Row.Tuple2Queryable 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /scalasql/query/src/JoinOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Expr, WithSqlExpr} 4 | 5 | trait JoinOps[C[_, _], Q, R] extends WithSqlExpr[Q] { 6 | 7 | /** 8 | * Performs a `JOIN`/`INNER JOIN` on the given [[other]], typically a [[Table]] or [[Select]]. 9 | */ 10 | def join[Q2, R2, QF, RF](other: Joinable[Q2, R2])(on: (Q, Q2) => Expr[Boolean])( 11 | implicit ja: JoinAppend[Q, Q2, QF, RF] 12 | ): C[QF, RF] = join0("JOIN", other, Some(on)) 13 | 14 | /** 15 | * Performs a `CROSS JOIN`, which is an `INNER JOIN` but without the `ON` clause 16 | */ 17 | def crossJoin[Q2, R2, QF, RF](other: Joinable[Q2, R2])( 18 | implicit ja: JoinAppend[Q, Q2, QF, RF] 19 | ): C[QF, RF] = join0("CROSS JOIN", other, None) 20 | 21 | protected def join0[Q2, R2, QF, RF]( 22 | prefix: String, 23 | other: Joinable[Q2, R2], 24 | on: Option[(Q, Q2) => Expr[Boolean]] 25 | )( 26 | implicit ja: JoinAppend[Q, Q2, QF, RF] 27 | ): C[QF, RF] 28 | 29 | protected def joinInfo[Q2, R2]( 30 | joinPrefix: String, 31 | other: Joinable[Q2, R2], 32 | on: Option[(Q, Q2) => Expr[Boolean]] 33 | ) = { 34 | 35 | val (otherFrom, otherExpr) = Joinable.toFromExpr(other) 36 | 37 | val otherOn = on.map(_(expr, otherExpr)) 38 | val otherJoin = Join(joinPrefix, Seq(Join.From(otherFrom, otherOn))) 39 | 40 | (Seq(otherJoin), otherExpr) 41 | } 42 | 43 | } 44 | 45 | object JoinOps { 46 | 47 | def join0[C[_, _], Q, R, Q2, R2, QF, RF]( 48 | v: JoinOps[C, Q, R], 49 | prefix: String, 50 | other: Joinable[Q2, R2], 51 | on: Option[(Q, Q2) => Expr[Boolean]] 52 | )( 53 | implicit ja: JoinAppend[Q, Q2, QF, RF] 54 | ) = { 55 | v.join0[Q2, R2, QF, RF](prefix, other, on) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /scalasql/query/src/Joinable.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Context, Expr, JoinNullable} 4 | 5 | /** 6 | * Something that can be joined; typically a [[Select]] or a [[Table]] 7 | */ 8 | trait Joinable[Q, R] { 9 | 10 | protected def joinableToFromExpr: (Context.From, Q) 11 | 12 | /** 13 | * Version of `crossJoin` meant for usage in `for`-comprehensions 14 | */ 15 | def crossJoin[Q2, R2](): FlatJoin.Mapper[Q, Q2, R, R2] = { 16 | val (from, expr) = joinableToFromExpr 17 | new FlatJoin.Mapper[Q, Q2, R, R2]("CROSS JOIN", from, expr, None, Nil) 18 | } 19 | 20 | /** 21 | * Version of `join` meant for usage in `for`-comprehensions 22 | */ 23 | def join[Q2, R2](on: Q => Expr[Boolean]): FlatJoin.Mapper[Q, Q2, R, R2] = { 24 | val (from, expr) = joinableToFromExpr 25 | new FlatJoin.Mapper[Q, Q2, R, R2]("JOIN", from, expr, Some(on(expr)), Nil) 26 | } 27 | 28 | /** 29 | * Version of `leftJoin` meant for usage in `for`-comprehensions 30 | */ 31 | def leftJoin[Q2, R2](on: Q => Expr[Boolean]): FlatJoin.NullableMapper[Q, Q2, R, R2] = { 32 | val (from, expr) = joinableToFromExpr 33 | new FlatJoin.NullableMapper[Q, Q2, R, R2]( 34 | "LEFT JOIN", 35 | from, 36 | JoinNullable(expr), 37 | Some(on(expr)), 38 | Nil 39 | ) 40 | } 41 | 42 | } 43 | object Joinable { 44 | def toFromExpr[Q, R](x: Joinable[Q, R]) = x.joinableToFromExpr 45 | } 46 | -------------------------------------------------------------------------------- /scalasql/query/src/JoinsToSql.scala: -------------------------------------------------------------------------------- 1 | package scalasql.renderer 2 | 3 | import scalasql.core.{Context, LiveExprs, SqlStr} 4 | import scalasql.core.SqlStr.SqlStringSyntax 5 | import scalasql.query.Join 6 | 7 | object JoinsToSql { 8 | 9 | def joinsToSqlStr( 10 | joins: Seq[Join], 11 | renderedFroms: Map[Context.From, SqlStr], 12 | joinOns: Seq[Seq[Option[SqlStr.Flattened]]] 13 | ) = { 14 | 15 | SqlStr.join(joins.iterator.zip(joinOns).map { case (join, joinOns) => 16 | val joinPrefix = SqlStr.raw(join.prefix) 17 | val joinSelectables = SqlStr.join(join.from.iterator.zip(joinOns).map { case (jf, fromOns) => 18 | val onSql = SqlStr.flatten(SqlStr.opt(fromOns)(on => sql" ON $on")) 19 | renderedFroms(jf.from) + onSql 20 | }) 21 | 22 | sql" $joinPrefix $joinSelectables" 23 | }) 24 | } 25 | 26 | def renderFroms( 27 | selectables: Seq[Context.From], 28 | prevContext: Context, 29 | namedFromsMap: Map[Context.From, String], 30 | liveExprs: LiveExprs 31 | ) = { 32 | selectables.iterator.map { f => 33 | (f, renderSingleFrom(prevContext, liveExprs, f, namedFromsMap)) 34 | }.toMap 35 | } 36 | 37 | def renderSingleFrom( 38 | prevContext: Context, 39 | liveExprs: LiveExprs, 40 | f: Context.From, 41 | namedFromsMap: Map[Context.From, String] 42 | ): SqlStr = { 43 | f.renderSql(SqlStr.raw(namedFromsMap(f)), prevContext, liveExprs) 44 | } 45 | 46 | def renderLateralJoins( 47 | prevContext: Context, 48 | from: Seq[Context.From], 49 | innerLiveExprs: LiveExprs, 50 | joins0: Seq[Join], 51 | renderedJoinOns: Seq[Seq[Option[SqlStr.Flattened]]] 52 | ) = { 53 | var joinContext = Context.compute(prevContext, from, None) 54 | 55 | val renderedFroms = JoinsToSql 56 | .renderFroms(from, prevContext, joinContext.fromNaming, innerLiveExprs) 57 | .to(collection.mutable.Map) 58 | 59 | val joins = SqlStr.join(joins0.iterator.zip(renderedJoinOns).map { case (join, joinOns) => 60 | val joinPrefix = SqlStr.raw(join.prefix) 61 | val prevJoinContext = joinContext 62 | joinContext = Context.compute(joinContext, join.from.map(_.from), None) 63 | val joinSelectables = SqlStr.join(join.from.iterator.zip(joinOns).map { case (jf, fromOns) => 64 | val onSql = SqlStr.flatten(SqlStr.opt(fromOns)(on => sql" ON $on")) 65 | 66 | renderedFroms.getOrElseUpdate( 67 | jf.from, 68 | JoinsToSql.renderSingleFrom( 69 | prevJoinContext, 70 | innerLiveExprs, 71 | jf.from, 72 | joinContext.fromNaming 73 | ) 74 | ) + 75 | onSql 76 | }) 77 | 78 | sql" $joinPrefix $joinSelectables" 79 | }) 80 | (renderedFroms, joins) 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /scalasql/query/src/LateralJoinOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{JoinNullable, Queryable, Expr, WithSqlExpr} 4 | 5 | /** 6 | * Wrapper class with extension methods to add support for `JOIN LATERAL`, which 7 | * allow for `JOIN` clauses to access the results of earlier `JOIN` and `FROM` clauses. 8 | * Only supported by Postgres and MySql 9 | */ 10 | class LateralJoinOps[C[_, _], Q, R](wrapped: JoinOps[C, Q, R] & Joinable[Q, R])( 11 | implicit qr: Queryable.Row[Q, R] 12 | ) { 13 | 14 | /** 15 | * Performs a `CROSS JOIN LATERAL`, similar to `CROSS JOIN` but allows the 16 | * `JOIN` clause to access the results of earlier `JOIN` and `FROM` clauses. 17 | * Only supported by Postgres and MySql 18 | */ 19 | def crossJoinLateral[Q2, R2, QF, RF](other: Q => Joinable[Q2, R2])( 20 | implicit ja: JoinAppend[Q, Q2, QF, RF] 21 | ): C[QF, RF] = 22 | JoinOps.join0(wrapped, "CROSS JOIN LATERAL", other(WithSqlExpr.get(wrapped)), None) 23 | 24 | /** 25 | * Performs a `JOIN LATERAL`, similar to `JOIN` but allows the 26 | * `JOIN` clause to access the results of earlier `JOIN` and `FROM` clauses. 27 | * Only supported by Postgres and MySql 28 | */ 29 | def joinLateral[Q2, R2, QF, RF](other: Q => Joinable[Q2, R2])(on: (Q, Q2) => Expr[Boolean])( 30 | implicit ja: JoinAppend[Q, Q2, QF, RF] 31 | ): C[QF, RF] = 32 | JoinOps.join0(wrapped, "JOIN LATERAL", other(WithSqlExpr.get(wrapped)), Some(on)) 33 | 34 | def leftJoinLateral[Q2, R2](other: Q => Joinable[Q2, R2])( 35 | on: (Q, Q2) => Expr[Boolean] 36 | )(implicit joinQr: Queryable.Row[Q2, R2]): Select[(Q, JoinNullable[Q2]), (R, Option[R2])] = { 37 | SimpleSelect.joinCopy( 38 | wrapped.asInstanceOf[SimpleSelect[Q, R]], 39 | other(WithSqlExpr.get(wrapped)), 40 | Some(on), 41 | "LEFT JOIN LATERAL" 42 | )((e, o) => (e, JoinNullable(o))) 43 | } 44 | 45 | /** 46 | * Version of `crossJoinLateral` meant for use in `for`-comprehensions 47 | */ 48 | def crossJoinLateral[Q2, R2](): FlatJoin.Mapper[Q, Q2, R, R2] = { 49 | val (from, expr) = Joinable.toFromExpr(wrapped) 50 | new FlatJoin.Mapper[Q, Q2, R, R2]("CROSS JOIN LATERAL", from, expr, None, Nil) 51 | } 52 | 53 | /** 54 | * Version of `joinLateral` meant for use in `for`-comprehensions 55 | */ 56 | def joinLateral[Q2, R2](on: Q => Expr[Boolean]): FlatJoin.Mapper[Q, Q2, R, R2] = { 57 | val (from, expr) = Joinable.toFromExpr(wrapped) 58 | new FlatJoin.Mapper[Q, Q2, R, R2]("JOIN LATERAL", from, expr, Some(on(expr)), Nil) 59 | } 60 | 61 | /** 62 | * Version of `leftJoinLateral` meant for use in `for`-comprehensions 63 | */ 64 | def leftJoinLateral[Q2, R2](on: Q => Expr[Boolean]): FlatJoin.NullableMapper[Q, Q2, R, R2] = { 65 | val (from, expr) = Joinable.toFromExpr(wrapped) 66 | new FlatJoin.NullableMapper[Q, Q2, R, R2]( 67 | "LEFT JOIN LATERAL", 68 | from, 69 | JoinNullable(expr), 70 | Some(on(expr)), 71 | Nil 72 | ) 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /scalasql/query/src/Model.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.Expr 4 | 5 | /** 6 | * Models a SQL `ORDER BY` clause 7 | */ 8 | case class OrderBy(expr: Expr[?], ascDesc: Option[AscDesc], nulls: Option[Nulls]) 9 | 10 | sealed trait AscDesc 11 | 12 | object AscDesc { 13 | 14 | /** 15 | * Models a SQL `ASC` clause 16 | */ 17 | case object Asc extends AscDesc 18 | 19 | /** 20 | * Models a SQL `DESC` clause 21 | */ 22 | case object Desc extends AscDesc 23 | } 24 | 25 | sealed trait Nulls 26 | 27 | object Nulls { 28 | 29 | /** 30 | * Models a SQL `NULLS FIRST` clause 31 | */ 32 | case object First extends Nulls 33 | 34 | /** 35 | * Models a SQL `NULSL LAST` clause 36 | */ 37 | case object Last extends Nulls 38 | } 39 | 40 | /** 41 | * Models a SQL `GROUP BY` clause 42 | */ 43 | case class GroupBy(keys: Seq[Expr[?]], select: () => Select[?, ?], having: Seq[Expr[?]]) 44 | 45 | /** 46 | * Models a SQL `JOIN` clause 47 | */ 48 | case class Join(prefix: String, from: Seq[Join.From]) 49 | object Join { 50 | case class From(from: scalasql.core.Context.From, on: Option[Expr[?]]) 51 | } 52 | -------------------------------------------------------------------------------- /scalasql/query/src/OnConflict.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.SqlStr.{Renderable, SqlStringSyntax} 4 | import scalasql.core.{Context, Queryable, SqlStr, WithSqlExpr} 5 | 6 | /** 7 | * A query with a SQL `ON CONFLICT` clause, typically an `INSERT` or an `UPDATE` 8 | */ 9 | class OnConflict[Q, R](query: Query[R] & Returning.InsertBase[Q], expr: Q, table: TableRef) { 10 | def onConflictIgnore(c: (Q => Column[?])*) = 11 | new OnConflict.Ignore(query, c.map(_(expr)), table) 12 | def onConflictUpdate(c: (Q => Column[?])*)(c2: (Q => Column.Assignment[?])*) = 13 | new OnConflict.Update(query, c.map(_(expr)), c2.map(_(expr)), table) 14 | } 15 | 16 | object OnConflict { 17 | class Ignore[Q, R]( 18 | protected val query: Query[R] & Returning.InsertBase[Q], 19 | columns: Seq[Column[?]], 20 | val table: TableRef 21 | ) extends Query.DelegateQuery[R] 22 | with Returning.InsertBase[Q] { 23 | protected def expr = WithSqlExpr.get(query) 24 | private[scalasql] def renderSql(ctx: Context) = { 25 | val str = Renderable.renderSql(query)(ctx) 26 | val columnsStr = SqlStr.join( 27 | columns.map(c => SqlStr.raw(ctx.config.columnNameMapper(c.name))), 28 | SqlStr.commaSep 29 | ) 30 | str + sql" ON CONFLICT ($columnsStr) DO NOTHING" 31 | } 32 | 33 | protected override def queryIsExecuteUpdate = true 34 | 35 | override protected def queryConstruct(args: Queryable.ResultSetIterator): R = 36 | Query.construct(query, args) 37 | } 38 | 39 | class Update[Q, R]( 40 | protected val query: Query[R] & Returning.InsertBase[Q], 41 | columns: Seq[Column[?]], 42 | updates: Seq[Column.Assignment[?]], 43 | val table: TableRef 44 | ) extends Query.DelegateQuery[R] 45 | with Returning.InsertBase[Q] { 46 | protected def expr = WithSqlExpr.get(query) 47 | private[scalasql] def renderSql(ctx: Context) = { 48 | implicit val implicitCtx = Context.compute(ctx, Nil, Some(table)) 49 | val str = Renderable.renderSql(query) 50 | val columnsStr = SqlStr.join( 51 | columns.map(c => SqlStr.raw(ctx.config.columnNameMapper(c.name))), 52 | SqlStr.commaSep 53 | ) 54 | val updatesStr = SqlStr.join( 55 | updates.map { case assign => 56 | SqlStr.raw(ctx.config.columnNameMapper(assign.column.name)) + sql" = ${assign.value}" 57 | }, 58 | SqlStr.commaSep 59 | ) 60 | str + sql" ON CONFLICT (${columnsStr}) DO UPDATE SET $updatesStr" 61 | } 62 | 63 | protected override def queryIsExecuteUpdate = true 64 | override protected def queryConstruct(args: Queryable.ResultSetIterator): R = 65 | Query.construct(query, args) 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /scalasql/query/src/Query.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.SqlStr.Renderable 4 | import scalasql.core.{Context, Expr, Queryable, SqlStr, WithSqlExpr} 5 | 6 | /** 7 | * A SQL Query, either a [[Query.Multiple]] that returns multiple rows, or 8 | * a [[Query.Single]] that returns a single row 9 | */ 10 | trait Query[R] extends Renderable { 11 | protected def queryWalkLabels(): Seq[List[String]] 12 | protected def queryWalkExprs(): Seq[Expr[?]] 13 | protected def queryIsSingleRow: Boolean 14 | protected def queryGetGeneratedKeys: Option[Queryable.Row[?, ?]] = None 15 | protected def queryIsExecuteUpdate: Boolean = false 16 | 17 | protected def queryConstruct(args: Queryable.ResultSetIterator): R 18 | } 19 | 20 | object Query { 21 | 22 | /** 23 | * Configuration for a typical update [[Query]] 24 | */ 25 | trait ExecuteUpdate[R] extends scalasql.query.Query[R] { 26 | protected def queryWalkLabels(): Seq[List[String]] = Nil 27 | protected def queryWalkExprs(): Seq[Expr[?]] = Nil 28 | protected override def queryIsSingleRow = true 29 | protected override def queryIsExecuteUpdate = true 30 | } 31 | 32 | /** 33 | * Configuration for a [[Query]] that wraps another [[Query]], delegating 34 | * most of the abstract methods to it 35 | */ 36 | trait DelegateQuery[R] extends scalasql.query.Query[R] { 37 | protected def query: Query[?] 38 | protected def queryWalkLabels() = query.queryWalkLabels() 39 | protected def queryWalkExprs() = query.queryWalkExprs() 40 | protected override def queryIsSingleRow = query.queryIsSingleRow 41 | protected override def queryIsExecuteUpdate = query.queryIsExecuteUpdate 42 | } 43 | 44 | /** 45 | * Configuration for a [[Query]] that wraps an expr [[Q]] and [[Queryable]] 46 | */ 47 | trait DelegateQueryable[Q, R] extends scalasql.query.Query[R] with WithSqlExpr[Q] { 48 | protected def qr: Queryable[Q, ?] 49 | protected def queryWalkLabels() = qr.walkLabels(expr) 50 | protected def queryWalkExprs() = qr.walkExprs(expr) 51 | protected override def queryIsSingleRow = qr.isSingleRow(expr) 52 | protected override def queryIsExecuteUpdate = qr.isExecuteUpdate(expr) 53 | } 54 | 55 | implicit def QueryQueryable[R]: Queryable[Query[R], R] = new QueryQueryable[Query[R], R]() 56 | 57 | def walkLabels[R](q: Query[R]) = q.queryWalkLabels() 58 | def walkSqlExprs[R](q: Query[R]) = q.queryWalkExprs() 59 | def isSingleRow[R](q: Query[R]) = q.queryIsSingleRow 60 | def construct[R](q: Query[R], args: Queryable.ResultSetIterator) = q.queryConstruct(args) 61 | 62 | /** 63 | * The default [[Queryable]] instance for any [[Query]]. Delegates the implementation 64 | * of the [[Queryable]] methods to abstract methods on the [[Query]], to allow easy 65 | * overrides and subclassing of [[Query]] classes 66 | */ 67 | class QueryQueryable[Q <: Query[R], R]() extends scalasql.core.Queryable[Q, R] { 68 | override def isGetGeneratedKeys(q: Q) = q.queryGetGeneratedKeys 69 | override def isExecuteUpdate(q: Q) = q.queryIsExecuteUpdate 70 | override def walkLabels(q: Q) = q.queryWalkLabels() 71 | override def walkExprs(q: Q) = q.queryWalkExprs() 72 | override def isSingleRow(q: Q) = q.queryIsSingleRow 73 | 74 | def renderSql(q: Q, ctx: Context): SqlStr = q.renderSql(ctx) 75 | 76 | override def construct(q: Q, args: Queryable.ResultSetIterator): R = q.queryConstruct(args) 77 | } 78 | 79 | /** 80 | * A [[Query]] that wraps another [[Query]] but sets [[queryIsSingleRow]] to `true` 81 | */ 82 | class Single[R](protected val query: Query[Seq[R]]) extends Query.DelegateQuery[R] { 83 | protected override def queryIsSingleRow: Boolean = true 84 | 85 | private[scalasql] def renderSql(ctx: Context): SqlStr = Renderable.renderSql(query)(ctx) 86 | protected override def queryConstruct(args: Queryable.ResultSetIterator): R = 87 | query.queryConstruct(args).asInstanceOf[R] 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /scalasql/query/src/Returning.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.SqlStr.{Renderable, SqlStringSyntax} 4 | import scalasql.core.{Context, ExprsToSql, Queryable, SqlStr, WithSqlExpr} 5 | 6 | /** 7 | * A query with a `RETURNING` clause 8 | */ 9 | trait Returning[Q, R] extends Query[Seq[R]] with Query.DelegateQueryable[Q, Seq[R]] { 10 | def single: Query.Single[R] = new Query.Single(this) 11 | } 12 | 13 | object Returning { 14 | 15 | /** 16 | * A query that could support a `RETURNING` clause, typically 17 | * an `INSERT` or `UPDATE` 18 | */ 19 | trait Base[Q] extends Renderable with WithSqlExpr[Q] { 20 | def table: TableRef 21 | } 22 | 23 | trait InsertBase[Q] extends Base[Q] { 24 | 25 | /** 26 | * Makes this `INSERT` query call `JdbcStatement.getGeneratedKeys` when it is executed, 27 | * returning a `Seq[R]` where `R` is a Scala type compatible with the auto-generated 28 | * primary key type (typically something like `Int` or `Long`) 29 | */ 30 | def getGeneratedKeys[R](implicit qr: Queryable.Row[?, R]): GetGeneratedKeys[Q, R] = { 31 | new GetGeneratedKeys.Impl(this) 32 | } 33 | } 34 | 35 | class InsertImpl[Q, R](returnable: InsertBase[?], returning: Q)( 36 | implicit qr: Queryable.Row[Q, R] 37 | ) extends Returning.Impl0[Q, R](qr, returnable, returning) 38 | with Returning[Q, R] {} 39 | 40 | class Impl[Q, R](returnable: Base[?], returning: Q)(implicit qr: Queryable.Row[Q, R]) 41 | extends Impl0[Q, R](qr, returnable, returning) 42 | with Returning[Q, R] 43 | 44 | class Impl0[Q, R]( 45 | protected val qr: Queryable.Row[Q, R], 46 | returnable: Base[?], 47 | protected val expr: Q 48 | ) extends Returning[Q, R] { 49 | 50 | override protected def queryConstruct(args: Queryable.ResultSetIterator): Seq[R] = { 51 | Seq(qr.construct(args)) 52 | } 53 | 54 | override def queryIsSingleRow = false 55 | 56 | private[scalasql] override def renderSql(ctx0: Context) = { 57 | val contextStage1: Context = Context.compute(ctx0, Nil, Some(returnable.table)) 58 | 59 | implicit val implicitCtx: Context = if (returnable.table.value.escape) { 60 | contextStage1.withFromNaming( 61 | contextStage1.fromNaming 62 | .updated(returnable.table, Table.fullIdentifier(returnable.table.value)(contextStage1)) 63 | ) 64 | } else { 65 | contextStage1 66 | } 67 | 68 | val prefix = Renderable.renderSql(returnable) 69 | val walked = qr.walkLabelsAndExprs(expr) 70 | val exprStr = ExprsToSql.apply(walked, implicitCtx, SqlStr.empty) 71 | val suffix = sql" RETURNING $exprStr" 72 | 73 | prefix + suffix 74 | } 75 | 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /scalasql/query/src/SqlWindow.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | import scalasql.core.{Expr, SqlStr} 3 | import scalasql.core.DialectTypeMappers 4 | import scalasql.query.{AscDesc, CompoundSelect, Nulls, OrderBy} 5 | import scalasql.core.SqlStr.SqlStringSyntax 6 | import scalasql.core.Context 7 | 8 | case class SqlWindow[T]( 9 | e: Expr[T], 10 | partitionBy0: Option[Expr[?]], 11 | filter0: Option[Expr[Boolean]], 12 | orderBy: Seq[scalasql.query.OrderBy], 13 | frameStart0: Option[SqlStr], 14 | frameEnd0: Option[SqlStr], 15 | exclusions: Option[SqlStr] 16 | )(implicit dialect: DialectTypeMappers) 17 | extends Expr[T] { 18 | import dialect.{dialectSelf => _, _} 19 | protected def renderToSql0(implicit ctx: Context): SqlStr = { 20 | val partitionBySql = SqlStr.opt(partitionBy0) { p => sql"PARTITION BY $p" } 21 | val sortBySql = CompoundSelect.orderToSqlStr(orderBy, ctx) 22 | val overClause = SqlStr.join( 23 | Seq(partitionBySql, sortBySql).filter(!SqlStr.flatten(_).queryParts.forall(_.length == 0)), 24 | sql" " 25 | ) 26 | 27 | val frameStr = (frameStart0, frameEnd0, exclusions) match { 28 | case (None, None, None) => SqlStr.empty 29 | case (Some(start), None, ex) => sql" ROWS $start" + SqlStr.opt(ex)(sql" " + _) 30 | case (Some(start), Some(end), ex) => 31 | sql" ROWS BETWEEN $start AND $end" + SqlStr.opt(ex)(sql" " + _) 32 | } 33 | val filterStr = SqlStr.opt(filter0) { f => 34 | sql" FILTER (WHERE $f)" 35 | } 36 | sql"$e$filterStr OVER ($overClause$frameStr)" 37 | 38 | } 39 | 40 | def partitionBy(e: Expr[?]) = this.copy(partitionBy0 = Some(e)) 41 | 42 | def filter(expr: Expr[Boolean]) = copy(filter0 = Some(expr)) 43 | def sortBy(expr: Expr[?]) = { 44 | val newOrder = Seq(OrderBy(expr, None, None)) 45 | 46 | copy(orderBy = newOrder ++ orderBy) 47 | } 48 | 49 | def asc = 50 | copy(orderBy = orderBy.take(1).map(_.copy(ascDesc = Some(AscDesc.Asc))) ++ orderBy.drop(1)) 51 | 52 | def desc = 53 | copy(orderBy = orderBy.take(1).map(_.copy(ascDesc = Some(AscDesc.Desc))) ++ orderBy.drop(1)) 54 | 55 | def nullsFirst = 56 | copy(orderBy = orderBy.take(1).map(_.copy(nulls = Some(Nulls.First))) ++ orderBy.drop(1)) 57 | 58 | def nullsLast = 59 | copy(orderBy = orderBy.take(1).map(_.copy(nulls = Some(Nulls.Last))) ++ orderBy.drop(1)) 60 | 61 | class FrameConfig(f: Some[SqlStr] => SqlWindow[T]) { 62 | def preceding(offset: Int = -1) = offset match { 63 | case -1 => f(Some(sql"UNBOUNDED PRECEDING")) 64 | case offset => f(Some(sql"$offset PRECEDING")) 65 | } 66 | 67 | def currentRow = f(Some(sql"CURRENT ROW")) 68 | 69 | def following(offset: Int = -1) = offset match { 70 | case -1 => f(Some(sql"UNBOUNDED FOLLOWING")) 71 | case offset => f(Some(sql"$offset FOLLOWING")) 72 | } 73 | } 74 | def frameStart = new FrameConfig(s => copy(frameStart0 = s)) 75 | def frameEnd = new FrameConfig(s => copy(frameEnd0 = s)) 76 | 77 | object exclude { 78 | def currentRow = copy(exclusions = Some(sql"EXCLUDE CURRENT ROW")) 79 | def group = copy(exclusions = Some(sql"EXCLUDE GROUP")) 80 | def ties = copy(exclusions = Some(sql"EXCLUDE TIES")) 81 | def noOthers = copy(exclusions = Some(sql"EXCLUDE NO OTHERS")) 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /scalasql/query/src/Table.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{DialectTypeMappers, Sc, Queryable, Expr} 4 | import scalasql.core.Context 5 | 6 | /** 7 | * In-code representation of a SQL table, associated with a given `case class` [[V]]. 8 | */ 9 | abstract class Table[V[_[_]]]()(implicit name: sourcecode.Name, metadata0: Table.Metadata[V]) 10 | extends Table.Base 11 | with Table.LowPri[V] { 12 | 13 | protected[scalasql] def tableName = name.value 14 | 15 | protected[scalasql] def schemaName = "" 16 | 17 | protected[scalasql] def escape: Boolean = false 18 | 19 | protected implicit def tableSelf: Table[V] = this 20 | 21 | protected def tableMetadata: Table.Metadata[V] = metadata0 22 | 23 | implicit def containerQr(implicit dialect: DialectTypeMappers): Queryable.Row[V[Expr], V[Sc]] = 24 | tableMetadata 25 | .queryable( 26 | tableMetadata.walkLabels0, 27 | dialect, 28 | new Table.Metadata.QueryableProxy(tableMetadata.queryables(dialect, _)) 29 | ) 30 | .asInstanceOf[Queryable.Row[V[Expr], V[Sc]]] 31 | 32 | protected def tableRef = new TableRef(this) 33 | protected[scalasql] def tableLabels: Seq[String] = { 34 | tableMetadata.walkLabels0() 35 | } 36 | implicit def tableImplicitMetadata: Table.ImplicitMetadata[V] = 37 | Table.ImplicitMetadata(tableMetadata) 38 | } 39 | 40 | object Table { 41 | trait LowPri[V[_[_]]] { this: Table[V] => 42 | implicit def containerQr2( 43 | implicit dialect: DialectTypeMappers 44 | ): Queryable.Row[V[Column], V[Sc]] = 45 | containerQr.asInstanceOf[Queryable.Row[V[Column], V[Sc]]] 46 | } 47 | 48 | case class ImplicitMetadata[V[_[_]]](value: Metadata[V]) 49 | 50 | def metadata[V[_[_]]](t: Table[V]) = t.tableMetadata 51 | def ref[V[_[_]]](t: Table[V]) = t.tableRef 52 | def name(t: Table.Base) = t.tableName 53 | def labels(t: Table.Base) = t.tableLabels 54 | def columnNameOverride[V[_[_]]](t: Table.Base)(s: String) = t.tableColumnNameOverride(s) 55 | def identifier(t: Table.Base)(implicit context: Context): String = { 56 | context.config.tableNameMapper.andThen { str => 57 | if (t.escape) { 58 | context.dialectConfig.escape(str) 59 | } else { 60 | str 61 | } 62 | }(t.tableName) 63 | } 64 | def fullIdentifier( 65 | t: Table.Base 66 | )(implicit context: Context): String = { 67 | t.schemaName match { 68 | case "" => identifier(t) 69 | case str => s"$str." + identifier(t) 70 | } 71 | } 72 | trait Base { 73 | 74 | /** 75 | * The name of this table, before processing by [[Config.tableNameMapper]]. 76 | * Can be overriden to configure the table names 77 | */ 78 | protected[scalasql] def tableName: String 79 | protected[scalasql] def schemaName: String 80 | protected[scalasql] def tableLabels: Seq[String] 81 | protected[scalasql] def escape: Boolean 82 | 83 | /** 84 | * Customizations to the column names of this table before processing, 85 | * by [[Config.columnNameMapper]]. Can be overriden to configure the column 86 | * names on a per-column basis. 87 | */ 88 | protected[scalasql] def tableColumnNameOverride(s: String): String = identity(s) 89 | } 90 | 91 | class Metadata[V[_[_]]]( 92 | val queryables: (DialectTypeMappers, Int) => Queryable.Row[?, ?], 93 | val walkLabels0: () => Seq[String], 94 | val queryable: ( 95 | () => Seq[String], 96 | DialectTypeMappers, 97 | Metadata.QueryableProxy 98 | ) => Queryable[V[Expr], V[Sc]], 99 | val vExpr0: (TableRef, DialectTypeMappers, Metadata.QueryableProxy) => V[Column] 100 | ) { 101 | def vExpr(t: TableRef, d: DialectTypeMappers) = 102 | vExpr0(t, d, new Metadata.QueryableProxy(queryables(d, _))) 103 | } 104 | 105 | object Metadata extends scalasql.query.TableMacros { 106 | class QueryableProxy(queryables: Int => Queryable.Row[?, ?]) { 107 | def apply[T, V](n: Int): Queryable.Row[T, V] = queryables(n).asInstanceOf[Queryable.Row[T, V]] 108 | } 109 | } 110 | 111 | object Internal { 112 | class TableQueryable[Q, R <: scala.Product]( 113 | walkLabels0: () => Seq[String], 114 | walkExprs0: Q => Seq[Expr[?]], 115 | construct0: Queryable.ResultSetIterator => R, 116 | deconstruct0: R => Q = ??? 117 | ) extends Queryable.Row[Q, R] { 118 | def walkLabels(): Seq[List[String]] = walkLabels0().map(List(_)) 119 | def walkExprs(q: Q): Seq[Expr[?]] = walkExprs0(q) 120 | 121 | def construct(args: Queryable.ResultSetIterator) = construct0(args) 122 | 123 | def deconstruct(r: R): Q = deconstruct0(r) 124 | } 125 | 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /scalasql/query/src/Values.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{Context, DialectTypeMappers, Expr, LiveExprs, Queryable, SqlStr} 4 | import scalasql.core.SqlStr.SqlStringSyntax 5 | 6 | /** 7 | * A SQL `VALUES` clause, used to treat a sequence of primitive [[T]]s as 8 | * a [[Select]] query. 9 | */ 10 | class Values[Q, R](val ts: Seq[R])( 11 | implicit val qr: Queryable.Row[Q, R], 12 | protected val dialect: DialectTypeMappers 13 | ) extends Select.Proxy[Q, R] 14 | with Query.DelegateQueryable[Q, Seq[R]] { 15 | assert(ts.nonEmpty, "`Values` clause does not support empty sequence") 16 | 17 | protected def selectToSimpleSelect() = this.subquery 18 | val tableRef = new SubqueryRef(this) 19 | protected def columnName(n: Int) = s"column${n + 1}" 20 | 21 | protected override val expr: Q = qr.deconstruct(ts.head) 22 | 23 | override protected def selectRenderer(prevContext: Context): SubqueryRef.Wrapped.Renderer = 24 | new Values.Renderer(this)(implicitly, prevContext) 25 | 26 | override protected def selectExprAliases(prevContext: Context) = { 27 | qr.walkExprs(expr) 28 | .zipWithIndex 29 | .map { case (e, i) => (Expr.identity(e), SqlStr.raw(columnName(i))) } 30 | } 31 | 32 | } 33 | 34 | object Values { 35 | class Renderer[Q, R](v: Values[Q, R])(implicit qr: Queryable.Row[Q, R], ctx: Context) 36 | extends SubqueryRef.Wrapped.Renderer { 37 | def wrapRow(t: R): SqlStr = sql"(" + SqlStr.join( 38 | qr.walkExprs(qr.deconstruct(t)).map(i => sql"$i"), 39 | SqlStr.commaSep 40 | ) + sql")" 41 | def render(liveExprs: LiveExprs): SqlStr = { 42 | val rows = SqlStr.join(v.ts.map(wrapRow), SqlStr.commaSep) 43 | sql"VALUES $rows" 44 | } 45 | 46 | def context = ctx 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /scalasql/query/src/WithCte.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql.core.{ 4 | Context, 5 | DialectTypeMappers, 6 | LiveExprs, 7 | Queryable, 8 | Expr, 9 | ExprsToSql, 10 | SqlStr, 11 | WithSqlExpr 12 | } 13 | import scalasql.core.SqlStr.SqlStringSyntax 14 | 15 | /** 16 | * A SQL `WITH` clause 17 | */ 18 | class WithCte[Q, R]( 19 | walked: Queryable.Walked, 20 | val lhs: Select[?, ?], 21 | val cteRef: WithCteRef, 22 | val rhs: Select[Q, R], 23 | val withPrefix: SqlStr = sql"WITH " 24 | )(implicit val qr: Queryable.Row[Q, R], protected val dialect: DialectTypeMappers) 25 | extends Select.Proxy[Q, R] { 26 | 27 | override protected def expr = Joinable.toFromExpr(rhs)._2 28 | private def unprefixed = new WithCte(walked, lhs, cteRef, rhs, SqlStr.commaSep) 29 | 30 | protected def selectToSimpleSelect() = this.subquery 31 | 32 | override def map[Q2, R2](f: Q => Q2)(implicit qr2: Queryable.Row[Q2, R2]): Select[Q2, R2] = { 33 | new WithCte(walked, lhs, cteRef, rhs.map(f)) 34 | } 35 | 36 | override def filter(f: Q => Expr[Boolean]): Select[Q, R] = { 37 | new WithCte(walked, rhs.filter(f), cteRef, rhs) 38 | } 39 | 40 | override def sortBy(f: Q => Expr[?]): Select[Q, R] = 41 | new WithCte(walked, lhs, cteRef, rhs.sortBy(f)) 42 | 43 | override def drop(n: Int): Select[Q, R] = new WithCte(walked, lhs, cteRef, rhs.drop(n)) 44 | override def take(n: Int): Select[Q, R] = new WithCte(walked, lhs, cteRef, rhs.take(n)) 45 | 46 | override protected def selectRenderer(prevContext: Context): SubqueryRef.Wrapped.Renderer = 47 | new WithCte.Renderer(walked, withPrefix, this, prevContext) 48 | 49 | override protected def selectExprAliases(prevContext: Context) = { 50 | SubqueryRef.Wrapped.exprAliases(rhs, prevContext) 51 | } 52 | 53 | override protected def queryConstruct(args: Queryable.ResultSetIterator): Seq[R] = 54 | Query.construct(rhs, args) 55 | 56 | } 57 | 58 | object WithCte { 59 | class Proxy[Q, R]( 60 | lhs: Select[Q, R], 61 | lhsSubQueryRef: WithCteRef, 62 | val qr: Queryable.Row[Q, R], 63 | protected val dialect: DialectTypeMappers 64 | ) extends Select.Proxy[Q, R] { 65 | 66 | override def joinableToFromExpr: (Context.From, Q) = { 67 | val otherFrom = lhsSubQueryRef 68 | (otherFrom, WithSqlExpr.get(lhs)) 69 | } 70 | 71 | override protected def selectToSimpleSelect(): SimpleSelect[Q, R] = { 72 | Select.newSimpleSelect[Q, R]( 73 | lhs, 74 | expr = WithSqlExpr.get(lhs), 75 | exprPrefix = None, 76 | exprSuffix = None, 77 | preserveAll = false, 78 | from = Seq(lhsSubQueryRef), 79 | joins = Nil, 80 | where = Nil, 81 | groupBy0 = None 82 | )(qr, dialect) 83 | } 84 | 85 | override def selectRenderer(prevContext: Context): SubqueryRef.Wrapped.Renderer = 86 | new SubqueryRef.Wrapped.Renderer { 87 | def render(liveExprs: LiveExprs): SqlStr = { 88 | SqlStr.raw(prevContext.fromNaming(lhsSubQueryRef)) 89 | } 90 | } 91 | 92 | override private[scalasql] def renderSql(ctx: Context): SqlStr = { 93 | SqlStr.raw(ctx.fromNaming(lhsSubQueryRef)) 94 | } 95 | } 96 | 97 | class Renderer[Q, R]( 98 | walked: Queryable.Walked, 99 | withPrefix: SqlStr, 100 | query: WithCte[Q, R], 101 | prevContext: Context 102 | ) extends SubqueryRef.Wrapped.Renderer { 103 | def render(liveExprs: LiveExprs) = { 104 | val newExprNaming = ExprsToSql.selectColumnReferences(walked, prevContext) 105 | val newContext = Context.compute(prevContext, Seq(query.cteRef), None) 106 | val cteName = SqlStr.raw(newContext.fromNaming(query.cteRef)) 107 | val leadingSpace = query.rhs match { 108 | case w: WithCte[Q, R] => SqlStr.empty 109 | case r => sql" " 110 | } 111 | 112 | val wrapped = SubqueryRef.Wrapped 113 | .renderer( 114 | query.rhs match { 115 | case w: WithCte[Q, R] => w.unprefixed 116 | case r => r 117 | }, 118 | newContext 119 | ) 120 | .render(liveExprs) 121 | 122 | val rhsSql = SqlStr.flatten(leadingSpace + wrapped) 123 | val rhsReferenced = LiveExprs.some(rhsSql.referencedExprs.toSet) 124 | val lhsSql = 125 | SubqueryRef.Wrapped.renderer(query.lhs, prevContext).render(rhsReferenced) 126 | 127 | val cteColumns = SqlStr.join( 128 | newExprNaming.collect { case (exprId, name) if rhsReferenced.isLive(exprId) => name }, 129 | SqlStr.commaSep 130 | ) 131 | 132 | sql"$withPrefix$cteName ($cteColumns) AS ($lhsSql)$rhsSql" 133 | } 134 | 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /scalasql/src/dialects/CompoundSelectRendererForceLimit.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql.core.TypeMapper 4 | import scalasql.core.SqlStr 5 | import scalasql.core.SqlStr.SqlStringSyntax 6 | 7 | object CompoundSelectRendererForceLimit { 8 | def limitToSqlStr(limit: Option[Int], offset: Option[Int])(implicit tm: TypeMapper[Int]) = { 9 | SqlStr.opt(limit.orElse(Option.when(offset.nonEmpty)(Int.MaxValue))) { limit => 10 | sql" LIMIT $limit" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /scalasql/src/dialects/DbApiQueryOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql.core._ 4 | import scalasql.query.{Select, Values, WithCte, WithCteRef} 5 | 6 | class DbApiQueryOps(dialect: DialectTypeMappers) { 7 | import dialect._ 8 | 9 | /** 10 | * Creates a SQL `VALUES` clause 11 | */ 12 | def values[Q, R](ts: Seq[R])(implicit qr: Queryable.Row[Q, R]): Values[Q, R] = 13 | new scalasql.query.Values(ts) 14 | 15 | /** Generates a SQL `WITH` common table expression clause */ 16 | def withCte[Q, Q2, R, R2]( 17 | lhs: Select[Q, R] 18 | )(block: Select[Q, R] => Select[Q2, R2])(implicit qr: Queryable.Row[Q2, R2]): Select[Q2, R2] = { 19 | 20 | val walked = lhs.qr.walkLabelsAndExprs(WithSqlExpr.get(lhs)) 21 | val lhsSubQueryRef = new WithCteRef(lhs.qr.walkLabelsAndExprs(WithSqlExpr.get(lhs))) 22 | val rhsSelect = new WithCte.Proxy[Q, R](lhs, lhsSubQueryRef, lhs.qr, dialect) 23 | 24 | new WithCte(walked, lhs, lhsSubQueryRef, block(rhsSelect)) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /scalasql/src/dialects/OnConflictOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql.core.{Expr, WithSqlExpr} 4 | import scalasql.query._ 5 | 6 | trait OnConflictOps { 7 | implicit def OnConflictableInsertColumns[V[_[_]], R]( 8 | query: InsertColumns[V, R] 9 | ): OnConflict[V[Column], Int] = 10 | new OnConflict[V[Column], Int](query, WithSqlExpr.get(query), query.table) 11 | 12 | implicit def OnConflictableInsertValues[V[_[_]], R]( 13 | query: InsertValues[V, R] 14 | ): OnConflict[V[Column], Int] = 15 | new OnConflict[V[Column], Int](query, WithSqlExpr.get(query), query.table) 16 | 17 | implicit def OnConflictableInsertSelect[V[_[_]], C, R, R2]( 18 | query: InsertSelect[V, C, R, R2] 19 | ): OnConflict[V[Column], Int] = { 20 | new OnConflict[V[Column], Int](query, WithSqlExpr.get(query), query.table) 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /scalasql/src/dialects/ReturningDialect.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql.core.{WithSqlExpr, Queryable} 4 | import scalasql.query.{OnConflict, Returning} 5 | 6 | trait ReturningDialect extends Dialect { 7 | implicit class InsertReturningConv[Q](r: Returning.InsertBase[Q]) { 8 | def returning[Q2, R](f: Q => Q2)(implicit qr: Queryable.Row[Q2, R]): Returning[Q2, R] = { 9 | new Returning.InsertImpl(r, f(WithSqlExpr.get(r))) 10 | } 11 | } 12 | 13 | implicit class ReturningConv[Q](r: Returning.Base[Q]) { 14 | def returning[Q2, R](f: Q => Q2)(implicit qr: Queryable.Row[Q2, R]): Returning[Q2, R] = { 15 | new Returning.Impl(r, f(WithSqlExpr.get(r))) 16 | } 17 | } 18 | implicit class OnConflictUpdateConv[Q, R](r: OnConflict.Update[Q, R]) { 19 | def returning[Q2, R](f: Q => Q2)(implicit qr: Queryable.Row[Q2, R]): Returning[Q2, R] = { 20 | new Returning.Impl(r, f(WithSqlExpr.get(r))) 21 | } 22 | } 23 | implicit class OnConflictIgnoreConv[Q, R](r: OnConflict.Ignore[Q, R]) { 24 | def returning[Q2, R](f: Q => Q2)(implicit qr: Queryable.Row[Q2, R]): Returning[Q2, R] = { 25 | new Returning.Impl(r, f(WithSqlExpr.get(r))) 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /scalasql/src/dialects/TableOps.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql.dialects.Dialect 4 | import scalasql.core.{Context, Expr} 5 | import scalasql.Sc 6 | import scalasql.query.{Column, Delete, Insert, Joinable, Select, SimpleSelect, Table, Update} 7 | 8 | class TableOps[V[_[_]]](val t: Table[V])(implicit dialect: Dialect) 9 | extends Joinable[V[Expr], V[Sc]] { 10 | 11 | import dialect.{dialectSelf => _} 12 | 13 | protected def toFromExpr0 = { 14 | val ref = Table.ref(t) 15 | (ref, Table.metadata(t).vExpr(ref, dialect)) 16 | } 17 | 18 | protected def joinableToFromExpr: (Context.From, V[Expr]) = { 19 | val (ref, expr) = toFromExpr0 20 | (ref, expr.asInstanceOf[V[Expr]]) 21 | } 22 | 23 | protected def joinableToSelect: Select[V[Expr], V[Sc]] = { 24 | val (ref, expr) = joinableToFromExpr 25 | new SimpleSelect(expr, None, None, false, Seq(ref), Nil, Nil, None)( 26 | t.containerQr, 27 | dialect 28 | ) 29 | } 30 | 31 | /** 32 | * Constructs a `SELECT` query 33 | */ 34 | def select = joinableToSelect 35 | 36 | /** 37 | * Constructs a `UPDATE` query with the given [[filter]] to select the 38 | * rows you want to delete 39 | */ 40 | def update(filter: V[Column] => Expr[Boolean]): Update[V[Column], V[Sc]] = { 41 | val (ref, expr) = toFromExpr0 42 | new Update.Impl(expr, ref, Nil, Nil, Seq(filter(Table.metadata(t).vExpr(ref, dialect))))( 43 | t.containerQr2, 44 | dialect 45 | ) 46 | } 47 | 48 | /** 49 | * Constructs a `INSERT` query 50 | */ 51 | def insert: Insert[V, V[Sc]] = { 52 | val (ref, expr) = toFromExpr0 53 | new Insert.Impl(expr, ref)(t.containerQr2, dialect) 54 | } 55 | 56 | /** 57 | * Constructs a `DELETE` query with the given [[filter]] to select the 58 | * rows you want to delete 59 | */ 60 | def delete(filter: V[Column] => Expr[Boolean]): Delete[V[Column]] = { 61 | val (ref, expr) = toFromExpr0 62 | new Delete.Impl(expr, filter(Table.metadata(t).vExpr(ref, dialect)), ref) 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /scalasql/src/package.scala: -------------------------------------------------------------------------------- 1 | package object scalasql { 2 | 3 | /** 4 | * Convenience alias for `geny.Bytes` 5 | */ 6 | def Bytes(x: String): geny.Bytes = new geny.Bytes(x.getBytes("UTF-8")) 7 | 8 | /** 9 | * Convenience alias for `geny.Bytes` 10 | */ 11 | def Bytes(x: Array[Byte]): geny.Bytes = new geny.Bytes(x) 12 | 13 | /** 14 | * Convenience alias for `geny.Bytes` 15 | */ 16 | type Bytes = geny.Bytes 17 | 18 | type Sc[T] = T 19 | 20 | val Table = query.Table 21 | type Table[V[_[_]]] = query.Table[V] 22 | 23 | val Column = query.Column 24 | type Column[T] = query.Column[T] 25 | 26 | val DbClient = core.DbClient 27 | type DbClient = core.DbClient 28 | 29 | val DbApi = core.DbApi 30 | type DbApi = core.DbApi 31 | 32 | val Queryable = core.Queryable 33 | type Queryable[Q, R] = core.Queryable[Q, R] 34 | 35 | val Expr = core.Expr 36 | type Expr[T] = core.Expr[T] 37 | 38 | type TypeMapper[T] = core.TypeMapper[T] 39 | val TypeMapper = core.TypeMapper 40 | 41 | val Config = core.Config 42 | type Config = core.Config 43 | 44 | val SqlStr = core.SqlStr 45 | type SqlStr = core.SqlStr 46 | 47 | val MySqlDialect = dialects.MySqlDialect 48 | type MySqlDialect = dialects.MySqlDialect 49 | 50 | val PostgresDialect = dialects.PostgresDialect 51 | type PostgresDialect = dialects.PostgresDialect 52 | 53 | val H2Dialect = dialects.H2Dialect 54 | type H2Dialect = dialects.H2Dialect 55 | 56 | val SqliteDialect = dialects.SqliteDialect 57 | type SqliteDialect = dialects.SqliteDialect 58 | } 59 | -------------------------------------------------------------------------------- /scalasql/test/resources/customer-data-plus-schema.sql: -------------------------------------------------------------------------------- 1 | 2 | INSERT INTO buyer (name, date_of_birth) VALUES 3 | ('James Bond', '2001-02-03'), 4 | ('叉烧包', '1923-11-12'), 5 | ('Li Haoyi', '1965-08-09'); 6 | 7 | INSERT INTO product (kebab_case_name, name, price) VALUES 8 | ('face-mask', 'Face Mask', 8.88), 9 | ('guitar', 'Guitar', 300), 10 | ('socks', 'Socks', 3.14), 11 | ('skate-board', 'Skate Board', 123.45), 12 | ('camera', 'Camera', 1000.00), 13 | ('cookie', 'Cookie', 0.10); 14 | 15 | INSERT INTO shipping_info (buyer_id, shipping_date) VALUES 16 | (2, '2010-02-03'), 17 | (1, '2012-04-05'), 18 | (2, '2012-05-06'); 19 | 20 | INSERT INTO purchase (shipping_info_id, product_id, count, total) VALUES 21 | (1, 1, 100, 888), 22 | (1, 2, 3, 900), 23 | (1, 3, 5, 15.7), 24 | (2, 4, 4, 493.8), 25 | (2, 5, 10, 10000.00), 26 | (3, 1, 5, 44.4), 27 | (3, 6, 13, 1.30); 28 | 29 | INSERT INTO otherschema.invoice (total, vendor_name) VALUES 30 | (150.4, 'Siemens'), 31 | (213.3, 'Samsung'), 32 | (407.2, 'Shell'); 33 | -------------------------------------------------------------------------------- /scalasql/test/resources/customer-data.sql: -------------------------------------------------------------------------------- 1 | 2 | INSERT INTO buyer (name, date_of_birth) VALUES 3 | ('James Bond', '2001-02-03'), 4 | ('叉烧包', '1923-11-12'), 5 | ('Li Haoyi', '1965-08-09'); 6 | 7 | INSERT INTO product (kebab_case_name, name, price) VALUES 8 | ('face-mask', 'Face Mask', 8.88), 9 | ('guitar', 'Guitar', 300), 10 | ('socks', 'Socks', 3.14), 11 | ('skate-board', 'Skate Board', 123.45), 12 | ('camera', 'Camera', 1000.00), 13 | ('cookie', 'Cookie', 0.10); 14 | 15 | INSERT INTO shipping_info (buyer_id, shipping_date) VALUES 16 | (2, '2010-02-03'), 17 | (1, '2012-04-05'), 18 | (2, '2012-05-06'); 19 | 20 | INSERT INTO purchase (shipping_info_id, product_id, count, total) VALUES 21 | (1, 1, 100, 888), 22 | (1, 2, 3, 900), 23 | (1, 3, 5, 15.7), 24 | (2, 4, 4, 493.8), 25 | (2, 5, 10, 10000.00), 26 | (3, 1, 5, 44.4), 27 | (3, 6, 13, 1.30); 28 | -------------------------------------------------------------------------------- /scalasql/test/resources/h2-customer-schema.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS buyer CASCADE; 2 | DROP TABLE IF EXISTS product CASCADE; 3 | DROP TABLE IF EXISTS shipping_info CASCADE; 4 | DROP TABLE IF EXISTS purchase CASCADE; 5 | DROP TABLE IF EXISTS data_types CASCADE; 6 | DROP TABLE IF EXISTS a CASCADE; 7 | DROP TABLE IF EXISTS b CASCADE; 8 | DROP TABLE IF EXISTS non_round_trip_types CASCADE; 9 | DROP TABLE IF EXISTS opt_cols CASCADE; 10 | DROP TABLE IF EXISTS nested CASCADE; 11 | DROP TABLE IF EXISTS enclosing CASCADE; 12 | DROP TABLE IF EXISTS invoice CASCADE; 13 | DROP SCHEMA IF EXISTS otherschema CASCADE; 14 | DROP TABLE IF EXISTS "SELECT" CASCADE; 15 | 16 | CREATE TABLE buyer ( 17 | id INTEGER AUTO_INCREMENT PRIMARY KEY, 18 | name VARCHAR(256), 19 | date_of_birth DATE 20 | ); 21 | 22 | CREATE TABLE product ( 23 | id INTEGER AUTO_INCREMENT PRIMARY KEY, 24 | kebab_case_name VARCHAR(256), 25 | name VARCHAR(256), 26 | price DECIMAL(20, 2) 27 | ); 28 | 29 | CREATE TABLE shipping_info ( 30 | id INTEGER AUTO_INCREMENT PRIMARY KEY, 31 | buyer_id INT, 32 | shipping_date DATE, 33 | FOREIGN KEY(buyer_id) REFERENCES buyer(id) 34 | ); 35 | 36 | CREATE TABLE purchase ( 37 | id INTEGER AUTO_INCREMENT PRIMARY KEY, 38 | shipping_info_id INT, 39 | product_id INT, 40 | count INT, 41 | total DECIMAL(20, 2), 42 | FOREIGN KEY(shipping_info_id) REFERENCES shipping_info(id), 43 | FOREIGN KEY(product_id) REFERENCES product(id) 44 | ); 45 | 46 | CREATE TABLE data_types ( 47 | my_tiny_int TINYINT, 48 | my_small_int SMALLINT, 49 | my_int INTEGER, 50 | my_big_int BIGINT, 51 | my_double DOUBLE, 52 | my_boolean BOOLEAN, 53 | my_local_date DATE, 54 | my_local_time TIME, 55 | my_local_date_time TIMESTAMP, 56 | my_util_date TIMESTAMP, 57 | my_instant TIMESTAMP WITH TIME ZONE, 58 | my_var_binary VARBINARY(256), 59 | my_uuid UUID, 60 | my_enum VARCHAR(256) 61 | -- my_offset_time TIME WITH TIME ZONE, 62 | ); 63 | 64 | CREATE TABLE a( 65 | id INTEGER, 66 | b_id INTEGER 67 | ); 68 | 69 | CREATE TABLE b( 70 | id INTEGER, 71 | custom VARCHAR(256) 72 | ); 73 | 74 | CREATE TABLE non_round_trip_types( 75 | my_zoned_date_time TIMESTAMP WITH TIME ZONE, 76 | my_offset_date_time TIMESTAMP WITH TIME ZONE 77 | ); 78 | 79 | CREATE TABLE opt_cols( 80 | my_int INTEGER, 81 | my_int2 INTEGER 82 | ); 83 | 84 | CREATE TABLE nested( 85 | foo_id INTEGER, 86 | my_boolean BOOLEAN 87 | ); 88 | 89 | CREATE TABLE enclosing( 90 | bar_id INTEGER, 91 | my_string VARCHAR(256), 92 | foo_id INTEGER, 93 | my_boolean BOOLEAN 94 | ); 95 | 96 | CREATE SCHEMA otherschema; 97 | 98 | CREATE TABLE otherschema.invoice( 99 | id INTEGER AUTO_INCREMENT PRIMARY KEY, 100 | total DECIMAL(20, 2), 101 | vendor_name VARCHAR(256) 102 | ); 103 | 104 | CREATE TABLE "SELECT"( 105 | id INTEGER, 106 | name VARCHAR(256) 107 | ) -------------------------------------------------------------------------------- /scalasql/test/resources/mysql-customer-schema.sql: -------------------------------------------------------------------------------- 1 | SET FOREIGN_KEY_CHECKS = 0; 2 | DROP TABLE IF EXISTS `buyer` CASCADE; 3 | DROP TABLE IF EXISTS `product` CASCADE; 4 | DROP TABLE IF EXISTS `shipping_info` CASCADE; 5 | DROP TABLE IF EXISTS `purchase` CASCADE; 6 | DROP TABLE IF EXISTS `data_types` CASCADE; 7 | DROP TABLE IF EXISTS `a` CASCADE; 8 | DROP TABLE IF EXISTS `b` CASCADE; 9 | DROP TABLE IF EXISTS `non_round_trip_types` CASCADE; 10 | DROP TABLE IF EXISTS `opt_cols` CASCADE; 11 | DROP TABLE IF EXISTS `nested` CASCADE; 12 | DROP TABLE IF EXISTS `enclosing` CASCADE; 13 | DROP TABLE IF EXISTS `select` CASCADE; 14 | 15 | SET FOREIGN_KEY_CHECKS = 1; 16 | 17 | CREATE TABLE buyer ( 18 | id INTEGER PRIMARY KEY AUTO_INCREMENT, 19 | name VARCHAR(256), 20 | date_of_birth DATE 21 | ); 22 | 23 | CREATE TABLE product ( 24 | id INTEGER PRIMARY KEY AUTO_INCREMENT, 25 | kebab_case_name VARCHAR(256), 26 | name VARCHAR(256), 27 | price DECIMAL(20, 2) 28 | ); 29 | 30 | CREATE TABLE shipping_info ( 31 | id INTEGER PRIMARY KEY AUTO_INCREMENT, 32 | buyer_id INT, 33 | shipping_date DATE, 34 | FOREIGN KEY(buyer_id) REFERENCES buyer(id) 35 | ); 36 | 37 | CREATE TABLE purchase ( 38 | id INTEGER PRIMARY KEY AUTO_INCREMENT, 39 | shipping_info_id INT, 40 | product_id INT, 41 | count INT, 42 | total DECIMAL(20, 2), 43 | FOREIGN KEY(shipping_info_id) REFERENCES shipping_info(id), 44 | FOREIGN KEY(product_id) REFERENCES product(id) 45 | ); 46 | 47 | CREATE TABLE data_types ( 48 | my_tiny_int SMALLINT, 49 | my_small_int SMALLINT, 50 | my_int INTEGER, 51 | my_big_int BIGINT, 52 | my_double DOUBLE PRECISION, 53 | my_boolean BOOLEAN, 54 | my_local_date DATE, 55 | my_local_time TIME, 56 | my_local_date_time TIMESTAMP, 57 | my_util_date TIMESTAMP, 58 | my_instant DATETIME, 59 | my_var_binary VARBINARY(256), 60 | my_uuid CHAR(36), 61 | my_enum ENUM ('foo', 'bar', 'baz') 62 | ); 63 | 64 | CREATE TABLE a( 65 | id INTEGER, 66 | b_id INTEGER 67 | ); 68 | 69 | CREATE TABLE b( 70 | id INTEGER, 71 | custom VARCHAR(256) 72 | ); 73 | 74 | CREATE TABLE non_round_trip_types( 75 | my_zoned_date_time TIMESTAMP, 76 | my_offset_date_time TIMESTAMP 77 | ); 78 | 79 | CREATE TABLE opt_cols( 80 | my_int INTEGER, 81 | my_int2 INTEGER 82 | ); 83 | 84 | CREATE TABLE nested( 85 | foo_id INTEGER, 86 | my_boolean BOOLEAN 87 | ); 88 | 89 | CREATE TABLE enclosing( 90 | bar_id INTEGER, 91 | my_string VARCHAR(256), 92 | foo_id INTEGER, 93 | my_boolean BOOLEAN 94 | ); 95 | 96 | CREATE TABLE `select`( 97 | id INTEGER, 98 | name VARCHAR(256) 99 | ); 100 | -------------------------------------------------------------------------------- /scalasql/test/resources/postgres-customer-schema.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS buyer CASCADE; 2 | DROP TABLE IF EXISTS product CASCADE; 3 | DROP TABLE IF EXISTS shipping_info CASCADE; 4 | DROP TABLE IF EXISTS purchase CASCADE; 5 | DROP TABLE IF EXISTS data_types CASCADE; 6 | DROP TABLE IF EXISTS a CASCADE; 7 | DROP TABLE IF EXISTS b CASCADE; 8 | DROP TABLE IF EXISTS non_round_trip_types CASCADE; 9 | DROP TABLE IF EXISTS opt_cols CASCADE; 10 | DROP TABLE IF EXISTS nested CASCADE; 11 | DROP TABLE IF EXISTS enclosing CASCADE; 12 | DROP TABLE IF EXISTS invoice CASCADE; 13 | DROP TYPE IF EXISTS my_enum CASCADE; 14 | DROP SCHEMA IF EXISTS otherschema CASCADE; 15 | DROP TABLE IF EXISTS "select" CASCADE; 16 | 17 | CREATE TABLE buyer ( 18 | id SERIAL PRIMARY KEY, 19 | name VARCHAR(256), 20 | date_of_birth DATE 21 | ); 22 | 23 | CREATE TABLE product ( 24 | id SERIAL PRIMARY KEY, 25 | kebab_case_name VARCHAR(256), 26 | name VARCHAR(256), 27 | price DECIMAL(20, 2) 28 | ); 29 | 30 | CREATE TABLE shipping_info ( 31 | id SERIAL PRIMARY KEY, 32 | buyer_id INT, 33 | shipping_date DATE, 34 | FOREIGN KEY(buyer_id) REFERENCES buyer(id) 35 | ); 36 | 37 | CREATE TABLE purchase ( 38 | id SERIAL PRIMARY KEY, 39 | shipping_info_id INT, 40 | product_id INT, 41 | count INT, 42 | total DECIMAL(20, 2), 43 | FOREIGN KEY(shipping_info_id) REFERENCES shipping_info(id), 44 | FOREIGN KEY(product_id) REFERENCES product(id) 45 | ); 46 | 47 | CREATE TYPE my_enum AS ENUM ('foo', 'bar', 'baz'); 48 | CREATE TABLE data_types ( 49 | my_tiny_int SMALLINT, 50 | my_small_int SMALLINT, 51 | my_int INTEGER, 52 | my_big_int BIGINT, 53 | my_double DOUBLE PRECISION, 54 | my_boolean BOOLEAN, 55 | my_local_date DATE, 56 | my_local_time TIME, 57 | my_local_date_time TIMESTAMP, 58 | my_util_date TIMESTAMP, 59 | my_instant TIMESTAMP WITH TIME ZONE, 60 | my_var_binary BYTEA, 61 | my_uuid UUID, 62 | my_enum my_enum 63 | -- my_offset_time TIME WITH TIME ZONE, 64 | 65 | ); 66 | 67 | CREATE TABLE a( 68 | id INTEGER, 69 | b_id INTEGER 70 | ); 71 | 72 | CREATE TABLE b( 73 | id INTEGER, 74 | custom VARCHAR(256) 75 | ); 76 | 77 | CREATE TABLE non_round_trip_types( 78 | my_zoned_date_time TIMESTAMP WITH TIME ZONE, 79 | my_offset_date_time TIMESTAMP WITH TIME ZONE 80 | ); 81 | 82 | CREATE TABLE opt_cols( 83 | my_int INTEGER, 84 | my_int2 INTEGER 85 | ); 86 | 87 | CREATE TABLE nested( 88 | foo_id INTEGER, 89 | my_boolean BOOLEAN 90 | ); 91 | 92 | CREATE TABLE enclosing( 93 | bar_id INTEGER, 94 | my_string VARCHAR(256), 95 | foo_id INTEGER, 96 | my_boolean BOOLEAN 97 | ); 98 | 99 | 100 | CREATE SCHEMA otherschema; 101 | 102 | CREATE TABLE otherschema.invoice( 103 | id SERIAL PRIMARY KEY, 104 | total DECIMAL(20, 2), 105 | vendor_name VARCHAR(256) 106 | ); 107 | 108 | CREATE TABLE "select"( 109 | id INTEGER, 110 | name VARCHAR(256) 111 | ); 112 | -------------------------------------------------------------------------------- /scalasql/test/resources/sqlite-customer-schema.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS buyer; 2 | DROP TABLE IF EXISTS product; 3 | DROP TABLE IF EXISTS shipping_info; 4 | DROP TABLE IF EXISTS purchase; 5 | DROP TABLE IF EXISTS data_types; 6 | DROP TABLE IF EXISTS a; 7 | DROP TABLE IF EXISTS b; 8 | DROP TABLE IF EXISTS non_round_trip_types; 9 | DROP TABLE IF EXISTS nested; 10 | DROP TABLE IF EXISTS enclosing; 11 | DROP TABLE IF EXISTS opt_cols; 12 | DROP TABLE IF EXISTS "select"; 13 | 14 | CREATE TABLE buyer ( 15 | id INTEGER PRIMARY KEY AUTOINCREMENT, 16 | name VARCHAR(256), 17 | date_of_birth DATE 18 | ); 19 | 20 | CREATE TABLE product ( 21 | id INTEGER PRIMARY KEY AUTOINCREMENT, 22 | kebab_case_name VARCHAR(256), 23 | name VARCHAR(256), 24 | price DECIMAL(20, 2) 25 | ); 26 | 27 | CREATE TABLE shipping_info ( 28 | id INTEGER PRIMARY KEY AUTOINCREMENT, 29 | buyer_id INT, 30 | shipping_date DATE, 31 | FOREIGN KEY(buyer_id) REFERENCES buyer(id) 32 | ); 33 | 34 | CREATE TABLE purchase ( 35 | id INTEGER PRIMARY KEY AUTOINCREMENT, 36 | shipping_info_id INT, 37 | product_id INT, 38 | count INT, 39 | total DECIMAL(20, 2), 40 | FOREIGN KEY(shipping_info_id) REFERENCES shipping_info(id), 41 | FOREIGN KEY(product_id) REFERENCES product(id) 42 | ); 43 | 44 | CREATE TABLE data_types ( 45 | my_tiny_int SMALLINT, 46 | my_small_int SMALLINT, 47 | my_int INTEGER, 48 | my_big_int BIGINT, 49 | my_double DOUBLE PRECISION, 50 | my_boolean BOOLEAN, 51 | my_local_date DATE, 52 | my_local_time TIME, 53 | my_local_date_time TIMESTAMP, 54 | my_util_date TIMESTAMP, 55 | my_instant DATETIME, 56 | my_var_binary VARBINARY, 57 | my_uuid BINARY(16), 58 | my_enum VARCHAR(256) 59 | -- my_offset_time TIME WITH TIME ZONE, 60 | ); 61 | 62 | CREATE TABLE a( 63 | id INTEGER, 64 | b_id INTEGER 65 | ); 66 | 67 | CREATE TABLE b( 68 | id INTEGER, 69 | custom VARCHAR(256) 70 | ); 71 | 72 | CREATE TABLE non_round_trip_types( 73 | my_zoned_date_time TIMESTAMP, 74 | my_offset_date_time TIMESTAMP 75 | ); 76 | 77 | CREATE TABLE opt_cols( 78 | my_int INTEGER, 79 | my_int2 INTEGER 80 | ); 81 | 82 | 83 | 84 | CREATE TABLE nested( 85 | foo_id INTEGER, 86 | my_boolean BOOLEAN 87 | ); 88 | 89 | CREATE TABLE enclosing( 90 | bar_id INTEGER, 91 | my_string VARCHAR(256), 92 | foo_id INTEGER, 93 | my_boolean BOOLEAN 94 | ); 95 | 96 | CREATE TABLE "select"( 97 | id INTEGER, 98 | name VARCHAR(256) 99 | ) 100 | -------------------------------------------------------------------------------- /scalasql/test/resources/world-schema.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS city ( 2 | id integer AUTO_INCREMENT PRIMARY KEY, 3 | name varchar NOT NULL, 4 | countrycode character(3) NOT NULL, 5 | district varchar NOT NULL, 6 | population integer NOT NULL 7 | ); 8 | 9 | CREATE TABLE IF NOT EXISTS country ( 10 | code character(3) PRIMARY KEY, 11 | name varchar NOT NULL, 12 | continent varchar NOT NULL, 13 | region varchar NOT NULL, 14 | surfacearea real NOT NULL, 15 | indepyear smallint, 16 | population integer NOT NULL, 17 | lifeexpectancy real, 18 | gnp numeric(10,2), 19 | gnpold numeric(10,2), 20 | localname varchar NOT NULL, 21 | governmentform varchar NOT NULL, 22 | headofstate varchar, 23 | capital integer, 24 | code2 character(2) NOT NULL 25 | ); 26 | 27 | CREATE TABLE IF NOT EXISTS countrylanguage ( 28 | countrycode character(3) NOT NULL, 29 | language varchar NOT NULL, 30 | isofficial boolean NOT NULL, 31 | percentage real NOT NULL 32 | ); 33 | -------------------------------------------------------------------------------- /scalasql/test/src-3/Scala3ExampleTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql 2 | 3 | import utest._ 4 | 5 | object Scala3ExampleTests extends TestSuite: 6 | def tests = Tests: 7 | test("h2") - example.Scala3H2Example.main(Array()) 8 | -------------------------------------------------------------------------------- /scalasql/test/src-3/example/Scala3H2Example.scala: -------------------------------------------------------------------------------- 1 | package scalasql.example 2 | 3 | import scalasql.Table 4 | import scalasql.H2Dialect._ 5 | 6 | object Scala3H2Example: 7 | 8 | case class ExampleProduct[T[_]]( 9 | id: T[Int], 10 | kebabCaseName: T[String], 11 | name: T[String], 12 | price: T[Double] 13 | ) 14 | 15 | object ExampleProduct extends Table[ExampleProduct] 16 | 17 | // The example H2 database comes from the library `com.h2database:h2:2.2.224` 18 | val dataSource = new org.h2.jdbcx.JdbcDataSource 19 | dataSource.setUrl("jdbc:h2:mem:testScala3;DB_CLOSE_DELAY=-1") 20 | lazy val h2Client = new scalasql.DbClient.DataSource( 21 | dataSource, 22 | config = new scalasql.Config {} 23 | ) 24 | 25 | def main(args: Array[String]): Unit = 26 | h2Client.transaction: db => 27 | db.updateRaw(""" 28 | CREATE TABLE example_product ( 29 | id INTEGER AUTO_INCREMENT PRIMARY KEY, 30 | kebab_case_name VARCHAR(256), 31 | name VARCHAR(256), 32 | price DECIMAL(20, 2) 33 | ); 34 | """) 35 | 36 | val inserted = db.run( 37 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 38 | ("face-mask", "Face Mask", 8.88), 39 | ("guitar", "Guitar", 300), 40 | ("socks", "Socks", 3.14), 41 | ("skate-board", "Skate Board", 123.45), 42 | ("camera", "Camera", 1000.00), 43 | ("cookie", "Cookie", 0.10) 44 | ) 45 | ) 46 | 47 | assert(inserted == 6) 48 | 49 | val result = 50 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 51 | 52 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 53 | 54 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 55 | 56 | db.run(ExampleProduct.delete(_.name === "Guitar")) 57 | 58 | val result2 = 59 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 60 | 61 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 62 | -------------------------------------------------------------------------------- /scalasql/test/src/ExampleTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql 2 | import utest._ 3 | 4 | /** 5 | * Make sure the examples all have passing main methods 6 | */ 7 | object ExampleTests extends TestSuite { 8 | def tests = Tests { 9 | test("postgres") - example.PostgresExample.main(Array()) 10 | test("mysql") - example.MySqlExample.main(Array()) 11 | test("h2") - example.H2Example.main(Array()) 12 | test("sqlite") - example.SqliteExample.main(Array()) 13 | test("hikari") - example.HikariCpExample.main(Array()) 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /scalasql/test/src/FailureTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql 2 | import scalasql.core.Expr 3 | import utest._ 4 | import utils.SqliteSuite 5 | 6 | import scala.annotation.unused 7 | 8 | /** 9 | * Tests for all the aggregate operators that we provide by default 10 | */ 11 | object FailureTests extends SqliteSuite { 12 | def description = "Things that should not compile or should give runtime errors" 13 | def tests = Tests { 14 | test("equals") - { 15 | // val ex = intercept[Exception] { Expr(1) == 2 } 16 | // assert(ex.getMessage.contains("Expr#equals is not defined")) 17 | // 18 | assert(Expr.identity(Expr(1)) != Expr.identity(Expr(1))) 19 | val e = Expr(1) 20 | assert(Expr.identity(e) == Expr.identity(e)) 21 | } 22 | test("toString") - { 23 | val ex = intercept[Exception] { Expr(1).toString } 24 | assert(ex.getMessage.contains("Expr#toString is not defined")) 25 | 26 | @unused val s: String = Expr.toString(Expr(1)) 27 | } 28 | 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /scalasql/test/src/Main.scala: -------------------------------------------------------------------------------- 1 | package scalasql 2 | 3 | import java.sql.DriverManager 4 | import scalasql.H2Dialect._ 5 | object Main { 6 | 7 | case class Example[T[_]](bytes: T[geny.Bytes]) 8 | 9 | object Example extends Table[Example] 10 | 11 | // The example H2 database comes from the library `com.h2database:h2:2.2.224` 12 | val conn = DriverManager.getConnection("jdbc:h2:mem:mydb") 13 | 14 | def main(args: Array[String]): Unit = { 15 | conn 16 | .createStatement() 17 | .executeUpdate( 18 | """ 19 | CREATE TABLE data_types ( 20 | my_var_binary VARBINARY(256) 21 | ); 22 | """ 23 | ) 24 | 25 | val prepared = conn.prepareStatement("INSERT INTO data_types (my_var_binary) VALUES (?)") 26 | prepared.setBytes(1, Array[Byte](1, 2, 3, 4)) 27 | prepared.executeUpdate() 28 | 29 | val results = conn 30 | .createStatement() 31 | .executeQuery( 32 | "SELECT data_types0.my_var_binary AS my_var_binary FROM data_types data_types0" 33 | ) 34 | 35 | results.next() 36 | pprint.log(results.getBytes(1)) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /scalasql/test/src/UnitTestData.scala: -------------------------------------------------------------------------------- 1 | package scalasql 2 | import scalasql._ 3 | 4 | import java.time.LocalDate 5 | 6 | case class Product[T[_]](id: T[Int], kebabCaseName: T[String], name: T[String], price: T[Double]) 7 | object Product extends Table[Product] 8 | 9 | case class Buyer[T[_]](id: T[Int], name: T[String], dateOfBirth: T[LocalDate]) 10 | object Buyer extends Table[Buyer] 11 | 12 | case class Invoice[T[_]](id: T[Int], total: T[Double], vendor_name: T[String]) 13 | object Invoice extends Table[Invoice] { 14 | override def schemaName = "otherschema" 15 | } 16 | 17 | case class Select[T[_]](id: T[Int], name: T[String]) 18 | object Select extends Table[Select] { 19 | override def escape = true 20 | } 21 | 22 | case class ShippingInfo[T[_]](id: T[Int], buyerId: T[Int], shippingDate: T[LocalDate]) 23 | object ShippingInfo extends Table[ShippingInfo] 24 | 25 | case class Purchase[T[_]]( 26 | id: T[Int], 27 | shippingInfoId: T[Int], 28 | productId: T[Int], 29 | count: T[Int], 30 | total: T[Double] 31 | ) 32 | object Purchase extends Table[Purchase] 33 | -------------------------------------------------------------------------------- /scalasql/test/src/UtestFramework.scala: -------------------------------------------------------------------------------- 1 | package scalasql 2 | 3 | import scalasql.UtestFramework.recordedTests 4 | 5 | object UtestFramework { 6 | case class Record( 7 | suiteName: String, 8 | suiteLine: Int, 9 | testPath: Seq[String], 10 | docs: String, 11 | queryCodeString: String, 12 | sqlString: Option[String], 13 | resultCodeString: Option[String] 14 | ) 15 | 16 | object Record { 17 | implicit val rw: upickle.default.ReadWriter[Record] = upickle.default.macroRW 18 | } 19 | val recordedTests = collection.mutable.Buffer.empty[Record] 20 | val recordedSuiteDescriptions = collection.mutable.Map.empty[String, String] 21 | } 22 | class UtestFramework extends utest.runner.Framework { 23 | override def setup() = { 24 | println("Setting up CustomFramework") 25 | recordedTests.clear() 26 | } 27 | override def teardown() = { 28 | println("Tearing down CustomFramework " + recordedTests.size) 29 | val workspaceRoot = os.Path(sys.env("MILL_WORKSPACE_ROOT")) 30 | val recordedTestsFile = os.RelPath(sys.env("SCALASQL_RECORDED_TESTS_NAME")) 31 | val recordedSuiteDescriptionsFile = 32 | os.RelPath(sys.env("SCALASQL_RECORDED_SUITE_DESCRIPTIONS_NAME")) 33 | os.write.over( 34 | workspaceRoot / "out" / recordedTestsFile, 35 | upickle.default.write(UtestFramework.recordedTests, indent = 4) 36 | ) 37 | os.write.over( 38 | workspaceRoot / "out" / recordedSuiteDescriptionsFile, 39 | upickle.default.write(UtestFramework.recordedSuiteDescriptions, indent = 4) 40 | ) 41 | recordedTests.clear() 42 | } 43 | 44 | override def exceptionStackFrameHighlighter(s: StackTraceElement): Boolean = { 45 | 46 | s.getClassName.contains("scalasql") 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /scalasql/test/src/dialects/H2DialectTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql.core.Expr 4 | import scalasql.utils.H2Suite 5 | import utest._ 6 | 7 | trait H2DialectTests extends H2Suite { 8 | def description = "Operations specific to working with H2 Databases" 9 | def tests = Tests { 10 | 11 | test("ltrim2") - checker( 12 | query = Expr("xxHellox").ltrim("x"), 13 | sql = "SELECT LTRIM(?, ?) AS res", 14 | value = "Hellox" 15 | ) 16 | 17 | test("rtrim2") - checker( 18 | query = Expr("xxHellox").rtrim("x"), 19 | sql = "SELECT RTRIM(?, ?) AS res", 20 | value = "xxHello" 21 | ) 22 | 23 | test("lpad") - checker( 24 | query = Expr("Hello").lpad(10, "xy"), 25 | sql = "SELECT LPAD(?, ?, ?) AS res", 26 | value = "xxxxxHello" // H2 only uses first character of fill string 27 | ) 28 | 29 | test("rpad") - checker( 30 | query = Expr("Hello").rpad(10, "xy"), 31 | sql = "SELECT RPAD(?, ?, ?) AS res", 32 | value = "Helloxxxxx" // H2 only uses first character of fill string 33 | ) 34 | 35 | test("concat") - checker( 36 | query = db.concat("i ", "am", " cow", 1337), 37 | sql = "SELECT CONCAT(?, ?, ?, ?) AS res", 38 | value = "i am cow1337" 39 | ) 40 | 41 | test("concatWs") - checker( 42 | query = db.concatWs(" ", "i", "am", "cow", 1337), 43 | sql = "SELECT CONCAT_WS(?, ?, ?, ?, ?) AS res", 44 | value = "i am cow 1337" 45 | ) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /scalasql/test/src/dialects/MySqlDialectTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql._ 4 | import scalasql.core.Expr 5 | import utest._ 6 | import utils.MySqlSuite 7 | 8 | import java.time.LocalDate 9 | 10 | trait MySqlDialectTests extends MySqlSuite { 11 | def description = "Operations specific to working with MySql Databases" 12 | override def utestBeforeEach(path: Seq[String]): Unit = checker.reset() 13 | def tests = Tests { 14 | 15 | test("forUpdate") - checker( 16 | query = Buyer.select.filter(_.id === 1).forUpdate, 17 | sql = """ 18 | SELECT 19 | buyer0.id AS id, 20 | buyer0.name AS name, 21 | buyer0.date_of_birth AS date_of_birth 22 | FROM buyer buyer0 23 | WHERE (buyer0.id = ?) 24 | FOR UPDATE 25 | """, 26 | value = Seq( 27 | Buyer[Sc](1, "James Bond", LocalDate.parse("2001-02-03")) 28 | ), 29 | docs = """ 30 | ScalaSql's MySql dialect provides the `.forUpdate` operator, which translates 31 | into a SQL `SELECT ... FOR UPDATE` clause 32 | """ 33 | ) 34 | 35 | test("reverse") - 36 | checker(query = Expr("Hello").reverse, sql = "SELECT REVERSE(?) AS res", value = "olleH") 37 | 38 | test("lpad") - checker( 39 | query = Expr("Hello").lpad(10, "xy"), 40 | sql = "SELECT LPAD(?, ?, ?) AS res", 41 | value = "xyxyxHello" 42 | ) 43 | 44 | test("rpad") - checker( 45 | query = Expr("Hello").rpad(10, "xy"), 46 | sql = "SELECT RPAD(?, ?, ?) AS res", 47 | value = "Helloxyxyx" 48 | ) 49 | 50 | test("conflict") { 51 | 52 | test("ignore") - { 53 | 54 | checker( 55 | query = Buyer.insert 56 | .columns( 57 | _.name := "test buyer", 58 | _.dateOfBirth := LocalDate.parse("2023-09-09"), 59 | _.id := 1 // This should cause a primary key conflict 60 | ) 61 | .onConflictUpdate(x => x.id := x.id), 62 | // MySql does not support ON CONFLICT IGNORE, but you can emulate it using 63 | // update (id = id) 64 | sql = 65 | "INSERT INTO buyer (name, date_of_birth, id) VALUES (?, ?, ?) ON DUPLICATE KEY UPDATE id = buyer.id", 66 | value = 1 67 | ) 68 | } 69 | 70 | test("update") - { 71 | checker( 72 | query = Buyer.insert 73 | .columns( 74 | _.name := "test buyer", 75 | _.dateOfBirth := LocalDate.parse("2023-09-09"), 76 | _.id := 1 // This should cause a primary key conflict 77 | ) 78 | .onConflictUpdate(_.name := "TEST BUYER CONFLICT"), 79 | sql = 80 | "INSERT INTO buyer (name, date_of_birth, id) VALUES (?, ?, ?) ON DUPLICATE KEY UPDATE name = ?", 81 | value = 2 82 | ) 83 | 84 | checker( 85 | query = Buyer.select, 86 | value = Seq( 87 | Buyer[Sc](1, "TEST BUYER CONFLICT", LocalDate.parse("2001-02-03")), 88 | Buyer[Sc](2, "叉烧包", LocalDate.parse("1923-11-12")), 89 | Buyer[Sc](3, "Li Haoyi", LocalDate.parse("1965-08-09")) 90 | ), 91 | normalize = (x: Seq[Buyer[Sc]]) => x.sortBy(_.id) 92 | ) 93 | } 94 | 95 | test("updateComputed") - { 96 | checker( 97 | query = Buyer.insert 98 | .columns( 99 | _.name := "test buyer", 100 | _.dateOfBirth := LocalDate.parse("2023-09-09"), 101 | _.id := 1 // This should cause a primary key conflict 102 | ) 103 | .onConflictUpdate(v => v.name := v.name.toUpperCase), 104 | sql = 105 | "INSERT INTO buyer (name, date_of_birth, id) VALUES (?, ?, ?) ON DUPLICATE KEY UPDATE name = UPPER(buyer.name)", 106 | value = 2 107 | ) 108 | 109 | checker( 110 | query = Buyer.select, 111 | value = Seq( 112 | Buyer[Sc](1, "JAMES BOND", LocalDate.parse("2001-02-03")), 113 | Buyer[Sc](2, "叉烧包", LocalDate.parse("1923-11-12")), 114 | Buyer[Sc](3, "Li Haoyi", LocalDate.parse("1965-08-09")) 115 | ), 116 | normalize = (x: Seq[Buyer[Sc]]) => x.sortBy(_.id) 117 | ) 118 | } 119 | 120 | } 121 | 122 | test("concat") - checker( 123 | query = db.concat("i ", "am", " cow", 1337), 124 | sql = "SELECT CONCAT(?, ?, ?, ?) AS res", 125 | value = "i am cow1337" 126 | ) 127 | 128 | test("concatWs") - checker( 129 | query = db.concatWs(" ", "i", "am", "cow", 1337), 130 | sql = "SELECT CONCAT_WS(?, ?, ?, ?, ?) AS res", 131 | value = "i am cow 1337" 132 | ) 133 | 134 | test("rand") - checker( 135 | query = db.rand, 136 | sql = "SELECT RAND() AS res" 137 | ) 138 | 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /scalasql/test/src/dialects/PostgresDialectTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql._ 4 | import scalasql.core.Expr 5 | import utest._ 6 | import utils.PostgresSuite 7 | 8 | trait PostgresDialectTests extends PostgresSuite { 9 | def description = "Operations specific to working with Postgres Databases" 10 | def tests = Tests { 11 | 12 | test("distinctOn") - checker( 13 | query = Purchase.select.distinctOn(_.shippingInfoId).sortBy(_.shippingInfoId).desc, 14 | sql = """ 15 | SELECT 16 | DISTINCT ON (purchase0.shipping_info_id) purchase0.id AS id, 17 | purchase0.shipping_info_id AS shipping_info_id, 18 | purchase0.product_id AS product_id, 19 | purchase0.count AS count, 20 | purchase0.total AS total 21 | FROM purchase purchase0 22 | ORDER BY shipping_info_id DESC 23 | """, 24 | value = Seq( 25 | Purchase[Sc](6, 3, 1, 5, 44.4), 26 | Purchase[Sc](4, 2, 4, 4, 493.8), 27 | Purchase[Sc](2, 1, 2, 3, 900.0) 28 | ), 29 | docs = """ 30 | ScalaSql's Postgres dialect provides the `.distinctOn` operator, which translates 31 | into a SQL `DISTINCT ON` clause 32 | """ 33 | ) 34 | 35 | test("forUpdate") - checker( 36 | query = Invoice.select.filter(_.id === 1).forUpdate, 37 | sql = """ 38 | SELECT 39 | invoice0.id AS id, 40 | invoice0.total AS total, 41 | invoice0.vendor_name AS vendor_name 42 | FROM otherschema.invoice invoice0 43 | WHERE (invoice0.id = ?) 44 | FOR UPDATE 45 | """, 46 | value = Seq( 47 | Invoice[Sc](1, 150.4, "Siemens") 48 | ), 49 | docs = """ 50 | ScalaSql's Postgres dialect provides the `.forUpdate` operator, which translates 51 | into a SQL `SELECT ... FOR UPDATE` clause 52 | """ 53 | ) 54 | 55 | test("ltrim2") - checker( 56 | query = Expr("xxHellox").ltrim("x"), 57 | sql = "SELECT LTRIM(?, ?) AS res", 58 | value = "Hellox" 59 | ) 60 | 61 | test("rtrim2") - checker( 62 | query = Expr("xxHellox").rtrim("x"), 63 | sql = "SELECT RTRIM(?, ?) AS res", 64 | value = "xxHello" 65 | ) 66 | 67 | test("reverse") - 68 | checker(query = Expr("Hello").reverse, sql = "SELECT REVERSE(?) AS res", value = "olleH") 69 | 70 | test("lpad") - checker( 71 | query = Expr("Hello").lpad(10, "xy"), 72 | sql = "SELECT LPAD(?, ?, ?) AS res", 73 | value = "xyxyxHello" 74 | ) 75 | 76 | test("rpad") - checker( 77 | query = Expr("Hello").rpad(10, "xy"), 78 | sql = "SELECT RPAD(?, ?, ?) AS res", 79 | value = "Helloxyxyx" 80 | ) 81 | 82 | test("concat") - checker( 83 | query = db.concat("i ", "am", " cow", 1337), 84 | sql = "SELECT CONCAT(?, ?, ?, ?) AS res", 85 | value = "i am cow1337" 86 | ) 87 | 88 | test("concatWs") - checker( 89 | query = db.concatWs(" ", "i", "am", "cow", 1337), 90 | sql = "SELECT CONCAT_WS(?, ?, ?, ?, ?) AS res", 91 | value = "i am cow 1337" 92 | ) 93 | 94 | test("format") - checker( 95 | query = db.format("i am cow %s hear me moo %s", 1337, 31337), 96 | sql = "SELECT FORMAT(?, ?, ?) AS res", 97 | value = "i am cow 1337 hear me moo 31337" 98 | ) 99 | 100 | test("random") - checker( 101 | query = db.random, 102 | sql = "SELECT RANDOM() AS res" 103 | ) 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /scalasql/test/src/dialects/SqliteDialectTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.dialects 2 | 3 | import scalasql._ 4 | import scalasql.core.Expr 5 | import utest._ 6 | import utils.SqliteSuite 7 | 8 | trait SqliteDialectTests extends SqliteSuite { 9 | def description = "Operations specific to working with Sqlite Databases" 10 | def tests = Tests { 11 | 12 | test("ltrim2") - checker( 13 | query = Expr("xxHellox").ltrim("x"), 14 | sql = "SELECT LTRIM(?, ?) AS res", 15 | value = "Hellox" 16 | ) 17 | 18 | test("rtrim2") - checker( 19 | query = Expr("xxHellox").rtrim("x"), 20 | sql = "SELECT RTRIM(?, ?) AS res", 21 | value = "xxHello" 22 | ) 23 | 24 | test("glob") - checker( 25 | query = Expr("*cop*").glob("roflcopter"), 26 | sql = "SELECT GLOB(?, ?) AS res", 27 | value = true 28 | ) 29 | 30 | test("changes") - checker( 31 | query = db.changes, 32 | sql = "SELECT CHANGES() AS res" 33 | ) 34 | 35 | test("totalChanges") - checker( 36 | query = db.totalChanges, 37 | sql = "SELECT TOTAL_CHANGES() AS res" 38 | ) 39 | 40 | test("typeOf") - checker( 41 | query = db.typeOf(123), 42 | sql = "SELECT TYPEOF(?) AS res", 43 | value = "integer" 44 | ) 45 | 46 | test("lastInsertRowId") - checker( 47 | query = db.lastInsertRowId, 48 | sql = "SELECT LAST_INSERT_ROWID() AS res" 49 | ) 50 | 51 | test("char") - checker( 52 | query = db.char(108, 111, 108), 53 | sql = "SELECT CHAR(?, ?, ?) AS res", 54 | value = "lol" 55 | ) 56 | 57 | test("format") - checker( 58 | query = db.format("i am cow %s hear me moo %s", 1337, 31337), 59 | sql = "SELECT FORMAT(?, ?, ?) AS res", 60 | value = "i am cow 1337 hear me moo 31337" 61 | ) 62 | 63 | test("hex") - checker( 64 | query = db.hex(new geny.Bytes(Array(1, 10, 100, -127))), 65 | sql = "SELECT HEX(?) AS res", 66 | value = "010A6481" 67 | ) 68 | 69 | test("unhex") - checker( 70 | query = db.unhex("010A6481"), 71 | sql = "SELECT UNHEX(?) AS res", 72 | value = new geny.Bytes(Array(1, 10, 100, -127)) 73 | ) 74 | 75 | test("zeroBlob") - checker( 76 | query = db.zeroBlob(16), 77 | sql = "SELECT ZEROBLOB(?) AS res", 78 | value = new geny.Bytes(new Array[Byte](16)) 79 | ) 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /scalasql/test/src/example/H2Example.scala: -------------------------------------------------------------------------------- 1 | package scalasql.example 2 | 3 | import scalasql.Table 4 | import scalasql.H2Dialect._ 5 | 6 | object H2Example { 7 | 8 | case class ExampleProduct[T[_]]( 9 | id: T[Int], 10 | kebabCaseName: T[String], 11 | name: T[String], 12 | price: T[Double] 13 | ) 14 | 15 | object ExampleProduct extends Table[ExampleProduct] 16 | 17 | // The example H2 database comes from the library `com.h2database:h2:2.2.224` 18 | val dataSource = new org.h2.jdbcx.JdbcDataSource 19 | dataSource.setUrl("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1") 20 | lazy val h2Client = new scalasql.DbClient.DataSource( 21 | dataSource, 22 | config = new scalasql.Config {} 23 | ) 24 | 25 | def main(args: Array[String]): Unit = { 26 | h2Client.transaction { db => 27 | db.updateRaw(""" 28 | CREATE TABLE example_product ( 29 | id INTEGER AUTO_INCREMENT PRIMARY KEY, 30 | kebab_case_name VARCHAR(256), 31 | name VARCHAR(256), 32 | price DECIMAL(20, 2) 33 | ); 34 | """) 35 | 36 | val inserted = db.run( 37 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 38 | ("face-mask", "Face Mask", 8.88), 39 | ("guitar", "Guitar", 300), 40 | ("socks", "Socks", 3.14), 41 | ("skate-board", "Skate Board", 123.45), 42 | ("camera", "Camera", 1000.00), 43 | ("cookie", "Cookie", 0.10) 44 | ) 45 | ) 46 | 47 | assert(inserted == 6) 48 | 49 | val result = 50 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 51 | 52 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 53 | 54 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 55 | 56 | db.run(ExampleProduct.delete(_.name === "Guitar")) 57 | 58 | val result2 = 59 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 60 | 61 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /scalasql/test/src/example/HikariCpExample.scala: -------------------------------------------------------------------------------- 1 | package scalasql.example 2 | 3 | import org.testcontainers.containers.PostgreSQLContainer 4 | import scalasql.Table 5 | import scalasql.PostgresDialect._ 6 | object HikariCpExample { 7 | 8 | case class ExampleProduct[T[_]]( 9 | id: T[Int], 10 | kebabCaseName: T[String], 11 | name: T[String], 12 | price: T[Double] 13 | ) 14 | 15 | object ExampleProduct extends Table[ExampleProduct] 16 | 17 | lazy val postgres = { 18 | println("Initializing Postgres") 19 | val pg = new PostgreSQLContainer("postgres:15-alpine") 20 | pg.start() 21 | pg 22 | } 23 | 24 | // HikariDataSource comes from the library `com.zaxxer:HikariCP:5.1.0` 25 | val hikariDataSource = new com.zaxxer.hikari.HikariDataSource() 26 | hikariDataSource.setJdbcUrl(postgres.getJdbcUrl) 27 | hikariDataSource.setUsername(postgres.getUsername) 28 | hikariDataSource.setPassword(postgres.getPassword) 29 | 30 | lazy val hikariClient = new scalasql.DbClient.DataSource( 31 | hikariDataSource, 32 | config = new scalasql.Config {} 33 | ) 34 | 35 | def main(args: Array[String]): Unit = { 36 | hikariClient.transaction { db => 37 | db.updateRaw(""" 38 | CREATE TABLE example_product ( 39 | id SERIAL PRIMARY KEY, 40 | kebab_case_name VARCHAR(256), 41 | name VARCHAR(256), 42 | price DECIMAL(20, 2) 43 | ); 44 | """) 45 | 46 | val inserted = db.run( 47 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 48 | ("face-mask", "Face Mask", 8.88), 49 | ("guitar", "Guitar", 300), 50 | ("socks", "Socks", 3.14), 51 | ("skate-board", "Skate Board", 123.45), 52 | ("camera", "Camera", 1000.00), 53 | ("cookie", "Cookie", 0.10) 54 | ) 55 | ) 56 | 57 | assert(inserted == 6) 58 | 59 | val result = 60 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 61 | 62 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 63 | 64 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 65 | 66 | db.run(ExampleProduct.delete(_.name === "Guitar")) 67 | 68 | val result2 = 69 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 70 | 71 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /scalasql/test/src/example/MySqlExample.scala: -------------------------------------------------------------------------------- 1 | package scalasql.example 2 | 3 | import org.testcontainers.containers.MySQLContainer 4 | import scalasql.Table 5 | import scalasql.MySqlDialect._ 6 | object MySqlExample { 7 | 8 | case class ExampleProduct[T[_]]( 9 | id: T[Int], 10 | kebabCaseName: T[String], 11 | name: T[String], 12 | price: T[Double] 13 | ) 14 | 15 | object ExampleProduct extends Table[ExampleProduct] 16 | 17 | // The example MySQLContainer comes from the library `org.testcontainers:mysql:1.19.1` 18 | lazy val mysql = { 19 | println("Initializing MySql") 20 | val mysql = new MySQLContainer("mysql:8.0.31") 21 | mysql.setCommand("mysqld", "--character-set-server=utf8mb4", "--collation-server=utf8mb4_bin") 22 | mysql.start() 23 | mysql 24 | } 25 | 26 | val dataSource = new com.mysql.cj.jdbc.MysqlDataSource 27 | dataSource.setURL(mysql.getJdbcUrl + "?allowMultiQueries=true") 28 | dataSource.setDatabaseName(mysql.getDatabaseName); 29 | dataSource.setUser(mysql.getUsername); 30 | dataSource.setPassword(mysql.getPassword); 31 | 32 | lazy val mysqlClient = new scalasql.DbClient.DataSource( 33 | dataSource, 34 | config = new scalasql.Config {} 35 | ) 36 | 37 | def main(args: Array[String]): Unit = { 38 | mysqlClient.transaction { db => 39 | db.updateRaw(""" 40 | CREATE TABLE example_product ( 41 | id INTEGER PRIMARY KEY AUTO_INCREMENT, 42 | kebab_case_name VARCHAR(256), 43 | name VARCHAR(256), 44 | price DECIMAL(20, 2) 45 | ); 46 | """) 47 | 48 | val inserted = db.run( 49 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 50 | ("face-mask", "Face Mask", 8.88), 51 | ("guitar", "Guitar", 300), 52 | ("socks", "Socks", 3.14), 53 | ("skate-board", "Skate Board", 123.45), 54 | ("camera", "Camera", 1000.00), 55 | ("cookie", "Cookie", 0.10) 56 | ) 57 | ) 58 | 59 | assert(inserted == 6) 60 | 61 | val result = 62 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 63 | 64 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 65 | 66 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 67 | 68 | db.run(ExampleProduct.delete(_.name === "Guitar")) 69 | 70 | val result2 = 71 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 72 | 73 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /scalasql/test/src/example/PostgresExample.scala: -------------------------------------------------------------------------------- 1 | package scalasql.example 2 | 3 | import org.testcontainers.containers.PostgreSQLContainer 4 | import scalasql.Table 5 | 6 | import scalasql.PostgresDialect._ 7 | object PostgresExample { 8 | 9 | case class ExampleProduct[T[_]]( 10 | id: T[Int], 11 | kebabCaseName: T[String], 12 | name: T[String], 13 | price: T[Double] 14 | ) 15 | 16 | object ExampleProduct extends Table[ExampleProduct] 17 | 18 | // The example PostgreSQLContainer comes from the library `org.testcontainers:postgresql:1.19.1` 19 | lazy val postgres = { 20 | println("Initializing Postgres") 21 | val pg = new PostgreSQLContainer("postgres:15-alpine") 22 | pg.start() 23 | pg 24 | } 25 | 26 | val dataSource = new org.postgresql.ds.PGSimpleDataSource 27 | dataSource.setURL(postgres.getJdbcUrl) 28 | dataSource.setDatabaseName(postgres.getDatabaseName); 29 | dataSource.setUser(postgres.getUsername); 30 | dataSource.setPassword(postgres.getPassword); 31 | 32 | lazy val postgresClient = new scalasql.DbClient.DataSource( 33 | dataSource, 34 | config = new scalasql.Config {} 35 | ) 36 | 37 | def main(args: Array[String]): Unit = { 38 | postgresClient.transaction { db => 39 | db.updateRaw(""" 40 | CREATE TABLE example_product ( 41 | id SERIAL PRIMARY KEY, 42 | kebab_case_name VARCHAR(256), 43 | name VARCHAR(256), 44 | price DECIMAL(20, 2) 45 | ); 46 | """) 47 | 48 | val inserted = db.run( 49 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 50 | ("face-mask", "Face Mask", 8.88), 51 | ("guitar", "Guitar", 300), 52 | ("socks", "Socks", 3.14), 53 | ("skate-board", "Skate Board", 123.45), 54 | ("camera", "Camera", 1000.00), 55 | ("cookie", "Cookie", 0.10) 56 | ) 57 | ) 58 | 59 | assert(inserted == 6) 60 | 61 | val result = 62 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 63 | 64 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 65 | 66 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 67 | 68 | db.run(ExampleProduct.delete(_.name === "Guitar")) 69 | 70 | val result2 = 71 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 72 | 73 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /scalasql/test/src/example/SqliteExample.scala: -------------------------------------------------------------------------------- 1 | package scalasql.example 2 | 3 | import scalasql.Table 4 | 5 | import scalasql.SqliteDialect._ 6 | object SqliteExample { 7 | 8 | case class ExampleProduct[T[_]]( 9 | id: T[Int], 10 | kebabCaseName: T[String], 11 | name: T[String], 12 | price: T[Double] 13 | ) 14 | 15 | object ExampleProduct extends Table[ExampleProduct] 16 | 17 | // The example Sqlite JDBC client comes from the library `org.xerial:sqlite-jdbc:3.43.0.0` 18 | val dataSource = new org.sqlite.SQLiteDataSource() 19 | val tmpDb = java.nio.file.Files.createTempDirectory("sqlite") 20 | dataSource.setUrl(s"jdbc:sqlite:$tmpDb/file.db") 21 | lazy val sqliteClient = new scalasql.DbClient.DataSource( 22 | dataSource, 23 | config = new scalasql.Config {} 24 | ) 25 | 26 | def main(args: Array[String]): Unit = { 27 | sqliteClient.transaction { db => 28 | db.updateRaw(""" 29 | CREATE TABLE example_product ( 30 | id INTEGER PRIMARY KEY AUTOINCREMENT, 31 | kebab_case_name VARCHAR(256), 32 | name VARCHAR(256), 33 | price DECIMAL(20, 2) 34 | ); 35 | """) 36 | 37 | val inserted = db.run( 38 | ExampleProduct.insert.batched(_.kebabCaseName, _.name, _.price)( 39 | ("face-mask", "Face Mask", 8.88), 40 | ("guitar", "Guitar", 300), 41 | ("socks", "Socks", 3.14), 42 | ("skate-board", "Skate Board", 123.45), 43 | ("camera", "Camera", 1000.00), 44 | ("cookie", "Cookie", 0.10) 45 | ) 46 | ) 47 | 48 | assert(inserted == 6) 49 | 50 | val result = 51 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 52 | 53 | assert(result == Seq("Camera", "Guitar", "Skate Board")) 54 | 55 | db.run(ExampleProduct.update(_.name === "Cookie").set(_.price := 11.0)) 56 | 57 | db.run(ExampleProduct.delete(_.name === "Guitar")) 58 | 59 | val result2 = 60 | db.run(ExampleProduct.select.filter(_.price > 10).sortBy(_.price).desc.map(_.name)) 61 | 62 | assert(result2 == Seq("Camera", "Skate Board", "Cookie")) 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /scalasql/test/src/operations/DbAggNumericOpsTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql._ 4 | import utest._ 5 | import utils.ScalaSqlSuite 6 | 7 | trait ExprAggNumericOpsTests extends ScalaSqlSuite { 8 | def description = "Operations that can be performed on `Expr[Seq[T]]` where `T` is numeric" 9 | def tests = Tests { 10 | test("sum") - checker( 11 | query = Purchase.select.map(_.count).sum, 12 | sql = "SELECT SUM(purchase0.count) AS res FROM purchase purchase0", 13 | value = 140 14 | ) 15 | 16 | test("min") - checker( 17 | query = Purchase.select.map(_.count).min, 18 | sql = "SELECT MIN(purchase0.count) AS res FROM purchase purchase0", 19 | value = 3 20 | ) 21 | 22 | test("max") - checker( 23 | query = Purchase.select.map(_.count).max, 24 | sql = "SELECT MAX(purchase0.count) AS res FROM purchase purchase0", 25 | value = 100 26 | ) 27 | 28 | test("avg") - checker( 29 | query = Purchase.select.map(_.count).avg, 30 | sql = "SELECT AVG(purchase0.count) AS res FROM purchase purchase0", 31 | value = 20 32 | ) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /scalasql/test/src/operations/DbAggOpsTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql._ 4 | import scalasql.H2Dialect 5 | import utest._ 6 | import utils.ScalaSqlSuite 7 | 8 | trait ExprAggOpsTests extends ScalaSqlSuite { 9 | def description = "Operations that can be performed on `Expr[Seq[_]]`" 10 | def tests = Tests { 11 | test("size") - checker( 12 | query = Purchase.select.size, 13 | sql = "SELECT COUNT(1) AS res FROM purchase purchase0", 14 | value = 7 15 | ) 16 | 17 | test("sumBy") { 18 | test("simple") - checker( 19 | query = Purchase.select.sumBy(_.count), 20 | sql = "SELECT SUM(purchase0.count) AS res FROM purchase purchase0", 21 | value = 140 22 | ) 23 | 24 | test("some") - checker( 25 | query = Purchase.select.sumByOpt(_.count), 26 | sql = "SELECT SUM(purchase0.count) AS res FROM purchase purchase0", 27 | value = Option(140) 28 | ) 29 | 30 | test("none") - checker( 31 | query = Purchase.select.filter(_ => false).sumByOpt(_.count), 32 | sql = "SELECT SUM(purchase0.count) AS res FROM purchase purchase0 WHERE ?", 33 | value = Option.empty[Int] 34 | ) 35 | } 36 | 37 | test("minBy") { 38 | test("simple") - checker( 39 | query = Purchase.select.minBy(_.count), 40 | sql = "SELECT MIN(purchase0.count) AS res FROM purchase purchase0", 41 | value = 3 42 | ) 43 | 44 | test("some") - checker( 45 | query = Purchase.select.minByOpt(_.count), 46 | sql = "SELECT MIN(purchase0.count) AS res FROM purchase purchase0", 47 | value = Option(3) 48 | ) 49 | 50 | test("none") - checker( 51 | query = Purchase.select.filter(_ => false).minByOpt(_.count), 52 | sql = "SELECT MIN(purchase0.count) AS res FROM purchase purchase0 WHERE ?", 53 | value = Option.empty[Int] 54 | ) 55 | } 56 | 57 | test("maxBy") { 58 | test("simple") - checker( 59 | query = Purchase.select.maxBy(_.count), 60 | sql = "SELECT MAX(purchase0.count) AS res FROM purchase purchase0", 61 | value = 100 62 | ) 63 | 64 | test("some") - checker( 65 | query = Purchase.select.maxByOpt(_.count), 66 | sql = "SELECT MAX(purchase0.count) AS res FROM purchase purchase0", 67 | value = Option(100) 68 | ) 69 | 70 | test("none") - checker( 71 | query = Purchase.select.filter(_ => false).maxByOpt(_.count), 72 | sql = "SELECT MAX(purchase0.count) AS res FROM purchase purchase0 WHERE ?", 73 | value = Option.empty[Int] 74 | ) 75 | } 76 | 77 | test("avgBy") { 78 | test("simple") - checker( 79 | query = Purchase.select.avgBy(_.count), 80 | sql = "SELECT AVG(purchase0.count) AS res FROM purchase purchase0", 81 | value = 20 82 | ) 83 | 84 | test("some") - checker( 85 | query = Purchase.select.avgByOpt(_.count), 86 | sql = "SELECT AVG(purchase0.count) AS res FROM purchase purchase0", 87 | value = Option(20) 88 | ) 89 | 90 | test("none") - checker( 91 | query = Purchase.select.filter(_ => false).avgByOpt(_.count), 92 | sql = "SELECT AVG(purchase0.count) AS res FROM purchase purchase0 WHERE ?", 93 | value = Option.empty[Int] 94 | ) 95 | } 96 | test("mkString") { 97 | test("simple") - checker( 98 | query = Buyer.select.map(_.name).mkString(), 99 | sqls = Seq( 100 | "SELECT STRING_AGG(buyer0.name || '', '') AS res FROM buyer buyer0", 101 | "SELECT GROUP_CONCAT(buyer0.name || '', '') AS res FROM buyer buyer0", 102 | "SELECT LISTAGG(buyer0.name || '', '') AS res FROM buyer buyer0", 103 | "SELECT GROUP_CONCAT(CONCAT(buyer0.name, '') SEPARATOR '') AS res FROM buyer buyer0" 104 | ), 105 | value = "James Bond叉烧包Li Haoyi" 106 | ) 107 | 108 | test("sep") - { 109 | if (!this.isInstanceOf[H2Dialect]) 110 | checker( 111 | query = Buyer.select.map(_.name).mkString(", "), 112 | sqls = Seq( 113 | "SELECT STRING_AGG(buyer0.name || '', ?) AS res FROM buyer buyer0", 114 | "SELECT GROUP_CONCAT(buyer0.name || '', ?) AS res FROM buyer buyer0", 115 | "SELECT GROUP_CONCAT(CONCAT(buyer0.name, '') SEPARATOR ?) AS res FROM buyer buyer0" 116 | ), 117 | value = "James Bond, 叉烧包, Li Haoyi" 118 | ) 119 | } 120 | } 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /scalasql/test/src/operations/DbApiOpsTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.utils.ScalaSqlSuite 4 | import utest._ 5 | 6 | trait DbApiOpsTests extends ScalaSqlSuite { 7 | def description = "Operations that can be performed on `Expr[T]` for any `T`" 8 | def tests = Tests {} 9 | } 10 | -------------------------------------------------------------------------------- /scalasql/test/src/operations/DbBlobOpsTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.{Expr, Bytes} 4 | import scalasql.utils.ScalaSqlSuite 5 | import utest._ 6 | 7 | trait ExprBlobOpsTests extends ScalaSqlSuite { 8 | def description = "Operations that can be performed on `Expr[Bytes]`" 9 | 10 | def tests = Tests { 11 | test("plus") - checker( 12 | query = Expr(Bytes("hello")) + Expr(Bytes("world")), 13 | sqls = Seq("SELECT (? || ?) AS res", "SELECT CONCAT(?, ?) AS res"), 14 | value = Bytes("helloworld") 15 | ) 16 | 17 | test("like") - checker( 18 | query = Expr(Bytes("hello")).like(Bytes("he%")), 19 | sql = "SELECT (? LIKE ?) AS res", 20 | value = true 21 | ) 22 | 23 | test("length") - checker( 24 | query = Expr(Bytes("hello")).length, 25 | sql = "SELECT LENGTH(?) AS res", 26 | value = 5 27 | ) 28 | 29 | test("octetLength") - checker( 30 | query = Expr(Bytes("叉烧包")).octetLength, 31 | sql = "SELECT OCTET_LENGTH(?) AS res", 32 | value = 9, 33 | moreValues = Seq(6) // Not sure why HsqlExpr returns different value here ??? 34 | ) 35 | 36 | test("position") - checker( 37 | query = Expr(Bytes("hello")).indexOf(Bytes("ll")), 38 | sqls = Seq("SELECT POSITION(? IN ?) AS res", "SELECT INSTR(?, ?) AS res"), 39 | value = 3 40 | ) 41 | // Not supported by postgres 42 | // 43 | // test("toLowerCase") - 44 | // checker(query = Expr(Bytes("Hello").toLowerCase, sql = "SELECT LOWER(?) AS res", value = Bytes("hello")) 45 | // 46 | // test("trim") - 47 | // checker(query = Expr(Bytes(" Hello ").trim, sql = "SELECT TRIM(?) AS res", value = Bytes("Hello")) 48 | // 49 | // test("ltrim") - 50 | // checker(query = Expr(Bytes(" Hello ").ltrim, sql = "SELECT LTRIM(?) AS res", value = Bytes("Hello ")) 51 | // 52 | // test("rtrim") - 53 | // checker(query = Expr(Bytes(" Hello ").rtrim, sql = "SELECT RTRIM(?) AS res", value = Bytes(" Hello")) 54 | 55 | test("substring") - checker( 56 | query = Expr(Bytes("Hello")).substring(2, 2), 57 | sql = "SELECT SUBSTRING(?, ?, ?) AS res", 58 | value = Bytes("el") 59 | ) 60 | 61 | test("startsWith") - checker( 62 | query = Expr(Bytes("Hello")).startsWith(Bytes("Hel")), 63 | sqls = Seq( 64 | "SELECT (? LIKE ? || '%') AS res", 65 | "SELECT (? LIKE CONCAT(?, '%')) AS res" 66 | ), 67 | value = true 68 | ) 69 | 70 | test("endsWith") - checker( 71 | query = Expr(Bytes("Hello")).endsWith(Bytes("llo")), 72 | sqls = Seq( 73 | "SELECT (? LIKE '%' || ?) AS res", 74 | "SELECT (? LIKE CONCAT('%', ?)) AS res" 75 | ), 76 | value = true 77 | ) 78 | 79 | test("contains") - checker( 80 | query = Expr(Bytes("Hello")).contains(Bytes("ll")), 81 | sqls = Seq( 82 | "SELECT (? LIKE '%' || ? || '%') AS res", 83 | "SELECT (? LIKE CONCAT('%', ?, '%')) AS res" 84 | ), 85 | value = true 86 | ) 87 | // Not supported by postgres 88 | // test("replace") - checker( 89 | // query = Expr(Bytes("Hello").replace(Bytes("ll"), Bytes("rr")), 90 | // sqls = Seq( 91 | // "SELECT REPLACE(?, ?, ?) AS res" 92 | // ), 93 | // value = Bytes("Herro") 94 | // ) 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /scalasql/test/src/operations/DbBooleanOpsTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql._ 4 | import scalasql.core.Expr 5 | import utest._ 6 | import utils.ScalaSqlSuite 7 | 8 | trait ExprBooleanOpsTests extends ScalaSqlSuite { 9 | def description = "Operations that can be performed on `Expr[Boolean]`" 10 | def tests = Tests { 11 | test("and") { 12 | checker(query = Expr(true) && Expr(true), sql = "SELECT (? AND ?) AS res", value = true) 13 | checker(query = Expr(false) && Expr(true), sql = "SELECT (? AND ?) AS res", value = false) 14 | } 15 | 16 | test("or") { 17 | checker(query = Expr(false) || Expr(false), sql = "SELECT (? OR ?) AS res", value = false) 18 | checker(query = !Expr(false), sql = "SELECT (NOT ?) AS res", value = true) 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /scalasql/test/src/operations/DbMathOpsTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql.core.DbApi 4 | import scalasql.utils.ScalaSqlSuite 5 | import utest._ 6 | 7 | trait ExprMathOpsTests extends ScalaSqlSuite { 8 | override implicit def DbApiOpsConv(db: => DbApi): DbApiOps & MathOps = ??? 9 | def description = "Math operations; supported by H2/Postgres/MySql, not supported by Sqlite" 10 | def tests = Tests { 11 | 12 | test("power") - checker( 13 | query = db.power(10, 3), 14 | sql = "SELECT POWER(?, ?) AS res", 15 | value = 1000.0 16 | ) 17 | 18 | test("sqrt") - checker( 19 | query = db.sqrt(9), 20 | sql = "SELECT SQRT(?) AS res", 21 | value = 3.0 22 | ) 23 | 24 | test("ln") - checker( 25 | query = db.ln(16.0), 26 | sql = "SELECT LN(?) AS res" 27 | ) 28 | 29 | test("log") - checker( 30 | query = db.log(2, 8), 31 | sql = "SELECT LOG(?, ?) AS res" 32 | ) 33 | 34 | test("log10") - checker( 35 | query = db.log10(16.0), 36 | sql = "SELECT LOG10(?) AS res" 37 | ) 38 | 39 | test("exp") - checker( 40 | query = db.exp(16.0), 41 | sql = "SELECT EXP(?) AS res" 42 | ) 43 | 44 | test("sin") - checker( 45 | query = db.sin(16.0), 46 | sql = "SELECT SIN(?) AS res" 47 | ) 48 | 49 | test("cos") - checker( 50 | query = db.cos(16.0), 51 | sql = "SELECT COS(?) AS res" 52 | ) 53 | 54 | test("tan") - checker( 55 | query = db.tan(16.0), 56 | sql = "SELECT TAN(?) AS res" 57 | ) 58 | 59 | test("asin") - checker( 60 | query = db.asin(1.0), 61 | sql = "SELECT ASIN(?) AS res" 62 | ) 63 | 64 | test("acos") - checker( 65 | query = db.acos(1.0), 66 | sql = "SELECT ACOS(?) AS res" 67 | ) 68 | 69 | test("atan") - checker( 70 | query = db.atan(1.0), 71 | sql = "SELECT ATAN(?) AS res" 72 | ) 73 | 74 | test("atan2") - checker( 75 | query = db.atan2(16.0, 23.0), 76 | sql = "SELECT ATAN2(?, ?) AS res" 77 | ) 78 | 79 | test("pi") - checker( 80 | query = db.pi, 81 | sql = "SELECT PI() AS res" 82 | ) 83 | 84 | test("degrees") - checker( 85 | query = db.degrees(180), 86 | sql = "SELECT DEGREES(?) AS res" 87 | ) 88 | 89 | test("radians") - checker( 90 | query = db.radians(180), 91 | sql = "SELECT RADIANS(?) AS res" 92 | ) 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /scalasql/test/src/operations/DbNumericOpsTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql._ 4 | import scalasql.core.Expr 5 | import utest._ 6 | import utils.ScalaSqlSuite 7 | 8 | trait ExprNumericOpsTests extends ScalaSqlSuite { 9 | def description = "Operations that can be performed on `Expr[T]` when `T` is numeric" 10 | def tests = Tests { 11 | test("plus") - checker(query = Expr(6) + Expr(2), sql = "SELECT (? + ?) AS res", value = 8) 12 | 13 | test("minus") - checker(query = Expr(6) - Expr(2), sql = "SELECT (? - ?) AS res", value = 4) 14 | 15 | test("times") - checker(query = Expr(6) * Expr(2), sql = "SELECT (? * ?) AS res", value = 12) 16 | 17 | test("divide") - checker(query = Expr(6) / Expr(2), sql = "SELECT (? / ?) AS res", value = 3) 18 | 19 | test("modulo") - checker(query = Expr(6) % Expr(2), sql = "SELECT MOD(?, ?) AS res", value = 0) 20 | 21 | test("bitwiseAnd") - checker( 22 | query = Expr(6) & Expr(2), 23 | sqls = Seq("SELECT (? & ?) AS res", "SELECT BITAND(?, ?) AS res"), 24 | value = 2 25 | ) 26 | 27 | test("bitwiseOr") - checker( 28 | query = Expr(6) | Expr(3), 29 | sqls = Seq("SELECT (? | ?) AS res", "SELECT BITOR(?, ?) AS res"), 30 | value = 7 31 | ) 32 | 33 | test("between") - checker( 34 | query = Expr(4).between(Expr(2), Expr(6)), 35 | sql = "SELECT ? BETWEEN ? AND ? AS res", 36 | value = true 37 | ) 38 | 39 | test("unaryPlus") - checker(query = +Expr(-4), sql = "SELECT +? AS res", value = -4) 40 | 41 | test("unaryMinus") - 42 | checker(query = -Expr(-4), sqls = Seq("SELECT -? AS res", "SELECT -(?) AS res"), value = 4) 43 | 44 | test("unaryTilde") - checker( 45 | query = ~Expr(-4), 46 | sqls = Seq("SELECT ~? AS res", "SELECT BITNOT(?) AS res"), 47 | value = 3 48 | ) 49 | 50 | test("abs") - checker(query = Expr(-4).abs, sql = "SELECT ABS(?) AS res", value = 4) 51 | 52 | test("mod") - checker(query = Expr(8).mod(Expr(3)), sql = "SELECT MOD(?, ?) AS res", value = 2) 53 | 54 | test("ceil") - checker(query = Expr(4.3).ceil, sql = "SELECT CEIL(?) AS res", value = 5.0) 55 | 56 | test("floor") - checker(query = Expr(4.7).floor, sql = "SELECT FLOOR(?) AS res", value = 4.0) 57 | 58 | test("precedence") - checker( 59 | query = (Expr(2) + Expr(3)) * Expr(4), 60 | sql = "SELECT ((? + ?) * ?) AS res", 61 | value = 20 62 | ) 63 | 64 | test("sign") - checker( 65 | query = Expr(-100).sign, 66 | sql = "SELECT SIGN(?) AS res", 67 | value = -1 68 | ) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /scalasql/test/src/operations/DbStringOpsTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.operations 2 | 3 | import scalasql._ 4 | import scalasql.core.Expr 5 | import utest._ 6 | import utils.ScalaSqlSuite 7 | 8 | trait ExprStringOpsTests extends ScalaSqlSuite { 9 | def description = "Operations that can be performed on `Expr[String]`" 10 | def tests = Tests { 11 | test("plus") - checker( 12 | query = Expr("hello") + Expr("world"), 13 | sqls = Seq("SELECT (? || ?) AS res", "SELECT CONCAT(?, ?) AS res"), 14 | value = "helloworld" 15 | ) 16 | 17 | test("like") - checker( 18 | query = Expr("hello").like("he%"), 19 | sql = "SELECT (? LIKE ?) AS res", 20 | value = true 21 | ) 22 | 23 | test("length") - checker( 24 | query = Expr("hello").length, 25 | sql = "SELECT LENGTH(?) AS res", 26 | value = 5 27 | ) 28 | 29 | test("octetLength") - checker( 30 | query = Expr("叉烧包").octetLength, 31 | sql = "SELECT OCTET_LENGTH(?) AS res", 32 | value = 9, 33 | moreValues = Seq(6) // Not sure why HsqlExpr returns different value here ??? 34 | ) 35 | 36 | test("position") - checker( 37 | query = Expr("hello").indexOf("ll"), 38 | sqls = Seq("SELECT POSITION(? IN ?) AS res", "SELECT INSTR(?, ?) AS res"), 39 | value = 3 40 | ) 41 | 42 | test("toLowerCase") - checker( 43 | query = Expr("Hello").toLowerCase, 44 | sql = "SELECT LOWER(?) AS res", 45 | value = "hello" 46 | ) 47 | 48 | test("trim") - checker( 49 | query = Expr(" Hello ").trim, 50 | sql = "SELECT TRIM(?) AS res", 51 | value = "Hello" 52 | ) 53 | 54 | test("ltrim") - checker( 55 | query = Expr(" Hello ").ltrim, 56 | sql = "SELECT LTRIM(?) AS res", 57 | value = "Hello " 58 | ) 59 | 60 | test("rtrim") - checker( 61 | query = Expr(" Hello ").rtrim, 62 | sql = "SELECT RTRIM(?) AS res", 63 | value = " Hello" 64 | ) 65 | 66 | test("substring") - checker( 67 | query = Expr("Hello").substring(2, 2), 68 | sql = "SELECT SUBSTRING(?, ?, ?) AS res", 69 | value = "el" 70 | ) 71 | 72 | test("startsWith") - checker( 73 | query = Expr("Hello").startsWith("Hel"), 74 | sqls = Seq( 75 | "SELECT (? LIKE ? || '%') AS res", 76 | "SELECT (? LIKE CONCAT(?, '%')) AS res" 77 | ), 78 | value = true 79 | ) 80 | 81 | test("endsWith") - checker( 82 | query = Expr("Hello").endsWith("llo"), 83 | sqls = Seq( 84 | "SELECT (? LIKE '%' || ?) AS res", 85 | "SELECT (? LIKE CONCAT('%', ?)) AS res" 86 | ), 87 | value = true 88 | ) 89 | 90 | test("contains") - checker( 91 | query = Expr("Hello").contains("ll"), 92 | sqls = Seq( 93 | "SELECT (? LIKE '%' || ? || '%') AS res", 94 | "SELECT (? LIKE CONCAT('%', ?, '%')) AS res" 95 | ), 96 | value = true 97 | ) 98 | 99 | test("replace") - checker( 100 | query = Expr("Hello").replace("ll", "rr"), 101 | sqls = Seq( 102 | "SELECT REPLACE(?, ?, ?) AS res" 103 | ), 104 | value = "Herro" 105 | ) 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /scalasql/test/src/query/DeleteTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql._ 4 | import utest._ 5 | import utils.ScalaSqlSuite 6 | 7 | trait DeleteTests extends ScalaSqlSuite { 8 | def description = "Basic `DELETE` operations" 9 | override def utestBeforeEach(path: Seq[String]): Unit = checker.reset() 10 | def tests = Tests { 11 | test("single") { 12 | checker( 13 | query = Purchase.delete(_.id `=` 2), 14 | sql = "DELETE FROM purchase WHERE (purchase.id = ?)", 15 | value = 1, 16 | docs = """ 17 | `Table.delete` takes a mandatory predicate specifying what rows you want to delete. 18 | The most common case is to specify the ID of the row you want to delete 19 | """ 20 | ) 21 | 22 | checker( 23 | query = Purchase.select, 24 | value = Seq( 25 | Purchase[Sc](id = 1, shippingInfoId = 1, productId = 1, count = 100, total = 888.0), 26 | // id==2 got deleted 27 | Purchase[Sc](id = 3, shippingInfoId = 1, productId = 3, count = 5, total = 15.7), 28 | Purchase[Sc](id = 4, shippingInfoId = 2, productId = 4, count = 4, total = 493.8), 29 | Purchase[Sc](id = 5, shippingInfoId = 2, productId = 5, count = 10, total = 10000.0), 30 | Purchase[Sc](id = 6, shippingInfoId = 3, productId = 1, count = 5, total = 44.4), 31 | Purchase[Sc](id = 7, shippingInfoId = 3, productId = 6, count = 13, total = 1.3) 32 | ) 33 | ) 34 | } 35 | test("multiple") { 36 | checker( 37 | query = Purchase.delete(_.id <> 2), 38 | sql = "DELETE FROM purchase WHERE (purchase.id <> ?)", 39 | value = 6, 40 | docs = """ 41 | Although specifying a single ID to delete is the most common case, you can pass 42 | in arbitrary predicates, e.g. in this example deleting all rows _except_ for the 43 | one with a particular ID 44 | """ 45 | ) 46 | 47 | checker( 48 | query = Purchase.select, 49 | value = 50 | Seq(Purchase[Sc](id = 2, shippingInfoId = 1, productId = 2, count = 3, total = 900.0)) 51 | ) 52 | } 53 | test("all") { 54 | checker( 55 | query = Purchase.delete(_ => true), 56 | sql = "DELETE FROM purchase WHERE ?", 57 | value = 7, 58 | docs = """ 59 | If you actually want to delete all rows in the table, you can explicitly 60 | pass in the predicate `_ => true` 61 | """ 62 | ) 63 | 64 | checker( 65 | query = Purchase.select, 66 | value = Seq[Purchase[Sc]]( 67 | // all Deleted 68 | ) 69 | ) 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /scalasql/test/src/query/EscapedTableNameReturningTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql._ 4 | import scalasql.core.JoinNullable 5 | import sourcecode.Text 6 | import utest._ 7 | import utils.ScalaSqlSuite 8 | 9 | import java.time.LocalDate 10 | import scalasql.core.Config 11 | import scalasql.dialects.ReturningDialect 12 | 13 | trait EscapedTableNameWithReturningTests extends ScalaSqlSuite { 14 | this: ReturningDialect => 15 | 16 | def description = """ 17 | If your table name is a reserved sql world, e.g. `order`, you can specify this in your table definition with 18 | `override def escape = true` 19 | """ 20 | 21 | def tests = Tests { 22 | val tableNameEscaped = dialectSelf.escape(Config.camelToSnake(Table.name(Select))) 23 | 24 | test("insert with returning") { 25 | checker( 26 | query = Text { 27 | Select.insert 28 | .values( 29 | Select[Sc]( 30 | id = 0, 31 | name = "hello" 32 | ) 33 | ) 34 | .returning(_.id) 35 | }, 36 | sql = 37 | s"INSERT INTO $tableNameEscaped (id, name) VALUES (?, ?) RETURNING $tableNameEscaped.id AS res", 38 | value = Seq(0), 39 | docs = "" 40 | ) 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /scalasql/test/src/query/EscapedTableNameTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql._ 4 | import scalasql.core.JoinNullable 5 | import sourcecode.Text 6 | import utest._ 7 | import utils.ScalaSqlSuite 8 | 9 | import java.time.LocalDate 10 | import scalasql.core.Config 11 | 12 | trait EscapedTableNameTests extends ScalaSqlSuite { 13 | def description = """ 14 | If your table name is a reserved sql world, e.g. `order`, you can specify this in your table definition with 15 | `override def escape = true` 16 | """ 17 | 18 | def tests = Tests { 19 | test("escape table name") { 20 | val tableNameEscaped = dialectSelf.escape(Config.camelToSnake(Table.name(Select))) 21 | test("select") { 22 | checker( 23 | query = Text { 24 | Select.select 25 | }, 26 | sql = s""" 27 | SELECT select0.id AS id, select0.name AS name 28 | FROM $tableNameEscaped select0 29 | """, 30 | value = Seq.empty[Select[Sc]], 31 | docs = "" 32 | ) 33 | } 34 | test("select with filter") { 35 | checker( 36 | query = Text { 37 | Select.select.filter(_.id `=` 1) 38 | }, 39 | sql = s""" 40 | SELECT select0.id AS id, select0.name AS name 41 | FROM $tableNameEscaped select0 42 | WHERE (select0.id = ?) 43 | """, 44 | value = Seq.empty[Select[Sc]], 45 | docs = "" 46 | ) 47 | } 48 | test("delete") { 49 | checker( 50 | query = Text { 51 | Select.delete(_ => true) 52 | }, 53 | sql = s"DELETE FROM $tableNameEscaped WHERE ?", 54 | value = 0, 55 | docs = "" 56 | ) 57 | } 58 | test("join") { 59 | checker( 60 | query = Text { 61 | Select.select.join(Select)(_.id `=` _.id) 62 | }, 63 | sql = s""" 64 | SELECT 65 | select0.id AS res_0_id, 66 | select0.name AS res_0_name, 67 | select1.id AS res_1_id, 68 | select1.name AS res_1_name 69 | FROM 70 | $tableNameEscaped select0 71 | JOIN $tableNameEscaped select1 ON (select0.id = select1.id) 72 | """, 73 | value = Seq.empty[(Select[Sc], Select[Sc])], 74 | docs = "" 75 | ) 76 | } 77 | test("update") { 78 | checker( 79 | query = Text { 80 | Select.update(_ => true).set(_.name := "hello") 81 | }, 82 | sqls = Seq( 83 | s"UPDATE $tableNameEscaped SET $tableNameEscaped.name = ?", 84 | s"UPDATE $tableNameEscaped SET name = ?" 85 | ), 86 | value = 0, 87 | docs = "" 88 | ) 89 | } 90 | test("update where") { 91 | checker( 92 | query = Text { 93 | Select.update(_.id `=` 1).set(_.name := "hello") 94 | }, 95 | sqls = Seq( 96 | s"UPDATE $tableNameEscaped SET $tableNameEscaped.name = ? WHERE ($tableNameEscaped.id = ?)", 97 | s"UPDATE $tableNameEscaped SET name = ? WHERE ($tableNameEscaped.id = ?)" 98 | ), 99 | value = 0, 100 | docs = "" 101 | ) 102 | } 103 | test("insert") { 104 | checker( 105 | query = Text { 106 | Select.insert.values( 107 | Select[Sc]( 108 | id = 0, 109 | name = "hello" 110 | ) 111 | ) 112 | }, 113 | sql = s"INSERT INTO $tableNameEscaped (id, name) VALUES (?, ?)", 114 | value = 1, 115 | docs = "" 116 | ) 117 | } 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /scalasql/test/src/query/SchemaTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql._ 4 | import scalasql.core.JoinNullable 5 | import sourcecode.Text 6 | import utest._ 7 | import utils.ScalaSqlSuite 8 | 9 | import java.time.LocalDate 10 | 11 | trait SchemaTests extends ScalaSqlSuite { 12 | def description = """ 13 | If your table belongs to a schema other than the default schema of your database, you can specify this in your table definition with 14 | `override def schemaName = "otherschema"` 15 | """ 16 | 17 | def tests = Tests { 18 | test("schema") { 19 | test("select") { 20 | checker( 21 | query = Text { 22 | Invoice.select 23 | }, 24 | sql = """ 25 | SELECT invoice0.id AS id, invoice0.total AS total, invoice0.vendor_name AS vendor_name 26 | FROM otherschema.invoice invoice0 27 | """, 28 | value = Seq( 29 | Invoice[Sc](id = 1, total = 150.4, vendor_name = "Siemens"), 30 | Invoice[Sc](id = 2, total = 213.3, vendor_name = "Samsung"), 31 | Invoice[Sc](id = 3, total = 407.2, vendor_name = "Shell") 32 | ), 33 | docs = "" 34 | ) 35 | } 36 | test("insert.columns") { 37 | checker( 38 | query = Invoice.insert.columns( 39 | _.total := 200.3, 40 | _.vendor_name := "Huawei" 41 | ), 42 | sql = "INSERT INTO otherschema.invoice (total, vendor_name) VALUES (?, ?)", 43 | value = 1, 44 | docs = "" 45 | ) 46 | } 47 | test("insert.values") { 48 | checker( 49 | query = Invoice.insert 50 | .values( 51 | Invoice[Sc]( 52 | id = 0, 53 | total = 200.3, 54 | vendor_name = "Huawei" 55 | ) 56 | ) 57 | .skipColumns(_.id), 58 | sql = "INSERT INTO otherschema.invoice (total, vendor_name) VALUES (?, ?)", 59 | value = 1, 60 | docs = "" 61 | ) 62 | } 63 | test("update") { 64 | checker( 65 | query = Invoice 66 | .update(_.id === 1) 67 | .set( 68 | _.total := 200.3, 69 | _.vendor_name := "Huawei" 70 | ), 71 | sql = """UPDATE otherschema.invoice 72 | SET 73 | total = ?, 74 | vendor_name = ? 75 | WHERE 76 | (invoice.id = ?)""", 77 | value = 1, 78 | docs = "" 79 | ) 80 | } 81 | test("delete") { 82 | checker( 83 | query = Invoice.delete(_.id === 1), 84 | sql = "DELETE FROM otherschema.invoice WHERE (invoice.id = ?)", 85 | value = 1, 86 | docs = "" 87 | ) 88 | } 89 | test("insert into") { 90 | checker( 91 | query = Invoice.insert.select( 92 | i => (i.total, i.vendor_name), 93 | Invoice.select.map(i => (i.total, i.vendor_name)) 94 | ), 95 | sql = """INSERT INTO 96 | otherschema.invoice (total, vendor_name) 97 | SELECT 98 | invoice0.total AS res_0, 99 | invoice0.vendor_name AS res_1 100 | FROM 101 | otherschema.invoice invoice0""", 102 | value = 4, 103 | docs = "" 104 | ) 105 | } 106 | test("join") { 107 | checker( 108 | query = Text { 109 | Invoice.select.join(Invoice)(_.id `=` _.id).map(_._1.id) 110 | }, 111 | sql = """SELECT 112 | invoice0.id AS res 113 | FROM 114 | otherschema.invoice invoice0 115 | JOIN otherschema.invoice invoice1 ON (invoice0.id = invoice1.id)""", 116 | value = Seq(2, 3, 4, 5, 6, 7, 8, 9), 117 | docs = "" 118 | ) 119 | } 120 | } 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /scalasql/test/src/query/UpdateSubQueryTests.scala: -------------------------------------------------------------------------------- 1 | package scalasql.query 2 | 3 | import scalasql._ 4 | import sourcecode.Text 5 | import utest._ 6 | import utils.ScalaSqlSuite 7 | 8 | trait UpdateSubQueryTests extends ScalaSqlSuite { 9 | def description = "`UPDATE` queries that use Subqueries" 10 | override def utestBeforeEach(path: Seq[String]): Unit = checker.reset() 11 | def tests = Tests { 12 | 13 | test("setSubquery") - { 14 | checker( 15 | query = Text { Product.update(_ => true).set(_.price := Product.select.maxBy(_.price)) }, 16 | sqls = Seq( 17 | """ 18 | UPDATE product 19 | SET price = (SELECT MAX(product1.price) AS res FROM product product1) 20 | """, 21 | """ 22 | UPDATE product 23 | SET product.price = (SELECT MAX(product1.price) AS res FROM product product1) 24 | """ 25 | ), 26 | value = 6, 27 | docs = """ 28 | You can use subqueries to compute the values you want to update, using 29 | aggregates like `.maxBy` to convert the `Select[T]` into an `Expr[T]` 30 | """ 31 | ) 32 | 33 | checker( 34 | query = Text { Product.select.map(p => (p.id, p.name, p.price)) }, 35 | value = Seq( 36 | (1, "Face Mask", 1000.0), 37 | (2, "Guitar", 1000.0), 38 | (3, "Socks", 1000.0), 39 | (4, "Skate Board", 1000.0), 40 | (5, "Camera", 1000.0), 41 | (6, "Cookie", 1000.0) 42 | ) 43 | ) 44 | } 45 | 46 | test("whereSubquery") - { 47 | checker( 48 | query = Text { 49 | Product.update(_.price `=` Product.select.maxBy(_.price)).set(_.price := 0) 50 | }, 51 | sqls = Seq( 52 | """ 53 | UPDATE product 54 | SET price = ? 55 | WHERE (product.price = (SELECT MAX(product1.price) AS res FROM product product1)) 56 | """, 57 | """ 58 | UPDATE product 59 | SET product.price = ? 60 | WHERE (product.price = (SELECT MAX(product1.price) AS res FROM product product1)) 61 | """ 62 | ), 63 | value = 1, 64 | docs = """ 65 | Subqueries and aggregates can also be used in the `WHERE` clause, defined by the 66 | predicate passed to `Table.update 67 | """ 68 | ) 69 | 70 | checker( 71 | query = Text { Product.select.map(p => (p.id, p.name, p.price)) }, 72 | value = Seq( 73 | (1, "Face Mask", 8.88), 74 | (2, "Guitar", 300.0), 75 | (3, "Socks", 3.14), 76 | (4, "Skate Board", 123.45), 77 | (5, "Camera", 0.0), 78 | (6, "Cookie", 0.1) 79 | ), 80 | normalize = (x: Seq[(Int, String, Double)]) => x.sorted 81 | ) 82 | 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /scalasql/test/src/utils/ScalaSqlSuite.scala: -------------------------------------------------------------------------------- 1 | package scalasql.utils 2 | 3 | import scalasql.DbApi 4 | import scalasql.dialects._ 5 | import utest.TestSuite 6 | 7 | abstract class ScalaSqlSuite(implicit val suiteLine: sourcecode.Line) 8 | extends TestSuite 9 | with Dialect { 10 | def checker: TestChecker 11 | lazy val db: DbApi = dbClient.getAutoCommitClientConnection 12 | 13 | lazy val dbClient = checker.dbClient 14 | def description: String 15 | } 16 | 17 | trait SqliteSuite extends ScalaSqlSuite with SqliteDialect { 18 | val checker = new TestChecker( 19 | scalasql.example.SqliteExample.sqliteClient, 20 | "sqlite-customer-schema.sql", 21 | "customer-data.sql", 22 | getClass.getName, 23 | suiteLine.value, 24 | description 25 | ) 26 | 27 | checker.reset() 28 | } 29 | 30 | trait H2Suite extends ScalaSqlSuite with H2Dialect { 31 | val checker = new TestChecker( 32 | scalasql.example.H2Example.h2Client, 33 | "h2-customer-schema.sql", 34 | "customer-data-plus-schema.sql", 35 | getClass.getName, 36 | suiteLine.value, 37 | description 38 | ) 39 | 40 | checker.reset() 41 | } 42 | 43 | trait PostgresSuite extends ScalaSqlSuite with PostgresDialect { 44 | val checker = new TestChecker( 45 | scalasql.example.PostgresExample.postgresClient, 46 | "postgres-customer-schema.sql", 47 | "customer-data-plus-schema.sql", 48 | getClass.getName, 49 | suiteLine.value, 50 | description 51 | ) 52 | 53 | checker.reset() 54 | } 55 | 56 | trait HikariSuite extends ScalaSqlSuite with PostgresDialect { 57 | val checker = new TestChecker( 58 | scalasql.example.HikariCpExample.hikariClient, 59 | "postgres-customer-schema.sql", 60 | "customer-data-plus-schema.sql", 61 | getClass.getName, 62 | suiteLine.value, 63 | description 64 | ) 65 | 66 | checker.reset() 67 | 68 | override def utestAfterAll(): Unit = { 69 | super.utestAfterAll() 70 | checker.autoCommitConnection.close() 71 | } 72 | } 73 | 74 | trait MySqlSuite extends ScalaSqlSuite with MySqlDialect { 75 | val checker = new TestChecker( 76 | scalasql.example.MySqlExample.mysqlClient, 77 | "mysql-customer-schema.sql", 78 | "customer-data.sql", 79 | getClass.getName, 80 | suiteLine.value, 81 | description 82 | ) 83 | 84 | checker.reset() 85 | } 86 | -------------------------------------------------------------------------------- /scalasql/test/src/utils/TestChecker.scala: -------------------------------------------------------------------------------- 1 | package scalasql.utils 2 | 3 | import com.github.vertical_blank.sqlformatter.SqlFormatter 4 | import pprint.PPrinter 5 | import scalasql.query.SubqueryRef 6 | import scalasql.{DbClient, Queryable, Expr, UtestFramework} 7 | 8 | class TestChecker( 9 | val dbClient: DbClient.DataSource, 10 | testSchemaFileName: String, 11 | testDataFileName: String, 12 | suiteName: String, 13 | suiteLine: Int, 14 | description: String 15 | ) { 16 | 17 | UtestFramework.recordedSuiteDescriptions(suiteName.stripSuffix("Tests$")) = description 18 | 19 | val autoCommitConnection = dbClient.getAutoCommitClientConnection 20 | def reset() = { 21 | autoCommitConnection.updateRaw( 22 | os.read(os.Path(sys.env("MILL_TEST_RESOURCE_DIR")) / testSchemaFileName) 23 | ) 24 | autoCommitConnection.updateRaw( 25 | os.read(os.Path(sys.env("MILL_TEST_RESOURCE_DIR")) / testDataFileName) 26 | ) 27 | } 28 | 29 | def recorded[T](docs: String, f: sourcecode.Text[T])(implicit tp: utest.framework.TestPath): T = { 30 | val res = f.value 31 | UtestFramework.recordedTests.append( 32 | UtestFramework.Record( 33 | suiteName = suiteName.stripSuffix("Tests$"), 34 | suiteLine = suiteLine, 35 | testPath = tp.value, 36 | docs = docs, 37 | queryCodeString = f.source match { 38 | case s"{$res}" => res 39 | case res => res 40 | }, 41 | sqlString = None, 42 | resultCodeString = None 43 | ) 44 | ) 45 | 46 | res 47 | } 48 | def apply[T, V]( 49 | query: sourcecode.Text[T], 50 | sql: String = null, 51 | sqls: Seq[String] = Nil, 52 | value: sourcecode.Text[V] = null, 53 | moreValues: Seq[V] = Nil, 54 | normalize: V => V = (x: V) => x, 55 | docs: String = "" 56 | )(implicit qr: Queryable[T, V], tp: utest.framework.TestPath) = { 57 | if (sys.env.contains("SCALASQL_RUN_BENCHMARK")) { 58 | for (i <- Range(0, 4)) { 59 | var iterations = 0 60 | val multiplier = 10 61 | val duration = sys.env("SCALASQL_RUN_BENCHMARK").toInt 62 | val end = System.currentTimeMillis() + duration 63 | while (System.currentTimeMillis() < end) { 64 | var i = 0 65 | while (i < multiplier) { 66 | i += 1 67 | dbClient.renderSql(query.value) 68 | } 69 | iterations += 1 70 | } 71 | println(s"${iterations * multiplier} iterations in ${duration}ms") 72 | } 73 | } 74 | val sqlResult = autoCommitConnection 75 | .renderSql(query.value) 76 | 77 | val allCheckedSqls = Option(sql) ++ sqls 78 | val matchedSql = allCheckedSqls.find { sql => 79 | val expectedSql = sql.trim.replaceAll("\\s+", " ") 80 | // pprint.log(sqlResult) 81 | // pprint.log(expectedSql) 82 | sqlResult == expectedSql 83 | } 84 | 85 | if (allCheckedSqls.nonEmpty) { 86 | assert(matchedSql.nonEmpty, pprint.apply(SqlFormatter.format(sqlResult))) 87 | } 88 | 89 | val result = autoCommitConnection.run(query.value) 90 | 91 | val values = Option(value).map(_.value) ++ moreValues 92 | val normalized = normalize(result) 93 | if (values.nonEmpty) { 94 | assert(values.exists(value => normalized == value), pprint.apply(normalized)) 95 | } 96 | 97 | UtestFramework.recordedTests.append( 98 | UtestFramework.Record( 99 | suiteName = suiteName.stripSuffix("Tests$"), 100 | suiteLine = suiteLine, 101 | testPath = tp.value, 102 | docs = docs, 103 | queryCodeString = query.source, 104 | sqlString = matchedSql, 105 | resultCodeString = Option(value).map(_.source) 106 | ) 107 | ) 108 | 109 | () 110 | } 111 | } 112 | 113 | object TestChecker { 114 | 115 | lazy val pprinter: PPrinter = PPrinter.Color.copy(additionalHandlers = { 116 | case v: SubqueryRef => pprinter.treeify(v.value, false, true) 117 | case v: Expr[_] if !v.isInstanceOf[scala.Product] => 118 | pprinter.treeify(Expr.toString(v), false, true) 119 | }) 120 | } 121 | --------------------------------------------------------------------------------