├── .github ├── CODEOWNERS ├── dependabot.yml └── workflows │ └── scala.yml ├── project ├── build.properties └── plugins.sbt ├── version.sbt ├── .gitignore ├── src ├── test │ ├── resources │ │ └── logback.xml │ └── scala │ │ ├── models │ │ └── Tables.scala │ │ └── com │ │ └── github │ │ └── takezoe │ │ └── slick │ │ └── blocking │ │ └── SlickBlockingAPISpec.scala └── main │ └── scala │ ├── com │ └── github │ │ └── takezoe │ │ └── slick │ │ └── blocking │ │ ├── BlockingDrivers.scala │ │ └── BlockingProfile.scala │ └── slick │ └── TransactionalJdbcBackend.scala ├── .scalafmt.conf ├── README.md └── LICENSE.txt /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @takezoe @xuwei-k 2 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.11.7 2 | -------------------------------------------------------------------------------- /version.sbt: -------------------------------------------------------------------------------- 1 | ThisBuild / version := "0.0.15-RC2" 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.3.1") 2 | 3 | addSbtPlugin("com.github.sbt" % "sbt-release" % "1.4.0") 4 | 5 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.6") 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | .ensime 4 | .ensime_cache 5 | 6 | # sbt specific 7 | dist/* 8 | target/ 9 | lib_managed/ 10 | src_managed/ 11 | project/boot/ 12 | project/plugins/project/ 13 | .bsp/ 14 | 15 | # Scala-IDE specific 16 | .scala_dependencies 17 | .classpath 18 | .project 19 | .cache* 20 | .settings 21 | 22 | # IntelliJ specific 23 | .idea/ 24 | .idea_modules/ 25 | /bin/ 26 | 27 | # Metals 28 | .metals/ 29 | .vscode/ 30 | .bloop/ 31 | metals.sbt 32 | -------------------------------------------------------------------------------- /.github/workflows/scala.yml: -------------------------------------------------------------------------------- 1 | name: Scala CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | strategy: 8 | fail-fast: false 9 | matrix: 10 | java: [8, 25] 11 | runs-on: ubuntu-latest 12 | timeout-minutes: 15 13 | steps: 14 | - uses: actions/checkout@v6 15 | - name: Set up JDK 16 | uses: actions/setup-java@v5 17 | with: 18 | java-version: ${{matrix.java}} 19 | distribution: 'zulu' 20 | - uses: sbt/setup-sbt@v1 21 | - name: Run tests 22 | run: sbt scalafmtSbtCheck "+ scalafmtCheckAll" +test 23 | -------------------------------------------------------------------------------- /src/test/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /src/main/scala/com/github/takezoe/slick/blocking/BlockingDrivers.scala: -------------------------------------------------------------------------------- 1 | package com.github.takezoe.slick.blocking 2 | 3 | import slick.jdbc._ 4 | 5 | object BlockingDerbyDriver extends DerbyProfile with BlockingJdbcProfile 6 | object BlockingH2Driver extends H2Profile with BlockingJdbcProfile 7 | object BlockingHsqldbDriver extends HsqldbProfile with BlockingJdbcProfile 8 | object BlockingMySQLDriver extends MySQLProfile with BlockingJdbcProfile 9 | object BlockingPostgresDriver extends PostgresProfile with BlockingJdbcProfile 10 | object BlockingSQLiteDriver extends SQLiteProfile with BlockingJdbcProfile 11 | object BlockingDB2Driver extends DB2Profile with BlockingJdbcProfile 12 | object BlockingSQLServerDriver extends SQLServerProfile with BlockingJdbcProfile 13 | object BlockingOracleDriver 14 | extends OracleProfile 15 | with JdbcActionComponent.OneRowPerStatementOnly 16 | with BlockingJdbcProfile 17 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = "3.10.2" 2 | runner.dialect = Scala213Source3 3 | maxColumn = 120 4 | align.preset = none 5 | align.tokens = [] 6 | rewrite.rules = [ 7 | RedundantParens 8 | ExpandImportSelectors 9 | PreferCurlyFors 10 | ] 11 | rewrite.imports.contiguousGroups = "no" 12 | rewrite.imports.groups = [[".*"]] 13 | continuationIndent.callSite = 2 14 | continuationIndent.defnSite = 2 15 | docstrings.style = keep 16 | includeCurlyBraceInSelectChains = false 17 | optIn.breakChainOnFirstMethodDot = false 18 | trailingCommas = preserve 19 | newlines.topLevelStatementBlankLines = [ 20 | { 21 | blanks { after = 1 } 22 | maxNest = 1 23 | regex = "Import" 24 | } 25 | ] 26 | project.layout = StandardConvention 27 | fileOverride { 28 | "glob:**/*.sbt" { 29 | runner.dialect = scala212 30 | rewrite.scala3.convertToNewSyntax = false 31 | } 32 | } 33 | rewrite.scala3.convertToNewSyntax = true 34 | runner.dialectOverride.allowSignificantIndentation = false 35 | runner.dialectOverride.allowAsForImportRename = false 36 | runner.dialectOverride.allowStarWildcardImport = false 37 | -------------------------------------------------------------------------------- /src/main/scala/slick/TransactionalJdbcBackend.scala: -------------------------------------------------------------------------------- 1 | package slick 2 | 3 | import slick.jdbc.JdbcBackend 4 | 5 | // JdbcProfileBlockingSession brings back withTransaction feature from slick 2.x 6 | // (it's also related with 3.0). 7 | // 8 | // It cannot use `session.rollback` because we cannot touch `protected var doRollback`. 9 | // Use `session.conn.rollback()` instead. 10 | // 11 | // ref: 12 | // - https://github.com/slick/slick/blob/3.0/slick/src/main/scala/slick/jdbc/JdbcBackend.scala#L424 13 | // - https://github.com/slick/slick/blob/2.1/src/main/scala/scala/slick/jdbc/JdbcBackend.scala#L419 14 | // - https://github.com/slick/slick/blob/3.1/slick/src/main/scala/slick/jdbc/JdbcBackend.scala#L407 15 | trait JdbcProfileBlockingSession { 16 | 17 | /** 18 | * Extends Session to add methods for session management. 19 | */ 20 | implicit class BlockingSession(session: JdbcBackend#Session) { 21 | def withTransaction[T](f: => T): T = { 22 | val s = session.asInstanceOf[JdbcBackend#BaseSession] 23 | if (s.isInTransaction) f 24 | else { 25 | s.startInTransaction 26 | var functionExecuted = false 27 | try { 28 | val res = f 29 | functionExecuted = true 30 | s.endInTransaction(s.conn.commit()) 31 | res 32 | } finally { 33 | if (!functionExecuted) { 34 | s.endInTransaction(s.conn.rollback()) 35 | } 36 | } 37 | } 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/test/scala/models/Tables.scala: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | // AUTO-GENERATED Slick data model 4 | /** Slick data model trait for extension, choice of backend or usage in the cake pattern. (Make sure to initialize this late.) */ 5 | trait Tables { 6 | val profile: slick.jdbc.JdbcProfile 7 | import profile.api._ 8 | import slick.model.ForeignKeyAction 9 | // NOTE: GetResult mappers for plain SQL are only generated for tables where Slick knows how to map the types of all columns. 10 | import slick.jdbc.{GetResult => GR} 11 | 12 | /** DDL for all tables. Call .create to execute. */ 13 | lazy val schema = Companies.schema ++ Users.schema 14 | @deprecated("Use .schema instead of .ddl", "3.0") 15 | def ddl = schema 16 | 17 | /** Entity class storing rows of table Companies 18 | * @param id Database column ID SqlType(INTEGER), PrimaryKey 19 | * @param name Database column NAME SqlType(VARCHAR) */ 20 | case class CompaniesRow(id: Int, name: String) 21 | 22 | /** Table description of table COMPANIES. Objects of this class serve as prototypes for rows in queries. */ 23 | class Companies(_tableTag: Tag) extends Table[CompaniesRow](_tableTag, "COMPANIES") { 24 | def * = (id, name).mapTo[CompaniesRow] 25 | 26 | /** Database column ID SqlType(INTEGER), PrimaryKey */ 27 | val id: Rep[Int] = column[Int]("ID", O.PrimaryKey) 28 | 29 | /** Database column NAME SqlType(VARCHAR) */ 30 | val name: Rep[String] = column[String]("NAME") 31 | } 32 | 33 | /** Collection-like TableQuery object for table Companies */ 34 | lazy val Companies = new TableQuery(tag => new Companies(tag)) 35 | 36 | /** Entity class storing rows of table Users 37 | * @param id Database column ID SqlType(BIGINT), AutoInc, PrimaryKey 38 | * @param name Database column NAME SqlType(VARCHAR) 39 | * @param companyId Database column COMPANY_ID SqlType(INTEGER) */ 40 | case class UsersRow(id: Long, name: String, companyId: Option[Int]) 41 | 42 | /** Table description of table USERS. Objects of this class serve as prototypes for rows in queries. */ 43 | class Users(_tableTag: Tag) extends Table[UsersRow](_tableTag, "USERS") { 44 | def * = (id, name, companyId).mapTo[UsersRow] 45 | 46 | /** Database column ID SqlType(BIGINT), AutoInc, PrimaryKey */ 47 | val id: Rep[Long] = column[Long]("ID", O.AutoInc, O.PrimaryKey) 48 | 49 | /** Database column NAME SqlType(VARCHAR) */ 50 | val name: Rep[String] = column[String]("NAME") 51 | 52 | /** Database column COMPANY_ID SqlType(INTEGER) */ 53 | val companyId: Rep[Option[Int]] = column[Option[Int]]("COMPANY_ID") 54 | 55 | /** Foreign key referencing Companies (database name IDX_USERS_FK0) */ 56 | lazy val companiesFk = foreignKey("IDX_USERS_FK0", companyId, Companies)( 57 | r => Rep.Some(r.id), 58 | onUpdate = ForeignKeyAction.Restrict, 59 | onDelete = ForeignKeyAction.Restrict 60 | ) 61 | } 62 | 63 | /** Collection-like TableQuery object for table Users */ 64 | lazy val Users = new TableQuery(tag => new Users(tag)) 65 | } 66 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | blocking-slick [![Scala CI](https://github.com/gitbucket/blocking-slick/actions/workflows/scala.yml/badge.svg)](https://github.com/gitbucket/blocking-slick/actions/workflows/scala.yml) 2 | ============== 3 | 4 | Provides Slick2 compatible blocking API for Slick3. 5 | 6 | Setup 7 | ----- 8 | 9 | Add following dependency to your `build.sbt`: 10 | 11 | ```scala 12 | // for Slick 3.5 (Scala 2 / Scala 3) 13 | libraryDependencies += "com.github.takezoe" %% "blocking-slick" % "0.0.15-RC2" 14 | 15 | // for Slick 3.4 (Scala 2) 16 | libraryDependencies += "com.github.takezoe" %% "blocking-slick" % "0.0.14" 17 | 18 | // for Slick 3.3 19 | libraryDependencies += "com.github.takezoe" %% "blocking-slick-33" % "0.0.13" 20 | 21 | // for Slick 3.2 22 | libraryDependencies += "com.github.takezoe" %% "blocking-slick-32" % "0.0.11" 23 | 24 | // for Slick 3.1 25 | libraryDependencies += "com.github.takezoe" %% "blocking-slick-31" % "0.0.7" 26 | ``` 27 | 28 | You can enable blocking API by import the blocking driver as follows: 29 | 30 | ```scala 31 | import com.github.takezoe.slick.blocking.BlockingH2Driver.blockingApi._ 32 | ``` 33 | 34 | Slick2 style blocking API 35 | ---- 36 | 37 | See the example of use of blocking API provided by blocking-slick: 38 | 39 | ```scala 40 | val db = Database.forURL("jdbc:h2:mem:test") 41 | 42 | db.withSession { implicit session => 43 | // Create tables 44 | models.Tables.schema.create 45 | 46 | // Insert 47 | Users.insert(UsersRow(1, "takezoe")) 48 | 49 | // Insert returning new id 50 | val newID: Long = (Users returning Users.map(_.id)).insert(UsersRow(1, "takezoe")) 51 | 52 | // Select 53 | val users: Seq[UserRow] = Users.list 54 | 55 | // Select single record 56 | val user: UserRow = Users.filter(_.id === "takezoe".bind).first 57 | 58 | // Select single record with Option 59 | val user: Option[UserRow] = Users.filter(_.id === "takezoe".bind).firstOption 60 | 61 | // Update 62 | Users.filter(t => t.id === 1.bind).update(UsersRow(1, "naoki")) 63 | 64 | // Delete 65 | Users.filter(t => t.id === 1.bind).delete 66 | 67 | // Drop tables 68 | models.Tables.schema.remove 69 | } 70 | ``` 71 | 72 | Plain sql can be executed synchronously as well. 73 | 74 | ```scala 75 | val id = 1 76 | val name = "takezoe" 77 | val insert = sqlu"INSERT INTO USERS (ID, NAME) VALUES (${id1}, ${name1})" 78 | insert.execute 79 | ``` 80 | 81 | Transaction is available by using `withTransaction` instead of `withSession`: 82 | 83 | ```scala 84 | // Transaction 85 | db.withTransaction { implicit session => 86 | ... 87 | } 88 | ``` 89 | 90 | DBIO support 91 | ---- 92 | 93 | blocking-slick also provides a way to run `DBIO` synchronously. It would help to rewrite Slick2 style code to Slick3 style code gradually. 94 | 95 | ```scala 96 | db.withSession { implicit session => 97 | val id1 = 1 98 | val id2 = 2 99 | val name1 = "takezoe" 100 | val name2 = "chibochibo" 101 | val insert1 = sqlu"INSERT INTO USERS (ID, NAME) VALUES (${id1}, ${name1})" andThen 102 | sqlu"INSERT INTO USERS (ID, NAME) VALUES (${id2}, ${name2})" 103 | insert1.run 104 | 105 | val query = for { 106 | count <- sql"SELECT COUNT(*) FROM USERS".as[Int].head 107 | max <- sql"SELECT MAX(ID) FROM USERS".as[Int].head 108 | } yield (count, max) 109 | val (count1, max1) = query.run 110 | assert(count1 == 2) 111 | assert(max1 == 2) 112 | } 113 | ``` 114 | 115 | Note that using `flatMap` and `andThen` requires an `ExecutionContext`, but if you run that code synchronously that value will be ignored. 116 | 117 | Resources 118 | ---- 119 | 120 | You can see actual codes in [the testcase](https://github.com/gitbucket/blocking-slick/blob/master/src/test/scala/com/github/takezoe/slick/blocking/SlickBlockingAPISpec.scala), and also a blocking-slick with Play2 and play-slick example is available at [here](https://github.com/takezoe/blocking-slick-play2). 121 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /src/test/scala/com/github/takezoe/slick/blocking/SlickBlockingAPISpec.scala: -------------------------------------------------------------------------------- 1 | package com.github.takezoe.slick.blocking 2 | 3 | import org.scalatest.BeforeAndAfterAll 4 | import org.scalatest.funsuite.AnyFunSuite 5 | import org.testcontainers.containers.JdbcDatabaseContainer 6 | import org.testcontainers.mysql.MySQLContainer 7 | import org.testcontainers.utility.DockerImageName 8 | import slick.jdbc.meta.MTable 9 | import scala.concurrent.Await 10 | import scala.concurrent.ExecutionContext 11 | import scala.concurrent.Future 12 | import scala.concurrent.duration.Duration 13 | 14 | class SlickBlockingAPISpecH2 15 | extends SlickBlockingAPISpec( 16 | BlockingH2Driver 17 | ) { 18 | protected override lazy val db = Tables.profile.blockingApi.Database.forURL("jdbc:h2:mem:test;TRACE_LEVEL_FILE=4") 19 | } 20 | 21 | class SlickBlockingAPISpecMySQL56 extends SlickBlockingAPISpecMySQL("5.6") 22 | 23 | abstract class SlickBlockingAPISpecMySQL(mysqlVersion: String) 24 | extends SlickBlockingAPISpecTestContainer( 25 | new MySQLContainer(DockerImageName.parse("mysql:" + mysqlVersion)), 26 | BlockingMySQLDriver 27 | ) 28 | 29 | abstract class SlickBlockingAPISpecTestContainer( 30 | container: JdbcDatabaseContainer[?], 31 | profile: BlockingJdbcProfile 32 | ) extends SlickBlockingAPISpec(profile) 33 | with BeforeAndAfterAll { 34 | 35 | override def beforeAll(): Unit = { 36 | super.beforeAll() 37 | container.start() 38 | } 39 | 40 | override def afterAll(): Unit = { 41 | super.afterAll() 42 | container.stop() 43 | } 44 | 45 | lazy val db = Tables.profile.blockingApi.Database.forURL( 46 | url = container.getJdbcUrl, 47 | user = container.getUsername, 48 | password = container.getPassword, 49 | driver = container.getDriverClassName 50 | ) 51 | 52 | } 53 | 54 | abstract class SlickBlockingAPISpec(p: BlockingJdbcProfile) extends AnyFunSuite { self => 55 | object Tables extends models.Tables { 56 | override val profile: BlockingJdbcProfile = self.p 57 | } 58 | import Tables.profile.blockingApi._ 59 | import Tables._ 60 | 61 | protected def db: Tables.profile.api.Database 62 | 63 | private final def testWithSession[A](f: Tables.profile.blockingApi.Session => A): A = { 64 | db.withSession { implicit session => 65 | try { 66 | Tables.schema.create 67 | f(session.asInstanceOf[Session]) 68 | } finally { 69 | Tables.schema.remove 70 | } 71 | } 72 | } 73 | 74 | test("CRUD operation") { 75 | testWithSession { implicit session => 76 | // Insert 77 | Users.insert(UsersRow(1, "takezoe", None)) 78 | Users.insert(UsersRow(2, "chibochibo", None)) 79 | Users.insert(UsersRow(3, "tanacasino", None)) 80 | 81 | val count1 = Query(Users.length).first 82 | assert(count1 == 3) 83 | 84 | val result1 = Users.sortBy(_.id).list 85 | assert(result1.length == 3) 86 | assert(result1(0) == UsersRow(1, "takezoe", None)) 87 | assert(result1(1) == UsersRow(2, "chibochibo", None)) 88 | assert(result1(2) == UsersRow(3, "tanacasino", None)) 89 | 90 | // Update 91 | Users.filter(_.id === 1L.bind).map(_.name).update("naoki") 92 | 93 | val result2 = Users.filter(_.id === 1L.bind).first 94 | assert(result2 == UsersRow(1, "naoki", None)) 95 | 96 | // Delete 97 | Users.filter(_.id === 1L.bind).delete 98 | 99 | val result3 = Users.filter(_.id === 1L.bind).firstOption 100 | assert(result3.isEmpty) 101 | 102 | val count2 = Query(Users.length).first 103 | assert(count2 == 2) 104 | } 105 | } 106 | 107 | test("Plain SQL") { 108 | testWithSession { implicit session => 109 | // plain sql 110 | val id1 = 1 111 | val name1 = "takezoe" 112 | val insert1 = sqlu"INSERT INTO USERS (ID, NAME) VALUES (${id1}, ${name1})" 113 | insert1.execute 114 | 115 | val query = sql"SELECT COUNT(*) FROM USERS".as[Int] 116 | val count1 = query.first 117 | assert(count1 == 1) 118 | 119 | val id2 = 2 120 | val name2 = "chibochibo" 121 | val insert2 = sqlu"INSERT INTO USERS (ID, NAME) VALUES (${id2}, ${name2})" 122 | insert2.execute 123 | 124 | val count2 = query.first 125 | assert(count2 == 2) 126 | } 127 | } 128 | 129 | test("exists") { 130 | testWithSession { implicit session => 131 | val exists1 = Users.filter(_.id === 1L.bind).filter(_.name === "takezoe".bind).exists.run 132 | assert(exists1 == false) 133 | 134 | Users.insert(UsersRow(1, "takezoe", None)) 135 | 136 | val exists2 = Users.filter(_.id === 1L.bind).filter(_.name === "takezoe".bind).exists.run 137 | assert(exists2 == true) 138 | 139 | } 140 | } 141 | 142 | test("sum") { 143 | testWithSession { implicit session => 144 | val sum = Users.map(_.id).sum.run 145 | assert(sum == None) 146 | 147 | } 148 | } 149 | 150 | test("run") { 151 | testWithSession { implicit session => 152 | assert(Users.run.length == 0) 153 | } 154 | } 155 | 156 | test("insertAll") { 157 | testWithSession { implicit session => 158 | val users = List( 159 | UsersRow(1, "takezoe", None), 160 | UsersRow(2, "chibochibo", None), 161 | UsersRow(3, "tanacasino", None) 162 | ) 163 | 164 | Users.insertAll(users*) 165 | val count1 = Query(Users.length).first 166 | assert(count1 == 3) 167 | 168 | Users ++= users 169 | val count2 = Query(Users.length).first 170 | assert(count2 == 6) 171 | } 172 | } 173 | 174 | test("insert returning") { 175 | testWithSession { implicit session => 176 | val id = Users.returning(Users.map(_.id)) insert UsersRow(1, "takezoe", None) 177 | assert(id == 1) 178 | assert(Users.length.run == 1) 179 | val u = (Users.returning(Users.map(_.id)).into((u, id) => u.copy(id = id))) insert UsersRow(2, "takezoe", None) 180 | assert(u.id == 2) 181 | assert(Users.length.run == 2) 182 | } 183 | 184 | } 185 | 186 | test("insert multiple returning") { 187 | testWithSession { implicit session => 188 | val id = 189 | Users 190 | .returning(Users.map(_.id)) 191 | .insertAll( 192 | Seq( 193 | UsersRow(1, "takezoe", None), 194 | UsersRow(2, "mrfyda", None) 195 | ) 196 | ) 197 | .run 198 | assert(Users.length.run == 2) 199 | val u = Users 200 | .returning(Users.map(_.id)) 201 | .into((u, id) => u.copy(id = id)) 202 | .insertAll( 203 | Seq( 204 | UsersRow(3, "takezoe", None), 205 | UsersRow(4, "mrfyda", None) 206 | ) 207 | ) 208 | .run 209 | assert(Users.length.run == 4) 210 | } 211 | } 212 | 213 | test("insert insertOrUpdate") { 214 | testWithSession { implicit session => 215 | Users.insertOrUpdate(UsersRow(1, "takezoe", None)) 216 | assert(Users.length.run == 1) 217 | Users.insertOrUpdate(UsersRow(1, "joao", None)) 218 | assert(Users.length.run == 1) 219 | } 220 | } 221 | 222 | test("withTransaction Query") { 223 | withTransaction( 224 | u => s => Users.insert(u)(s), 225 | id => s => Users.filter(_.id === id.bind).exists.run(s) 226 | ) 227 | } 228 | 229 | test("withTransaction Action") { 230 | withTransaction( 231 | u => s => sqlu"insert into USERS values (${u.id}, ${u.name}, ${u.companyId})".execute(s), 232 | id => s => sql"select exists (select * from USERS where id = $id)".as[Boolean].first(s) 233 | ) 234 | } 235 | 236 | private def withTransaction( 237 | insertUser: UsersRow => Session => Int, 238 | existsUser: Long => Session => Boolean 239 | ) = { 240 | testWithSession { implicit session => 241 | { // rollback 242 | session.withTransaction { 243 | insertUser(UsersRow(1, "takezoe", None))(session) 244 | val exists = existsUser(1)(session) 245 | assert(exists == true) 246 | session.conn.rollback() 247 | } 248 | val exists = existsUser(1)(session) 249 | assert(exists == false) 250 | } 251 | 252 | { // ok 253 | session.withTransaction { 254 | insertUser(UsersRow(2, "takezoe", None))(session) 255 | val exists = existsUser(2)(session) 256 | assert(exists == true) 257 | } 258 | val exists = existsUser(2)(session) 259 | assert(exists == true) 260 | } 261 | 262 | { // nest (rollback) 263 | session.withTransaction { 264 | insertUser(UsersRow(3, "takezoe", None))(session) 265 | assert(existsUser(3)(session) == true) 266 | session.withTransaction { 267 | insertUser(UsersRow(4, "takezoe", None))(session) 268 | assert(existsUser(4)(session) == true) 269 | session.conn.rollback() 270 | } 271 | } 272 | assert(existsUser(3)(session) == false) 273 | assert(existsUser(4)(session) == false) 274 | } 275 | 276 | { // nest (ok) 277 | session.withTransaction { 278 | insertUser(UsersRow(5, "takezoe", None))(session) 279 | assert(existsUser(5)(session) == true) 280 | session.withTransaction { 281 | insertUser(UsersRow(6, "takezoe", None))(session) 282 | assert(existsUser(6)(session) == true) 283 | } 284 | } 285 | assert(existsUser(5)(session) == true) 286 | assert(existsUser(6)(session) == true) 287 | } 288 | } 289 | } 290 | 291 | test("MTable support") { 292 | if (this.isInstanceOf[SlickBlockingAPISpecH2]) { 293 | testWithSession { implicit session => 294 | assert(MTable.getTables.list.length == 2) 295 | } 296 | } else { 297 | pending // TODO 298 | } 299 | } 300 | 301 | test("Transaction support with Query SELECT FOR UPDATE") { 302 | testTransactionWithSelectForUpdate { implicit session => 303 | Users.map(_.id).forUpdate.list 304 | } 305 | } 306 | 307 | test("Transaction support with Action SELECT FOR UPDATE") { 308 | testTransactionWithSelectForUpdate { implicit session => 309 | sql"select id from USERS for update".as[Long].list 310 | } 311 | } 312 | 313 | private def testTransactionWithSelectForUpdate(selectForUpdate: Session => Seq[Long]) = { 314 | import scala.concurrent.ExecutionContext.Implicits.global 315 | if (this.isInstanceOf[SlickBlockingAPISpecH2]) { 316 | testWithSession { implicit session => 317 | // Insert 318 | Users.insert(UsersRow(1, "takezoe", None)) 319 | 320 | // concurrently do a select for update 321 | val f1 = Future { 322 | db.withTransaction { implicit session => 323 | val l = selectForUpdate(session.asInstanceOf[Session]).length 324 | // default h2 lock timeout is 1000ms 325 | Thread.sleep(3000L) 326 | l 327 | } 328 | } 329 | 330 | // and try to update a row 331 | val f2 = Future { 332 | db.withTransaction { implicit session => 333 | Thread.sleep(500L) 334 | Users.filter(_.id === 1L).map(_.name).update("João") 335 | } 336 | } 337 | 338 | assert(Await.result(f1, Duration.Inf) == 1) 339 | assertThrows[Exception](Await.result(f2, Duration.Inf)) 340 | } 341 | } else { 342 | pending // TODO 343 | } 344 | } 345 | 346 | test("compiled support") { 347 | if (this.isInstanceOf[SlickBlockingAPISpecH2]) { 348 | testWithSession { implicit session => 349 | val compiled = Compiled { (i: Rep[Long]) => Users.filter(_.id === i) } 350 | assert(compiled(1L).run.length === 0) 351 | 352 | // Insert 353 | val insertCompiled = Users.insertInvoker 354 | insertCompiled.insert(UsersRow(1, "takezoe", None)) 355 | assert(compiled(1L).run.length === 1) 356 | 357 | // update 358 | val compiledUpdate = Compiled { (n: Rep[String]) => Users.filter(_.name === n).map(_.name) } 359 | compiledUpdate("takezoe").update("João") 360 | 361 | // delete 362 | compiledUpdate("João").delete 363 | 364 | assert(compiled(1L).run.length === 0) 365 | } 366 | } else { 367 | pending // TODO 368 | } 369 | } 370 | 371 | test("Plain SQL chained together") { 372 | testWithSession { implicit session => 373 | implicit val ctx = ExecutionContext.global 374 | 375 | // plain sql 376 | val id1 = 1 377 | val id2 = 2 378 | val name1 = "takezoe" 379 | val name2 = "chibochibo" 380 | val insert1 = sqlu"INSERT INTO USERS (ID, NAME) VALUES (${id1}, ${name1})" andThen 381 | sqlu"INSERT INTO USERS (ID, NAME) VALUES (${id2}, ${name2})" 382 | insert1.run 383 | 384 | val query = for { 385 | count <- sql"SELECT COUNT(*) FROM USERS".as[Int].head 386 | max <- sql"SELECT MAX(ID) FROM USERS".as[Int].head 387 | } yield (count, max) 388 | val (count1, max1) = query.run 389 | assert(count1 == 2) 390 | assert(max1 == 2) 391 | 392 | val id3 = 3 393 | val name3 = "drapp" 394 | val insert2 = sqlu"INSERT INTO USERS (ID, NAME) VALUES (${id3}, ${name3})" andThen 395 | sqlu"DELETE FROM USERS WHERE ID=${id1}" 396 | insert2.run 397 | 398 | val count2 = query.run 399 | assert(count2 == (2, 3)) 400 | 401 | } 402 | } 403 | 404 | test("DBIO.sequence") { 405 | testWithSession { implicit session => 406 | implicit val ctx = ExecutionContext.global 407 | 408 | val users = (1 to 3).map(i => UsersRow(i, i.toString, None)) 409 | 410 | val dbios = users.map(u => Users.forceInsert(u)) 411 | val dbioSequence = DBIO.sequence(dbios) 412 | 413 | dbioSequence.run 414 | 415 | val count1 = Query(Users.length).first 416 | assert(count1 == 3) 417 | } 418 | } 419 | 420 | test("foreach, foldLeft, toMap") { 421 | testWithSession { implicit session => 422 | Users.insert(UsersRow(1, "takezoe", None)) 423 | Users.insert(UsersRow(2, "chibochibo", None)) 424 | Users.insert(UsersRow(3, "tanacasino", None)) 425 | 426 | var forEachCounter = 0 427 | Users.sortBy(_.id).foreach(u => forEachCounter += 1) 428 | assert(forEachCounter == 3) 429 | 430 | val uMap = Users.map(u => (u.id, u.name)).toMap 431 | assert(uMap.size == 3) 432 | 433 | val sumIds = Users.sortBy(_.id).foldLeft(0L)((acc, u) => acc + u.id) 434 | assert(sumIds == 1 + 2 + 3) 435 | } 436 | } 437 | } 438 | -------------------------------------------------------------------------------- /src/main/scala/com/github/takezoe/slick/blocking/BlockingProfile.scala: -------------------------------------------------------------------------------- 1 | package com.github.takezoe.slick.blocking 2 | 3 | import java.sql.Connection 4 | import slick.ast.Node 5 | import slick.basic.BasicAction 6 | import slick.basic.BasicStreamingAction 7 | import slick.dbio.* 8 | import slick.jdbc.ActionBasedSQLInterpolation 9 | import slick.jdbc.JdbcBackend 10 | import slick.jdbc.JdbcProfile 11 | import slick.lifted.RunnableCompiled 12 | import slick.relational.* 13 | import scala.language.existentials 14 | import scala.language.implicitConversions 15 | import scala.util.* 16 | 17 | trait BlockingRelationalProfile extends RelationalProfile { 18 | trait BlockingAPI extends RelationalAPI {} 19 | } 20 | 21 | trait BlockingJdbcProfile extends JdbcProfile with BlockingRelationalProfile { 22 | val blockingApi = new BlockingJdbcAPI {} 23 | 24 | trait BlockingJdbcAPI extends BlockingAPI with JdbcImplicitColumnTypes with slick.JdbcProfileBlockingSession { 25 | 26 | implicit def actionBasedSQLInterpolation(s: StringContext): ActionBasedSQLInterpolation = 27 | new ActionBasedSQLInterpolation(s) 28 | private class BlockingJdbcActionContext(s: JdbcBackend#Session) extends backend.JdbcActionContext { 29 | val useSameThread = true 30 | override def session = s.asInstanceOf[backend.Session] 31 | override def connection: Connection = s.conn 32 | } 33 | 34 | /** 35 | * Extends DDL to add methods to create and drop tables immediately. 36 | */ 37 | implicit class DDLInvoker(schema: DDL) { 38 | def create(implicit s: JdbcBackend#Session): Unit = { 39 | createSchemaActionExtensionMethods(schema).create 40 | .asInstanceOf[SynchronousDatabaseAction[Unit, NoStream, BlockingJdbcActionContext, ?, Effect]] 41 | .run(new BlockingJdbcActionContext(s)) 42 | } 43 | 44 | def remove(implicit s: JdbcBackend#Session): Unit = { 45 | createSchemaActionExtensionMethods(schema).drop 46 | .asInstanceOf[SynchronousDatabaseAction[Unit, NoStream, BlockingJdbcActionContext, ?, Effect]] 47 | .run(new BlockingJdbcActionContext(s)) 48 | } 49 | } 50 | 51 | implicit class RepQueryExecutor[E](rep: Rep[E]) { 52 | private val invoker = new QueryInvoker[E](queryCompiler.run(Query(rep)(slick.lifted.RepShape).toNode).tree, ()) 53 | 54 | def run(implicit s: JdbcBackend#Session): E = invoker.first 55 | def selectStatement: String = invoker.selectStatement 56 | } 57 | implicit class QueryExecutor[U, C[_]](q: Query[?, U, C]) { 58 | private val invoker = new QueryInvoker[U](queryCompiler.run(q.toNode).tree, ()) 59 | 60 | def run(implicit s: JdbcBackend#Session): Seq[U] = invoker.results(0).right.get.toSeq 61 | def selectStatement: String = invoker.selectStatement 62 | } 63 | 64 | implicit class RunnableCompiledQueryExecutor[U, C[_]](c: RunnableCompiled[? <: Query[?, ?, C], C[U]]) { 65 | private val invoker = new QueryInvoker[U](c.compiledQuery, c.param) 66 | 67 | def run(implicit s: JdbcBackend#Session): Seq[U] = invoker.invoker.results(0).right.get.toSeq 68 | def selectStatement: String = invoker.selectStatement 69 | } 70 | 71 | /** 72 | * Extends QueryInvokerImpl to add selectStatement method. 73 | */ 74 | class QueryInvoker[R](tree: Node, param: Any) extends QueryInvokerImpl[R](tree, param, null) { 75 | def selectStatement: String = getStatement 76 | } 77 | 78 | class BlockingQueryInvoker[U](tree: Node, param: Any) { 79 | def selectStatement: String = { 80 | val invoker = new QueryInvoker[U](tree, param) 81 | invoker.selectStatement 82 | } 83 | def list(implicit s: JdbcBackend#Session): List[U] = { 84 | val invoker = new QueryInvoker[U](tree, param) 85 | invoker.results(0).right.get.toList 86 | } 87 | 88 | def first(implicit s: JdbcBackend#Session): U = { 89 | val invoker = new QueryInvoker[U](tree, param) 90 | invoker.first 91 | } 92 | 93 | def firstOption(implicit s: JdbcBackend#Session): Option[U] = { 94 | val invoker = new QueryInvoker[U](tree, param) 95 | invoker.firstOption 96 | } 97 | 98 | def foreach(b: U => Unit)(implicit s: JdbcBackend#Session): Unit = { 99 | val invoker = new QueryInvoker[U](tree, param) 100 | invoker.results(0).right.get.foreach(b) 101 | } 102 | 103 | def foldLeft[R](z: R)(f: (R, U) => R)(implicit s: JdbcBackend#Session): R = { 104 | val invoker = new QueryInvoker[U](tree, param) 105 | invoker.results(0).right.get.foldLeft(z)(f) 106 | } 107 | } 108 | implicit def queryToQueryInvoker[U, C[_]](q: Query[?, U, C]): BlockingQueryInvoker[U] = 109 | new BlockingQueryInvoker[U](queryCompiler.run(q.toNode).tree, ()) 110 | implicit def compiledToQueryInvoker[U, C[_]]( 111 | c: RunnableCompiled[? <: Query[?, ?, C], C[U]] 112 | ): BlockingQueryInvoker[U] = 113 | new BlockingQueryInvoker[U](c.compiledQuery, c.param) 114 | 115 | class BlockingDeleteInvoker(protected val tree: Node, param: Any) { 116 | def deleteStatement = createDeleteActionExtensionMethods(tree, param).delete.statements.head 117 | 118 | def delete(implicit s: JdbcBackend#Session): Int = { 119 | createDeleteActionExtensionMethods(tree, param).delete 120 | .asInstanceOf[SynchronousDatabaseAction[Int, NoStream, BlockingJdbcActionContext, ?, Effect]] 121 | .run(new BlockingJdbcActionContext(s)) 122 | } 123 | 124 | def deleteInvoker: this.type = this 125 | } 126 | implicit def queryToDeleteInvoker[U, C[_]](q: Query[?, U, C]): BlockingDeleteInvoker = 127 | new BlockingDeleteInvoker(deleteCompiler.run(q.toNode).tree, ()) 128 | implicit def compiledToDeleteInvoker[U, C[_]]( 129 | c: RunnableCompiled[? <: Query[?, ?, C], C[U]] 130 | ): BlockingDeleteInvoker = 131 | new BlockingDeleteInvoker(c.compiledDelete, c.param) 132 | 133 | class MapInvoker[A, B](tree: slick.ast.Node, param: Any) { 134 | def selectStatement: String = { 135 | val invoker = new QueryInvoker[(A, B)](tree, param) 136 | invoker.selectStatement 137 | } 138 | 139 | def toMap(implicit s: JdbcBackend#Session): Map[A, B] = { 140 | val invoker = new QueryInvoker[(A, B)](tree, param) 141 | invoker.results(0).right.get.toMap 142 | } 143 | } 144 | 145 | implicit def mapInvoker[A, B, C[_]](q: Query[?, (A, B), C]): MapInvoker[A, B] = 146 | new MapInvoker[A, B](queryCompiler.run(q.toNode).tree, ()) 147 | 148 | implicit def compiledMapInvoker[A, B, C[_]]( 149 | c: RunnableCompiled[? <: Query[?, ?, C], C[(A, B)]] 150 | ): MapInvoker[A, B] = 151 | new MapInvoker[A, B](c.compiledQuery, c.param) 152 | 153 | class BlockingUpdateInvoker[U](tree: Node, param: Any) { 154 | def updateStatement = createUpdateActionExtensionMethods(tree, param).updateStatement 155 | 156 | def update(value: U)(implicit s: JdbcBackend#Session): Int = { 157 | createUpdateActionExtensionMethods(tree, param) 158 | .update(value) 159 | .asInstanceOf[SynchronousDatabaseAction[Int, NoStream, BlockingJdbcActionContext, ?, Effect]] 160 | .run(new BlockingJdbcActionContext(s)) 161 | } 162 | 163 | def updateInvoker: this.type = this 164 | } 165 | implicit def queryToUpdateInvoker[U, C[_]](q: Query[?, U, C]): BlockingUpdateInvoker[U] = 166 | new BlockingUpdateInvoker[U](updateCompiler.run(q.toNode).tree, ()) 167 | implicit def compiledToUpdateInvoker[U, C[_]]( 168 | c: RunnableCompiled[? <: Query[?, ?, C], C[U]] 169 | ): BlockingUpdateInvoker[U] = 170 | new BlockingUpdateInvoker[U](c.compiledUpdate, c.param) 171 | 172 | class BlockingInsertInvoker[U](compiled: CompiledInsert) { 173 | 174 | def +=(value: U)(implicit session: JdbcBackend#Session): Int = insert(value) 175 | 176 | def insert(value: U)(implicit s: JdbcBackend#Session): Int = { 177 | createInsertActionExtensionMethods(compiled) 178 | .+=(value) 179 | .asInstanceOf[SynchronousDatabaseAction[Int, NoStream, BlockingJdbcActionContext, ?, Effect]] 180 | .run(new BlockingJdbcActionContext(s)) 181 | } 182 | 183 | def ++=(values: Iterable[U])(implicit s: JdbcBackend#Session): Int = insertAll(values.toSeq*) 184 | 185 | def insertAll(values: U*)(implicit s: JdbcBackend#Session): Int = { 186 | createInsertActionExtensionMethods(compiled) 187 | .++=(values) 188 | .asInstanceOf[SynchronousDatabaseAction[Option[Int], NoStream, BlockingJdbcActionContext, ?, Effect]] 189 | .run(new BlockingJdbcActionContext(s)) 190 | .getOrElse(0) 191 | } 192 | 193 | def insertOrUpdate(value: U)(implicit s: JdbcBackend#Session): Int = { 194 | createInsertActionExtensionMethods(compiled) 195 | .insertOrUpdate(value) 196 | .asInstanceOf[SynchronousDatabaseAction[Int, NoStream, BlockingJdbcActionContext, ?, Effect]] 197 | .run(new BlockingJdbcActionContext(s)) 198 | } 199 | 200 | def insertInvoker: this.type = this 201 | } 202 | implicit def queryToInsertInvoker[U, C[_]](q: Query[?, U, C]): BlockingInsertInvoker[U] = 203 | new BlockingInsertInvoker[U](compileInsert(q.toNode)) 204 | implicit def compiledToInsertInvoker[U, C[_]]( 205 | c: RunnableCompiled[? <: Query[?, ?, C], C[U]] 206 | ): BlockingInsertInvoker[U] = 207 | new BlockingInsertInvoker[U](c.compiledInsert.asInstanceOf[CompiledInsert]) 208 | 209 | implicit class ReturningInsertActionComposer2[T, R](a: ReturningInsertActionComposer[T, R]) { 210 | 211 | def +=(value: T)(implicit s: JdbcBackend#Session): R = insert(value) 212 | 213 | def insert(value: T)(implicit s: JdbcBackend#Session): R = { 214 | (a += value) match { 215 | case a: SynchronousDatabaseAction[R, ?, BlockingJdbcActionContext, ?, ?] @unchecked => { 216 | a.run(new BlockingJdbcActionContext(s)) 217 | } 218 | } 219 | } 220 | 221 | def ++=(values: Iterable[T])(implicit s: JdbcBackend#Session): Seq[R] = insertAll(values.toSeq*) 222 | 223 | def insertAll(values: T*)(implicit s: JdbcBackend#Session): Seq[R] = { 224 | (a ++= values) match { 225 | case a: SynchronousDatabaseAction[Seq[R], ?, BlockingJdbcActionContext, ?, ?] @unchecked => { 226 | a.run(new BlockingJdbcActionContext(s)) 227 | } 228 | } 229 | } 230 | 231 | } 232 | 233 | implicit class IntoInsertActionComposer2[T, R](a: IntoInsertActionComposer[T, R]) { 234 | def +=(value: T)(implicit s: JdbcBackend#Session): R = insert(value) 235 | 236 | def insert(value: T)(implicit s: JdbcBackend#Session): R = { 237 | (a += value) match { 238 | case a: SynchronousDatabaseAction[R, ?, BlockingJdbcActionContext, ?, ?] @unchecked => { 239 | a.run(new BlockingJdbcActionContext(s)) 240 | } 241 | } 242 | } 243 | 244 | def ++=(values: Iterable[T])(implicit s: JdbcBackend#Session): Seq[R] = insertAll(values.toSeq*) 245 | 246 | def insertAll(values: T*)(implicit s: JdbcBackend#Session): Seq[R] = { 247 | (a ++= values) match { 248 | case a: SynchronousDatabaseAction[Seq[R], ?, BlockingJdbcActionContext, ?, ?] @unchecked => { 249 | a.run(new BlockingJdbcActionContext(s)) 250 | } 251 | } 252 | } 253 | 254 | } 255 | 256 | /** 257 | * Extends Database to add methods for session management. 258 | */ 259 | implicit class BlockingDatabase(db: JdbcBackend#JdbcDatabaseDef) { 260 | 261 | def withSession[T](f: (JdbcBackend#Session) => T): T = { 262 | val session = db.createSession() 263 | try { 264 | f(session) 265 | } finally { 266 | session.close() 267 | } 268 | } 269 | 270 | def withTransaction[T](f: (JdbcBackend#Session) => T): T = 271 | withSession { s => s.withTransaction(f(s)) } 272 | } 273 | 274 | implicit class BasicStreamingActionInvoker[R, E <: Effect](action: BasicStreamingAction[Vector[R], R, E]) { 275 | def first(implicit s: JdbcBackend#Session): R = { 276 | action.head 277 | .asInstanceOf[SynchronousDatabaseAction[R, NoStream, BlockingJdbcActionContext, ?, E]] 278 | .run(new BlockingJdbcActionContext(s)) 279 | } 280 | def firstOption(implicit s: JdbcBackend#Session): Option[R] = { 281 | action.headOption 282 | .asInstanceOf[SynchronousDatabaseAction[Option[R], NoStream, BlockingJdbcActionContext, ?, E]] 283 | .run(new BlockingJdbcActionContext(s)) 284 | } 285 | def list(implicit s: JdbcBackend#Session): List[R] = { 286 | action 287 | .asInstanceOf[SynchronousDatabaseAction[Vector[R], Streaming[R], BlockingJdbcActionContext, ?, Effect]] 288 | .run(new BlockingJdbcActionContext(s)) 289 | .toList 290 | } 291 | } 292 | 293 | implicit class BasicActionInvoker[R](action: BasicAction[R, NoStream, Effect]) { 294 | def execute(implicit s: JdbcBackend#Session): R = { 295 | action 296 | .asInstanceOf[SynchronousDatabaseAction[R, NoStream, BlockingJdbcActionContext, ?, Effect]] 297 | .run(new BlockingJdbcActionContext(s)) 298 | } 299 | } 300 | 301 | /** 302 | * Extends plain db queries 303 | */ 304 | implicit class RichDBIOAction[R, E <: Effect](action: DBIOAction[R, NoStream, E]) { 305 | 306 | def executeAction[T]( 307 | action: DBIOAction[T, NoStream, E], 308 | ctx: backend.JdbcActionContext, 309 | streaming: Boolean, 310 | topLevel: Boolean 311 | ): T = action match { 312 | case a: SynchronousDatabaseAction[?, ?, backend.JdbcActionContext, ?, Effect] => a.run(ctx).asInstanceOf[T] 313 | case FlatMapAction(base, f, ec) => 314 | val result = executeAction(base, ctx, false, topLevel) 315 | executeAction(f(result), ctx, streaming, false) 316 | case AndThenAction(actions) => 317 | val last = actions.length - 1 318 | val results = actions.zipWithIndex.map { case (action, pos) => 319 | executeAction(action, ctx, streaming && pos == last, pos == 0) 320 | } 321 | results.last.asInstanceOf[T] 322 | case SequenceAction(dbios) => dbios.map(dbio => executeAction(dbio, ctx, streaming, topLevel)).asInstanceOf[T] 323 | case CleanUpAction(base, f, keepFailure, ec) => 324 | val t1 = Try(executeAction(base, ctx, streaming, topLevel)) 325 | 326 | val a2 = f(t1 match { 327 | case Success(_) => None 328 | case Failure(t) => Some(t) 329 | }) 330 | val t2 = Try(executeAction(a2, ctx, streaming, topLevel)) 331 | 332 | t2 match { 333 | case Failure(e) if t1.isSuccess || !keepFailure => throw e 334 | case _ => 335 | t1 match { 336 | case Success(r) => r 337 | case Failure(e) => throw e 338 | } 339 | } 340 | } 341 | 342 | def run(implicit s: JdbcBackend#Session): R = executeAction(action, new BlockingJdbcActionContext(s), false, true) 343 | } 344 | } 345 | } 346 | --------------------------------------------------------------------------------