├── base └── src │ ├── test │ ├── sql │ │ └── presto │ │ │ └── sample │ │ │ ├── array_data.sql │ │ │ ├── access_log.sql │ │ │ ├── expr.sql │ │ │ ├── ImportExample.sql │ │ │ ├── optional_by_column_name.sql │ │ │ ├── optional_by_preamble.sql │ │ │ ├── nasdaq.sql │ │ │ ├── function.sql │ │ │ └── sql_tmpl.sql │ └── scala │ │ └── xerial │ │ └── sbt │ │ └── sql │ │ ├── Spec.scala │ │ ├── SQLTemplateCompilerTest.scala │ │ ├── SQLTemplateParserTest.scala │ │ ├── SQLTemplateTest.scala │ │ └── SQLModelClassGeneratorTest.scala │ └── main │ └── scala │ └── xerial │ └── sbt │ └── sql │ ├── SQLTemplate.scala │ ├── JDBCClient.scala │ ├── DataType.scala │ ├── SQL.scala │ ├── JDBCTypeNameParser.scala │ ├── SQLTemplateParser.scala │ ├── SQLTemplateCompiler.scala │ └── SQLModelClassGenerator.scala ├── src └── sbt-test │ ├── sbt-sql │ ├── sqlite │ │ ├── src │ │ │ └── main │ │ │ │ └── sql │ │ │ │ └── sqlite │ │ │ │ └── person.sql │ │ ├── sample.db │ │ ├── test │ │ ├── build.sbt │ │ └── project │ │ │ └── plugins.sbt │ └── duckdb │ │ ├── build.sbt │ │ ├── test │ │ ├── project │ │ └── plugins.sbt │ │ └── src │ │ └── main │ │ └── sql │ │ └── duckdb │ │ └── example │ │ └── all_types.sql │ ├── sbt-sql-sqlite │ └── basic │ │ ├── src │ │ ├── main │ │ │ └── sql │ │ │ │ └── sqlite │ │ │ │ └── example │ │ │ │ ├── person.sql │ │ │ │ ├── person_opt.sql │ │ │ │ └── person_opt2.sql │ │ └── test │ │ │ └── scala │ │ │ └── example │ │ │ └── ReadSQLTest.scala │ │ ├── sample.db │ │ ├── project │ │ └── plugins.sbt │ │ ├── test │ │ └── build.sbt │ └── sbt-sql-td │ └── simple │ ├── src │ └── main │ │ └── sql │ │ └── trino │ │ └── access_log.sql │ ├── build.sbt │ ├── test │ └── project │ └── plugins.sbt ├── .scalafmt.conf ├── .scala-steward.conf ├── .gitignore ├── .git-blame-ignore-revs ├── generic └── src │ └── main │ └── scala │ └── xerial │ └── sbt │ └── sql │ └── SbtSQLJDBC.scala ├── .github ├── workflows │ ├── release-note.yml │ ├── release.yml │ └── test.yml ├── dependabot.yml └── release.yml ├── project ├── plugin.sbt └── build.properties ├── .mergify.yml ├── duckdb └── src │ └── main │ └── scala │ └── xerial │ └── sbt │ └── sql │ └── SbtSQLDuckDB.scala ├── sqlite └── src │ └── main │ └── scala │ └── xerial │ └── sbt │ └── sql │ └── SbtSQLSQLite.scala ├── trino └── src │ └── main │ └── scala │ └── xerial │ └── sbt │ └── sql │ └── SbtSQLTrino.scala ├── presto └── src │ └── main │ └── scala │ └── xerial │ └── sbt │ └── sql │ └── SbtSQLPresto.scala ├── td └── src │ └── main │ └── scala │ └── xerial │ └── sbt │ └── sql │ └── SbtSQLTreasureData.scala ├── RELEASE_NOTES.md ├── CLAUDE.md ├── README.md └── sbt /base/src/test/sql/presto/sample/array_data.sql: -------------------------------------------------------------------------------- 1 | select ARRAY[0, 1, 2] -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/sqlite/src/main/sql/sqlite/person.sql: -------------------------------------------------------------------------------- 1 | select * from person 2 | -------------------------------------------------------------------------------- /base/src/test/sql/presto/sample/access_log.sql: -------------------------------------------------------------------------------- 1 | select * from sample_datasets.www_access -------------------------------------------------------------------------------- /base/src/test/sql/presto/sample/expr.sql: -------------------------------------------------------------------------------- 1 | @(id:String) 2 | select '${id.hashCode}' 3 | -------------------------------------------------------------------------------- /base/src/test/sql/presto/sample/ImportExample.sql: -------------------------------------------------------------------------------- 1 | @import java.util.logging._ 2 | select 1 3 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-sqlite/basic/src/main/sql/sqlite/example/person.sql: -------------------------------------------------------------------------------- 1 | select * from person 2 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-td/simple/src/main/sql/trino/access_log.sql: -------------------------------------------------------------------------------- 1 | select * from sample_datasets.www_access -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-sqlite/basic/src/main/sql/sqlite/example/person_opt.sql: -------------------------------------------------------------------------------- 1 | select id, name as name__optional from person 2 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/sqlite/sample.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xerial/sbt-sql/HEAD/src/sbt-test/sbt-sql/sqlite/sample.db -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-sqlite/basic/src/main/sql/sqlite/example/person_opt2.sql: -------------------------------------------------------------------------------- 1 | @optional(name) 2 | select id, name from person 3 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-sqlite/basic/sample.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xerial/sbt-sql/HEAD/src/sbt-test/sbt-sql-sqlite/basic/sample.db -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 3.8.6 2 | maxColumn = 120 3 | style = defaultWithAlign 4 | optIn.breaksInsideChains = true 5 | runner.dialect=scala212 -------------------------------------------------------------------------------- /base/src/test/sql/presto/sample/optional_by_column_name.sql: -------------------------------------------------------------------------------- 1 | select 2 | 1 as num, 3 | 'hoge' as str__optional, 4 | true as bool__optional -------------------------------------------------------------------------------- /base/src/test/sql/presto/sample/optional_by_preamble.sql: -------------------------------------------------------------------------------- 1 | @optional(str, bool) 2 | select 3 | 1 as num, 4 | 'hoge' as str, 5 | true as bool -------------------------------------------------------------------------------- /.scala-steward.conf: -------------------------------------------------------------------------------- 1 | # sbt plugins must use Scala 2.12.x 2 | updates.pin = [ { groupId = "org.scala-lang", artifactId="scala-library", version = "2.12." } ] 3 | -------------------------------------------------------------------------------- /base/src/test/sql/presto/sample/nasdaq.sql: -------------------------------------------------------------------------------- 1 | @(start:Long, end:Long) 2 | select * from sample_datasets.nasdaq 3 | where time between ${start} and ${end} 4 | 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | build 3 | .bundle 4 | vendor/bundle 5 | *.iml 6 | .idea 7 | atlassian-ide-plugin.xml 8 | target 9 | result 10 | log-test.properties 11 | .DS_Store 12 | .bsp 13 | 14 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-td/simple/build.sbt: -------------------------------------------------------------------------------- 1 | lazy val root = 2 | project 3 | .in(file(".")) 4 | .enablePlugins(SbtSQLTreasureData) 5 | .settings( 6 | name := "sbt-sql-test" 7 | ) 8 | -------------------------------------------------------------------------------- /base/src/test/sql/presto/sample/function.sql: -------------------------------------------------------------------------------- 1 | @(start:Long=0, end:Long, timeZone:String="UTC") 2 | select * 3 | from sample_datasets.www_access 4 | where TD_TIME_RANGE(time, ${start}, ${end}, '${timeZone}') 5 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/duckdb/build.sbt: -------------------------------------------------------------------------------- 1 | enablePlugins(SbtSQLDuckDB) 2 | 3 | name := "sbt-sql-duckdb-test" 4 | 5 | libraryDependencies ++= Seq( 6 | "org.wvlet.airframe" %% "airframe-codec" % "23.6.0" 7 | ) 8 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Scala Steward: Reformat with scalafmt 3.7.3 2 | aaaba39e9ee6f4c57830b49fe4fb6930ba5e9677 3 | 4 | # Scala Steward: Reformat with scalafmt 3.8.2 5 | d5b5bf8b261b30703e387737d11e64fc73e8667e 6 | -------------------------------------------------------------------------------- /base/src/test/sql/presto/sample/sql_tmpl.sql: -------------------------------------------------------------------------------- 1 | @(start:String="2019-01-01", end:String="2019-01-02", cond:sql="AND time > 0") 2 | select * from sample_datasets.nasdaq 3 | where TD_TIME_RANGE(time, '${start}', '${end}') 4 | ${cond} 5 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-td/simple/test: -------------------------------------------------------------------------------- 1 | > sqlDir 2 | 3 | > jdbcDriver 4 | 5 | > jdbcURL 6 | 7 | > jdbcUser 8 | 9 | > jdbcPassword 10 | 11 | # Confirm model file is generated 12 | > generateSQLModel 13 | $ exists target/scala-2.12/src_managed/main/access_log.scala 14 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/sqlite/test: -------------------------------------------------------------------------------- 1 | > sqlDir 2 | 3 | > jdbcDriver 4 | 5 | > jdbcURL 6 | 7 | > jdbcUser 8 | 9 | > jdbcPassword 10 | 11 | # Confirm model file is generated 12 | > generateSQLModel 13 | $ exists target/scala-2.12/src_managed/main/person.scala 14 | 15 | -------------------------------------------------------------------------------- /base/src/test/scala/xerial/sbt/sql/Spec.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import wvlet.log.LogSupport 4 | import org.scalatest.matchers.should.Matchers 5 | import org.scalatest.wordspec.AnyWordSpec 6 | 7 | /** */ 8 | trait Spec extends AnyWordSpec with Matchers with LogSupport {} 9 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/duckdb/test: -------------------------------------------------------------------------------- 1 | > sqlDir 2 | 3 | > jdbcDriver 4 | 5 | > jdbcURL 6 | 7 | > jdbcUser 8 | 9 | > jdbcPassword 10 | 11 | # Confirm model file is generated 12 | > generateSQLModel 13 | $ exists target/scala-2.12/src_managed/main/example/all_types.scala 14 | # Compilation should pass 15 | > compile 16 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-td/simple/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | sys.props.get("plugin.version") match { 2 | case Some(x) => addSbtPlugin("org.xerial.sbt" % "sbt-sql-td" % x) 3 | case _ => sys.error("""|The system property 'plugin.version' is not defined. 4 | |Specify this property using the scriptedLaunchOpts -D.""".stripMargin) 5 | } 6 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/sqlite/build.sbt: -------------------------------------------------------------------------------- 1 | enablePlugins(SbtSQLJDBC) 2 | 3 | name := "sbt-sql-test" 4 | 5 | // You can change SQL file folder. The default is src/main/sql 6 | sqlDir := (Compile / sourceDirectory).value / "sql" / "sqlite" 7 | 8 | // Configure your JDBC driver 9 | jdbcDriver := "org.sqlite.JDBC" 10 | jdbcURL := "jdbc:sqlite:sample.db" 11 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-sqlite/basic/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | sys.props.get("plugin.version") match { 2 | case Some(x) => addSbtPlugin("org.xerial.sbt" % "sbt-sql-sqlite" % x) 3 | case _ => sys.error("""|The system property 'plugin.version' is not defined. 4 | |Specify this property using the scriptedLaunchOpts -D.""".stripMargin) 5 | } 6 | -------------------------------------------------------------------------------- /generic/src/main/scala/xerial/sbt/sql/SbtSQLJDBC.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import sbt.AutoPlugin 4 | import sbt.plugins.JvmPlugin 5 | 6 | /** */ 7 | object SbtSQLJDBC extends AutoPlugin { 8 | 9 | object autoImport extends SQL.Keys 10 | 11 | override def trigger = noTrigger 12 | 13 | override def requires = JvmPlugin 14 | 15 | override def projectSettings = SQL.sqlSettings 16 | } 17 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-sqlite/basic/test: -------------------------------------------------------------------------------- 1 | > sqlDir 2 | 3 | > jdbcDriver 4 | 5 | > jdbcURL 6 | 7 | > jdbcUser 8 | 9 | > jdbcPassword 10 | 11 | # Confirm model file is generated 12 | > generateSQLModel 13 | $ exists target/scala-2.12/src_managed/main/example/person.scala 14 | $ exists target/scala-2.12/src_managed/main/example/person_opt.scala 15 | $ exists target/scala-2.12/src_managed/main/example/person_opt2.scala 16 | > test 17 | -------------------------------------------------------------------------------- /.github/workflows/release-note.yml: -------------------------------------------------------------------------------- 1 | name: Release Note 2 | 3 | on: 4 | push: 5 | tags: 6 | - v* 7 | workflow_dispatch: 8 | 9 | jobs: 10 | release: 11 | name: Create a new release note 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Create a release note 15 | env: 16 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 17 | run: | 18 | gh release create "$GITHUB_REF_NAME" --repo="$GITHUB_REPOSITORY" --generate-notes 19 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/sqlite/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | sys.props.get("plugin.version") match { 2 | case Some(x) => addSbtPlugin("org.xerial.sbt" % "sbt-sql" % x) 3 | case _ => sys.error("""|The system property 'plugin.version' is not defined. 4 | |Specify this property using the scriptedLaunchOpts -D.""".stripMargin) 5 | } 6 | // Add your jdbc driver dependency for checking the result schema 7 | libraryDependencies ++= Seq( 8 | "org.xerial" % "sqlite-jdbc" % "3.50.3.0" 9 | ) 10 | -------------------------------------------------------------------------------- /project/plugin.sbt: -------------------------------------------------------------------------------- 1 | // Ignore binary incompatible errors for libraries using scala-xml. 2 | ThisBuild / libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % "always" 3 | addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.3.1") 4 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.5") 5 | addSbtPlugin("com.github.sbt" % "sbt-dynver" % "5.1.1") 6 | 7 | scalacOptions ++= Seq("-deprecation", "-feature") 8 | 9 | libraryDependencies += "org.scala-sbt" %% "scripted-plugin" % sbtVersion.value 10 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/duckdb/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | sys.props.get("plugin.version") match { 2 | case Some(x) => addSbtPlugin("org.xerial.sbt" % "sbt-sql-duckdb" % x) 3 | case _ => sys.error("""|The system property 'plugin.version' is not defined. 4 | |Specify this property using the scriptedLaunchOpts -D.""".stripMargin) 5 | } 6 | // Add your jdbc driver dependency for checking the result schema 7 | libraryDependencies ++= Seq( 8 | // "org.xerial" % "sqlite-jdbc" % "3.42.0.0" 9 | ) 10 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-sqlite/basic/build.sbt: -------------------------------------------------------------------------------- 1 | enablePlugins(SbtSQLSQLite) 2 | 3 | name := "sqlite-basic-test" 4 | jdbcURL := "jdbc:sqlite:sample.db" 5 | 6 | val AIRFRAME_VERSION = "23.5.3" 7 | 8 | libraryDependencies ++= Seq( 9 | "org.xerial" % "sqlite-jdbc" % "3.50.3.0", 10 | "org.wvlet.airframe" %% "airframe-codec" % AIRFRAME_VERSION, 11 | "org.wvlet.airframe" %% "airspec" % AIRFRAME_VERSION % "test" 12 | ) 13 | 14 | testFrameworks += new TestFramework("wvlet.airspec.Framework") 15 | -------------------------------------------------------------------------------- /base/src/test/scala/xerial/sbt/sql/SQLTemplateCompilerTest.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | /** */ 4 | class SQLTemplateCompilerTest extends Spec { 5 | "SQLTemplateCompiler" should { 6 | 7 | "handle embedded Scala expression" in { 8 | 9 | val t = 10 | """@import java.util.Locale 11 | |@(range:(Int,Int),str:String="test") 12 | |select ${range._1}, ${range._2}, '${str.toString}' 13 | """.stripMargin 14 | 15 | SQLTemplateCompiler.compile(t) 16 | } 17 | 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "github-actions" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /.mergify.yml: -------------------------------------------------------------------------------- 1 | pull_request_rules: 2 | - name: Automatic merge Scala Steward PRs 3 | conditions: 4 | - author=xerial-bot 5 | - check-success=code format 6 | - check-success=unit tests 7 | - check-success=sbt scripted tests 8 | - or: 9 | - title~=^Update airframe- 10 | - title~=^Update sbt-airframe 11 | - title~=^Update airspec 12 | - title~=^Update scalafmt-core 13 | - label=sbt-plugin-update 14 | - label=test-library-update 15 | - label=library-update 16 | actions: 17 | merge: 18 | method: squash 19 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | # 14 | 15 | sbt.version=1.11.6 16 | -------------------------------------------------------------------------------- /duckdb/src/main/scala/xerial/sbt/sql/SbtSQLDuckDB.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import sbt._ 4 | import sbt.Keys.sourceDirectory 5 | import sbt.plugins.JvmPlugin 6 | 7 | object SbtSQLDuckDB extends AutoPlugin { 8 | object autoImport extends SQL.Keys 9 | 10 | import autoImport._ 11 | 12 | lazy val duckdbSettings = SQL.sqlSettings ++ Seq( 13 | sqlDir := (Compile / sourceDirectory).value / "sql" / "duckdb", 14 | jdbcDriver := "org.duckdb.DuckDBDriver", 15 | // TODO support multiple files 16 | jdbcURL := "jdbc:duckdb:" 17 | ) 18 | 19 | override def trigger = noTrigger 20 | override def requires = JvmPlugin 21 | override def projectSettings = duckdbSettings 22 | } 23 | -------------------------------------------------------------------------------- /base/src/main/scala/xerial/sbt/sql/SQLTemplate.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import wvlet.log.LogSupport 4 | 5 | object SQLTemplate extends LogSupport { 6 | sealed trait Fragment 7 | case class Text(s: String) extends Fragment 8 | case class Param(name: String, typeName: String) extends Fragment 9 | 10 | def apply(sql: String): SQLTemplate = SQLTemplateCompiler.compile(sql) 11 | } 12 | 13 | case class SQLTemplate( 14 | sql: String, 15 | populated: String, 16 | params: Seq[Preamble.FunctionArg], 17 | imports: Seq[Preamble.Import], 18 | optionals: Seq[Preamble.Optional] 19 | ) { 20 | def optionalParams: Set[String] = optionals.map(_.columns).flatten.toSet[String] 21 | } 22 | -------------------------------------------------------------------------------- /sqlite/src/main/scala/xerial/sbt/sql/SbtSQLSQLite.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import sbt._ 4 | import sbt.Keys.sourceDirectory 5 | import sbt.plugins.JvmPlugin 6 | 7 | /** */ 8 | object SbtSQLSQLite extends AutoPlugin { 9 | 10 | object autoImport extends SQL.Keys 11 | 12 | import autoImport._ 13 | 14 | lazy val sqliteSettings = SQL.sqlSettings ++ Seq( 15 | sqlDir := (Compile / sourceDirectory).value / "sql" / "sqlite", 16 | jdbcDriver := "org.sqlite.JDBC", 17 | // TODO support multiple files 18 | jdbcURL := "jdbc:sqlite::memory::" 19 | ) 20 | 21 | override def trigger = noTrigger 22 | 23 | override def requires = JvmPlugin 24 | override def projectSettings = sqliteSettings 25 | } 26 | -------------------------------------------------------------------------------- /trino/src/main/scala/xerial/sbt/sql/SbtSQLTrino.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import sbt.AutoPlugin 4 | 5 | import sbt.Keys._ 6 | import sbt._ 7 | import sbt.plugins.JvmPlugin 8 | 9 | object SbtSQLTrino extends AutoPlugin { 10 | 11 | object autoImport extends SQL.Keys 12 | 13 | import autoImport._ 14 | 15 | lazy val trinoSettings = SQL.sqlSettings ++ Seq( 16 | sqlDir := (sourceDirectory in Compile).value / "sql" / "trino", 17 | jdbcDriver := "io.trino.jdbc.TrinoDriver", 18 | jdbcURL := "jdbc:trino://(your trino server url)/(catalog name)" 19 | ) 20 | 21 | override def trigger = noTrigger 22 | 23 | override def requires = JvmPlugin 24 | override def projectSettings = trinoSettings 25 | } 26 | -------------------------------------------------------------------------------- /base/src/test/scala/xerial/sbt/sql/SQLTemplateParserTest.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import xerial.sbt.sql.Preamble.FunctionArg 4 | 5 | /** */ 6 | class SQLTemplateParserTest extends Spec { 7 | 8 | "SQLTemplateParser" should { 9 | 10 | "parse function preamble" in { 11 | val f = SQLTemplateParser.parseFunction("""@(id:Int=0, name:String, timeZone:String="UTC", longVal:Long=100L)""") 12 | f.args should have length (4) 13 | f.args(0) shouldBe FunctionArg("id", "Int", Some("0")) 14 | f.args(1) shouldBe FunctionArg("name", "String", None) 15 | f.args(2) shouldBe FunctionArg("timeZone", "String", Some("UTC")) 16 | f.args(3) shouldBe FunctionArg("longVal", "Long", Some("100L")) 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /presto/src/main/scala/xerial/sbt/sql/SbtSQLPresto.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import sbt.AutoPlugin 4 | 5 | import sbt.Keys._ 6 | import sbt._ 7 | import sbt.plugins.JvmPlugin 8 | 9 | object SbtSQLPresto extends AutoPlugin { 10 | 11 | object autoImport extends SQL.Keys 12 | 13 | import autoImport._ 14 | 15 | lazy val prestoSettings = SQL.sqlSettings ++ Seq( 16 | sqlDir := (Compile / sourceDirectory).value / "sql" / "presto", 17 | jdbcDriver := "io.prestosql.jdbc.PrestoDriver", 18 | jdbcURL := "jdbc:presto://(your presto server url)/(catalog name)" 19 | ) 20 | 21 | override def trigger = noTrigger 22 | 23 | override def requires = JvmPlugin 24 | override def projectSettings = prestoSettings 25 | } 26 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | categories: 3 | - title: '🔥 Breaking Changes' 4 | labels: 5 | - 'breaking' 6 | - title: '👋 Deprecated' 7 | labels: 8 | - 'deprecation' 9 | - title: '🚀 Features' 10 | labels: 11 | - 'feature' 12 | - 'enhancement' 13 | - title: '🐛 Bug Fixes' 14 | labels: 15 | - 'bug' 16 | - title: '🔗 Dependency Updates' 17 | labels: 18 | - 'library-update' 19 | - 'dependencies' 20 | - title: '🛠 Internal Updates' 21 | labels: 22 | - 'internal' 23 | - 'kaizen' 24 | - 'test-library-update' 25 | - 'sbt-plugin-update' 26 | - title: '📚 Docs' 27 | labels: 28 | - 'doc' 29 | - title: Other Changes 30 | labels: 31 | - "*" 32 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - v* 7 | workflow_dispatch: 8 | 9 | jobs: 10 | publish: 11 | name: Publish 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v5 15 | with: 16 | fetch-depth: 10000 17 | - run: git fetch --tags -f 18 | - uses: actions/setup-java@v4 19 | with: 20 | distribution: 'temurin' 21 | java-version: '21' 22 | - name: Setup GPG 23 | env: 24 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 25 | run: echo $PGP_SECRET | base64 --decode | gpg --import --batch --yes 26 | - name: Build bundle 27 | env: 28 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 29 | run: | 30 | ./sbt publishSigned 31 | - name: Release to Sonatype 32 | env: 33 | SONATYPE_USERNAME: '${{ secrets.SONATYPE_USERNAME }}' 34 | SONATYPE_PASSWORD: '${{ secrets.SONATYPE_PASSWORD }}' 35 | run: ./sbt sonaRelease -------------------------------------------------------------------------------- /base/src/main/scala/xerial/sbt/sql/JDBCClient.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import java.sql.{Connection, DriverManager, ResultSet} 4 | 5 | import wvlet.log.LogSupport 6 | 7 | private[sql] case class JDBCConfig( 8 | driver: String, 9 | url: String, 10 | user: String, 11 | password: String 12 | ) 13 | 14 | /** */ 15 | class JDBCClient(config: JDBCConfig) extends LogSupport { 16 | private def withResource[R <: AutoCloseable, U](r: R)(body: R => U): U = { 17 | try { 18 | body(r) 19 | } finally { 20 | r.close() 21 | } 22 | } 23 | 24 | def withConnection[U](body: Connection => U): U = { 25 | Class.forName(config.driver) 26 | withResource(DriverManager.getConnection(config.url, config.user, config.password)) { conn => 27 | body(conn) 28 | } 29 | } 30 | 31 | def submitQuery[U](conn: Connection, sql: String)(body: ResultSet => U): U = { 32 | withResource(conn.createStatement()) { stmt => 33 | info(s"Executing SQL:\n${sql}") 34 | withResource(stmt.executeQuery(sql)) { rs => 35 | body(rs) 36 | } 37 | } 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /td/src/main/scala/xerial/sbt/sql/SbtSQLTreasureData.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import sbt.{AutoPlugin, DirectCredentials} 4 | import sbt.plugins.JvmPlugin 5 | import sbt.Keys._ 6 | import sbt._ 7 | 8 | object SbtSQLTreasureData extends AutoPlugin { 9 | 10 | object autoImport extends SQL.Keys 11 | 12 | import autoImport._ 13 | 14 | lazy val tdSettings = SQL.sqlSettings ++ Seq( 15 | sqlDir := (Compile / sourceDirectory).value / "sql" / "trino", 16 | jdbcDriver := "io.trino.jdbc.TrinoDriver", 17 | jdbcURL := { 18 | val host = credentials.value 19 | .collectFirst { 20 | case d: DirectCredentials if d.realm == "Treasure Data" => 21 | d.host 22 | }.getOrElse("api-presto.treasuredata.com") 23 | s"jdbc:trino://${host}:443/td-presto?SSL=true" 24 | }, 25 | jdbcUser := { 26 | val user = credentials.value.collectFirst { 27 | case d: DirectCredentials if d.realm == "Treasure Data" => 28 | d.userName 29 | } 30 | user.orElse(sys.env.get("TD_API_KEY")).getOrElse("") 31 | }, 32 | jdbcPassword := "dummy-password" 33 | ) 34 | 35 | override def trigger = noTrigger 36 | 37 | override def requires = JvmPlugin 38 | override def projectSettings = tdSettings 39 | } 40 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - '**.scala' 7 | - '**.java' 8 | - '**.sbt' 9 | - 'project/build.properties' 10 | push: 11 | branches: 12 | - master 13 | paths: 14 | - '**.scala' 15 | - '**.java' 16 | - '**.sbt' 17 | - 'project/build.properties' 18 | 19 | jobs: 20 | code_format: 21 | name: code format 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@v5 25 | - name: scalafmt test 26 | run: ./sbt scalafmtCheckAll 27 | unit_test: 28 | name: unit tests 29 | runs-on: ubuntu-latest 30 | steps: 31 | - uses: actions/checkout@v5 32 | - uses: olafurpg/setup-scala@v14 33 | with: 34 | java-version: openjdk@1.11 35 | - name: Unit tests 36 | run: ./sbt test 37 | test_sbt_plugin: 38 | name: sbt scripted tests 39 | runs-on: ubuntu-latest 40 | steps: 41 | - uses: actions/checkout@v5 42 | - uses: olafurpg/setup-scala@v14 43 | with: 44 | java-version: openjdk@1.11 45 | - name: build plugins 46 | run: ./sbt publishLocal 47 | - name: sbt scripted test 48 | # Skip sbt-sql-td test 49 | run: ./sbt "scripted sbt-sql/* sbt-sql-sqlite/*" 50 | -------------------------------------------------------------------------------- /base/src/test/scala/xerial/sbt/sql/SQLTemplateTest.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import xerial.sbt.sql.Preamble.FunctionArg 4 | 5 | /** */ 6 | class SQLTemplateTest extends Spec { 7 | 8 | "SQLTemplate" should { 9 | "extract embedded variables" in { 10 | val params = SQLTemplateParser.extractParam(""" 11 | |select * from sample_datasets.nasdaq 12 | |where td_time_range(time, '${start:String}', '${end:String}') 13 | |${cond:sql=AND time > 0} 14 | """.stripMargin) 15 | 16 | info(params) 17 | params.length shouldBe 3 18 | params(0) shouldBe FunctionArg("start", "String", None) // , 3, 27, 42) 19 | params(1) shouldBe FunctionArg("end", "String", None) // , 3, 46, 59) 20 | params(2) shouldBe FunctionArg("cond", "sql", Some("AND time > 0")) // , 4, 0, 24) 21 | } 22 | 23 | "remove type param" in { 24 | val removed = SQLTemplateParser.removeParamType("select ${a:Int}, ${b:String}") 25 | removed shouldBe "select ${a}, ${b}" 26 | } 27 | 28 | "populate params" in { 29 | val populated = 30 | SQLTemplate("select ${a:Int}, '${b:String}', ${c:Float}, ${d:Boolean}, '${e:String}', ${f:Double}").populated 31 | populated shouldBe "select 0, 'dummy', 0.0, true, 'dummy', 0.0" 32 | } 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /base/src/main/scala/xerial/sbt/sql/DataType.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | /** */ 4 | object DataType { 5 | 6 | case object BooleanType extends DataType("Boolean") 7 | 8 | case object ByteType extends DataType("Byte") 9 | 10 | case object ShortType extends DataType("Short") 11 | 12 | case object IntType extends DataType("Int") 13 | 14 | case object DateType extends DataType("java.sql.Date") 15 | 16 | case object LongType extends DataType("Long") 17 | 18 | case object FloatType extends DataType("Float") 19 | 20 | case object DoubleType extends DataType("Double") 21 | 22 | case object StringType extends DataType("String") 23 | 24 | case object BinaryType extends DataType(name = "Array[Byte]") 25 | 26 | case class DecimalType(p: Int, s: Int) extends DataType(name = "Double") // Map decimal to Double 27 | 28 | case class ArrayType(elementType: DataType) extends DataType(s"Array[${elementType.name}]", Seq(elementType)) 29 | 30 | case class MapType(keyType: DataType, valueType: DataType) 31 | extends DataType(s"Map[${keyType.name}, ${valueType.name}]", Seq(keyType, valueType)) 32 | 33 | case object AnyType extends DataType("Any") 34 | 35 | case class OptionType(elementType: DataType) extends DataType(name = s"Option[${elementType.name}]", Seq(elementType)) 36 | 37 | } 38 | 39 | sealed abstract class DataType(val name: String, val typeArgs: Seq[DataType] = Seq.empty) 40 | -------------------------------------------------------------------------------- /RELEASE_NOTES.md: -------------------------------------------------------------------------------- 1 | Release Notes 2 | === 3 | 4 | The latest release notes are available from here https://github.com/xerial/sbt-sql/releases 5 | 6 | ## 0.13 7 | - Support stream read with .selectStream(param){ iterator => ... } 8 | - Remove the dependency to airframe-control 9 | - Improved the internal test coverage 10 | - Upgrade sqlite-jdbc to 3.32.3 11 | 12 | ## 0.12 13 | - Support Option[X] types by using '__optional' column name suffix, or `@optional(col_name1, ...)` parameter. 14 | 15 | ## 0.11 16 | - Drop support for sbt 0.13 17 | - Generate Scala case classes 18 | - Use airframe-codec and airframe-jdbc for mapping JDBC query results into Scala case classes 19 | 20 | ## 0.8 21 | - Upgrade to Presto 0.186 22 | 23 | ## 0.7 24 | - Support sbt-1.0.0-RC3 25 | - Dropped a support for 0.13.x 26 | - Support SQLite (sbt-sql-sqlite plugin) 27 | 28 | ## 0.5 29 | - Support Scala expression inside `${...}` block 30 | - Add support for function definition `@(...)` at the sql file header: 31 | ``` 32 | @(v1:String, v2:Int = 0) 33 | select '${v1}', ${v2} 34 | ``` 35 | - Support include statement, e.g., `@include java.lang.sql._` 36 | 37 | ## 0.4 38 | - Split the plugin into DB specific ones 39 | - sbt-sql (generic JDBC), sbt-sql-presto, sbt-sql-td (For Treasure Data Presto) 40 | - Simplified the configuration 41 | 42 | ## 0.3 43 | - Add Treasure Data Presto support 44 | - Improved log messages 45 | 46 | ## 0.2 47 | - Add selectWith 48 | - Add select(param)(connection) 49 | 50 | ## 0.1 51 | - Initial release 52 | -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql/duckdb/src/main/sql/duckdb/example/all_types.sql: -------------------------------------------------------------------------------- 1 | select 2 | true p_boolean, 3 | cast(1 as TINYINT) p_tinyint, 4 | cast(1 as SMALLINT) p_smallint, 5 | cast(1 as INTEGER) p_integer, 6 | cast(1 as BIGINT) p_bigint, 7 | cast(1 as UTINYINT) p_utinyint, 8 | cast(1 as USMALLINT) p_usmallint, 9 | cast(1 as UINTEGER) p_uinteger, 10 | cast(1 as UBIGINT) p_ubigint, 11 | cast(1 as HUGEINT) p_hugeint, 12 | cast(1.0 as FLOAT) p_float, 13 | cast(1.0 as DOUBLE) p_double, 14 | cast(1 as DECIMAL) p_decimal, 15 | cast('a' as VARCHAR) p_varchar, 16 | 'A'::BLOB p_blob, 17 | 'A'::BYTEA p_bloba, 18 | time '12:34:56' p_time, 19 | date '1992-09-20' p_date, 20 | timestamp '1992-09-20 12:34:56' p_timestamp, 21 | timestamp '1992-09-20 12:34:56.789' p_timestamp_ms, 22 | timestamp '1992-09-20 12:34:56.789123456' p_timestamp_ns, 23 | timestamp '1992-09-20 12:34:56' p_timestamp_s, 24 | TIMESTAMP '1992-09-20 12:34:56.789+01:00' p_timestamp_with_timezone, 25 | null p_null, 26 | '101010'::BIT p_bit, 27 | interval 1 year p_interval, 28 | [1, 2, 3] p_int_list, 29 | ['a', 'b'] p_varchar_list, 30 | [true, false] p_boolean_list, 31 | -- STRUCT 32 | -- ENUM 33 | uuid() p_uuid, 34 | '{"duck":42}'::JSON p_json, 35 | map {'k1':1, 'k2':3} p_varchar_int_map, 36 | -- UNKNOWN 37 | -- UNION 38 | cast(1 as INT16) p_int16, 39 | cast(1 as INT32) p_int32, 40 | cast(1 as INT64) p_int64, 41 | cast(1 as INT128) p_int128, 42 | 1.0 p_real, 43 | cast(1.0 as FLOAT4) p_float4, 44 | cast(1.0 as FLOAT8) p_float8 45 | -------------------------------------------------------------------------------- /base/src/main/scala/xerial/sbt/sql/SQL.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import sbt.Keys._ 4 | import sbt._ 5 | import sbt.internal.io.Source 6 | 7 | /** */ 8 | object SQL { 9 | 10 | trait Keys { 11 | val sqlDir = settingKey[File]("A folder containing SQL files. e.g. src/main/sql") 12 | val jdbcDriver = settingKey[String]("JDBC driver class name. e.g., com.facebook.presto.jdbc.PrestoDriver") 13 | val jdbcURL = taskKey[String]("JDBC connection URL. e.g., jdbc:trino://api-presto.treasuredata.com:443/td-presto") 14 | val jdbcUser = taskKey[String]("JDBC user name") 15 | val jdbcPassword = taskKey[String]("JDBC password") 16 | val generateSQLModel = taskKey[Seq[File]]("create model classes from SQL files") 17 | val sqlModelClasses = taskKey[Seq[File]]("Generated SQL model classes") 18 | } 19 | 20 | object autoImport extends Keys 21 | 22 | import autoImport._ 23 | 24 | // TODO split plugins for each jdbc drivers (mysqlSettings, prestoSettings, etc.) 25 | lazy val sqlSettings = Seq( 26 | sqlDir := (Compile / sourceDirectory).value / "sql", 27 | generateSQLModel := { 28 | val config = JDBCConfig(jdbcDriver.value, jdbcURL.value, jdbcUser.value, jdbcPassword.value) 29 | val generator = new SQLModelClassGenerator(config) 30 | generator.generate( 31 | GeneratorConfig(sqlDir.value, (Compile / managedSourceDirectories).value.head) 32 | ) 33 | }, 34 | sqlModelClasses := generateSQLModel.value, 35 | Compile / sourceGenerators += sqlModelClasses.taskValue, 36 | watchSources += new Source( 37 | sqlDir.value, 38 | new NameFilter { 39 | override def accept(name: String): Boolean = { 40 | name.endsWith(".sql") 41 | } 42 | }, 43 | NothingFilter 44 | ), 45 | jdbcUser := "", 46 | jdbcPassword := "" 47 | ) 48 | } 49 | -------------------------------------------------------------------------------- /base/src/test/scala/xerial/sbt/sql/SQLModelClassGeneratorTest.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import java.io.File 4 | import java.sql.{JDBCType, Types} 5 | 6 | import xerial.sbt.sql.DataType.{ArrayType, MapType, OptionType} 7 | import xerial.sbt.sql.SQLModelClassGenerator.JDBCResultColumn 8 | 9 | /** */ 10 | class SQLModelClassGeneratorTest extends Spec { 11 | "SQLModelClassGenerator" should { 12 | "generate case class code" in { 13 | if (sys.env.get("TD_API_KEY").isEmpty) { 14 | // Skip tests that requrie TD_API_KEY 15 | pending 16 | } 17 | val g = new SQLModelClassGenerator( 18 | JDBCConfig( 19 | driver = "io.trino.jdbc.TrinoDriver", 20 | url = "jdbc:trino://api-presto.treasuredata.com:443/td-presto?SSL=true", 21 | user = sys.env("TD_API_KEY"), 22 | password = "dummy" 23 | ) 24 | ) 25 | g.generate( 26 | GeneratorConfig( 27 | new File("base/src/test/sql/trino"), 28 | new File("target/sbt-1.0/src_managed/test") 29 | ) 30 | ) 31 | } 32 | 33 | "generate Scala type names" in { 34 | MapType(DataType.StringType, DataType.LongType).name shouldBe "Map[String, Long]" 35 | ArrayType(DataType.StringType).name shouldBe "Array[String]" 36 | OptionType(DataType.StringType).name shouldBe "Option[String]" 37 | } 38 | 39 | "support optional types" in { 40 | val columns = Seq( 41 | JDBCResultColumn("id", "varchar", Types.VARCHAR, true), 42 | JDBCResultColumn("param__optional", "bigint", Types.BIGINT, true), 43 | JDBCResultColumn("type", "varchar", Types.VARCHAR, true), 44 | JDBCResultColumn("end_at", "bigint", Types.BIGINT, true) 45 | ) 46 | val schema = SQLModelClassGenerator.generateSchema(columns, Set("end_at")) 47 | schema shouldBe Schema( 48 | IndexedSeq( 49 | Column("id", DataType.StringType, JDBCType.VARCHAR, true), 50 | Column("param", OptionType(DataType.LongType), JDBCType.BIGINT, true), 51 | // quote Scala reserved words 52 | Column("`type`", DataType.StringType, JDBCType.VARCHAR, true), 53 | // Optional type specified by preamble 54 | Column("end_at", OptionType(DataType.LongType), JDBCType.BIGINT, true) 55 | ) 56 | ) 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Project Overview 6 | 7 | sbt-sql is an sbt plugin that generates Scala model classes from SQL query files. The plugin integrates SQL with Scala by creating type-safe model classes to read SQL results, eliminating runtime errors from column name mismatches and enabling IDE support for SQL result parameters. 8 | 9 | ## Key Architecture 10 | 11 | ### Multi-Module Structure 12 | 13 | The project consists of several modules: 14 | 15 | - **base**: Core SQL parsing and model generation logic 16 | - `SQLTemplateParser`: Parses SQL files with template variables 17 | - `SQLModelClassGenerator`: Generates Scala case classes from SQL schemas 18 | - `JDBCClient`: Handles JDBC connections and schema retrieval 19 | 20 | - **generic**: Generic JDBC driver support (sbt-sql) 21 | - **sqlite**: SQLite-specific plugin (sbt-sql-sqlite) 22 | - **duckdb**: DuckDB-specific plugin (sbt-sql-duckdb) 23 | - **trino**: Trino-specific plugin (sbt-sql-trino) 24 | - **td**: Treasure Data-specific plugin (sbt-sql-td) 25 | 26 | ### SQL File Processing Flow 27 | 28 | 1. SQL files in `src/main/sql` (or database-specific subdirectories) are parsed 29 | 2. Template variables `@(name:type)` and expressions `${expr}` are extracted 30 | 3. JDBC connection is used to validate SQL and retrieve result schema 31 | 4. Scala model classes are generated in `target/src_managed/main` 32 | 33 | ## Development Commands 34 | 35 | ### Build and Compile 36 | ```bash 37 | sbt compile 38 | ``` 39 | 40 | ### Run Tests 41 | ```bash 42 | sbt test 43 | ``` 44 | 45 | ### Run Scripted Tests (Integration Tests) 46 | ```bash 47 | sbt scripted 48 | ``` 49 | 50 | ### Run a Specific Scripted Test 51 | ```bash 52 | sbt "scripted sbt-sql-sqlite/basic" 53 | ``` 54 | 55 | ### Format Code 56 | ```bash 57 | sbt scalafmtAll 58 | ``` 59 | 60 | ### Clean Build 61 | ```bash 62 | sbt clean 63 | ``` 64 | 65 | ### Publish Local (for testing plugin changes) 66 | ```bash 67 | sbt publishLocal 68 | ``` 69 | 70 | ### Update Plugin Version in Test Projects 71 | ```bash 72 | sbt bumpPluginVersion 73 | ``` 74 | 75 | ## Plugin Development Tips 76 | 77 | When modifying the plugin: 78 | 79 | 1. The main plugin logic is in the `base` module under `xerial.sbt.sql` 80 | 2. Database-specific plugins extend `SbtSQLBase` from the base module 81 | 3. SQL template parsing uses Scala parser combinators 82 | 4. Generated code uses Airframe Surface for JDBC-to-Scala mapping 83 | 5. Scripted tests in `src/sbt-test` verify plugin functionality 84 | 85 | ## Testing Plugin Changes 86 | 87 | To test changes: 88 | 1. Run `sbt publishLocal` to publish the plugin locally 89 | 2. Update the plugin version in test projects if needed 90 | 3. Run `sbt scripted` to execute integration tests -------------------------------------------------------------------------------- /src/sbt-test/sbt-sql-sqlite/basic/src/test/scala/example/ReadSQLTest.scala: -------------------------------------------------------------------------------- 1 | package example 2 | 3 | import java.sql.DriverManager 4 | 5 | object ReadSQLTest extends wvlet.airspec.AirSpec { 6 | 7 | private implicit lazy val connection = { 8 | Class.forName("org.sqlite.JDBC") 9 | DriverManager.getConnection("jdbc:sqlite::memory:") 10 | } 11 | 12 | private def execute(sql: String): Unit = { 13 | val stmt = connection.createStatement() 14 | try { 15 | stmt.execute(sql) 16 | } finally { 17 | stmt.close() 18 | } 19 | } 20 | 21 | override protected def beforeAll: Unit = { 22 | execute(s"create table person(id string, name string)") 23 | execute(s"insert into person values ('1', 'leo'), ('2', 'yui'), ('3', null)") 24 | } 25 | 26 | override protected def afterAll: Unit = { 27 | connection.close() 28 | } 29 | 30 | test("show original SQL") { 31 | val sql = person.sql() 32 | assert(sql == "select * from person") 33 | } 34 | 35 | test("select()") { 36 | val result = person.select() 37 | assert(result == Seq(person(1, "leo"), person(2, "yui"), person(3, ""))) 38 | } 39 | 40 | test("selectWith(SQL)") { 41 | // select with sql 42 | val r1 = person.selectWith("select * from person where id = '2'") 43 | info(s"person.selectWith(...): ${r1}") 44 | assert(r1 == Seq(person(2, "yui"))) 45 | } 46 | 47 | test("selectStream") { 48 | // Reading with stream 49 | info(s"person.selectStream()") 50 | person.selectStream() { it => 51 | assert(it.hasNext) 52 | val r0 = it.next() 53 | assert(r0 == person(1, "leo")) 54 | assert(it.hasNext) 55 | val r1 = it.next() 56 | assert(r1 == person(2, "yui")) 57 | assert(it.hasNext) 58 | val r2 = it.next() 59 | assert(r2 == person(3, "")) 60 | info(Seq(r0, r1, r2)) 61 | assert(it.hasNext == false) 62 | } 63 | } 64 | 65 | test("selectStream and map") { 66 | person.selectStream() { it => 67 | val result = it.map(_.toString).mkString(",") 68 | info(s"person.selectStream(): ${result}") 69 | assert(result == """person(1,leo),person(2,yui),person(3,)""") 70 | } 71 | } 72 | 73 | test("selectStreamWith(SQL)") { 74 | person.selectStreamWith("select * from person where id = 1") { it => 75 | assert(it.hasNext) 76 | val r = it.next() 77 | info(r) 78 | assert(r == person(1, "leo")) 79 | assert(it.hasNext == false) 80 | } 81 | } 82 | 83 | test("generate case class with xxx__optional column") { 84 | val result2 = person_opt.select() 85 | info(s"person_opt.select(): ${result2}") 86 | assert(result2 == Seq(person_opt(1, Some("leo")), person_opt(2, Some("yui")), person_opt(3, None))) 87 | } 88 | 89 | test("generate case class with @optional column") { 90 | val result3 = person_opt2.select() 91 | info(s"person_opt2.select(): ${result3}") 92 | assert(result3 == Seq(person_opt2(1, Some("leo")), person_opt2(2, Some("yui")), person_opt2(3, None))) 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /base/src/main/scala/xerial/sbt/sql/JDBCTypeNameParser.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import java.util.Locale 4 | 5 | import wvlet.log.LogSupport 6 | 7 | import scala.util.parsing.combinator.RegexParsers 8 | 9 | /** Parse JDBC Type Names (based on Presto's types) This class defines mapping between Presto data type. 10 | * 11 | * Presto types: https://trino.io/docs/current/language/types.html 12 | */ 13 | object JDBCTypeNameParser extends RegexParsers with LogSupport { 14 | 15 | import xerial.sbt.sql.DataType._ 16 | 17 | private def typeName: Parser[String] = "[a-z][a-z ]*".r 18 | 19 | private def number: Parser[Int] = 20 | "[0-9]*".r ^^ { 21 | _.toInt 22 | } 23 | 24 | private def primitiveType: Parser[DataType] = 25 | typeName ^^ { 26 | toScalaPrimitiveType(_) 27 | } 28 | 29 | private def varcharType: Parser[DataType] = 30 | "varchar" ~ opt("(" ~ number ~ ")") ~ opt("[]") ^^ { case _ ~ _ ~ opt => 31 | if (opt.isDefined) { 32 | // For DuckDB 33 | ArrayType(StringType) 34 | } else { 35 | StringType 36 | } 37 | } 38 | 39 | private def decimalType: Parser[DecimalType] = 40 | "decimal" ~ "(" ~ number ~ "," ~ number ~ ")" ^^ { case _ ~ _ ~ p ~ _ ~ s ~ _ => 41 | DecimalType(p, s) 42 | } 43 | 44 | private def arrayType: Parser[ArrayType] = 45 | "array" ~ "(" ~ dataType ~ ")" ^^ { case _ ~ _ ~ x ~ _ => ArrayType(x) } | 46 | // DuckDB uses "[]" instead of "array" 47 | primitiveType ~ "[]" ^^ { case x ~ _ => ArrayType(x) } 48 | 49 | private def mapType: Parser[DataType] = 50 | "map" ~ "(" ~ dataType ~ "," ~ dataType ~ ")" ^^ { case _ ~ _ ~ k ~ _ ~ v ~ _ => 51 | // Reading map type is not supported in JdbcUtil of Spark, so use String instead 52 | MapType(k, v) 53 | } 54 | 55 | private def dataType: Parser[DataType] = 56 | varcharType | decimalType | arrayType | mapType | primitiveType 57 | 58 | def parseDataType(s: String): Option[DataType] = { 59 | val input = s.toLowerCase(Locale.US).trim 60 | parseAll(dataType, input) match { 61 | case Success(result, next) => Some(result) 62 | case Error(msg, next) => 63 | warn(msg) 64 | None 65 | case Failure(msg, next) => 66 | warn(msg) 67 | None 68 | } 69 | } 70 | 71 | // TODO Support Embulk JDBC types 72 | // // See also https://github.com/embulk/embulk-input-jdbc/blob/9ce3e5528a205f86e9c2892dd8a3739f685e07e7/embulk-input-jdbc/src/main/java/org/embulk/input/jdbc/getter/ColumnGetterFactory.java#L92 73 | // val default : java.sql.JDBCType => DataType = { 74 | // case BIT | BOOLEAN => BooleanType 75 | // 76 | // case TINYINT | SMALLINT => IntType 77 | // case INTEGER | BIGINT => LongType 78 | // 79 | // case FLOAT | REAL => FloatType 80 | // case DOUBLE => DoubleType 81 | // 82 | // case NUMERIC | DECIMAL => StringType // TODO 83 | // case CHAR | VARCHAR | LONGVARCHAR | CLOB | NCHAR | NVARCHAR => StringType 84 | // case DATE => StringType // TODO 85 | // case ARRAY => ArrayType(AnyType) // TODO 86 | // case _ => StringType 87 | // } 88 | 89 | private def toScalaPrimitiveType(typeName: String): DataType = { 90 | typeName match { 91 | case "bit" | "boolean" => BooleanType 92 | case "tinyint" => ByteType 93 | case "smallint" => ShortType 94 | case "integer" => IntType 95 | case "bigint" | "long" => LongType 96 | case "float" | "real" => FloatType 97 | case "double" => DoubleType 98 | case "date" => DateType 99 | case "json" => StringType 100 | case "char" => StringType 101 | case "numeric" | "decimal" => StringType // TODO 102 | // DuckDB types 103 | case "utinyint" | "usmallint" | "int1" | "int2" | "int4" | "int" | "signed" => IntType 104 | case "int8" | "uinteger" => LongType 105 | // DuckDB uses "hugeint" for 128-bit integers 106 | case "hugeint" | "int32" | "ubigint" => LongType 107 | // BigInt should be used, but it is not supported now 108 | case "int64" | "int128" => LongType 109 | case "float4" | "float8" => FloatType 110 | case "float8" => DoubleType 111 | case "blob" | "bytea" => BinaryType 112 | case t if t.startsWith("interval ") => StringType 113 | case "time" | "time with time zone" => 114 | // Return string to be compatible with TD API 115 | StringType 116 | case "timestamp" | "timestamp with time zone" => 117 | // Return strings since java.sql.timestamp can't hold timezone information 118 | StringType 119 | case "varbinary" => 120 | BinaryType 121 | case unknown => 122 | // Use StringType for all unknown types 123 | StringType 124 | } 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /base/src/main/scala/xerial/sbt/sql/SQLTemplateParser.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import wvlet.log.LogSupport 4 | 5 | import scala.io.Source 6 | import scala.util.matching.Regex.Match 7 | import scala.util.parsing.combinator._ 8 | 9 | sealed trait Preamble 10 | 11 | object Preamble { 12 | 13 | case class Function(args: Seq[FunctionArg]) extends Preamble 14 | 15 | case class FunctionArg(name: String, typeName: String, defaultValue: Option[String]) { 16 | override def toString = s"${name}:${typeName}${defaultValue.map(x => s"=${x}").getOrElse("")}" 17 | 18 | def isSameType(a: FunctionArg) = name == a.name && typeName == a.typeName 19 | 20 | def quotedValue: String = { 21 | typeName match { 22 | case "String" | "SQL" | "sql" => "\"" + defaultValue.get + "\"" 23 | case other => defaultValue.get 24 | } 25 | } 26 | def functionArgType: String = { 27 | typeName match { 28 | case "SQL" | "sql" => "String" 29 | case other => other 30 | } 31 | } 32 | } 33 | case class Import(target: String) extends Preamble 34 | 35 | case class Optional(columns: List[String]) extends Preamble 36 | } 37 | 38 | import Preamble._ 39 | 40 | /** */ 41 | object SQLTemplateParser extends LogSupport { 42 | 43 | case class Pos(line: Int, pos: Int) 44 | 45 | case class ParseError(message: String, pos: Option[Pos]) extends Exception(message) 46 | 47 | case class ParseResult(sql: String, args: Seq[FunctionArg], imports: Seq[Import], optionals: Seq[Optional]) 48 | 49 | def parse(template: String): ParseResult = { 50 | val preamble = Seq.newBuilder[Preamble] 51 | val remaining = Seq.newBuilder[String] 52 | 53 | for ((line, lineNum) <- Source.fromString(template).getLines().zipWithIndex) { 54 | if (line.startsWith("@")) { 55 | PreambleParser.parse(PreambleParser.preamble, line) match { 56 | case PreambleParser.Success(matched, _) => preamble += matched 57 | case other => 58 | throw ParseError(other.toString, Some(Pos(lineNum + 1, 0))) 59 | } 60 | } else { 61 | remaining += line 62 | } 63 | } 64 | 65 | val sql = remaining.result().mkString("\n") 66 | val p = preamble.result() 67 | val imports = p.collect { case i: Import => i } 68 | val optionals = p.collect { case o: Optional => o } 69 | val f = { 70 | val defs = p.collect { case f: Function => f } 71 | if (defs.size > 1) { 72 | warn(s"Multiple function definitions are found:\n${defs.mkString("\n")}") 73 | } 74 | defs.headOption 75 | } 76 | 77 | val parametersInsideSQLBody = extractParam(sql) 78 | if (f.nonEmpty) { 79 | for (x <- parametersInsideSQLBody) { 80 | if (!f.get.args.exists(_.name == x.name)) { 81 | throw ParseError(s"${x} is not found in the function definition", None) 82 | } 83 | } 84 | } 85 | 86 | // Allow SQL template without any function header for backward compatibility 87 | // Escape backslash 88 | val sanitized = removeParamType(sql).replaceAll("\\\\", "\\\\\\\\") 89 | ParseResult(sanitized, f.map(_.args).getOrElse(parametersInsideSQLBody), imports, optionals) 90 | } 91 | 92 | def parseFunction(f: String): Function = { 93 | PreambleParser.parse(PreambleParser.function, f) match { 94 | case PreambleParser.Success(matched, _) => matched 95 | case other => throw new IllegalArgumentException(other.toString) 96 | } 97 | } 98 | 99 | object PreambleParser extends JavaTokenParsers { 100 | override def skipWhitespace = true 101 | 102 | def str: Parser[String] = stringLiteral ^^ { x => x.substring(1, x.length - 1) } 103 | def value: Parser[String] = ident | str | longLiteral | decimalNumber | floatingPointNumber 104 | def longLiteral: Parser[String] = """-?[\d_]+L?""".r 105 | def defaultValue: Parser[String] = "=" ~ value ^^ { case _ ~ v => v } 106 | def arg: Parser[FunctionArg] = 107 | ident ~ ":" ~ typeName ~ opt(defaultValue) ^^ { case n ~ _ ~ t ~ opt => FunctionArg(n, t, opt) } 108 | def args: Parser[Seq[FunctionArg]] = 109 | arg ~ rep(',' ~ arg) ^^ { case first ~ rest => Seq(first) ++ rest.map(_._2).toSeq } 110 | 111 | def typeName: Parser[String] = ident | tupleType | genericType 112 | def genericType: Parser[String] = ident ~ "[" ~ typeName ~ rep("," ~ typeName) ~ "]" ^^ { _._2 } 113 | def tupleType: Parser[String] = 114 | "(" ~ typeName ~ rep("," ~ typeName) ~ ")" ^^ { case _ ~ first ~ rest ~ _ => 115 | s"(${(Seq(first) ++ rest.map(_._2).toSeq).mkString(",")})" 116 | } 117 | 118 | def function: Parser[Function] = "@(" ~ args ~ ")" ^^ { case _ ~ args ~ _ => Function(args) } 119 | 120 | def importStmt: Parser[Import] = "@import" ~ classRef ^^ { case _ ~ i => Import(i.toString) } 121 | 122 | def optional: Parser[Optional] = "@optional(" ~ repsep(ident, ',') ~ ")" ^^ { case _ ~ cols ~ _ => Optional(cols) } 123 | 124 | def classRef: Parser[String] = 125 | ident ~ rep('.' ~ ident) ^^ { case h ~ t => 126 | (h :: t.map(_._2)).mkString(".") 127 | } 128 | 129 | def preamble: Parser[Preamble] = function | importStmt | optional 130 | } 131 | 132 | val embeddedParamPattern = """\$\{\s*(\w+)\s*(:\s*(\w+))?\s*(=\s*([^\}]+)\s*)?\}""".r 133 | val embeddedExprPattern = """\$\{([^\}]*)\}""" 134 | 135 | def extractParam(sql: String): Seq[FunctionArg] = { 136 | // TODO remove comment lines 137 | val params = Seq.newBuilder[FunctionArg] 138 | for ((line, lineNum) <- Source.fromString(sql).getLines().zipWithIndex) { 139 | for (m <- embeddedParamPattern.findAllMatchIn(line)) { 140 | val name = m.group(1) 141 | val typeName = Option(m.group(3)) 142 | val defaultValue = Option(m.group(5)) 143 | params += FunctionArg(name, typeName.getOrElse("String"), defaultValue) // , lineNum+1, m.start, m.end) 144 | } 145 | } 146 | // Dedup by preserving orders 147 | val lst = params.result 148 | var seen = Set.empty[String] 149 | val result = for (p <- params.result if !seen.contains(p.name)) yield { 150 | seen += p.name 151 | p 152 | } 153 | result.toSeq 154 | } 155 | 156 | def removeParamType(sql: String): String = { 157 | embeddedParamPattern.replaceAllIn( 158 | sql, 159 | { m: Match => 160 | val name = m.group(1) 161 | "\\${" + name + "}" 162 | } 163 | ) 164 | } 165 | 166 | } 167 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | sbt-sql 2 | ==== 3 | 4 | A sbt plugin for generating model classes from SQL query files in `src/main/sql`. 5 | 6 | ## Why you need sbt-sql? 7 | 8 | - Integrate the power of SQL and Scala 9 | - If you write an SQL, it creates a Scala class to read the SQL result. 10 | - Type safety 11 | - No longer need to write a code like `ResultSet.getColumn("id")` etc. 12 | - Editors such as IntelliJ can show the SQL result parameter names and types. 13 | - For example, if you rename a column name in SQL from `id` to `ID`, the code using `id` will be shown as compilation error. Without sbt-sql, it will be a run-time exception, such as `Unknown column "id"`!. 14 | - Reuse your SQL as a template 15 | - You can embed parameters in your SQL with automatically generated Scala functions. 16 | 17 | ## Usage 18 | 19 | sbt-sql version: [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.xerial.sbt/sbt-sql/badge.svg)](https://maven-badges.herokuapp.com/maven-central/org.xerial.sbt/sbt-sql) Airframe version: [![wvlet/airframe](https://maven-badges.herokuapp.com/maven-central/org.wvlet.airframe/airframe-codec_2.13/badge.svg)](https://maven-badges.herokuapp.com/maven-central/org.wvlet.airframe/airframe-codec_2.13) 20 | 21 | sbt-sql supports only sbt 1.8.x or higher. 22 | 23 | **project/plugins.sbt** 24 | ```scala 25 | // For Trino 26 | addSbtPlugin("org.xerial.sbt" % "sbt-sql-trino" % "(version)") 27 | 28 | // For DuckDB 29 | addSbtPlugin("org.xerial.sbt" % "sbt-sql-duckdb" % "(version)") 30 | 31 | // For SQLite (available since 0.7.0) 32 | addSbtPlugin("org.xerial.sbt" % "sbt-sql-sqlite" % "(version)") 33 | 34 | // For Treasure Data Presto 35 | addSbtPlugin("org.xerial.sbt" % "sbt-sql-td" % "(version)") 36 | 37 | // For Generic JDBC drivers 38 | addSbtPlugin("org.xerial.sbt" % "sbt-sql" % "(version)") 39 | // Add your jdbc driver dependency for checking the result schema 40 | libraryDependencies ++= Seq( 41 | // Add airframe-codec for mapping JDBC data to Scala objects 42 | "org.wvlet.airframe" %% "airframe-codec" % "(airframe version)" 43 | // Add your jdbc driver here 44 | ) 45 | ``` 46 | 47 | **build.sbt** 48 | 49 | This is an example of using a custom JDBC driver: 50 | 51 | ```scala 52 | enablePlugins(SbtSQLJDBC) 53 | 54 | // Add your JDBC driver to the dependency 55 | // For using trino-jdbc 56 | libraryDependencies ++= Seq( 57 | "org.wvlet.airframe" %% "airframe-codec" % "(airframe version)", // Necessary for mapping JDBC ResultSets to model classes 58 | "io.trino" % "trino-jdbc" % "332" 59 | ) 60 | 61 | // You can change SQL file folder. The default is src/main/sql 62 | // sqlDir := (sourceDirectory in Compile).value / "sql" 63 | 64 | // Configure your JDBC driver (e.g., using Trino JDBC) 65 | jdbcDriver := "io.trino.jdbc.TrinoDriver" 66 | jdbcURL := "(jdbc url e.g., jdbc:trino://.... )" 67 | jdbcUser := "(jdbc user name)" 68 | jdbcPassword := "(jdbc password)" 69 | ``` 70 | 71 | ### sbt-sql-sqlite 72 | 73 | `sbt-sql-sqlite` plugin uses `src/main/sql/sqlite` as the SQL file directory. Configure `jdbcURL` and `jdbcUser` properties: 74 | ```scala 75 | enablePlugins(SbtSQLSQLite) 76 | 77 | jdbcURL := "jdbc:sqlite:(sqlite db file path)" 78 | ``` 79 | 80 | ### sbt-sql-duckdb 81 | 82 | `sbt-sql-duckdb` plugin uses `src/main/sql/duckdb` as the SQL file directory. 83 | 84 | ```scala 85 | enablePlugins(SbtSQLDuckDB) 86 | 87 | // [optional] 88 | jdbcURL := "jdbc:duckdb:(duckdb file path)" 89 | ``` 90 | 91 | 92 | ### sbt-sql-trino 93 | 94 | `sbt-sql-trino` plugin uses `src/main/sql/trino` as the SQL file directory. Configure `jdbcURL` and `jdbcUser` properties: 95 | 96 | ```scala 97 | enablePlugins(SbtSQLTrino) 98 | 99 | jdbcURL := "jdbc:trino://(your trino server address):443/(catalog name)" 100 | jdbcUser := "trino user name" 101 | ``` 102 | 103 | ### sbt-sql-td (Treasure Data) 104 | 105 | To use [Treasure Data](http://www.treasuredata.com/), set TD_API_KEY environment variable. `jdbcUser` will be set to this value. `src/main/sql/trino` will be the SQL file directory. 106 | 107 | Alternatively you can set TD_API_KEY in your sbt credential: 108 | 109 | **$HOME/.sbt/1.0/td.sbt** 110 | ``` 111 | credentials += 112 | Credentials("Treasure Data", "api-presto.treasuredata.com", "(your TD API KEY)", "") 113 | ``` 114 | 115 | ```scala 116 | enablePlugins(SbtSQLTreasureData) 117 | ``` 118 | 119 | ## Writing SQL 120 | 121 | **src/main/sql/trino/sample/nasdaq.sql** 122 | ```sql 123 | @(start:Long, end:Long) 124 | select * from sample_datasets.nasdaq 125 | where time between ${start} and ${end} 126 | ``` 127 | 128 | From this SQL file, sbt-sql generates Scala model classes and several utility methods. 129 | 130 | * SQL file can contain template variables `${(Scala expression)}`. 131 | To define user input variables, use `@(name:type, ...)`. sbt-sql generates a function to populate them, such as `Nasdaq.select(start = xxxxx, end = yyyyy)`. The variable can have a default value, e.g., `@(x:String="hello")`. 132 | 133 | ### Template Variable Examples 134 | 135 | - Embed a String value 136 | ```sql 137 | @(symbol:String) 138 | select * from sample_datasets.nasdaq 139 | where symbol = '${symbol}' 140 | ``` 141 | 142 | - Embed an input table name as a variable with the default value `sample_datasets.nasdaq`: 143 | ```sql 144 | @(table:SQL="sample_datasets.nasdaq") 145 | select * from ${table} 146 | ``` 147 | SQL type can be used for embedding an SQL expression as a String. 148 | 149 | ### Import statement 150 | 151 | You can use your own type for populating SQL templates by importing your class as follows: 152 | ``` 153 | @import your.own.class 154 | ``` 155 | 156 | ### Using Option[X] types 157 | 158 | To generate case classes with Option[X] parameters, add `__optional` suffix to the target column names. 159 | 160 | ``` 161 | select a as a__optional // Option[X] will be used 162 | from ... 163 | ``` 164 | 165 | ```scala 166 | @optional(a, b) 167 | select a, b, c // Option[A], Option[B], C 168 | ``` 169 | 170 | ### Generated Files 171 | **target/src_managed/main/sample/nasdaq.scala** 172 | ```scala 173 | package sample 174 | 175 | object nasdaq { 176 | def path : String = "/sample/nasdaq.sql" 177 | 178 | def sql(start:Long, end:Long) : String = { 179 | s""""select * from sample_dataest.nasdaq 180 | where time between ${start} and ${end} 181 | """ 182 | } 183 | 184 | def select(start:Long, end:Long)(implicit conn:java.sql.Connection): Seq[nasdaq] = ... 185 | def selectWith(sql:String)(implicit conn:java.sql.Connection): Seq[nasdaq] = ... 186 | } 187 | 188 | case class nasdaq( 189 | symbol: String, 190 | open: Double, 191 | volume: Long, 192 | high: Double, 193 | low: Double, 194 | close: Double, 195 | time: Long 196 | ) 197 | ``` 198 | 199 | -------------------------------------------------------------------------------- /base/src/main/scala/xerial/sbt/sql/SQLTemplateCompiler.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import wvlet.airframe.surface.{Surface, Zero} 4 | import wvlet.airframe.surface.reflect.{ReflectSurfaceFactory, ReflectTypeUtil} 5 | import wvlet.log.LogSupport 6 | 7 | import scala.util.{Failure, Success, Try} 8 | 9 | /** */ 10 | object SQLTemplateCompiler extends LogSupport { 11 | 12 | private def defaultValueFor(typeName: String): Any = 13 | typeName match { 14 | case "SQL" | "sql" => "" 15 | case "String" => "dummy" 16 | case "Int" => 0 17 | case "Long" => 0L 18 | case "Float" => 0.0f 19 | case "Double" => 0.0 20 | case "Boolean" => true 21 | case tuple if typeName.startsWith("(") => 22 | val a = tuple.trim.substring(1, tuple.length - 1).split(",") 23 | val e = a.map(x => defaultValueFor(x)) 24 | // TODO proper parsing of tuple types 25 | e.length match { 26 | case 1 => (e(0)) 27 | case 2 => (e(0), e(1)) 28 | case 3 => (e(0), e(1), e(2)) 29 | case 4 => (e(0), e(1), e(2), e(3)) 30 | case 5 => (e(0), e(1), e(2), e(3), e(4)) 31 | case _ => null 32 | } 33 | case _ => 34 | Try(Zero.zeroOf(ReflectSurfaceFactory.ofClass(Class.forName(typeName)))).toOption.getOrElse(null) 35 | } 36 | 37 | def compile(sqlTemplate: String): SQLTemplate = { 38 | val parsed = SQLTemplateParser.parse(sqlTemplate) 39 | val params = parsed.args 40 | val imports = parsed.imports.map(x => s"import ${x.target}").mkString("\n") 41 | 42 | val (defaultParams, otherParams) = params.partition(_.defaultValue.isDefined) 43 | 44 | val methodArgs = otherParams 45 | .map { x => 46 | x.defaultValue match { 47 | case Some(v) => s"${x.name}:${x.functionArgType}=${v}" 48 | case None => s"${x.name}:${x.functionArgType}" 49 | } 50 | }.mkString(", ") 51 | val functionArgs = { 52 | val paramLength = otherParams.length 53 | val a = (otherParams.map { p => s"${p.functionArgType}" }).mkString(", ") 54 | if (paramLength == 0) 55 | "()" 56 | else if (paramLength > 1 || (paramLength == 1 && a.startsWith("("))) // tuple type only 57 | s"($a)" 58 | else 59 | a 60 | } 61 | val valDefs = defaultParams 62 | .map { x => 63 | s" val ${x.name} = ${x.quotedValue}" 64 | }.mkString("\n") 65 | 66 | val sqlCode = "s\"\"\"" + parsed.sql + "\"\"\"" 67 | val funDef = 68 | s"""$imports 69 | |new (${functionArgs} => String) { 70 | | def apply(${methodArgs}): String = { 71 | |$valDefs 72 | |$sqlCode 73 | | } 74 | |} 75 | | 76 | """.stripMargin 77 | debug(s"function def:\n${funDef}") 78 | 79 | import scala.reflect.runtime.currentMirror 80 | import scala.tools.reflect.ToolBox 81 | val toolBox = currentMirror.mkToolBox() 82 | val code = Try(toolBox.eval(toolBox.parse(funDef))) match { 83 | case Success(c) => c 84 | case Failure(f) => 85 | error(s"Failed to compile code:\n${funDef}") 86 | throw f 87 | } 88 | 89 | val p = otherParams.map(x => defaultValueFor(x.typeName)).toIndexedSeq 90 | debug(s"function args:${p.mkString(", ")}") 91 | 92 | val populatedSQL: String = otherParams.length match { 93 | case 0 => 94 | code.asInstanceOf[Function0[String]].apply() 95 | case 1 => 96 | code.asInstanceOf[Function1[Any, String]].apply(p(0)) 97 | case 2 => 98 | code.asInstanceOf[Function2[Any, Any, String]].apply(p(0), p(1)) 99 | case 3 => 100 | code.asInstanceOf[Function3[Any, Any, Any, String]].apply(p(0), p(1), p(2)) 101 | case 4 => 102 | code.asInstanceOf[Function4[Any, Any, Any, Any, String]].apply(p(0), p(1), p(2), p(3)) 103 | case 5 => 104 | code.asInstanceOf[Function5[Any, Any, Any, Any, Any, String]].apply(p(0), p(1), p(2), p(3), p(4)) 105 | case 6 => 106 | code.asInstanceOf[Function6[Any, Any, Any, Any, Any, Any, String]].apply(p(0), p(1), p(2), p(3), p(4), p(5)) 107 | case 7 => 108 | code 109 | .asInstanceOf[Function7[Any, Any, Any, Any, Any, Any, Any, String]].apply( 110 | p(0), 111 | p(1), 112 | p(2), 113 | p(3), 114 | p(4), 115 | p(5), 116 | p(6) 117 | ) 118 | case 8 => 119 | code 120 | .asInstanceOf[Function8[Any, Any, Any, Any, Any, Any, Any, Any, String]].apply( 121 | p(0), 122 | p(1), 123 | p(2), 124 | p(3), 125 | p(4), 126 | p(5), 127 | p(6), 128 | p(7) 129 | ) 130 | case 9 => 131 | code 132 | .asInstanceOf[Function9[Any, Any, Any, Any, Any, Any, Any, Any, Any, String]].apply( 133 | p(0), 134 | p(1), 135 | p(2), 136 | p(3), 137 | p(4), 138 | p(5), 139 | p(6), 140 | p(7), 141 | p(8) 142 | ) 143 | case 10 => 144 | code 145 | .asInstanceOf[Function10[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, String]].apply( 146 | p(0), 147 | p(1), 148 | p(2), 149 | p(3), 150 | p(4), 151 | p(5), 152 | p(6), 153 | p(7), 154 | p(8), 155 | p(9) 156 | ) 157 | case 11 => 158 | code 159 | .asInstanceOf[Function11[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, String]].apply( 160 | p(0), 161 | p(1), 162 | p(2), 163 | p(3), 164 | p(4), 165 | p(5), 166 | p(6), 167 | p(7), 168 | p(8), 169 | p(9), 170 | p(10) 171 | ) 172 | case 12 => 173 | code 174 | .asInstanceOf[Function12[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, String]].apply( 175 | p(0), 176 | p(1), 177 | p(2), 178 | p(3), 179 | p(4), 180 | p(5), 181 | p(6), 182 | p(7), 183 | p(8), 184 | p(9), 185 | p(10), 186 | p(11) 187 | ) 188 | case 13 => 189 | code 190 | .asInstanceOf[Function13[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, String]].apply( 191 | p(0), 192 | p(1), 193 | p(2), 194 | p(3), 195 | p(4), 196 | p(5), 197 | p(6), 198 | p(7), 199 | p(8), 200 | p(9), 201 | p(10), 202 | p(11), 203 | p(12) 204 | ) 205 | case 14 => 206 | code 207 | .asInstanceOf[Function14[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, String]].apply( 208 | p(0), 209 | p(1), 210 | p(2), 211 | p(3), 212 | p(4), 213 | p(5), 214 | p(6), 215 | p(7), 216 | p(8), 217 | p(9), 218 | p(10), 219 | p(11), 220 | p(12), 221 | p(13) 222 | ) 223 | case 15 => 224 | code 225 | .asInstanceOf[ 226 | Function15[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, String] 227 | ].apply(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7), p(8), p(9), p(10), p(11), p(12), p(13), p(14)) 228 | case other => 229 | warn(s"Too many parameters in SQL template:\n${sqlTemplate}") 230 | parsed.sql 231 | } 232 | 233 | debug(s"populated SQL:\n${populatedSQL}") 234 | 235 | new SQLTemplate( 236 | sql = parsed.sql, 237 | populated = populatedSQL, 238 | params = params, 239 | imports = parsed.imports, 240 | optionals = parsed.optionals 241 | ) 242 | } 243 | } 244 | -------------------------------------------------------------------------------- /base/src/main/scala/xerial/sbt/sql/SQLModelClassGenerator.scala: -------------------------------------------------------------------------------- 1 | package xerial.sbt.sql 2 | 3 | import java.sql.JDBCType 4 | import java.util.Properties 5 | 6 | import sbt.{File, IO, _} 7 | import wvlet.log.LogSupport 8 | import xerial.sbt.sql.DataType.StringType 9 | 10 | import scala.util.{Failure, Success, Try} 11 | import java.sql.ResultSetMetaData 12 | 13 | case class Schema(columns: Seq[Column]) 14 | 15 | case class Column( 16 | qname: String, 17 | reader: DataType, 18 | sqlType: java.sql.JDBCType, 19 | isNullable: Boolean, 20 | elementType: java.sql.JDBCType = JDBCType.NULL 21 | ) 22 | 23 | case class GeneratorConfig(sqlDir: File, targetDir: File) 24 | 25 | object SQLModelClassGenerator extends LogSupport { 26 | 27 | private lazy val buildProps = { 28 | val p = new Properties() 29 | val in = this.getClass.getResourceAsStream("/org/xerial/sbt/sbt-sql/build.properties") 30 | if (in != null) { 31 | try { 32 | p.load(in) 33 | } finally { 34 | in.close 35 | } 36 | } else { 37 | warn("build.properties file not found") 38 | } 39 | p 40 | } 41 | 42 | lazy val getBuildTime: Long = { 43 | buildProps.getProperty("build_time", System.currentTimeMillis().toString).toLong 44 | } 45 | lazy val getVersion: String = { 46 | buildProps.getProperty("version", "unknown") 47 | } 48 | 49 | case class JDBCResultColumn(name: String, typeName: String, typeId: Int, isNullable: Boolean) 50 | 51 | private[sql] def generateSchema(columns: Seq[JDBCResultColumn], optionalColumns: Set[String]): Schema = { 52 | val colTypes = columns.map { c => 53 | val tpe = JDBCType.valueOf(c.typeId) 54 | val typeName = c.typeName 55 | val originalDataType = JDBCTypeNameParser.parseDataType(typeName).getOrElse(StringType) 56 | val (colName, dataType) = if (c.name.endsWith("__optional") || optionalColumns.contains(c.name)) { 57 | (c.name.stripSuffix("__optional"), DataType.OptionType(originalDataType)) 58 | } else { 59 | (c.name, originalDataType) 60 | } 61 | val qname = colName match { 62 | // Scala's reserved keywords 63 | case "type" => "`type`" 64 | case other => other 65 | } 66 | Column(qname, dataType, tpe, c.isNullable) 67 | } 68 | Schema(colTypes.toIndexedSeq) 69 | } 70 | } 71 | 72 | class SQLModelClassGenerator(jdbcConfig: JDBCConfig) extends LogSupport { 73 | import SQLModelClassGenerator._ 74 | 75 | wvlet.log.Logger.init 76 | 77 | private val db = new JDBCClient(jdbcConfig) 78 | 79 | private def wrapWithLimit0(sql: String) = { 80 | s"""-- sbt-sql version:${SQLModelClassGenerator.getVersion} 81 | |SELECT * FROM ( 82 | |${sql.trim} 83 | |) LIMIT 0""".stripMargin 84 | } 85 | 86 | def checkResultSchema(sql: String, optionalParams: Set[String]): Schema = { 87 | db.withConnection { conn => 88 | db.submitQuery(conn, sql) { rs => 89 | val m = rs.getMetaData 90 | val cols = (1 to m.getColumnCount).map { i => 91 | JDBCResultColumn( 92 | m.getColumnName(i), 93 | m.getColumnTypeName(i), 94 | m.getColumnType(i), 95 | m.isNullable(i) == ResultSetMetaData.columnNullable 96 | ) 97 | } 98 | // Ensure materializing cols before closing the connection 99 | generateSchema(cols.toIndexedSeq, optionalParams) 100 | } 101 | } 102 | } 103 | 104 | def generate(config: GeneratorConfig): Seq[File] = { 105 | // Submit queries using multi-threads to minimize the waiting time 106 | val result = Seq.newBuilder[File] 107 | val buildTime = SQLModelClassGenerator.getBuildTime 108 | debug(s"SQLModelClassGenerator version:${SQLModelClassGenerator.getVersion}") 109 | 110 | val baseDir = file(".") 111 | 112 | for (sqlFile <- (config.sqlDir ** "*.sql").get.par) { 113 | val path = sqlFile.relativeTo(config.sqlDir).get.getPath 114 | val targetClassFile = config.targetDir / path.replaceAll("\\.sql$", ".scala") 115 | 116 | val sqlFilePath = sqlFile.relativeTo(baseDir).getOrElse(sqlFile) 117 | debug(s"Processing ${sqlFilePath}") 118 | val latestTimestamp = Math.max(sqlFile.lastModified(), buildTime) 119 | if ( 120 | targetClassFile.exists() 121 | && targetClassFile.exists() 122 | && latestTimestamp <= targetClassFile.lastModified() 123 | ) { 124 | debug(s"${targetClassFile.relativeTo(config.targetDir).getOrElse(targetClassFile)} is up-to-date") 125 | } else { 126 | val sql = IO.read(sqlFile) 127 | Try(SQLTemplate(sql)) match { 128 | case Success(template) => 129 | val limit0 = wrapWithLimit0(template.populated) 130 | info(s"Checking the SQL result schema of ${sqlFilePath}") 131 | val schema = checkResultSchema(limit0, template.optionalParams) 132 | 133 | // Write SQL template without type annotation 134 | val scalaCode = schemaToClass(sqlFile, config.sqlDir, schema, template) 135 | info(s"Generating model class: ${targetClassFile}") 136 | info(s"${scalaCode}") 137 | IO.write(targetClassFile, scalaCode) 138 | targetClassFile.setLastModified(latestTimestamp) 139 | case Failure(e) => 140 | error(s"Failed to parse ${sqlFile}: ${e.getMessage}") 141 | throw e 142 | } 143 | } 144 | 145 | synchronized { 146 | result += targetClassFile 147 | } 148 | } 149 | result.result() 150 | } 151 | 152 | def schemaToParamDef(schema: Schema) = { 153 | schema.columns.map { c => 154 | s"${c.qname}: ${c.reader.name}" 155 | } 156 | } 157 | 158 | def schemaToPackerCode(schema: Schema, packerName: String = "packer") = { 159 | for (c <- schema.columns) { 160 | s"${packerName}.packXXX(${c.qname})" 161 | } 162 | } 163 | 164 | def schemaToClass(origFile: File, baseDir: File, schema: Schema, sqlTemplate: SQLTemplate): String = { 165 | val packageName = origFile 166 | .relativeTo(baseDir).map { f => 167 | Option(f.getParent).map(_.replaceAll("""[\\/]""", ".")).getOrElse("") 168 | }.getOrElse("") 169 | val name = origFile.getName.replaceAll("\\.sql$", "") 170 | 171 | val params = schemaToParamDef(schema) 172 | 173 | val sqlTemplateArgs = sqlTemplate.params.map { p => 174 | p.defaultValue match { 175 | case None => s"${p.name}:${p.functionArgType}" 176 | case Some(v) => s"${p.name}:${p.functionArgType} = ${p.quotedValue}" 177 | } 178 | } 179 | val sqlArgList = sqlTemplateArgs.mkString(", ") 180 | val paramNames = sqlTemplate.params.map(_.name) 181 | 182 | val additionalImports = sqlTemplate.imports.map(x => s"import ${x.target}").mkString("\n") 183 | val embeddedSQL = "\"\"\"" + sqlTemplate.sql + "\"\"\"" 184 | 185 | val packageLine = if (packageName.isEmpty) { 186 | "" 187 | } else { 188 | s"package ${packageName}" 189 | } 190 | val code = 191 | s"""/** 192 | | * DO NOT EDIT THIS FILE. This file is generated by sbt-sql 193 | | */ 194 | |${packageLine} 195 | | 196 | |${additionalImports} 197 | | 198 | |object ${name} extends wvlet.log.LogSupport { 199 | | private lazy val codec = wvlet.airframe.codec.MessageCodec.of[${name}] 200 | | 201 | | def path : String = "/${packageName.replaceAll("\\.", "/")}/${name}.sql" 202 | | 203 | | def sql(${sqlArgList}) : String = { 204 | | s${embeddedSQL} 205 | | } 206 | | 207 | | def select(${sqlArgList})(implicit conn:java.sql.Connection): Seq[${name}] = { 208 | | selectWith(sql(${paramNames.mkString(", ")}))(conn) 209 | | } 210 | | 211 | | def selectWith(sql:String)(implicit conn:java.sql.Connection) : Seq[${name}] = { 212 | | selectStreamWith(sql:String){ it => 213 | | it.toIndexedSeq 214 | | }(conn) 215 | | } 216 | | 217 | | def selectStream[R](${sqlArgList}) 218 | | (streamReader: scala.collection.Iterator[${name}] => R) 219 | | (implicit conn:java.sql.Connection) : R = { 220 | | selectStreamWith(sql(${paramNames.mkString(", ")}))(streamReader)(conn) 221 | | } 222 | | 223 | | def selectStreamWith[R](sql:String) 224 | | (streamReader: scala.collection.Iterator[${name}] => R) 225 | | (implicit conn:java.sql.Connection) : R = { 226 | | withResource(conn.createStatement()) { stmt => 227 | | debug(s"Executing query:\\n$${sql}") 228 | | withResource(stmt.executeQuery(sql)) { rs => 229 | | val jdbcCodec = wvlet.airframe.codec.JDBCCodec(rs) 230 | | val it = jdbcCodec.mapMsgPackArrayRows{ msgpack => 231 | | codec.fromMsgPack(msgpack) 232 | | } 233 | | streamReader(it.toIterator) 234 | | } 235 | | } 236 | | } 237 | | 238 | | private def withResource[R <: AutoCloseable, U](resource: R)(body: R => U): U = { 239 | | try { 240 | | body(resource) 241 | | } finally { 242 | | if (resource != null) { 243 | | resource.close() 244 | | } 245 | | } 246 | | } 247 | |} 248 | | 249 | |case class ${name}( 250 | | ${params.mkString(",\n ")} 251 | |) 252 | |""".stripMargin 253 | 254 | code 255 | } 256 | 257 | } 258 | -------------------------------------------------------------------------------- /sbt: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # A more capable sbt runner, coincidentally also called sbt. 4 | # Author: Paul Phillips 5 | # https://github.com/paulp/sbt-extras 6 | # 7 | # Generated from http://www.opensource.org/licenses/bsd-license.php 8 | # Copyright (c) 2011, Paul Phillips. All rights reserved. 9 | # 10 | # Redistribution and use in source and binary forms, with or without 11 | # modification, are permitted provided that the following conditions are 12 | # met: 13 | # 14 | # * Redistributions of source code must retain the above copyright 15 | # notice, this list of conditions and the following disclaimer. 16 | # * Redistributions in binary form must reproduce the above copyright 17 | # notice, this list of conditions and the following disclaimer in the 18 | # documentation and/or other materials provided with the distribution. 19 | # * Neither the name of the author nor the names of its contributors 20 | # may be used to endorse or promote products derived from this software 21 | # without specific prior written permission. 22 | # 23 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 24 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 25 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 26 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 27 | # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 28 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 29 | # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 30 | # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 31 | # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 32 | # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 33 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 34 | 35 | set -o pipefail 36 | 37 | declare -r sbt_release_version="1.5.5" 38 | declare -r sbt_unreleased_version="1.6.0-M1" 39 | 40 | declare -r latest_213="2.13.7" 41 | declare -r latest_212="2.12.15" 42 | declare -r latest_211="2.11.12" 43 | declare -r latest_210="2.10.7" 44 | declare -r latest_29="2.9.3" 45 | declare -r latest_28="2.8.2" 46 | 47 | declare -r buildProps="project/build.properties" 48 | 49 | declare -r sbt_launch_ivy_release_repo="https://repo.typesafe.com/typesafe/ivy-releases" 50 | declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots" 51 | declare -r sbt_launch_mvn_release_repo="https://repo1.maven.org/maven2" 52 | declare -r sbt_launch_mvn_snapshot_repo="https://repo.scala-sbt.org/scalasbt/maven-snapshots" 53 | 54 | declare -r default_jvm_opts_common="-Xms512m -Xss2m -XX:MaxInlineLevel=18" 55 | declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy -Dsbt.coursier.home=project/.coursier" 56 | 57 | declare sbt_jar sbt_dir sbt_create sbt_version sbt_script sbt_new 58 | declare sbt_explicit_version 59 | declare verbose noshare batch trace_level 60 | 61 | declare java_cmd="java" 62 | declare sbt_launch_dir="$HOME/.sbt/launchers" 63 | declare sbt_launch_repo 64 | 65 | # pull -J and -D options to give to java. 66 | declare -a java_args scalac_args sbt_commands residual_args 67 | 68 | # args to jvm/sbt via files or environment variables 69 | declare -a extra_jvm_opts extra_sbt_opts 70 | 71 | echoerr() { echo >&2 "$@"; } 72 | vlog() { [[ -n "$verbose" ]] && echoerr "$@"; } 73 | die() { 74 | echo "Aborting: $*" 75 | exit 1 76 | } 77 | 78 | setTrapExit() { 79 | # save stty and trap exit, to ensure echo is re-enabled if we are interrupted. 80 | SBT_STTY="$(stty -g 2>/dev/null)" 81 | export SBT_STTY 82 | 83 | # restore stty settings (echo in particular) 84 | onSbtRunnerExit() { 85 | [ -t 0 ] || return 86 | vlog "" 87 | vlog "restoring stty: $SBT_STTY" 88 | stty "$SBT_STTY" 89 | } 90 | 91 | vlog "saving stty: $SBT_STTY" 92 | trap onSbtRunnerExit EXIT 93 | } 94 | 95 | # this seems to cover the bases on OSX, and someone will 96 | # have to tell me about the others. 97 | get_script_path() { 98 | local path="$1" 99 | [[ -L "$path" ]] || { 100 | echo "$path" 101 | return 102 | } 103 | 104 | local -r target="$(readlink "$path")" 105 | if [[ "${target:0:1}" == "/" ]]; then 106 | echo "$target" 107 | else 108 | echo "${path%/*}/$target" 109 | fi 110 | } 111 | 112 | script_path="$(get_script_path "${BASH_SOURCE[0]}")" 113 | declare -r script_path 114 | script_name="${script_path##*/}" 115 | declare -r script_name 116 | 117 | init_default_option_file() { 118 | local overriding_var="${!1}" 119 | local default_file="$2" 120 | if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then 121 | local envvar_file="${BASH_REMATCH[1]}" 122 | if [[ -r "$envvar_file" ]]; then 123 | default_file="$envvar_file" 124 | fi 125 | fi 126 | echo "$default_file" 127 | } 128 | 129 | sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)" 130 | sbtx_opts_file="$(init_default_option_file SBTX_OPTS .sbtxopts)" 131 | jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)" 132 | 133 | build_props_sbt() { 134 | [[ -r "$buildProps" ]] && 135 | grep '^sbt\.version' "$buildProps" | tr '=\r' ' ' | awk '{ print $2; }' 136 | } 137 | 138 | set_sbt_version() { 139 | sbt_version="${sbt_explicit_version:-$(build_props_sbt)}" 140 | [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version 141 | export sbt_version 142 | } 143 | 144 | url_base() { 145 | local version="$1" 146 | 147 | case "$version" in 148 | 0.7.*) echo "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/simple-build-tool" ;; 149 | 0.10.*) echo "$sbt_launch_ivy_release_repo" ;; 150 | 0.11.[12]) echo "$sbt_launch_ivy_release_repo" ;; 151 | 0.*-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" 152 | echo "$sbt_launch_ivy_snapshot_repo" ;; 153 | 0.*) echo "$sbt_launch_ivy_release_repo" ;; 154 | *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]T[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmddThhMMss" 155 | echo "$sbt_launch_mvn_snapshot_repo" ;; 156 | *) echo "$sbt_launch_mvn_release_repo" ;; 157 | esac 158 | } 159 | 160 | make_url() { 161 | local version="$1" 162 | 163 | local base="${sbt_launch_repo:-$(url_base "$version")}" 164 | 165 | case "$version" in 166 | 0.7.*) echo "$base/sbt-launch-0.7.7.jar" ;; 167 | 0.10.*) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 168 | 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 169 | 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; 170 | *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch-${version}.jar" ;; 171 | esac 172 | } 173 | 174 | addJava() { 175 | vlog "[addJava] arg = '$1'" 176 | java_args+=("$1") 177 | } 178 | addSbt() { 179 | vlog "[addSbt] arg = '$1'" 180 | sbt_commands+=("$1") 181 | } 182 | addScalac() { 183 | vlog "[addScalac] arg = '$1'" 184 | scalac_args+=("$1") 185 | } 186 | addResidual() { 187 | vlog "[residual] arg = '$1'" 188 | residual_args+=("$1") 189 | } 190 | 191 | addResolver() { addSbt "set resolvers += $1"; } 192 | 193 | addDebugger() { addJava "-Xdebug" && addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"; } 194 | 195 | setThisBuild() { 196 | vlog "[addBuild] args = '$*'" 197 | local key="$1" && shift 198 | addSbt "set $key in ThisBuild := $*" 199 | } 200 | setScalaVersion() { 201 | [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")' 202 | addSbt "++ $1" 203 | } 204 | setJavaHome() { 205 | java_cmd="$1/bin/java" 206 | setThisBuild javaHome "_root_.scala.Some(file(\"$1\"))" 207 | export JAVA_HOME="$1" 208 | export JDK_HOME="$1" 209 | export PATH="$JAVA_HOME/bin:$PATH" 210 | } 211 | 212 | getJavaVersion() { 213 | local -r str=$("$1" -version 2>&1 | grep -E -e '(java|openjdk) version' | awk '{ print $3 }' | tr -d '"') 214 | 215 | # java -version on java8 says 1.8.x 216 | # but on 9 and 10 it's 9.x.y and 10.x.y. 217 | if [[ "$str" =~ ^1\.([0-9]+)(\..*)?$ ]]; then 218 | echo "${BASH_REMATCH[1]}" 219 | # Fixes https://github.com/dwijnand/sbt-extras/issues/326 220 | elif [[ "$str" =~ ^([0-9]+)(\..*)?(-ea)?$ ]]; then 221 | echo "${BASH_REMATCH[1]}" 222 | elif [[ -n "$str" ]]; then 223 | echoerr "Can't parse java version from: $str" 224 | fi 225 | } 226 | 227 | checkJava() { 228 | # Warn if there is a Java version mismatch between PATH and JAVA_HOME/JDK_HOME 229 | 230 | [[ -n "$JAVA_HOME" && -e "$JAVA_HOME/bin/java" ]] && java="$JAVA_HOME/bin/java" 231 | [[ -n "$JDK_HOME" && -e "$JDK_HOME/lib/tools.jar" ]] && java="$JDK_HOME/bin/java" 232 | 233 | if [[ -n "$java" ]]; then 234 | pathJavaVersion=$(getJavaVersion java) 235 | homeJavaVersion=$(getJavaVersion "$java") 236 | if [[ "$pathJavaVersion" != "$homeJavaVersion" ]]; then 237 | echoerr "Warning: Java version mismatch between PATH and JAVA_HOME/JDK_HOME, sbt will use the one in PATH" 238 | echoerr " Either: fix your PATH, remove JAVA_HOME/JDK_HOME or use -java-home" 239 | echoerr " java version from PATH: $pathJavaVersion" 240 | echoerr " java version from JAVA_HOME/JDK_HOME: $homeJavaVersion" 241 | fi 242 | fi 243 | } 244 | 245 | java_version() { 246 | local -r version=$(getJavaVersion "$java_cmd") 247 | vlog "Detected Java version: $version" 248 | echo "$version" 249 | } 250 | 251 | is_apple_silicon() { [[ "$(uname -s)" == "Darwin" && "$(uname -m)" == "arm64" ]]; } 252 | 253 | # MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+ 254 | default_jvm_opts() { 255 | local -r v="$(java_version)" 256 | if [[ $v -ge 17 ]]; then 257 | echo "$default_jvm_opts_common" 258 | elif [[ $v -ge 10 ]]; then 259 | if is_apple_silicon; then 260 | # As of Dec 2020, JVM for Apple Silicon (M1) doesn't support JVMCI 261 | echo "$default_jvm_opts_common" 262 | else 263 | echo "$default_jvm_opts_common -XX:+UnlockExperimentalVMOptions -XX:+UseJVMCICompiler" 264 | fi 265 | elif [[ $v -ge 8 ]]; then 266 | echo "$default_jvm_opts_common" 267 | else 268 | echo "-XX:MaxPermSize=384m $default_jvm_opts_common" 269 | fi 270 | } 271 | 272 | execRunner() { 273 | # print the arguments one to a line, quoting any containing spaces 274 | vlog "# Executing command line:" && { 275 | for arg; do 276 | if [[ -n "$arg" ]]; then 277 | if printf "%s\n" "$arg" | grep -q ' '; then 278 | printf >&2 "\"%s\"\n" "$arg" 279 | else 280 | printf >&2 "%s\n" "$arg" 281 | fi 282 | fi 283 | done 284 | vlog "" 285 | } 286 | 287 | setTrapExit 288 | 289 | if [[ -n "$batch" ]]; then 290 | "$@" /dev/null 2>&1; then 312 | curl --fail --silent --location "$url" --output "$jar" 313 | elif command -v wget >/dev/null 2>&1; then 314 | wget -q -O "$jar" "$url" 315 | fi 316 | } && [[ -r "$jar" ]] 317 | } 318 | 319 | acquire_sbt_jar() { 320 | { 321 | sbt_jar="$(jar_file "$sbt_version")" 322 | [[ -r "$sbt_jar" ]] 323 | } || { 324 | sbt_jar="$HOME/.ivy2/local/org.scala-sbt/sbt-launch/$sbt_version/jars/sbt-launch.jar" 325 | [[ -r "$sbt_jar" ]] 326 | } || { 327 | sbt_jar="$(jar_file "$sbt_version")" 328 | jar_url="$(make_url "$sbt_version")" 329 | 330 | echoerr "Downloading sbt launcher for ${sbt_version}:" 331 | echoerr " From ${jar_url}" 332 | echoerr " To ${sbt_jar}" 333 | 334 | download_url "${jar_url}" "${sbt_jar}" 335 | 336 | case "${sbt_version}" in 337 | 0.*) 338 | vlog "SBT versions < 1.0 do not have published MD5 checksums, skipping check" 339 | echo "" 340 | ;; 341 | *) verify_sbt_jar "${sbt_jar}" ;; 342 | esac 343 | } 344 | } 345 | 346 | verify_sbt_jar() { 347 | local jar="${1}" 348 | local md5="${jar}.md5" 349 | md5url="$(make_url "${sbt_version}").md5" 350 | 351 | echoerr "Downloading sbt launcher ${sbt_version} md5 hash:" 352 | echoerr " From ${md5url}" 353 | echoerr " To ${md5}" 354 | 355 | download_url "${md5url}" "${md5}" >/dev/null 2>&1 356 | 357 | if command -v md5sum >/dev/null 2>&1; then 358 | if echo "$(cat "${md5}") ${jar}" | md5sum -c -; then 359 | rm -rf "${md5}" 360 | return 0 361 | else 362 | echoerr "Checksum does not match" 363 | return 1 364 | fi 365 | elif command -v md5 >/dev/null 2>&1; then 366 | if [ "$(md5 -q "${jar}")" == "$(cat "${md5}")" ]; then 367 | rm -rf "${md5}" 368 | return 0 369 | else 370 | echoerr "Checksum does not match" 371 | return 1 372 | fi 373 | elif command -v openssl >/dev/null 2>&1; then 374 | if [ "$(openssl md5 -r "${jar}" | awk '{print $1}')" == "$(cat "${md5}")" ]; then 375 | rm -rf "${md5}" 376 | return 0 377 | else 378 | echoerr "Checksum does not match" 379 | return 1 380 | fi 381 | else 382 | echoerr "Could not find an MD5 command" 383 | return 1 384 | fi 385 | } 386 | 387 | usage() { 388 | set_sbt_version 389 | cat < display stack traces with a max of frames (default: -1, traces suppressed) 402 | -debug-inc enable debugging log for the incremental compiler 403 | -no-colors disable ANSI color codes 404 | -sbt-create start sbt even if current directory contains no sbt project 405 | -sbt-dir path to global settings/plugins directory (default: ~/.sbt/) 406 | -sbt-boot path to shared boot directory (default: ~/.sbt/boot in 0.11+) 407 | -ivy path to local Ivy repository (default: ~/.ivy2) 408 | -no-share use all local caches; no sharing 409 | -offline put sbt in offline mode 410 | -jvm-debug Turn on JVM debugging, open at the given port. 411 | -batch Disable interactive mode 412 | -prompt Set the sbt prompt; in expr, 's' is the State and 'e' is Extracted 413 | -script Run the specified file as a scala script 414 | 415 | # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version) 416 | -sbt-version use the specified version of sbt (default: $sbt_release_version) 417 | -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version 418 | -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version 419 | -sbt-jar use the specified jar as the sbt launcher 420 | -sbt-launch-dir directory to hold sbt launchers (default: $sbt_launch_dir) 421 | -sbt-launch-repo repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version")) 422 | 423 | # scala version (default: as chosen by sbt) 424 | -28 use $latest_28 425 | -29 use $latest_29 426 | -210 use $latest_210 427 | -211 use $latest_211 428 | -212 use $latest_212 429 | -213 use $latest_213 430 | -scala-home use the scala build at the specified directory 431 | -scala-version use the specified version of scala 432 | -binary-version use the specified scala version when searching for dependencies 433 | 434 | # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) 435 | -java-home alternate JAVA_HOME 436 | 437 | # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution 438 | # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found 439 | $(default_jvm_opts) 440 | JVM_OPTS environment variable holding either the jvm args directly, or 441 | the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts') 442 | Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument. 443 | -jvm-opts file containing jvm args (if not given, .jvmopts in project root is used if present) 444 | -Dkey=val pass -Dkey=val directly to the jvm 445 | -J-X pass option -X directly to the jvm (-J is stripped) 446 | 447 | # passing options to sbt, OR to this runner 448 | SBT_OPTS environment variable holding either the sbt args directly, or 449 | the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts') 450 | Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument. 451 | -sbt-opts file containing sbt args (if not given, .sbtopts in project root is used if present) 452 | -S-X add -X to sbt's scalacOptions (-S is stripped) 453 | 454 | # passing options exclusively to this runner 455 | SBTX_OPTS environment variable holding either the sbt-extras args directly, or 456 | the reference to a file containing sbt-extras args if given path is prepended by '@' (e.g. '@/etc/sbtxopts') 457 | Note: "@"-file is overridden by local '.sbtxopts' or '-sbtx-opts' argument. 458 | -sbtx-opts file containing sbt-extras args (if not given, .sbtxopts in project root is used if present) 459 | EOM 460 | exit 0 461 | } 462 | 463 | process_args() { 464 | require_arg() { 465 | local type="$1" 466 | local opt="$2" 467 | local arg="$3" 468 | 469 | if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then 470 | die "$opt requires <$type> argument" 471 | fi 472 | } 473 | while [[ $# -gt 0 ]]; do 474 | case "$1" in 475 | -h | -help) usage ;; 476 | -v) verbose=true && shift ;; 477 | -d) addSbt "--debug" && shift ;; 478 | -w) addSbt "--warn" && shift ;; 479 | -q) addSbt "--error" && shift ;; 480 | -x) shift ;; # currently unused 481 | -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;; 482 | -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; 483 | 484 | -no-colors) addJava "-Dsbt.log.noformat=true" && addJava "-Dsbt.color=false" && shift ;; 485 | -sbt-create) sbt_create=true && shift ;; 486 | -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;; 487 | -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; 488 | -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; 489 | -no-share) noshare=true && shift ;; 490 | -offline) addSbt "set offline in Global := true" && shift ;; 491 | -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;; 492 | -batch) batch=true && shift ;; 493 | -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;; 494 | -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;; 495 | 496 | -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;; 497 | -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;; 498 | -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;; 499 | -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; 500 | -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;; 501 | -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;; 502 | 503 | -28) setScalaVersion "$latest_28" && shift ;; 504 | -29) setScalaVersion "$latest_29" && shift ;; 505 | -210) setScalaVersion "$latest_210" && shift ;; 506 | -211) setScalaVersion "$latest_211" && shift ;; 507 | -212) setScalaVersion "$latest_212" && shift ;; 508 | -213) setScalaVersion "$latest_213" && shift ;; 509 | 510 | -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;; 511 | -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;; 512 | -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "_root_.scala.Some(file(\"$2\"))" && shift 2 ;; 513 | -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;; 514 | -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;; 515 | -sbtx-opts) require_arg path "$1" "$2" && sbtx_opts_file="$2" && shift 2 ;; 516 | -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;; 517 | 518 | -D*) addJava "$1" && shift ;; 519 | -J*) addJava "${1:2}" && shift ;; 520 | -S*) addScalac "${1:2}" && shift ;; 521 | 522 | new) sbt_new=true && : ${sbt_explicit_version:=$sbt_release_version} && addResidual "$1" && shift ;; 523 | 524 | *) addResidual "$1" && shift ;; 525 | esac 526 | done 527 | } 528 | 529 | # process the direct command line arguments 530 | process_args "$@" 531 | 532 | # skip #-styled comments and blank lines 533 | readConfigFile() { 534 | local end=false 535 | until $end; do 536 | read -r || end=true 537 | [[ $REPLY =~ ^# ]] || [[ -z $REPLY ]] || echo "$REPLY" 538 | done <"$1" 539 | } 540 | 541 | # if there are file/environment sbt_opts, process again so we 542 | # can supply args to this runner 543 | if [[ -r "$sbt_opts_file" ]]; then 544 | vlog "Using sbt options defined in file $sbt_opts_file" 545 | while read -r opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file") 546 | elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then 547 | vlog "Using sbt options defined in variable \$SBT_OPTS" 548 | IFS=" " read -r -a extra_sbt_opts <<<"$SBT_OPTS" 549 | else 550 | vlog "No extra sbt options have been defined" 551 | fi 552 | 553 | # if there are file/environment sbtx_opts, process again so we 554 | # can supply args to this runner 555 | if [[ -r "$sbtx_opts_file" ]]; then 556 | vlog "Using sbt options defined in file $sbtx_opts_file" 557 | while read -r opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbtx_opts_file") 558 | elif [[ -n "$SBTX_OPTS" && ! ("$SBTX_OPTS" =~ ^@.*) ]]; then 559 | vlog "Using sbt options defined in variable \$SBTX_OPTS" 560 | IFS=" " read -r -a extra_sbt_opts <<<"$SBTX_OPTS" 561 | else 562 | vlog "No extra sbt options have been defined" 563 | fi 564 | 565 | [[ -n "${extra_sbt_opts[*]}" ]] && process_args "${extra_sbt_opts[@]}" 566 | 567 | # reset "$@" to the residual args 568 | set -- "${residual_args[@]}" 569 | argumentCount=$# 570 | 571 | # set sbt version 572 | set_sbt_version 573 | 574 | checkJava 575 | 576 | # only exists in 0.12+ 577 | setTraceLevel() { 578 | case "$sbt_version" in 579 | "0.7."* | "0.10."* | "0.11."*) echoerr "Cannot set trace level in sbt version $sbt_version" ;; 580 | *) setThisBuild traceLevel "$trace_level" ;; 581 | esac 582 | } 583 | 584 | # set scalacOptions if we were given any -S opts 585 | [[ ${#scalac_args[@]} -eq 0 ]] || addSbt "set scalacOptions in ThisBuild += \"${scalac_args[*]}\"" 586 | 587 | [[ -n "$sbt_explicit_version" && -z "$sbt_new" ]] && addJava "-Dsbt.version=$sbt_explicit_version" 588 | vlog "Detected sbt version $sbt_version" 589 | 590 | if [[ -n "$sbt_script" ]]; then 591 | residual_args=("$sbt_script" "${residual_args[@]}") 592 | else 593 | # no args - alert them there's stuff in here 594 | ((argumentCount > 0)) || { 595 | vlog "Starting $script_name: invoke with -help for other options" 596 | residual_args=(shell) 597 | } 598 | fi 599 | 600 | # verify this is an sbt dir, -create was given or user attempts to run a scala script 601 | [[ -r ./build.sbt || -d ./project || -n "$sbt_create" || -n "$sbt_script" || -n "$sbt_new" ]] || { 602 | cat <