├── .github └── workflows │ └── build.yml ├── .gitignore ├── .travis.yml ├── Dockerfile ├── LICENSE ├── README.md ├── project ├── Build.scala ├── build.properties └── plugins.sbt ├── scalastyle-config.xml └── src ├── main └── scala │ └── com │ └── github │ └── caiiiycuk │ └── pg2sqlite │ ├── Boot.scala │ ├── Config.scala │ ├── Connection.scala │ ├── DumpInserter.scala │ ├── Log.scala │ ├── LoggedIterator.scala │ ├── command │ ├── Command.scala │ ├── CommandException.scala │ ├── Copy.scala │ ├── CreateIndex.scala │ └── CreateTable.scala │ ├── dsl │ └── DSL.scala │ ├── iterator │ ├── Line.scala │ └── LineIterator.scala │ ├── schema │ ├── Column.scala │ └── Schema.scala │ └── values │ ├── LineToValues.scala │ ├── Value.scala │ └── ValueParseException.scala └── test └── scala └── com └── github └── caiiiycuk └── pg2sqlite └── dsl ├── DSLTest.scala └── DumperTest.scala /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. 2 | # They are provided by a third-party and are governed by 3 | # separate terms of service, privacy policy, and support 4 | # documentation. 5 | 6 | name: Build 7 | 8 | on: 9 | push: 10 | branches: [ "master" ] 11 | pull_request: 12 | branches: [ "master" ] 13 | 14 | permissions: 15 | contents: read 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v3 22 | - name: Set up JDK 8 23 | uses: actions/setup-java@v3 24 | with: 25 | java-version: '8' 26 | distribution: 'temurin' 27 | cache: 'sbt' 28 | - name: Run tests 29 | run: sbt test 30 | - name: Build one-jar 31 | run: sbt one-jar 32 | - name: Upload a Build Artifact 33 | uses: actions/upload-artifact@v3.1.2 34 | with: 35 | name: postgresql-to-sqlite 36 | path: target/scala-2.11/postgresql-to-sqlite*.jar 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | 4 | # sbt specific 5 | .cache 6 | .history 7 | .lib/ 8 | dist/* 9 | target/ 10 | lib_managed/ 11 | src_managed/ 12 | project/boot/ 13 | project/plugins/project/ 14 | 15 | # Scala-IDE specific 16 | .scala_dependencies 17 | .worksheet 18 | /bin/ 19 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: scala 2 | scala: 3 | - 2.11.12 4 | branches: 5 | only: 6 | - master 7 | jdk: 8 | - openjdk9 9 | 10 | script: 11 | - sbt ++$TRAVIS_SCALA_VERSION test 12 | - sbt ++$TRAVIS_SCALA_VERSION one-jar 13 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM hseeberger/scala-sbt:8u222_1.3.5_2.13.1 2 | ENV psource=database.dump 3 | ENV starget=sqllight.db 4 | RUN mkdir -p /p2s 5 | WORKDIR /p2s 6 | COPY . ./ 7 | RUN sbt one-jar 8 | RUN cp target/scala-2.11/postgresql-to-sqlite_2.11-*-one-jar.jar pg2sqlite.jar 9 | CMD exec java -jar pg2sqlite.jar -d "$psource" -o "$starget" 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Aleksander Guryanov 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # postgresql-to-sqlite (pg2sqlite) 2 | [![Build](https://github.com/caiiiycuk/postgresql-to-sqlite/actions/workflows/build.yml/badge.svg)](https://github.com/caiiiycuk/postgresql-to-sqlite/actions/workflows/build.yml) 3 | 4 | Easy to use solution to create sqlite database from postgresql dump. 5 | 6 | * default [`pg_dump`](http://www.postgresql.org/docs/9.4/static/app-pgdump.html) script format 7 | * as fast as possible 8 | * silently ignore unsupported postgresql features 9 | * gzip support 10 | 11 | ## Installing 12 | 13 | In [release section](https://github.com/caiiiycuk/postgresql-to-sqlite/releases/) you can download pre-built version of pg2sqlite.jar 14 | 15 | ## How to use 16 | 17 | 1. Install jre (java) on your PC 18 | 19 | 2. Create dump from postgresql database 20 | ```sh 21 | pg_dump -h host -U user -f database.dump database 22 | ``` 23 | 24 | 3. Make sqlite database from it 25 | ``` 26 | java -jar pg2sqlite-1.0.3.jar -d database.dump -o sqlite.db 27 | ``` 28 | 29 | ## Command line arguments 30 | 31 | `pg2sqlite -d -o [-f ]` 32 | 33 | * **-d** `` - file that contains dump of postgresql database (made by pg_dump, accepts .gz) 34 | * **-o** `` - file name of newly created sqlite3 database 35 | * **-f** `` - default: false, force database re-creation if database file alredy exists 36 | * **-t** `` - default: integer, change sqlite3 date class (read below) 37 | 38 | ## Timestamps 39 | 40 | SQLite does not have a storage class set aside for storing dates and/or times. Instead, the built-in [Date And Time Functions](https://www.sqlite.org/lang_datefunc.html) of SQLite are capable of storing dates and times as TEXT, REAL, or INTEGER values: 41 | 42 | * TEXT as ISO8601 strings ("YYYY-MM-DD HH:MM:SS.SSS"). 43 | * REAL as Julian day numbers, the number of days since noon in Greenwich on November 24, 4714 B.C. according to the proleptic Gregorian calendar. 44 | * INTEGER as Unix Time, the number of seconds since 1970-01-01 00:00:00 UTC. 45 | 46 | By default pg2sqlite uses **INTEGER** to store dates, but you can change this with **-t** argument (`-t text` or `-t real`), use it like this: 47 | 48 | ```sh 49 | java -jar pg2sqlite-1.0.3.jar -d database.dump -o sqlite.db -t text 50 | ``` 51 | 52 | ## Tips 53 | 54 | pg2sqlite does not support database schemas. If your dump file includes schema definition It will print errors like this: 55 | ``` 56 | Create Table - Exception: 57 | unknown database 58 | [SQL] 'CREATE TABLE .table (...;' 59 | ``` 60 | You can easily fix dump file with `sed`: 61 | ```sh 62 | # sed 's/\.//' -i database.dump 63 | sed 's/public\.//' -i database.dump 64 | pg2sqlite -d output.dump -o sqlite.db 65 | ``` 66 | Where `public` is a schema name. 67 | 68 | ## How to build 69 | ```sh 70 | git clone https://github.com/caiiiycuk/postgresql-to-sqlite.git 71 | cd postgresql-to-sqlite 72 | sbt one-jar 73 | cp target/scala-2.11/postgresql-to-sqlite_2.11-0.0.1-SNAPSHOT-one-jar.jar pg2sqlite.jar 74 | ``` 75 | 76 | ## Docker 77 | 78 | Clone the repository and run 79 | ``` 80 | docker build -t postgresql-to-sqlite:latest . 81 | ``` 82 | inside the postgresql-to-sqlite folder. 83 | 84 | Use 85 | ``` 86 | docker run -v /home/john/dbdata:/dbdata -e psource='/dbdata/pqdump.sql' -e starget='/dbdata/output.sqlite' -it postgresql-to-sqlite:latest 87 | ``` 88 | where 89 | - -v: is the volume where the pqdump file is located. (and later the output file) 90 | - -e: `psource` is the pqdump filename and folder & `starget` the sqlite filename and folder 91 | 92 | p.s. the schema removal has to be done outside the container 93 | 94 | ## Support 95 | 96 | If you appreciate this project, please consider voting for it on Stack Overflow: 97 | 98 | https://stackoverflow.com/questions/6148421/how-to-convert-a-postgres-database-to-sqlite/69293251#69293251 99 | -------------------------------------------------------------------------------- /project/Build.scala: -------------------------------------------------------------------------------- 1 | import com.github.retronym.SbtOneJar 2 | import sbt._ 3 | import Keys._ 4 | 5 | object Build extends Build { 6 | 7 | lazy val project = Project("root", file("."), settings = Seq( 8 | name := "postgresql-to-sqlite", 9 | organization := "com.github.caiiiycuk", 10 | version := "1.1.1", 11 | scalaVersion := "2.11.12", 12 | 13 | libraryDependencies ++= Seq( 14 | "com.github.scopt" %% "scopt" % "3.3.0", 15 | "ch.qos.logback" % "logback-classic" % "1.1.2", 16 | "org.xerial" % "sqlite-jdbc" % "3.42.0.0", 17 | "org.scalatest" %% "scalatest" % "2.2.4" % "test" 18 | ) 19 | ) ++ SbtOneJar.oneJarSettings) 20 | 21 | } 22 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.18 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0") 2 | 3 | addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.7.0") 4 | 5 | addSbtPlugin("org.scala-sbt.plugins" % "sbt-onejar" % "0.8") 6 | -------------------------------------------------------------------------------- /scalastyle-config.xml: -------------------------------------------------------------------------------- 1 | 2 | Scalastyle standard configuration 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/Boot.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite 2 | 3 | import com.github.caiiiycuk.pg2sqlite.command.CommandException 4 | import com.github.caiiiycuk.pg2sqlite.iterator.LineIterator 5 | import com.github.caiiiycuk.pg2sqlite.values.ValueParseException 6 | 7 | import ch.qos.logback.classic.Level 8 | 9 | object Boot extends App with Log { 10 | 11 | val root = org.slf4j.LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger] 12 | root.setLevel(Level.INFO) 13 | 14 | val config = Config.parse(args) 15 | import config._ 16 | 17 | val size = pgdump.length() 18 | val connection = Connection.sqlite(sqlite, config.dateClass) 19 | val iterator = LineIterator(pgdump) 20 | val loggedIterator = LoggedIterator(iterator, () => 100.0 * iterator.readed / size) 21 | val dumpInserter = new DumpInserter(connection) 22 | 23 | log.info(s"'$pgdump' (${toMb(size)} Mb) -> '$sqlite'") 24 | 25 | val success = try { 26 | dumpInserter.insert(loggedIterator) 27 | true 28 | } catch { 29 | case e: CommandException => 30 | log.error(e.getMessage) 31 | false 32 | case e: ValueParseException => 33 | log.error(e.getMessage) 34 | false 35 | case e: Throwable => 36 | log.error(e.getMessage, e) 37 | false 38 | } 39 | 40 | iterator.close 41 | connection.close 42 | 43 | if (success) { 44 | log.info("Well done...") 45 | } else { 46 | log.error("Task failed...") 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/Config.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite 2 | 3 | import java.io.File 4 | 5 | case class Config(pgdump: File = new File("dump"), sqlite: File = new File("db"), 6 | force: Boolean = false, dateClass: String = Connection.DEFAULT_DATE_CLASS) 7 | 8 | object Config extends Log { 9 | private val parser = new scopt.OptionParser[Config]("postgresql-to-sqlite") { 10 | head("postgresql-to-sqlite") 11 | 12 | opt[File]('d', "dump") required() valueName ("") action { (v, c) => 13 | c.copy(pgdump = v) 14 | } text ("postgresql dump generated by pg_dump") 15 | 16 | opt[File]('o', "out") required() valueName ("") action { (v, c) => 17 | c.copy(sqlite = v) 18 | } text ("sqlite3 database to create") 19 | 20 | opt[Boolean]('f', "force") optional() valueName ("") action { (v, c) => 21 | c.copy(force = v) 22 | } text ("recreate database if exists") 23 | 24 | opt[String]('t', "timestamps") optional() valueName ("") action { (v, c) => 25 | val dc = v.toUpperCase() 26 | if (dc.equals(Connection.TEXT_DATE_CLASS) || dc.equals(Connection.REAL_DATE_CLASS)) { 27 | c.copy(dateClass = dc) 28 | } else { 29 | c 30 | } 31 | } text ("Change sqlite3 date class (default: INTEGER)") 32 | 33 | checkConfig { c => 34 | import c._ 35 | 36 | if (!pgdump.exists()) { 37 | failure(s"Dump '${pgdump}' does not exists") 38 | } else if (sqlite.exists()) { 39 | if (force) { 40 | sqlite.delete() 41 | success 42 | } else { 43 | failure(s"Database '${sqlite}' already exists") 44 | } 45 | } else { 46 | success 47 | } 48 | } 49 | } 50 | 51 | def parse(args: Array[String]) = { 52 | parser.parse(args, Config()) match { 53 | case Some(config) => 54 | Option(System.getenv("SQLITE_TMPDIR")) match { 55 | case None => 56 | log.warn("You should set SQLITE_TMPDIR environment variable to control where sqlite stores temp files") 57 | case _ => 58 | } 59 | 60 | config 61 | case _ => 62 | System.exit(1) 63 | ??? 64 | } 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/Connection.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite 2 | 3 | import org.sqlite.SQLiteConfig 4 | 5 | import java.sql.DriverManager 6 | import java.sql.Statement 7 | import java.sql.PreparedStatement 8 | import scala.collection.mutable.ListBuffer 9 | import java.sql.ResultSet 10 | import scala.annotation.tailrec 11 | import java.io.File 12 | import java.util.Properties 13 | 14 | trait ConnectionHolder { 15 | def makeConnection: java.sql.Connection 16 | 17 | def db: String 18 | } 19 | 20 | object Connection { 21 | final val DEFAULT_DATE_CLASS = "INTEGER" 22 | final val TEXT_DATE_CLASS = "TEXT" 23 | final val REAL_DATE_CLASS = "REAL" 24 | private final val DATE_CLASS_PRAGMA = "date_class" 25 | private final val FETCH_SIZE = 8192 26 | private final val MAX_VARIABLE_NUMBER = 999 27 | 28 | def sqlite(dbFile: File, dateClass: String = DEFAULT_DATE_CLASS): Connection = { 29 | val connectionHolder = new ConnectionHolder { 30 | override def makeConnection: java.sql.Connection = { 31 | val properties = new Properties() 32 | properties.setProperty(DATE_CLASS_PRAGMA, dateClass) 33 | implicit val connection = DriverManager.getConnection(s"jdbc:sqlite:$dbFile", properties) 34 | 35 | connection.setAutoCommit(true) 36 | sqlitePragmas() 37 | 38 | connection.setAutoCommit(false) 39 | connection 40 | } 41 | 42 | override def db = dbFile.toString 43 | } 44 | 45 | new Connection(connectionHolder) 46 | } 47 | 48 | private def sqlitePragmas()(implicit connection: java.sql.Connection) = { 49 | assert(SQLiteConfig.Pragma.DATE_CLASS.pragmaName.equals(DATE_CLASS_PRAGMA)); 50 | val statement = connection.createStatement() 51 | statement.executeUpdate(s"PRAGMA ${SQLiteConfig.Pragma.SYNCHRONOUS.pragmaName} = OFF") 52 | statement.executeUpdate(s"PRAGMA ${SQLiteConfig.Pragma.JOURNAL_MODE.pragmaName} = OFF") 53 | statement.executeUpdate(s"PRAGMA ${SQLiteConfig.Pragma.LIMIT_WORKER_THREADS.pragmaName} = 64") 54 | statement.executeUpdate(s"PRAGMA ${SQLiteConfig.Pragma.MAX_PAGE_COUNT.pragmaName} = 2147483646") 55 | statement.executeUpdate(s"PRAGMA ${SQLiteConfig.Pragma.CACHE_SIZE.pragmaName} = 65536") 56 | statement.executeUpdate("PRAGMA cache_spill = true") 57 | statement.close 58 | } 59 | } 60 | 61 | class Connection(connectionHolder: ConnectionHolder) { 62 | 63 | import Connection._ 64 | 65 | final val MAX_VARIABLE_NUMBER = Connection.MAX_VARIABLE_NUMBER 66 | 67 | lazy val connection = connectionHolder.makeConnection 68 | 69 | lazy val db = connectionHolder.db 70 | 71 | def withStatement[T](block: (Statement) => T): T = { 72 | val statement = connection.createStatement() 73 | val t = block(statement) 74 | statement.close 75 | t 76 | } 77 | 78 | def withPreparedStatement[T](sql: String, keepAlive: Boolean = false)(block: (PreparedStatement) => T): T = { 79 | val statement = connection.prepareStatement(sql) 80 | statement.setFetchSize(FETCH_SIZE) 81 | 82 | val t = block(statement) 83 | if (!keepAlive) statement.close 84 | t 85 | } 86 | 87 | def close = { 88 | connection.commit 89 | connection.close 90 | } 91 | 92 | def execute(sql: String) = { 93 | withStatement { statement => 94 | statement.executeUpdate(sql) 95 | } 96 | } 97 | 98 | } 99 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/DumpInserter.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite 2 | 3 | import scala.annotation.tailrec 4 | import com.github.caiiiycuk.pg2sqlite.command._ 5 | import com.github.caiiiycuk.pg2sqlite.iterator.Line 6 | import com.github.caiiiycuk.pg2sqlite.schema.Schema 7 | 8 | object DumpInserter { 9 | val COMMANDS = List(CreateTable, Copy, CreateIndex) 10 | } 11 | 12 | class DumpInserter(connection: Connection) { 13 | 14 | import DumpInserter._ 15 | 16 | implicit val schema = new Schema() 17 | 18 | @tailrec 19 | final def insert(iterator: Iterator[Line]): Unit = { 20 | if (iterator.hasNext) { 21 | val head = iterator.next() 22 | val fullIterator = Iterator(head) ++ iterator 23 | 24 | COMMANDS.find(_.matchHead(head)).foreach { command => 25 | command.apply(connection, fullIterator) 26 | } 27 | 28 | insert(iterator) 29 | } 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/Log.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite 2 | 3 | import org.slf4j.LoggerFactory 4 | 5 | trait Log { 6 | 7 | protected lazy val log = LoggerFactory.getLogger(getClass) 8 | 9 | def toMb(length: Long) = { 10 | length / 1024 / 1024 11 | } 12 | 13 | def humanizeMsTime(time: Long) = { 14 | val ms = time % 1000 15 | val s = time / 1000 % 60 16 | val m = time / 1000 / 60 17 | 18 | s"${m}m ${s}s ${ms}ms" 19 | } 20 | 21 | def humanizeElapsedAndRemaning(startAt: Long, progress: Double): String = { 22 | val elapsed = System.currentTimeMillis - startAt 23 | val remaining = (elapsed / progress - elapsed).toInt 24 | 25 | s"elapsed: ${humanizeMsTime(elapsed)} / remaining: ${humanizeMsTime(remaining)}" 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/LoggedIterator.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite 2 | 3 | object LoggedIterator { 4 | final val DEFAULT_SENSIVITY = 10 5 | } 6 | 7 | case class LoggedIterator[T](iterator: Iterator[T], 8 | progress: () => Double, sensivity: Int = LoggedIterator.DEFAULT_SENSIVITY) 9 | extends Iterator[T] with Log { 10 | 11 | val startAt = System.currentTimeMillis 12 | var currentProgress: Long = 0L 13 | 14 | override def hasNext = iterator.hasNext 15 | 16 | override def next(): T = { 17 | val value = iterator.next 18 | val newProgress = progress() 19 | val intProgress = (newProgress * sensivity).toLong 20 | 21 | if (intProgress > currentProgress) { 22 | val elapsedAndRemaining = humanizeElapsedAndRemaning(startAt, newProgress / 100) 23 | log.info(s"Progress ${intProgress.toDouble / sensivity}%, ${elapsedAndRemaining}...\t") 24 | currentProgress = intProgress 25 | } 26 | 27 | value 28 | } 29 | 30 | } 31 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/command/Command.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.command 2 | 3 | import scala.util.matching.Regex 4 | import scala.annotation.tailrec 5 | import com.github.caiiiycuk.pg2sqlite.Connection 6 | import com.github.caiiiycuk.pg2sqlite.iterator.Line 7 | import com.github.caiiiycuk.pg2sqlite.schema.Schema 8 | 9 | trait Command { 10 | 11 | def matchHead(head: Line): Boolean = 12 | matchHead(head.text) 13 | 14 | def matchHead(head: String): Boolean 15 | 16 | def apply(connection: Connection, iterator: Iterator[Line])(implicit schema: Schema) 17 | 18 | @tailrec 19 | final protected def takeUntil(iterator: Iterator[Line], 20 | when: (String) => Boolean, 21 | buffer: List[Line] = Nil): List[Line] = { 22 | if (!iterator.hasNext) { 23 | buffer.reverse 24 | } else { 25 | val line = iterator.next 26 | val newBuffer = line :: buffer 27 | 28 | if (when(line.text)) { 29 | newBuffer.reverse 30 | } else { 31 | takeUntil(iterator, when, newBuffer) 32 | } 33 | } 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/command/CommandException.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.command 2 | 3 | import com.github.caiiiycuk.pg2sqlite.iterator.Line 4 | 5 | case class CommandException(command: String, cause: Throwable, context: List[String]) 6 | extends Exception(s""" 7 | $command - Exception: 8 | \t${cause.getMessage} 9 | \t${context.mkString("\n\t")}, 10 | """, cause) 11 | 12 | object CommandException { 13 | def apply(command: String, cause: Throwable, sql: String, rows: List[Line], context: List[String] = Nil): CommandException = { 14 | val default = List(s"[SQL] '$sql'", s"[LINE #${rows.head.num}] ${rows.mkString(" ")}") 15 | CommandException(command, cause, default ++ context) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/command/Copy.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.command 2 | 3 | import com.github.caiiiycuk.pg2sqlite.Connection 4 | import com.github.caiiiycuk.pg2sqlite.Log 5 | import com.github.caiiiycuk.pg2sqlite.values.LineToValues 6 | import java.sql.SQLException 7 | import com.github.caiiiycuk.pg2sqlite.iterator.Line 8 | import com.github.caiiiycuk.pg2sqlite.schema.Schema 9 | import com.github.caiiiycuk.pg2sqlite.values.ValueParseException 10 | import com.github.caiiiycuk.pg2sqlite.dsl.DSL._ 11 | 12 | object Copy extends Command with Log { 13 | 14 | import LineToValues._ 15 | 16 | private val TABLE_NAME_POSITION = 1 17 | private val activator = "^(?i)copy".r 18 | 19 | override def matchHead(head: String): Boolean = { 20 | activator.findFirstIn(head).isDefined 21 | } 22 | 23 | override def apply(connection: Connection, iterator: Iterator[Line])(implicit schema: Schema) = { 24 | val rows = takeUntil(iterator, _.contains(";")) 25 | val rawSql = rows.mkString(" ") 26 | 27 | val (tableName, sql, columnTypes) = try { 28 | val tableName = rawSql.tokens(TABLE_NAME_POSITION) 29 | val columns = rawSql.takeBraces.head.columns.map(_.name).toList 30 | 31 | val marks = ("?," * columns.size).dropRight(1) 32 | val sql = s"insert into $tableName(${columns.map(column => s"[$column]").mkString(",")}) values($marks)" 33 | 34 | val columnTypes = schema.columnsToTypeConstants(tableName, columns) 35 | 36 | (tableName, sql, columnTypes) 37 | } catch { 38 | case t: Throwable => 39 | throw CommandException(s"COPY - Unable to find TABLE NAME or COLUMNS in '$rawSql'", 40 | t, rawSql, rows) 41 | } 42 | 43 | if (schema.shouldExcludeTable(tableName)) { 44 | log.info(s"Skipping '$sql'") 45 | } else { 46 | log.info(s"COPY table '$tableName'") 47 | connection.withPreparedStatement(sql) { statement => 48 | iterator.takeWhile(!_.startsWith("\\.")).foreach { row => 49 | val values = try { 50 | toValues(row.text)(columnTypes) 51 | } catch { 52 | case e: ValueParseException => 53 | throw CommandException("COPY", e, sql, rows, 54 | List(s"[DATA #${row.num}] '$row'", 55 | s"[COLUMN,TYPE] ${schema.columns(tableName).map(_.toString).mkString(" ")}")) 56 | } 57 | 58 | try { 59 | values.foreach(_.apply(statement)) 60 | statement.executeUpdate() 61 | } catch { 62 | case e: SQLException => 63 | val vals = values.map(_.toString).mkString(", ") 64 | throw CommandException("COPY", e, sql, rows, 65 | List(s"[DATA #${row.num}] '$row'", s"[VALUES] '$vals'")) 66 | } 67 | 68 | } 69 | } 70 | } 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/command/CreateIndex.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.command 2 | 3 | import java.sql.SQLException 4 | import scala.annotation.tailrec 5 | import com.github.caiiiycuk.pg2sqlite.Connection 6 | import com.github.caiiiycuk.pg2sqlite.iterator.Line 7 | import com.github.caiiiycuk.pg2sqlite.schema.Schema 8 | import com.github.caiiiycuk.pg2sqlite.dsl.DSL._ 9 | import com.github.caiiiycuk.pg2sqlite.Log 10 | 11 | object CreateIndex extends Command with Log { 12 | 13 | private val INDEX_NAME_POSITION = 2 14 | private val TABLE_NAME_POSITION = 0 15 | private val activator = """^(?i)create\s+index""".r 16 | 17 | override def matchHead(head: String): Boolean = { 18 | activator.findFirstIn(head).isDefined 19 | } 20 | 21 | override def apply(connection: Connection, iterator: Iterator[Line])(implicit schema: Schema) = { 22 | val rows = takeUntil(iterator, _.contains(";")) 23 | val rawSql = rows.mkString(" ").toLowerCase 24 | 25 | val (tableName, sql, columns) = try { 26 | val createIndexParts = rawSql.split("""\s+on\s+""") 27 | val indexName = createIndexParts(0).tokens(INDEX_NAME_POSITION) 28 | val tableName = createIndexParts(1).tokens(TABLE_NAME_POSITION) 29 | val columns = rawSql.takeBraces.head.columns.map(column => s"[${column.name}]").mkString(",") 30 | 31 | (tableName, s"CREATE INDEX $indexName ON $tableName ($columns)", columns) 32 | } catch { 33 | case t: Throwable => 34 | throw CommandException(s"CREATE INDEX - Unable to find INDEX_NAME or TABLE NAME or COLUMNS in '$rawSql'", 35 | t, rawSql, rows) 36 | } 37 | 38 | if (schema.shouldExcludeTable(tableName) || 39 | columns.isEmpty) { 40 | log.info(s"Skipping '$sql'") 41 | } else { 42 | try { 43 | connection.execute(sql) 44 | } catch { 45 | case e: SQLException => 46 | throw CommandException("Create Index", e, sql, rows) 47 | } 48 | } 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/command/CreateTable.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.command 2 | 3 | import java.sql.SQLException 4 | import scala.annotation.tailrec 5 | import com.github.caiiiycuk.pg2sqlite.Connection 6 | import com.github.caiiiycuk.pg2sqlite.iterator.Line 7 | import com.github.caiiiycuk.pg2sqlite.schema.Schema 8 | import com.github.caiiiycuk.pg2sqlite.Log 9 | import com.github.caiiiycuk.pg2sqlite.dsl.DSL._ 10 | 11 | object CreateTable extends Command with Log { 12 | 13 | private final val TABLE_NAME_POSITON = 2 14 | private final val activator = """^(?i)create\s+table""".r 15 | 16 | override def matchHead(head: String): Boolean = { 17 | activator.findFirstIn(head).isDefined 18 | } 19 | 20 | override def apply(connection: Connection, iterator: Iterator[Line])(implicit schema: Schema) = { 21 | val rows = takeUntil(iterator, _.contains(";")) 22 | val rawSql = rows.mkString(" ") 23 | 24 | val (tableName, sql) = try { 25 | val table = rawSql.tokens(TABLE_NAME_POSITON) 26 | val columns = rawSql.takeBraces.head.columns 27 | 28 | columns.foreach { column => 29 | schema.addColumn(table, column) 30 | } 31 | 32 | (table, s"CREATE TABLE [$table] (${columns.map(column => s"[${column.name}]").mkString(", ")});") 33 | } catch { 34 | case t: Throwable => 35 | throw CommandException(s"CREATE TABLE - Unable to find TABLE NAME or COLUMNS in '$rawSql'", 36 | t, rawSql, rows) 37 | } 38 | 39 | if (schema.shouldExcludeTable(tableName)) { 40 | log.info(s"Skipping '$sql'") 41 | } else { 42 | try { 43 | connection.execute(sql) 44 | } catch { 45 | case e: SQLException => 46 | throw CommandException("Create Table", e, sql, rows) 47 | } 48 | } 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/dsl/DSL.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.dsl 2 | 3 | import scala.annotation.tailrec 4 | import com.github.caiiiycuk.pg2sqlite.schema.Column 5 | 6 | class DSL(line: String) { 7 | 8 | import DSL._ 9 | 10 | def dropBraces: String = 11 | dropBraces(line.toIterator) 12 | 13 | def takeBraces: List[String] = { 14 | takeBraces(line.toIterator) 15 | } 16 | 17 | def commaSplitRespectBraces: List[String] = { 18 | commaSplitRespectBraces(line.toIterator) 19 | } 20 | 21 | def tokens: List[String] = { 22 | line.replaceAll("\"|'","").split("""\s|:|,|\(|\)""").map(_.trim).filterNot(_.isEmpty).toList 23 | } 24 | 25 | def columns: List[Column] = { 26 | val columns = commaSplitRespectBraces(line.toIterator).map(_.trim).filterNot(_.isEmpty) 27 | 28 | columns.map(_.replaceAll("\"|'", "")).flatMap { columnDefenition => 29 | val partials = columnDefenition.split("""\s""") 30 | .map(_.trim.toLowerCase).filterNot(_.isEmpty).toList 31 | 32 | partials match { 33 | case head :: _ if head.startsWith("constraint") => 34 | None 35 | case head :: _ if head.startsWith("to_tsvector(") => 36 | val name = columnDefenition.takeBraces.head.tokens.last 37 | Some(Column(name, None)) 38 | case head :: _ if head.startsWith("lower(") || head.startsWith("upper(") => 39 | val name = columnDefenition.takeBraces.head.tokens.head 40 | Some(Column(name, None)) 41 | case head :: sqlType :: _ => 42 | Some(Column(head, Some(sqlType))) 43 | case head :: Nil => 44 | Some(Column(head, None)) 45 | case _ => 46 | None 47 | } 48 | } 49 | } 50 | 51 | @tailrec 52 | private def takeBraces(line: Iterator[Char], nesting: Int = 0, 53 | acc: String = "", buff: List[String] = Nil): List[String] = 54 | if (line.hasNext) { 55 | val head = line.next 56 | 57 | val newAcc = if (nesting > 1 || (nesting > 0 && head != ')')) { 58 | acc + head 59 | } else { 60 | acc 61 | } 62 | 63 | if (head == '(') { 64 | takeBraces(line, nesting + 1, newAcc, buff) 65 | } else if (head == ')' && nesting == 1) { 66 | takeBraces(line, nesting - 1, "", newAcc :: buff) 67 | } else if (head == ')') { 68 | takeBraces(line, nesting - 1, newAcc, buff) 69 | } else { 70 | takeBraces(line, nesting, newAcc, buff) 71 | } 72 | } else if (acc.nonEmpty) { 73 | (acc :: buff).reverse 74 | } else { 75 | buff.reverse 76 | } 77 | 78 | @tailrec 79 | private def dropBraces(line: Iterator[Char], nesting: Int = 0, buff: String = ""): String = 80 | if (line.hasNext) { 81 | val head = line.next 82 | 83 | if (head == '(') { 84 | dropBraces(line, nesting + 1, buff) 85 | } else if (head == ')') { 86 | dropBraces(line, nesting - 1, buff) 87 | } else if (nesting == 0) { 88 | dropBraces(line, nesting, buff + head) 89 | } else { 90 | dropBraces(line, nesting, buff) 91 | } 92 | } else { 93 | buff 94 | } 95 | 96 | @tailrec 97 | private def commaSplitRespectBraces(line: Iterator[Char], nesting: Int = 0, 98 | acc: String = "", buff: List[String] = Nil): List[String] = 99 | if (line.hasNext) { 100 | val head = line.next 101 | 102 | if (head == '(') { 103 | commaSplitRespectBraces(line, nesting + 1, acc + head, buff) 104 | } else if (head == ')') { 105 | commaSplitRespectBraces(line, nesting - 1, acc + head, buff) 106 | } else if (head == ',' && nesting == 0) { 107 | commaSplitRespectBraces(line, nesting, "", acc :: buff) 108 | } else { 109 | commaSplitRespectBraces(line, nesting, acc + head, buff) 110 | } 111 | } else if (acc.nonEmpty) { 112 | (acc :: buff).reverse 113 | } else { 114 | buff.reverse 115 | } 116 | 117 | } 118 | 119 | object DSL { 120 | 121 | implicit def toDSLClass(line: String): DSL = { 122 | new DSL(line) 123 | } 124 | 125 | } 126 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/iterator/Line.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.iterator 2 | 3 | case class Line(num: Int, text: String) { 4 | def startsWith(value: String) = 5 | text.startsWith(value) 6 | 7 | override def toString(): String = text 8 | } 9 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/iterator/LineIterator.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.iterator 2 | 3 | import java.io.FileReader 4 | import java.io.BufferedReader 5 | import java.io.Closeable 6 | import java.io.File 7 | import scala.collection.TraversableOnce.flattenTraversableOnce 8 | 9 | trait LineIterator extends Iterator[Line] with Closeable { 10 | def readed: Long 11 | } 12 | 13 | class FileOptionStringIterator(file: File) extends Iterator[Option[String]] with Closeable { 14 | 15 | var readed = 0L 16 | 17 | private val reader = new FileReader(file) { 18 | override def read(buf: Array[Char], off: Int, len: Int) = { 19 | val count = super.read(buf, off, len) 20 | readed += count 21 | count 22 | } 23 | } 24 | 25 | private val bufferedReader = new BufferedReader(reader) 26 | 27 | private var current = Option(bufferedReader.readLine()) 28 | 29 | override def hasNext: Boolean = { 30 | current.nonEmpty 31 | } 32 | 33 | override def next(): Option[String] = { 34 | val value = current 35 | current = Option(bufferedReader.readLine()) 36 | value 37 | } 38 | 39 | override def close(): Unit = { 40 | bufferedReader.close 41 | } 42 | 43 | } 44 | 45 | object LineIterator { 46 | def apply(file: File) = { 47 | val iterator = new FileOptionStringIterator(file) 48 | val flatIterator = iterator.flatten.zipWithIndex.map { 49 | case (text, index) => 50 | Line(index + 1, text) 51 | } 52 | 53 | new LineIterator { 54 | override def hasNext: Boolean = flatIterator.hasNext 55 | override def next(): Line = flatIterator.next() 56 | override def close = iterator.close 57 | override def readed: Long = iterator.readed 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/schema/Column.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.schema 2 | 3 | import java.sql.Types 4 | 5 | object Column { 6 | val TYPE_DETECTORS = List( 7 | ("""boolean""".r -> Types.BOOLEAN), 8 | ("""int""".r -> Types.BIGINT), 9 | ("""float""".r -> Types.DOUBLE), 10 | ("""numeric""".r -> Types.NUMERIC), 11 | ("""bytea""".r -> Types.BLOB), 12 | ("""geometry""".r -> Types.BLOB), 13 | ("""timestamp""".r -> Types.TIMESTAMP), 14 | ("""time""".r -> Types.TIME), 15 | ("""date""".r -> Types.DATE), 16 | ("""char""".r -> Types.VARCHAR), 17 | ("""text""".r -> Types.VARCHAR)) 18 | } 19 | 20 | case class Column(name: String, sqlType: Option[String]) { 21 | 22 | import Column._ 23 | 24 | lazy val typeConstant = sqlType.map { 25 | sqlType => 26 | val nativeType = TYPE_DETECTORS.find { 27 | case (regex, _) => 28 | regex.findFirstIn(sqlType).isDefined 29 | } 30 | 31 | nativeType.map(_._2) 32 | }.flatten 33 | 34 | } 35 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/schema/Schema.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.schema 2 | 3 | import java.sql.Types 4 | 5 | import scala.collection.mutable.Map 6 | 7 | class Schema(excludeTables: Set[String] = Set("sqlite_stat")) { 8 | 9 | protected case class Table(columns: Map[String, Column] = Map.empty) 10 | 11 | val tables: Map[String, Table] = Map.empty 12 | 13 | def addColumn(tableName: String, column: Column) = { 14 | val loweredTableName = tableName.toLowerCase 15 | val table = tables.get(loweredTableName).getOrElse { 16 | val table = Table() 17 | tables += ((loweredTableName, table)) 18 | table 19 | } 20 | table.columns += ((column.name.toLowerCase, column)) 21 | } 22 | 23 | def columns(tableName: String) = { 24 | tables.get(tableName.toLowerCase).map(_.columns).getOrElse(Map.empty) 25 | } 26 | 27 | def columnsToTypeConstants(tableName: String, columns: List[String]): scala.collection.immutable.Map[Int, Int] = { 28 | tables.get(tableName.toLowerCase).map { table => 29 | columns.zipWithIndex.flatMap { 30 | case (column, index) => 31 | table.columns.get(column).flatMap { column => 32 | column.typeConstant.map((index + 1, _)) 33 | } 34 | }.toMap 35 | }.getOrElse(scala.collection.immutable.Map.empty) 36 | } 37 | 38 | def shouldExcludeTable(table: String) = { 39 | excludeTables.contains(table.toLowerCase) 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/values/LineToValues.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.values 2 | 3 | import java.sql.Types 4 | import java.util.Formatter.DateTime 5 | import java.text.SimpleDateFormat 6 | import java.util.Date 7 | 8 | object LineToValues { 9 | 10 | val DOUBLE = """^\d+\.\d+$""".r 11 | val INTEGER = """^\d+$""".r 12 | 13 | val SIMPLE_TIMESTAMP_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") 14 | val SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd") 15 | val SIMPLE_TIME_FORMAT = new SimpleDateFormat("HH:mm:ss") 16 | 17 | val FORMATTER = Map(Types.DATE -> SIMPLE_DATE_FORMAT, 18 | Types.TIME -> SIMPLE_TIME_FORMAT, 19 | Types.TIMESTAMP -> SIMPLE_TIMESTAMP_FORMAT) 20 | 21 | val NO_HEX_DIGITS = """[^0-9A-Fa-f]""".r 22 | 23 | def toValues(line: String)(implicit indexToType: Map[Int, Int]): List[Value] = { 24 | val parts = line.split("\t").map(_.trim) 25 | parts.zipWithIndex.map { 26 | case (value, index) => 27 | toValue(index + 1, value) 28 | }.toList 29 | } 30 | 31 | def toValue(index: Int, value: String)(implicit indexToType: Map[Int, Int]) = { 32 | if (value == """\N""") { 33 | NullValue(index, indexToType.get(index)) 34 | } else { 35 | indexToType.get(index).map { sqlType => 36 | toValueWithKnownType(index, value, sqlType) 37 | }.getOrElse { 38 | value match { 39 | case DOUBLE(_*) => 40 | toDoubleWithStringFallback(index, value) 41 | case INTEGER(_*) => 42 | toIntegerWithDoubleFallback(index, value) 43 | case _ => 44 | StringValue(index, value) 45 | } 46 | } 47 | } 48 | } 49 | 50 | def toValueWithKnownType(index: Int, value: String, sqlType: Int) = { 51 | sqlType match { 52 | case Types.BIGINT => 53 | toIntegerWithDoubleFallback(index, value) 54 | case Types.DOUBLE | Types.NUMERIC => 55 | toDoubleWithStringFallback(index, value) 56 | case Types.VARCHAR => 57 | StringValue(index, value) 58 | case Types.BOOLEAN => 59 | BooleanValue(index, value.toLowerCase != "f") 60 | case Types.TIMESTAMP | Types.TIME | Types.DATE => 61 | val date = toDate(value, sqlType).getOrElse { 62 | throw new ValueParseException(s"[COLUMN#${index}] Doesn`t know how to convert string '$value', to timestamp") 63 | } 64 | DateValue(index, date, sqlType) 65 | case Types.BLOB => 66 | BlobValue(index, hex2bytes(value)) 67 | case _ => 68 | throw new ValueParseException(s"[COLUMN#${index}] Doesn`t know how to convert string '$value', to sql type '$sqlType'") 69 | } 70 | } 71 | 72 | private def toDate(value: String, sqlType: Int): Option[Date] = { 73 | val formatter = FORMATTER(sqlType) 74 | 75 | try { 76 | Some(formatter.parse(value.take(formatter.toPattern().length))) 77 | } catch { 78 | case t: Throwable => 79 | None 80 | } 81 | } 82 | 83 | private def toIntegerWithDoubleFallback(index: Int, value: String) = { 84 | try { 85 | IntegerValue(index, value.toLong) 86 | } catch { 87 | case e: NumberFormatException => 88 | toDoubleWithStringFallback(index, value) 89 | } 90 | } 91 | 92 | private def toDoubleWithStringFallback(index: Int, value: String) = { 93 | try { 94 | RealValue(index, value.toDouble) 95 | } catch { 96 | case e: NumberFormatException => 97 | StringValue(index, value) 98 | } 99 | } 100 | 101 | private def hex2bytes(value: String): Array[Byte] = { 102 | if (value.length % 2 != 0 || NO_HEX_DIGITS.findFirstIn(value).isDefined) { 103 | value.getBytes 104 | } else { 105 | javax.xml.bind.DatatypeConverter.parseHexBinary(value) 106 | } 107 | } 108 | 109 | } 110 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/values/Value.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.values 2 | 3 | import java.sql.PreparedStatement 4 | import java.sql.Types 5 | import java.util.Date 6 | 7 | abstract class Value(index: Int) { 8 | def apply(statement: PreparedStatement) 9 | } 10 | 11 | case class NullValue(index: Int, sqlType: Option[Int]) extends Value(index) { 12 | def apply(statement: PreparedStatement) { 13 | statement.setNull(index, sqlType.getOrElse(Types.BIGINT)) 14 | } 15 | } 16 | 17 | case class BooleanValue(index: Int, value: Boolean) extends Value(index) { 18 | def apply(statement: PreparedStatement) { 19 | statement.setBoolean(index, value) 20 | } 21 | } 22 | 23 | case class RealValue(index: Int, value: Double) extends Value(index) { 24 | def apply(statement: PreparedStatement) { 25 | statement.setDouble(index, value) 26 | } 27 | } 28 | 29 | case class IntegerValue(index: Int, value: Long) extends Value(index) { 30 | def apply(statement: PreparedStatement) { 31 | statement.setLong(index, value) 32 | } 33 | } 34 | 35 | case class StringValue(index: Int, value: String) extends Value(index) { 36 | def apply(statement: PreparedStatement) { 37 | statement.setString(index, value) 38 | } 39 | } 40 | 41 | case class BlobValue(index: Int, value: Array[Byte]) extends Value(index) { 42 | def apply(statement: PreparedStatement) { 43 | statement.setBytes(index, value) 44 | } 45 | } 46 | 47 | case class DateValue(index: Int, value: Date, dateType: Int) extends Value(index) { 48 | def apply(statement: PreparedStatement) { 49 | dateType match { 50 | case Types.DATE => 51 | statement.setDate(index, new java.sql.Date(value.getTime)) 52 | case Types.TIME => 53 | statement.setTime(index, new java.sql.Time(value.getTime)) 54 | case _ => 55 | statement.setTimestamp(index, new java.sql.Timestamp(value.getTime)) 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/main/scala/com/github/caiiiycuk/pg2sqlite/values/ValueParseException.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.values 2 | 3 | class ValueParseException(message: String) extends Exception(message) 4 | -------------------------------------------------------------------------------- /src/test/scala/com/github/caiiiycuk/pg2sqlite/dsl/DSLTest.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.dsl 2 | 3 | import org.scalatest.FlatSpec 4 | import org.scalatest.Matchers 5 | import com.github.caiiiycuk.pg2sqlite.dsl.DSL._ 6 | import com.github.caiiiycuk.pg2sqlite.schema.Column 7 | 8 | class DslTest extends FlatSpec with Matchers { 9 | 10 | "DSL" should "drop braces from line" in { 11 | val TEST_STRING = """ 12 | id bigint DEFAULT nextval('hibernate_sequence'::regclass) NOT NULL, 13 | url text, 14 | ident character varying(20) DEFAULT "substring"(upper(md5((((999999999)::double precision * random()))::text)), 1, 8) NOT NULL, 15 | created_at timestamp without time zone DEFAULT now() 16 | """ 17 | 18 | TEST_STRING.dropBraces should equal(""" 19 | id bigint DEFAULT nextval NOT NULL, 20 | url text, 21 | ident character varying DEFAULT "substring" NOT NULL, 22 | created_at timestamp without time zone DEFAULT now 23 | """) 24 | } 25 | 26 | "DSL" should "take columns parts" in { 27 | val TEST_STRING = """ 28 | insert into some(a, b, c) values ("a", 2, true); 29 | """ 30 | 31 | TEST_STRING.takeBraces should equal(List( 32 | "a, b, c", """"a", 2, true""")) 33 | } 34 | 35 | "DSL" should "extract tokens" in { 36 | val TEST_STRING = """ 37 | insert(strange text) into(some buffer) table 38 | """ 39 | 40 | TEST_STRING.tokens should equal(List("insert", "strange", "text", "into", "some", "buffer", "table")) 41 | } 42 | 43 | "DSL" should "extract columns with type" in { 44 | val COLUMNS = """ 45 | id bigint DEFAULT nextval('hibernate_sequence'::regclass) NOT NULL, 46 | url text, 47 | ident character varying(20) DEFAULT "substring"(upper(md5((((999999999)::double precision * random()))::text)), 1, 8) NOT NULL, 48 | created_at timestamp without time zone DEFAULT now() 49 | """ 50 | 51 | COLUMNS.columns should equal( 52 | List(Column("id", Some("bigint")), 53 | Column("url", Some("text")), 54 | Column("ident", Some("character")), 55 | Column("created_at", Some("timestamp")))) 56 | } 57 | 58 | "DSL" should "exclude keywords (CONSTRAINTS, etc.) from columns list" in { 59 | val COLUMNS = """ 60 | id integer DEFAULT nextval('hibernate_sequence'::regclass) NOT NULL, 61 | location geometry, 62 | owner_geoobject_id bigint, 63 | CONSTRAINT enforce_dims_location CHECK ((st_ndims(location) = 2)), 64 | CONSTRAINT enforce_geotype_location CHECK (((geometrytype(location) = 'POLYGON'::text) 65 | OR (location IS NULL))), 66 | CONSTRAINT enforce_srid_location CHECK ((st_srid(location) = 3395)) 67 | """ 68 | 69 | COLUMNS.columns should equal( 70 | List(Column("id", Some("integer")), 71 | Column("location", Some("geometry")), 72 | Column("owner_geoobject_id", Some("bigint")))) 73 | } 74 | 75 | "DSL" should "get column name from to_tsvector function call" in { 76 | val COLUMNS = "to_tsvector('libstemmer_serb_lat_no_diacrit'::regconfig, content)" 77 | 78 | COLUMNS.columns should equal( 79 | List(Column("content", None))) 80 | } 81 | 82 | "DSL" should "get column name from lower/upper function call" in { 83 | val COLUMNS = "lower((email)::text),upper((email_up)::text)" 84 | 85 | COLUMNS.columns should equal( 86 | List(Column("email", None), Column("email_up", None))) 87 | } 88 | 89 | "DSL" should "split by comma respect braces" in { 90 | val TEST_STRING = """ 91 | id bigint DEFAULT nextval('hibernate_sequence'::regclass) NOT NULL, 92 | url text, 93 | ident character varying(20) DEFAULT "substring"(upper(md5((((999999999)::double precision * random()))::text)), 1, 8) NOT NULL, 94 | created_at timestamp without time zone DEFAULT now() 95 | """.replaceAll("\n", "") 96 | 97 | val parts = TEST_STRING.commaSplitRespectBraces 98 | parts.length should equal(4) 99 | parts(0) should equal("id bigint DEFAULT nextval('hibernate_sequence'::regclass) NOT NULL") 100 | parts(1) should equal("url text") 101 | parts(2) should equal("ident character varying(20) DEFAULT \"substring\"(upper(md5((((999999999)::double precision * random()))::text)), 1, 8) NOT NULL") 102 | parts(3) should equal("created_at timestamp without time zone DEFAULT now()") 103 | } 104 | } -------------------------------------------------------------------------------- /src/test/scala/com/github/caiiiycuk/pg2sqlite/dsl/DumperTest.scala: -------------------------------------------------------------------------------- 1 | package com.github.caiiiycuk.pg2sqlite.dsl 2 | 3 | import org.scalatest.FlatSpec 4 | import org.scalatest.Matchers 5 | import com.github.caiiiycuk.pg2sqlite.iterator.Line 6 | import com.github.caiiiycuk.pg2sqlite.{Connection, DumpInserter} 7 | import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} 8 | 9 | import java.io.File 10 | 11 | class DumperTest extends FlatSpec with Matchers with BeforeAndAfter { 12 | 13 | val dbFile = new File("test.db") 14 | 15 | private final val DATE_DUMP = 16 | """ 17 | |CREATE TABLE test ( 18 | | current timestamp without time zone NOT NULL 19 | |); 20 | | 21 | |COPY test (current) FROM stdin; 22 | |2024-05-06 15:14:12 23 | |\. 24 | |""".stripMargin 25 | 26 | private def makeConnection(dateClass: String = Connection.DEFAULT_DATE_CLASS) = { 27 | if (dbFile.exists()) { 28 | dbFile.delete() 29 | } 30 | 31 | Connection.sqlite(dbFile, dateClass) 32 | } 33 | 34 | after { 35 | new File("test.db").delete() 36 | } 37 | 38 | "dumper" should "generate db from test-case of issue#11" in { 39 | val connection = makeConnection() 40 | val inserter = new DumpInserter(connection) 41 | val dump = 42 | """ 43 | |CREATE TYPE product_type AS ENUM ( 44 | | 'Material', 45 | | 'Digital' 46 | |); 47 | | 48 | |CREATE TABLE product ( 49 | | client_id integer NOT NULL, 50 | | order_product integer, 51 | | upper_price integer NOT NULL, 52 | | lower_price integer NOT NULL, 53 | | type product_type NOT NULL, 54 | | product_id integer NOT NULL--, 55 | | CONSTRAINT product_check CHECK (((lower_price > upper_price) AND (upper_price <= 200))), 56 | | CONSTRAINT product_order_product_check CHECK ((order_product > 0)), 57 | | CONSTRAINT product_upper_price_check CHECK ((upper_price >= 0)) 58 | |); 59 | |""".stripMargin 60 | .split("\n") 61 | .zipWithIndex 62 | .map { 63 | case (text, num) => 64 | Line(num, text) 65 | } 66 | 67 | inserter.insert(dump.iterator) 68 | connection.close 69 | } 70 | 71 | "dumper" should "should respect date class (Default)" in { 72 | val connection = makeConnection() 73 | val inserter = new DumpInserter(connection) 74 | val dump = DATE_DUMP.split("\n") 75 | .zipWithIndex 76 | .map { 77 | case (text, num) => 78 | Line(num, text) 79 | } 80 | 81 | inserter.insert(dump.iterator) 82 | connection.withStatement { statment => 83 | val rs = statment.executeQuery("SELECT * FROM test") 84 | rs.next() should equal(true) 85 | rs.getLong(1) > 0 should equal(true) 86 | rs.close() 87 | } 88 | connection.close 89 | } 90 | 91 | "dumper" should "should respect date class (text)" in { 92 | val connection = makeConnection(Connection.TEXT_DATE_CLASS) 93 | val inserter = new DumpInserter(connection) 94 | val dump = DATE_DUMP.split("\n") 95 | .zipWithIndex 96 | .map { 97 | case (text, num) => 98 | Line(num, text) 99 | } 100 | 101 | inserter.insert(dump.iterator) 102 | connection.withStatement { statment => 103 | val rs = statment.executeQuery("SELECT * FROM test") 104 | rs.next() should equal(true) 105 | rs.getString(1) should equal("2024-05-06 15:14:12.000") 106 | rs.close() 107 | } 108 | connection.close 109 | } 110 | 111 | "dumper" should "should respect date class (real)" in { 112 | val connection = makeConnection(Connection.REAL_DATE_CLASS) 113 | val inserter = new DumpInserter(connection) 114 | val dump = DATE_DUMP.split("\n") 115 | .zipWithIndex 116 | .map { 117 | case (text, num) => 118 | Line(num, text) 119 | } 120 | 121 | inserter.insert(dump.iterator) 122 | connection.withStatement { statment => 123 | val rs = statment.executeQuery("SELECT * FROM test") 124 | rs.next() should equal(true) 125 | rs.getDouble(1) > 0 should equal(true) 126 | rs.close() 127 | } 128 | connection.close 129 | } 130 | 131 | } 132 | --------------------------------------------------------------------------------