├── project
├── build.properties
├── plugins.sbt
└── Utils.scala
├── version.sbt
├── example
├── native-libs
│ ├── libsqlite4java-osx-1.0.392.dylib
│ ├── sqlite4java-win32-x64-1.0.392.dll
│ ├── sqlite4java-win32-x86-1.0.392.dll
│ ├── libsqlite4java-linux-amd64-1.0.392.so
│ └── libsqlite4java-linux-i386-1.0.392.so
├── src
│ ├── test
│ │ ├── resources
│ │ │ ├── application.conf
│ │ │ └── logback-test.xml
│ │ └── scala
│ │ │ └── com
│ │ │ └── github
│ │ │ └── j5ik2o
│ │ │ └── dddbase
│ │ │ └── example
│ │ │ └── repository
│ │ │ ├── IdGenerator.scala
│ │ │ ├── SpecSupport.scala
│ │ │ ├── util
│ │ │ ├── ScalaFuturesSupportSpec.scala
│ │ │ ├── JdbcSpecSupport.scala
│ │ │ ├── SkinnySpecSupport.scala
│ │ │ ├── RandomPortSupport.scala
│ │ │ ├── FlywayWithMySQLSpecSupport.scala
│ │ │ └── Slick3SpecSupport.scala
│ │ │ ├── airframe
│ │ │ └── AirframeSpec.scala
│ │ │ ├── skinny
│ │ │ ├── UserMessageRepositoryBySkinnyImplSpec.scala
│ │ │ └── UserAccountRepositoryBySkinnyImplSpec.scala
│ │ │ ├── slick
│ │ │ ├── UserMessageRepositoryBySlickImplSpec.scala
│ │ │ └── UserAccountRepositoryBySlickImplSpec.scala
│ │ │ ├── free
│ │ │ └── UserAccountRepositoryByFreeSpec.scala
│ │ │ ├── memory
│ │ │ └── UserAccountRepositoryOnMemorySpec.scala
│ │ │ ├── memcached
│ │ │ └── UserAccountRepositoryOnMemcachedSpec.scala
│ │ │ ├── dynamodb
│ │ │ └── UserAccountRepositoryOnDynamoDBSpec.scala
│ │ │ └── redis
│ │ │ └── UserAccountRepositoryOnRedisSpec.scala
│ └── main
│ │ └── scala
│ │ └── com
│ │ └── github
│ │ └── j5ik2o
│ │ └── dddbase
│ │ └── example
│ │ ├── repository
│ │ ├── slick
│ │ │ ├── UserAccountRepositoryBySlickImpl.scala
│ │ │ ├── UserMessageRepositoryBySlickImpl.scala
│ │ │ ├── AbstractUserMessageRepositoryBySlick.scala
│ │ │ └── AbstractUserAccountRepositoryBySlick.scala
│ │ ├── package.scala
│ │ ├── free
│ │ │ ├── UserRepositoryDSL.scala
│ │ │ └── UserAccountRepositoryByFree.scala
│ │ ├── UserMessageRepository.scala
│ │ ├── dynamodb
│ │ │ ├── UserMessageRepositoryOnDynamoDB.scala
│ │ │ └── UserAccountRepositoryOnDynamoDB.scala
│ │ ├── redis
│ │ │ └── UserAccountRepositoryOnRedis.scala
│ │ ├── memcached
│ │ │ └── UserAccountRepositoryOnMemcached.scala
│ │ ├── skinny
│ │ │ ├── UserMessageRepositoryBySkinny.scala
│ │ │ └── UserAccountRepositoryBySkinny.scala
│ │ ├── memory
│ │ │ └── UserAccountRepositoryOnMemory.scala
│ │ └── UserAccountRepository.scala
│ │ ├── model
│ │ ├── Status.scala
│ │ ├── UserMessage.scala
│ │ └── UserAccount.scala
│ │ └── dao
│ │ ├── UserMessage.scala
│ │ ├── UserAccount.scala
│ │ ├── memcached
│ │ └── UserAccountComponent.scala
│ │ ├── redis
│ │ └── UserAccountComponent.scala
│ │ ├── memory
│ │ └── UserAccountComponent.scala
│ │ └── dynamodb
│ │ ├── UserAccountComponent.scala
│ │ └── UserMesssageComponent.scala
└── templates
│ ├── UserAccount_template.ftl
│ └── UserMessage_template.ftl
├── core
└── src
│ └── main
│ └── scala
│ └── com
│ └── github
│ └── j5ik2o
│ └── dddbase
│ ├── AggregateId.scala
│ ├── AggregatesChunk.scala
│ ├── AggregateLongId.scala
│ ├── AggregateIO.scala
│ ├── AggregateStringId.scala
│ ├── AggregateAllReader.scala
│ ├── AggregateNotFoundException.scala
│ ├── AggregateSingleReader.scala
│ ├── AggregateSingleWriter.scala
│ ├── AggregateSingleSoftDeletable.scala
│ ├── AggregateMultiReader.scala
│ ├── AggregateMultiWriter.scala
│ ├── AggregateSingleHardDeletable.scala
│ ├── AggregateMultiHardDeletable.scala
│ ├── AggregateMultiSoftDeletable.scala
│ ├── AggregateChunkReader.scala
│ └── Aggregate.scala
├── jdbc
├── slick
│ └── src
│ │ └── main
│ │ └── scala
│ │ └── com
│ │ └── github
│ │ └── j5ik2o
│ │ └── dddbase
│ │ └── slick
│ │ ├── AggregateBaseWriteFeature.scala
│ │ ├── AggregateBaseReadFeature.scala
│ │ ├── AggregateAllReadFeature.scala
│ │ ├── AggregateSingleHardDeleteFeature.scala
│ │ ├── AggregateSingleWriteFeature.scala
│ │ ├── AggregateMultiReadFeature.scala
│ │ ├── AggregateMultiHardDeleteFeature.scala
│ │ ├── AggregateMultiSoftDeleteFeature.scala
│ │ ├── AggregateIOBaseFeature.scala
│ │ ├── AggregateChunkReadFeature.scala
│ │ ├── AggregateSingleReadFeature.scala
│ │ ├── AggregateMultiWriteFeature.scala
│ │ ├── SlickDaoSupport.scala
│ │ └── AggregateSingleSoftDeleteFeature.scala
└── skinny
│ └── src
│ └── main
│ └── scala
│ └── com
│ └── github
│ └── j5ik2o
│ └── dddbase
│ └── skinny
│ ├── AggregateBaseWriteFeature.scala
│ ├── AggregateBaseReadFeature.scala
│ ├── AggregateIOBaseFeature.scala
│ ├── AggregateSingleHardDeleteFeature.scala
│ ├── AggregateSingleWriteFeature.scala
│ ├── AggregateAllReadFeature.scala
│ ├── AggregateMultiHardDeleteFeature.scala
│ ├── AggregateMultiSoftDeleteFeature.scala
│ ├── AggregateMultiReadFeature.scala
│ ├── AggregateSingleReadFeature.scala
│ ├── AggregateMultiWriteFeature.scala
│ ├── AggregateSingleSoftDeleteFeature.scala
│ ├── AggregateChunkReadFeature.scala
│ └── SkinnyDaoSupport.scala
├── nosql
├── memory
│ └── src
│ │ └── main
│ │ └── scala
│ │ └── com
│ │ └── github
│ │ └── j5ik2o
│ │ └── dddbase
│ │ └── memory
│ │ ├── AggregateBaseReadFeature.scala
│ │ ├── AggregateBaseWriteFeature.scala
│ │ ├── AggregateIOBaseFeature.scala
│ │ ├── AggregateSingleHardDeleteFeature.scala
│ │ ├── AggregateAllReadFeature.scala
│ │ ├── AggregateSingleWriteFeature.scala
│ │ ├── AggregateMultiHardDeleteFeature.scala
│ │ ├── AggregateMultiSoftDeleteFeature.scala
│ │ ├── AggregateMultiReadFeature.scala
│ │ ├── AggregateMultiWriteFeature.scala
│ │ ├── AggregateSingleSoftDeleteFeature.scala
│ │ ├── AggregateSingleReadFeature.scala
│ │ └── MemoryDaoSupport.scala
├── dynamodb
│ └── src
│ │ └── main
│ │ └── scala
│ │ └── com
│ │ └── github
│ │ └── j5ik2o
│ │ └── dddbase
│ │ └── dynamodb
│ │ ├── AggregateBaseReadFeature.scala
│ │ ├── AggregateBaseWriteFeature.scala
│ │ ├── AggregateSingleHardDeleteFeature.scala
│ │ ├── AggregateIOBaseFeature.scala
│ │ ├── AggregateSingleWriteFeature.scala
│ │ ├── AggregateMultiHardDeleteFeature.scala
│ │ ├── AggregateMultiSoftDeleteFeature.scala
│ │ ├── AggregateMultiReadFeature.scala
│ │ ├── AggregateMultiWriteFeature.scala
│ │ ├── AggregateSingleReadFeature.scala
│ │ ├── AggregateSingleSoftDeleteFeature.scala
│ │ └── DynamoDBDaoSupport.scala
├── redis
│ └── src
│ │ └── main
│ │ └── scala
│ │ └── com
│ │ └── github
│ │ └── j5ik2o
│ │ └── dddbase
│ │ └── redis
│ │ ├── AggregateBaseWriteFeature.scala
│ │ ├── AggregateBaseReadFeature.scala
│ │ ├── AggregateIOBaseFeature.scala
│ │ ├── AggregateSingleHardDeleteFeature.scala
│ │ ├── AggregateSingleWriteFeature.scala
│ │ ├── AggregateMultiHardDeleteFeature.scala
│ │ ├── AggregateMultiSoftDeleteFeature.scala
│ │ ├── AggregateMultiReadFeature.scala
│ │ ├── AggregateMultiWriteFeature.scala
│ │ ├── AggregateSingleSoftDeleteFeature.scala
│ │ ├── AggregateSingleReadFeature.scala
│ │ └── RedisDaoSupport.scala
└── memcached
│ └── src
│ └── main
│ └── scala
│ └── com
│ └── github
│ └── j5ik2o
│ └── dddbase
│ └── memcached
│ ├── AggregateBaseReadFeature.scala
│ ├── AggregateBaseWriteFeature.scala
│ ├── AggregateIOBaseFeature.scala
│ ├── AggregateSingleHardDeleteFeature.scala
│ ├── AggregateSingleWriteFeature.scala
│ ├── AggregateMultiHardDeleteFeature.scala
│ ├── AggregateMultiSoftDeleteFeature.scala
│ ├── AggregateMultiReadFeature.scala
│ ├── AggregateMultiWriteFeature.scala
│ ├── AggregateSingleSoftDeleteFeature.scala
│ ├── AggregateSingleReadFeature.scala
│ └── MemcachedDaoSupport.scala
├── .gitignore
├── release.sbt
├── .scalafmt.conf
├── LICENSE
├── .circleci
└── config.yml
├── flyway
└── src
│ └── test
│ └── resources
│ └── rdb-migration
│ └── V1__Create_Tables.sql
├── scalastyle-config.xml
└── README.md
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.2.8
--------------------------------------------------------------------------------
/version.sbt:
--------------------------------------------------------------------------------
1 | version in ThisBuild := "1.0.28-SNAPSHOT"
2 |
--------------------------------------------------------------------------------
/example/native-libs/libsqlite4java-osx-1.0.392.dylib:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/j5ik2o/scala-ddd-base/HEAD/example/native-libs/libsqlite4java-osx-1.0.392.dylib
--------------------------------------------------------------------------------
/example/native-libs/sqlite4java-win32-x64-1.0.392.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/j5ik2o/scala-ddd-base/HEAD/example/native-libs/sqlite4java-win32-x64-1.0.392.dll
--------------------------------------------------------------------------------
/example/native-libs/sqlite4java-win32-x86-1.0.392.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/j5ik2o/scala-ddd-base/HEAD/example/native-libs/sqlite4java-win32-x86-1.0.392.dll
--------------------------------------------------------------------------------
/example/native-libs/libsqlite4java-linux-amd64-1.0.392.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/j5ik2o/scala-ddd-base/HEAD/example/native-libs/libsqlite4java-linux-amd64-1.0.392.so
--------------------------------------------------------------------------------
/example/native-libs/libsqlite4java-linux-i386-1.0.392.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/j5ik2o/scala-ddd-base/HEAD/example/native-libs/libsqlite4java-linux-i386-1.0.392.so
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateId.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateId {
4 | type IdType
5 | val value: IdType
6 | }
7 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregatesChunk.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | case class AggregatesChunk[A <: Aggregate](index: Long, aggregates: Seq[A])
4 |
--------------------------------------------------------------------------------
/example/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "DEBUG"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateLongId.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateLongId extends AggregateId {
4 |
5 | override type IdType = Long
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateIO.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateIO[M[_]] {
4 | type AggregateType <: Aggregate
5 | type IdType <: AggregateId
6 | }
7 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateStringId.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateStringId extends AggregateId {
4 |
5 | override type IdType = String
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateAllReader.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateAllReader[M[_]] extends AggregateIO[M] {
4 |
5 | def resolveAll: M[Seq[AggregateType]]
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateNotFoundException.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | case class AggregateNotFoundException(id: AggregateId) extends Exception(s"Aggregate is not found: $id")
4 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateSingleReader.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateSingleReader[M[_]] extends AggregateIO[M] {
4 | def resolveById(id: IdType): M[AggregateType]
5 | }
6 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateSingleWriter.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateSingleWriter[M[_]] extends AggregateIO[M] {
4 |
5 | def store(aggregate: AggregateType): M[Long]
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateSingleSoftDeletable.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateSingleSoftDeletable[M[_]] { this: AggregateIO[M] =>
4 |
5 | def softDelete(id: IdType): M[Long]
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateMultiReader.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateMultiReader[M[_]] extends AggregateIO[M] {
4 |
5 | def resolveMulti(ids: Seq[IdType]): M[Seq[AggregateType]]
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateMultiWriter.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateMultiWriter[M[_]] extends AggregateIO[M] {
4 |
5 | def storeMulti(aggregates: Seq[AggregateType]): M[Long]
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateSingleHardDeletable.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateSingleHardDeletable[M[_]] { this: AggregateSingleWriter[M] =>
4 |
5 | def hardDelete(id: IdType): M[Long]
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateMultiHardDeletable.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateMultiHardDeletable[M[_]] { this: AggregateMultiWriter[M] =>
4 |
5 | def hardDeleteMulti(ids: Seq[IdType]): M[Long]
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateMultiSoftDeletable.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateMultiSoftDeletable[M[_]] {
4 | this: AggregateMultiWriter[M] =>
5 |
6 | def softDeleteMulti(ids: Seq[IdType]): M[Long]
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/AggregateChunkReader.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | trait AggregateChunkReader[M[_]] extends AggregateIO[M] {
4 |
5 | def resolveMultiWithOffsetLimit(offset: Option[Long] = None, limit: Long = 100L): M[AggregatesChunk[AggregateType]]
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateBaseWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 | import monix.eval.Task
3 |
4 | trait AggregateBaseWriteFeature extends AggregateIOBaseFeature {
5 |
6 | protected def convertToRecord: AggregateType => Task[RecordType]
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateBaseReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import monix.eval.Task
4 |
5 | trait AggregateBaseReadFeature extends AggregateIOBaseFeature {
6 |
7 | protected def convertToAggregate: RecordType => Task[AggregateType]
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateBaseReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import monix.eval.Task
4 |
5 | trait AggregateBaseReadFeature extends AggregateIOBaseFeature {
6 |
7 | protected def convertToAggregate: RecordType => Task[AggregateType]
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateBaseWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import monix.eval.Task
4 |
5 | trait AggregateBaseWriteFeature extends AggregateIOBaseFeature {
6 |
7 | protected def convertToRecord: AggregateType => Task[RecordType]
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateBaseReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import monix.eval.Task
4 |
5 | trait AggregateBaseReadFeature extends AggregateIOBaseFeature {
6 |
7 | protected def convertToAggregate: RecordType => Task[AggregateType]
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateBaseWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import monix.eval.Task
4 |
5 | trait AggregateBaseWriteFeature extends AggregateIOBaseFeature {
6 |
7 | protected def convertToRecord: AggregateType => Task[RecordType]
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/IdGenerator.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository
2 | import java.util.concurrent.atomic.AtomicLong
3 |
4 | object IdGenerator {
5 |
6 | private val atomicLong = new AtomicLong(0L)
7 |
8 | def generateIdValue: Long = atomicLong.getAndIncrement()
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/SpecSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository
2 |
3 | trait SpecSupport {
4 |
5 | def sameAs[A](c: Traversable[A], d: Traversable[A]): Boolean = {
6 | def counts(e: Traversable[A]) = e groupBy identity mapValues (_.size)
7 | counts(c) == counts(d)
8 | }
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .classpath
2 | .project
3 | .cache-main
4 | .cache-tests
5 | .settings/
6 | target/
7 | project/target
8 | .cache
9 | .idea/
10 | .cache-main
11 | .envrc
12 | bin/
13 | native
14 | *.pyc
15 | *.pem
16 | *.stackdump
17 | *.tfvars
18 | *.tfstate
19 | *.tfstate.backup
20 | *.deb
21 | *.tgz
22 | *.log
23 | .terraform/
24 | node_modules/
25 | dump.rdb
26 |
27 | .DS_Store
28 | .credentials
29 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateBaseWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 | import cats.data.ReaderT
3 | import monix.eval.Task
4 | import scalikejdbc.DBSession
5 |
6 | trait AggregateBaseWriteFeature extends AggregateIOBaseFeature {
7 |
8 | protected def convertToRecord: AggregateType => ReaderT[Task, DBSession, RecordType]
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateIOBaseFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.AggregateIO
4 | import monix.eval.Task
5 |
6 | trait AggregateIOBaseFeature extends AggregateIO[Task] {
7 | type RecordType <: MemoryDaoSupport#Record
8 | type DaoType <: MemoryDaoSupport#Dao[Task, RecordType]
9 |
10 | protected val dao: DaoType
11 | }
12 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateBaseWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 | import cats.data.ReaderT
3 | import com.github.j5ik2o.reactive.redis.RedisConnection
4 | import monix.eval.Task
5 |
6 | trait AggregateBaseWriteFeature extends AggregateIOBaseFeature {
7 |
8 | protected def convertToRecord: AggregateType => ReaderT[Task, RedisConnection, RecordType]
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateBaseReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 | import cats.data.ReaderT
3 | import com.github.j5ik2o.reactive.redis.RedisConnection
4 | import monix.eval.Task
5 |
6 | trait AggregateBaseReadFeature extends AggregateIOBaseFeature {
7 |
8 | protected def convertToAggregate: RecordType => ReaderT[Task, RedisConnection, AggregateType]
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateBaseReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 | import cats.data.ReaderT
3 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
4 | import monix.eval.Task
5 |
6 | trait AggregateBaseReadFeature extends AggregateIOBaseFeature {
7 |
8 | protected def convertToAggregate: RecordType => ReaderT[Task, MemcachedConnection, AggregateType]
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateBaseWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 | import cats.data.ReaderT
3 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
4 | import monix.eval.Task
5 |
6 | trait AggregateBaseWriteFeature extends AggregateIOBaseFeature {
7 |
8 | protected def convertToRecord: AggregateType => ReaderT[Task, MemcachedConnection, RecordType]
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/util/ScalaFuturesSupportSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.util
2 | import org.scalatest.concurrent.ScalaFutures
3 | import org.scalatest.time.{ Seconds, Span }
4 |
5 | trait ScalaFuturesSupportSpec { this: ScalaFutures =>
6 | override implicit def patienceConfig: PatienceConfig =
7 | PatienceConfig(timeout = scaled(Span(60, Seconds)), interval = scaled(Span(1, Seconds)))
8 | }
9 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/util/JdbcSpecSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.util
2 |
3 | import org.scalatest.concurrent.ScalaFutures
4 | import org.scalatest.time.{ Millis, Seconds, Span }
5 |
6 | trait JdbcSpecSupport extends ScalaFutures with ScalaFuturesSupportSpec {
7 | this: FlywayWithMySQLSpecSupport =>
8 | val tables: Seq[String]
9 |
10 | def jdbcPort: Int = mySQLdConfig.port.get
11 | }
12 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateBaseReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import monix.eval.Task
5 | import scalikejdbc._
6 |
7 | trait AggregateBaseReadFeature extends AggregateIOBaseFeature {
8 |
9 | protected def convertToAggregate: RecordType => ReaderT[Task, DBSession, AggregateType]
10 |
11 | protected def byCondition(id: IdType): SQLSyntax
12 | protected def byConditions(ids: Seq[IdType]): SQLSyntax
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateSingleHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateSingleHardDeletable, AggregateSingleWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleHardDeleteFeature extends AggregateSingleHardDeletable[Task] with AggregateBaseWriteFeature {
7 | this: AggregateSingleWriter[Task] =>
8 |
9 | override def hardDelete(id: IdType): Task[Long] = dao.delete(toRecordId(id))
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateSingleHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateSingleHardDeletable, AggregateSingleWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleHardDeleteFeature extends AggregateSingleHardDeletable[Task] with AggregateBaseWriteFeature {
7 | this: AggregateSingleWriter[Task] =>
8 |
9 | override def hardDelete(id: IdType): Task[Long] = dao.delete(id.value.toString)
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/slick/UserAccountRepositoryBySlickImpl.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.slick
2 |
3 | import com.github.j5ik2o.dddbase.slick._
4 | import slick.jdbc.JdbcProfile
5 |
6 | class UserAccountRepositoryBySlickImpl(override val profile: JdbcProfile, override val db: JdbcProfile#Backend#Database)
7 | extends AbstractUserAccountRepositoryBySlick(profile, db)
8 | with AggregateSingleSoftDeleteFeature
9 | with AggregateMultiSoftDeleteFeature
10 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/slick/UserMessageRepositoryBySlickImpl.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.slick
2 |
3 | import com.github.j5ik2o.dddbase.slick._
4 | import slick.jdbc.JdbcProfile
5 |
6 | class UserMessageRepositoryBySlickImpl(override val profile: JdbcProfile, override val db: JdbcProfile#Backend#Database)
7 | extends AbstractUserMessageRepositoryBySlick(profile, db)
8 | with AggregateSingleSoftDeleteFeature
9 | with AggregateMultiSoftDeleteFeature
10 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateIOBaseFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.AggregateIO
4 | import monix.eval.Task
5 |
6 | trait AggregateIOBaseFeature extends AggregateIO[Task] {
7 | type RecordIdType
8 | type RecordType <: DynamoDBDaoSupport#Record[RecordIdType]
9 | type DaoType <: DynamoDBDaoSupport#Dao[Task, RecordIdType, RecordType]
10 |
11 | protected val dao: DaoType
12 | protected def toRecordId(id: IdType): RecordIdType
13 | }
14 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateSingleWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 | import com.github.j5ik2o.dddbase.AggregateSingleWriter
3 | import monix.eval.Task
4 |
5 | trait AggregateSingleWriteFeature extends AggregateSingleWriter[Task] with AggregateBaseWriteFeature {
6 |
7 | override def store(aggregate: AggregateType): Task[Long] = {
8 | for {
9 | record <- convertToRecord(aggregate)
10 | result <- dao.put(record)
11 | } yield result
12 | }
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateAllReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.AggregateAllReader
4 | import monix.eval.Task
5 |
6 | trait AggregateAllReadFeature extends AggregateAllReader[Task] with AggregateBaseReadFeature {
7 |
8 | override def resolveAll: Task[Seq[AggregateType]] =
9 | for {
10 | results <- dao.getAll
11 | aggregates <- Task.gather(results.map(v => convertToAggregate(v)))
12 | } yield aggregates
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateSingleWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.AggregateSingleWriter
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleWriteFeature extends AggregateSingleWriter[Task] with AggregateBaseWriteFeature {
7 |
8 | override def store(aggregate: AggregateType): Task[Long] = {
9 | for {
10 | record <- convertToRecord(aggregate)
11 | result <- dao.set(record)
12 | } yield result
13 | }
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/release.sbt:
--------------------------------------------------------------------------------
1 | import sbtrelease.ReleasePlugin.autoImport.ReleaseTransformations._
2 |
3 | releaseCrossBuild := true
4 |
5 | releasePublishArtifactsAction := PgpKeys.publishSigned.value
6 |
7 | releaseProcess := Seq[ReleaseStep](
8 | checkSnapshotDependencies,
9 | inquireVersions,
10 | runClean,
11 | setReleaseVersion,
12 | commitReleaseVersion,
13 | tagRelease,
14 | releaseStepCommandAndRemaining("+publishSigned"),
15 | setNextVersion,
16 | commitNextVersion,
17 | releaseStepCommand("sonatypeReleaseAll"),
18 | pushChanges
19 | )
20 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/model/Status.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.model
2 |
3 | import enumeratum._
4 |
5 | import scala.collection.immutable
6 |
7 | sealed abstract class Status(override val entryName: String) extends EnumEntry
8 |
9 | object Status extends Enum[Status] {
10 | override def values: immutable.IndexedSeq[Status] = findValues
11 | case object Active extends Status("active")
12 | case object Suspend extends Status("suspend")
13 | case object Deleted extends Status("deleted")
14 | }
15 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateMultiHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateMultiHardDeletable, AggregateMultiWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiHardDeleteFeature extends AggregateMultiHardDeletable[Task] with AggregateBaseReadFeature {
7 | this: AggregateMultiWriter[Task] with AggregateSingleHardDeleteFeature =>
8 |
9 | override def hardDeleteMulti(ids: Seq[IdType]): Task[Long] = dao.deleteMulti(ids.map(toRecordId))
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateMultiHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateMultiHardDeletable, AggregateMultiWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiHardDeleteFeature extends AggregateMultiHardDeletable[Task] with AggregateBaseReadFeature {
7 | this: AggregateMultiWriter[Task] with AggregateSingleHardDeleteFeature =>
8 |
9 | override def hardDeleteMulti(ids: Seq[IdType]): Task[Long] = dao.deleteMulti(ids.map(_.value.toString))
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateMultiSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateMultiSoftDeletable, AggregateMultiWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiSoftDeleteFeature extends AggregateMultiSoftDeletable[Task] with AggregateBaseReadFeature {
7 | this: AggregateMultiWriter[Task] with AggregateSingleSoftDeleteFeature =>
8 |
9 | override def softDeleteMulti(ids: Seq[IdType]): Task[Long] = dao.softDeleteMulti(ids.map(toRecordId))
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateMultiSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateMultiSoftDeletable, AggregateMultiWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiSoftDeleteFeature extends AggregateMultiSoftDeletable[Task] with AggregateBaseReadFeature {
7 | this: AggregateMultiWriter[Task] with AggregateSingleSoftDeleteFeature =>
8 |
9 | override def softDeleteMulti(ids: Seq[IdType]): Task[Long] = dao.softDeleteMulti(ids.map(_.value.toString))
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateMultiReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.AggregateMultiReader
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiReadFeature extends AggregateMultiReader[Task] with AggregateBaseReadFeature {
7 |
8 | override def resolveMulti(ids: Seq[IdType]): Task[Seq[AggregateType]] =
9 | for {
10 | results <- dao.getMulti(ids.map(toRecordId))
11 | aggregates <- Task.gather(results.map(v => convertToAggregate(v)))
12 | } yield aggregates
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateMultiReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.AggregateMultiReader
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiReadFeature extends AggregateMultiReader[Task] with AggregateBaseReadFeature {
7 |
8 | override def resolveMulti(ids: Seq[IdType]): Task[Seq[AggregateType]] =
9 | for {
10 | results <- dao.getMulti(ids.map(_.value.toString))
11 | aggregates <- Task.gather(results.map(v => convertToAggregate(v)))
12 | } yield aggregates
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/core/src/main/scala/com/github/j5ik2o/dddbase/Aggregate.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase
2 |
3 | import scala.reflect.ClassTag
4 |
5 | trait Aggregate {
6 | type AggregateType <: Aggregate
7 | type IdType <: AggregateId
8 | val id: IdType
9 | protected val tag: ClassTag[AggregateType]
10 |
11 | def canEqual(other: Any): Boolean = tag.runtimeClass.isInstance(other)
12 |
13 | override def equals(other: Any): Boolean = other match {
14 | case tag(that) => (that canEqual this) && id == that.id
15 | case _ => false
16 | }
17 |
18 | override def hashCode(): Int = 31 * id.##
19 | }
20 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateAllReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.AggregateAllReader
4 | import monix.eval.Task
5 |
6 | trait AggregateAllReadFeature extends AggregateAllReader[Task] with AggregateBaseReadFeature {
7 |
8 | import profile.api._
9 |
10 | override def resolveAll: Task[Seq[AggregateType]] =
11 | for {
12 | results <- Task.deferFuture {
13 | db.run(dao.result)
14 | }
15 | aggregates <- Task.traverse(results)(convertToAggregate(_))
16 | } yield aggregates
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateMultiWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.AggregateMultiWriter
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiWriteFeature extends AggregateMultiWriter[Task] with AggregateBaseWriteFeature {
7 |
8 | override def storeMulti(aggregates: Seq[AggregateType]): Task[Long] =
9 | for {
10 | records <- Task.traverse(aggregates) { aggregate =>
11 | convertToRecord(aggregate)
12 | }
13 | result <- dao.setMulti(records)
14 | } yield result
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateMultiWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.AggregateMultiWriter
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiWriteFeature extends AggregateMultiWriter[Task] with AggregateBaseWriteFeature {
7 |
8 | override def storeMulti(aggregates: Seq[AggregateType]): Task[Long] =
9 | for {
10 | records <- Task.traverse(aggregates) { aggregate =>
11 | convertToRecord(aggregate)
12 | }
13 | result <- dao.putMulti(records)
14 | } yield result
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateSingleHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateSingleHardDeletable, AggregateSingleWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleHardDeleteFeature extends AggregateSingleHardDeletable[Task] with AggregateBaseWriteFeature {
7 | this: AggregateSingleWriter[Task] =>
8 |
9 | override def hardDelete(id: IdType): Task[Long] = Task.deferFutureAction { implicit ec =>
10 | import profile.api._
11 | db.run(dao.filter(byCondition(id)).delete).map(_.toLong)
12 | }
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateSingleWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.AggregateSingleWriter
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleWriteFeature extends AggregateSingleWriter[Task] with AggregateBaseWriteFeature {
7 |
8 | override def store(aggregate: AggregateType): Task[Long] =
9 | for {
10 | record <- convertToRecord(aggregate)
11 | result <- Task.deferFutureAction { implicit ec =>
12 | import profile.api._
13 | db.run(dao.insertOrUpdate(record)).map(_.toLong)
14 | }
15 | } yield result
16 | }
17 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateIOBaseFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateIO
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | import scala.concurrent.duration.Duration
9 |
10 | trait AggregateIOBaseFeature extends AggregateIO[ReaderT[Task, RedisConnection, ?]] {
11 | type RecordType <: RedisDaoSupport#Record
12 | type DaoType <: RedisDaoSupport#Dao[ReaderT[Task, RedisConnection, ?], RecordType]
13 |
14 | protected val dao: DaoType
15 | val expireDuration: Duration
16 | }
17 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateMultiReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.AggregateMultiReader
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiReadFeature extends AggregateMultiReader[Task] with AggregateBaseReadFeature {
7 | import profile.api._
8 |
9 | override def resolveMulti(ids: Seq[IdType]): Task[Seq[AggregateType]] =
10 | for {
11 | results <- Task.deferFuture {
12 | db.run(dao.filter(byConditions(ids)).result)
13 | }
14 | aggregates <- Task.traverse(results)(convertToAggregate(_))
15 | } yield aggregates
16 | }
17 |
--------------------------------------------------------------------------------
/.scalafmt.conf:
--------------------------------------------------------------------------------
1 | version = 2.0.0-RC5
2 | style = defaultWithAlign
3 | danglingParentheses = true
4 | indentOperator = spray
5 | includeCurlyBraceInSelectChains = true
6 | maxColumn = 120
7 | rewrite.rules = [RedundantParens, SortImports, PreferCurlyFors]
8 | spaces.inImportCurlyBraces = true
9 | binPack.literalArgumentLists = false
10 | unindentTopLevelOperators = true
11 | optIn.breaksInsideChains = true
12 | newlines.alwaysBeforeTopLevelStatements = true
--------------------------------------------------------------------------------
/example/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] [%X{akkaSource}] %-5level %logger{36} - %msg%n
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateIOBaseFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateIO, AggregateId }
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateIOBaseFeature extends AggregateIO[ReaderT[Task, DBSession, ?]] {
9 | override type IdType <: AggregateId
10 | type RecordIdType
11 | type RecordType <: SkinnyDaoSupport#Record[RecordIdType]
12 | type DaoType <: SkinnyDaoSupport#Dao[RecordIdType, RecordType]
13 |
14 | protected val dao: DaoType
15 | protected def toRecordId(id: IdType): RecordIdType
16 | }
17 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateMultiHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateMultiHardDeletable, AggregateMultiWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiHardDeleteFeature extends AggregateMultiHardDeletable[Task] with AggregateBaseReadFeature {
7 | this: AggregateMultiWriter[Task] with AggregateSingleHardDeleteFeature =>
8 |
9 | override def hardDeleteMulti(ids: Seq[IdType]): Task[Long] = Task.deferFutureAction { implicit ec =>
10 | import profile.api._
11 | db.run(dao.filter(byConditions(ids)).delete).map(_.toLong)
12 | }
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateIOBaseFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateIO
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | import scala.concurrent.duration.Duration
9 |
10 | trait AggregateIOBaseFeature extends AggregateIO[ReaderT[Task, MemcachedConnection, ?]] {
11 | type RecordType <: MemcachedDaoSupport#Record
12 | type DaoType <: MemcachedDaoSupport#Dao[ReaderT[Task, MemcachedConnection, ?], RecordType]
13 |
14 | protected val dao: DaoType
15 | val expireDuration: Duration
16 | }
17 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateSingleSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.AggregateSingleSoftDeletable
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleSoftDeleteFeature extends AggregateSingleSoftDeletable[Task] with AggregateBaseReadFeature {
7 |
8 | override type RecordType <: MemoryDaoSupport#SoftDeletableRecord
9 | override type DaoType <: MemoryDaoSupport#Dao[Task, RecordType] with MemoryDaoSupport#DaoSoftDeletable[
10 | Task,
11 | RecordType
12 | ]
13 |
14 | override def softDelete(id: IdType): Task[Long] =
15 | dao.softDelete(id.value.toString)
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateMultiSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateMultiSoftDeletable, AggregateMultiWriter }
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiSoftDeleteFeature extends AggregateMultiSoftDeletable[Task] with AggregateBaseReadFeature {
7 | this: AggregateMultiWriter[Task] with AggregateSingleSoftDeleteFeature =>
8 |
9 | override def softDeleteMulti(ids: Seq[IdType]): Task[Long] =
10 | Task.deferFutureAction { implicit ec =>
11 | import profile.api._
12 | db.run(dao.filter(byConditions(ids)).map(_.status).update(DELETE)).map(_.toLong)
13 | }
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateSingleHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateSingleHardDeletable, AggregateSingleWriter }
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateSingleHardDeleteFeature
9 | extends AggregateSingleHardDeletable[ReaderT[Task, RedisConnection, ?]]
10 | with AggregateBaseWriteFeature {
11 | this: AggregateSingleWriter[ReaderT[Task, RedisConnection, ?]] =>
12 |
13 | override def hardDelete(id: IdType): ReaderT[Task, RedisConnection, Long] = dao.delete(id.value.toString)
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateSingleWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateSingleWriter
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateSingleWriteFeature
9 | extends AggregateSingleWriter[ReaderT[Task, RedisConnection, ?]]
10 | with AggregateBaseWriteFeature {
11 |
12 | override def store(aggregate: AggregateType): ReaderT[Task, RedisConnection, Long] = {
13 | for {
14 | record <- convertToRecord(aggregate)
15 | result <- dao.set(record, expireDuration)
16 | } yield result
17 | }
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateSingleReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateNotFoundException, AggregateSingleReader }
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleReadFeature extends AggregateSingleReader[Task] with AggregateBaseReadFeature {
7 |
8 | override def resolveById(id: IdType): Task[AggregateType] = {
9 | for {
10 | record <- dao.get(toRecordId(id)).flatMap {
11 | case Some(v) => Task.pure(v)
12 | case None => Task.raiseError(AggregateNotFoundException(id))
13 | }
14 | aggregate <- convertToAggregate(record)
15 | } yield aggregate
16 | }
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateSingleHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateSingleHardDeletable, AggregateSingleWriter }
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateSingleHardDeleteFeature
9 | extends AggregateSingleHardDeletable[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseWriteFeature {
11 | this: AggregateSingleWriter[ReaderT[Task, DBSession, ?]] =>
12 |
13 | override def hardDelete(id: IdType): ReaderT[Task, DBSession, Long] = ReaderT { implicit dbSession =>
14 | Task { dao.deleteById(toRecordId(id)).toLong }
15 | }
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/AggregateSingleSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.AggregateSingleSoftDeletable
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleSoftDeleteFeature extends AggregateSingleSoftDeletable[Task] with AggregateBaseReadFeature {
7 | override type RecordType <: DynamoDBDaoSupport#SoftDeletableRecord[RecordIdType]
8 | override type DaoType <: DynamoDBDaoSupport#Dao[Task, RecordIdType, RecordType] with DynamoDBDaoSupport#DaoSoftDeletable[
9 | Task,
10 | RecordIdType,
11 | RecordType
12 | ]
13 |
14 | override def softDelete(id: IdType): Task[Long] = dao.softDelete(toRecordId(id))
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateSingleHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateSingleHardDeletable, AggregateSingleWriter }
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateSingleHardDeleteFeature
9 | extends AggregateSingleHardDeletable[ReaderT[Task, MemcachedConnection, ?]]
10 | with AggregateBaseWriteFeature {
11 | this: AggregateSingleWriter[ReaderT[Task, MemcachedConnection, ?]] =>
12 |
13 | override def hardDelete(id: IdType): ReaderT[Task, MemcachedConnection, Long] = dao.delete(id.value.toString)
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateSingleWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateSingleWriter
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateSingleWriteFeature
9 | extends AggregateSingleWriter[ReaderT[Task, MemcachedConnection, ?]]
10 | with AggregateBaseWriteFeature {
11 |
12 | override def store(aggregate: AggregateType): ReaderT[Task, MemcachedConnection, Long] = {
13 | for {
14 | record <- convertToRecord(aggregate)
15 | result <- dao.set(record, expireDuration)
16 | } yield result
17 | }
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/AggregateSingleReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateNotFoundException, AggregateSingleReader }
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleReadFeature extends AggregateSingleReader[Task] with AggregateBaseReadFeature {
7 |
8 | override def resolveById(id: IdType): Task[AggregateType] =
9 | for {
10 | record <- dao.get(id.value.toString).flatMap {
11 | case Some(v) =>
12 | Task.pure(v)
13 | case None =>
14 | Task.raiseError(AggregateNotFoundException(id))
15 | }
16 | aggregate <- convertToAggregate(record)
17 | } yield aggregate
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateMultiHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateMultiHardDeletable, AggregateMultiWriter }
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateMultiHardDeleteFeature
9 | extends AggregateMultiHardDeletable[ReaderT[Task, RedisConnection, ?]]
10 | with AggregateBaseReadFeature {
11 | this: AggregateMultiWriter[ReaderT[Task, RedisConnection, ?]] with AggregateSingleHardDeleteFeature =>
12 |
13 | override def hardDeleteMulti(ids: Seq[IdType]): ReaderT[Task, RedisConnection, Long] =
14 | dao.deleteMulti(ids.map(_.value.toString))
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateMultiSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateMultiSoftDeletable, AggregateMultiWriter }
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateMultiSoftDeleteFeature
9 | extends AggregateMultiSoftDeletable[ReaderT[Task, RedisConnection, ?]]
10 | with AggregateBaseReadFeature {
11 | this: AggregateMultiWriter[ReaderT[Task, RedisConnection, ?]] with AggregateSingleSoftDeleteFeature =>
12 |
13 | override def softDeleteMulti(ids: Seq[IdType]): ReaderT[Task, RedisConnection, Long] =
14 | dao.softDeleteMulti(ids.map(_.value.toString))
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateIOBaseFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.AggregateIO
4 | import monix.eval.Task
5 | import slick.jdbc.JdbcProfile
6 | import slick.lifted.{ Rep, TableQuery }
7 |
8 | trait AggregateIOBaseFeature extends AggregateIO[Task] {
9 | type RecordType <: SlickDaoSupport#Record
10 | type TableType <: SlickDaoSupport#TableBase[RecordType]
11 |
12 | protected val profile: JdbcProfile
13 |
14 | protected val db: JdbcProfile#Backend#Database
15 |
16 | protected val dao: TableQuery[TableType]
17 |
18 | protected def byCondition(id: IdType): TableType => Rep[Boolean]
19 |
20 | protected def byConditions(ids: Seq[IdType]): TableType => Rep[Boolean]
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateSingleWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateSingleWriter
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateSingleWriteFeature
9 | extends AggregateSingleWriter[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseWriteFeature {
11 |
12 | override def store(aggregate: AggregateType): ReaderT[Task, DBSession, Long] = {
13 | for {
14 | record <- convertToRecord(aggregate)
15 | result <- ReaderT[Task, DBSession, Long] { implicit dbSession =>
16 | Task {
17 | dao.createOrUpdate(record)
18 | }
19 | }
20 | } yield result
21 | }
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateAllReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateAllReader
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateAllReadFeature extends AggregateAllReader[ReaderT[Task, DBSession, ?]] with AggregateBaseReadFeature {
9 |
10 | override def resolveAll: ReaderT[Task, DBSession, Seq[AggregateType]] = ReaderT[Task, DBSession, Seq[AggregateType]] {
11 | implicit dbSession: DBSession =>
12 | for {
13 | results <- Task {
14 | dao.findAll()
15 | }
16 | aggregates <- Task.gather(results.map(convertToAggregate(_)(dbSession)))
17 | } yield aggregates
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateMultiHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateMultiHardDeletable, AggregateMultiWriter }
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateMultiHardDeleteFeature
9 | extends AggregateMultiHardDeletable[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseReadFeature {
11 | this: AggregateMultiWriter[ReaderT[Task, DBSession, ?]] with AggregateSingleHardDeleteFeature =>
12 |
13 | override def hardDeleteMulti(ids: Seq[IdType]): ReaderT[Task, DBSession, Long] = ReaderT { implicit dbSession =>
14 | Task {
15 | dao.deleteBy(byConditions(ids)).toLong
16 | }
17 | }
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateMultiHardDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateMultiHardDeletable, AggregateMultiWriter }
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateMultiHardDeleteFeature
9 | extends AggregateMultiHardDeletable[ReaderT[Task, MemcachedConnection, ?]]
10 | with AggregateBaseReadFeature {
11 | this: AggregateMultiWriter[ReaderT[Task, MemcachedConnection, ?]] with AggregateSingleHardDeleteFeature =>
12 |
13 | override def hardDeleteMulti(ids: Seq[IdType]): ReaderT[Task, MemcachedConnection, Long] =
14 | dao.deleteMulti(ids.map(_.value.toString))
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateMultiSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateMultiSoftDeletable, AggregateMultiWriter }
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateMultiSoftDeleteFeature
9 | extends AggregateMultiSoftDeletable[ReaderT[Task, MemcachedConnection, ?]]
10 | with AggregateBaseReadFeature {
11 | this: AggregateMultiWriter[ReaderT[Task, MemcachedConnection, ?]] with AggregateSingleSoftDeleteFeature =>
12 |
13 | override def softDeleteMulti(ids: Seq[IdType]): ReaderT[Task, MemcachedConnection, Long] =
14 | dao.softDeleteMulti(ids.map(_.value.toString))
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateChunkReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateChunkReader, AggregatesChunk }
4 | import monix.eval.Task
5 |
6 | trait AggregateChunkReadFeature extends AggregateChunkReader[Task] with AggregateBaseReadFeature {
7 |
8 | import profile.api._
9 |
10 | override def resolveMultiWithOffsetLimit(offset: Option[Long], limit: Long): Task[AggregatesChunk[AggregateType]] = {
11 | val index = offset.map(_.toInt).getOrElse(0)
12 | for {
13 | results <- Task.deferFuture {
14 | db.run(dao.drop(index).take(limit).result)
15 | }
16 | aggregates <- Task.traverse(results)(convertToAggregate(_))
17 | } yield AggregatesChunk(index, aggregates)
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateSingleReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.{ AggregateNotFoundException, AggregateSingleReader }
4 | import monix.eval.Task
5 |
6 | trait AggregateSingleReadFeature extends AggregateSingleReader[Task] with AggregateBaseReadFeature {
7 |
8 | override def resolveById(id: IdType): Task[AggregateType] =
9 | for {
10 | record <- Task
11 | .deferFutureAction { implicit ec =>
12 | import profile.api._
13 | db.run(dao.filter(byCondition(id)).take(1).result)
14 | .map(_.headOption)
15 | .map(_.getOrElse(throw AggregateNotFoundException(id)))
16 | }
17 | aggregate <- convertToAggregate(record)
18 | } yield aggregate
19 | }
20 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0")
2 |
3 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.0.0")
4 |
5 | addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.11")
6 |
7 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.5")
8 |
9 | addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.1")
10 |
11 | addSbtPlugin("com.chatwork" % "sbt-wix-embedded-mysql" % "1.0.9")
12 |
13 | addSbtPlugin("jp.co.septeni-original" % "sbt-dao-generator" % "1.0.8")
14 |
15 | addSbtPlugin("io.github.davidmweber" % "flyway-sbt" % "5.0.0")
16 |
17 | addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.4")
18 |
19 | resolvers ++= Seq("Seasar2 Repository" at "http://maven.seasar.org/maven2")
20 |
21 | libraryDependencies ++= Seq(
22 | "org.seasar.util" % "s2util" % "0.0.1"
23 | )
24 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/package.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example
2 | import cats.data.ReaderT
3 | import cats.free.Free
4 | import com.github.j5ik2o.dddbase.example.repository.free.UserRepositoryDSL
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import com.github.j5ik2o.reactive.redis.RedisConnection
7 | import monix.eval.Task
8 | import scalikejdbc.DBSession
9 |
10 | package object repository {
11 | type OnDynamoDB[A] = Task[A]
12 | type OnRedis[A] = ReaderT[Task, RedisConnection, A]
13 | type OnMemcached[A] = ReaderT[Task, MemcachedConnection, A]
14 | type OnMemory[A] = Task[A]
15 | type BySlick[A] = Task[A]
16 | type BySkinny[A] = ReaderT[Task, DBSession, A]
17 | type ByFree[A] = Free[UserRepositoryDSL, A]
18 | }
19 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateMultiSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateMultiSoftDeletable, AggregateMultiWriter }
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateMultiSoftDeleteFeature
9 | extends AggregateMultiSoftDeletable[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseReadFeature {
11 | this: AggregateMultiWriter[ReaderT[Task, DBSession, ?]] with AggregateSingleSoftDeleteFeature =>
12 |
13 | override def softDeleteMulti(ids: Seq[IdType]): ReaderT[Task, DBSession, Long] = ReaderT { implicit dbDesion =>
14 | Task {
15 | dao.updateBy(byConditions(ids)).withAttributes('status -> DELETE).toLong
16 | }
17 | }
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/free/UserRepositoryDSL.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.free
2 |
3 | import com.github.j5ik2o.dddbase.example.model.{ UserAccount, UserAccountId }
4 |
5 | sealed trait UserRepositoryDSL[A]
6 |
7 | case class ResolveMulti(ids: Seq[UserAccountId]) extends UserRepositoryDSL[Seq[UserAccount]]
8 | case class ResolveById(ids: UserAccountId) extends UserRepositoryDSL[UserAccount]
9 | case class Store(userAccount: UserAccount) extends UserRepositoryDSL[Long]
10 | case class StoreMulti(userAccounts: Seq[UserAccount]) extends UserRepositoryDSL[Long]
11 | case class SoftDelete(id: UserAccountId) extends UserRepositoryDSL[Long]
12 | case class SoftDeleteMulti(ids: Seq[UserAccountId]) extends UserRepositoryDSL[Long]
13 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateMultiReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateMultiReader
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateMultiReadFeature
9 | extends AggregateMultiReader[ReaderT[Task, RedisConnection, ?]]
10 | with AggregateBaseReadFeature {
11 |
12 | override def resolveMulti(ids: Seq[IdType]): ReaderT[Task, RedisConnection, Seq[AggregateType]] =
13 | ReaderT[Task, RedisConnection, Seq[AggregateType]] { con =>
14 | for {
15 | results <- dao.getMulti(ids.map(_.value.toString)).run(con)
16 | aggregates <- Task.gather(results.map(convertToAggregate(_)(con)))
17 | } yield aggregates
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/model/UserMessage.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.model
2 | import java.time.ZonedDateTime
3 |
4 | import com.github.j5ik2o.dddbase.{ Aggregate, AggregateId }
5 |
6 | import scala.reflect.{ classTag, ClassTag }
7 |
8 | case class UserMessageId(userId: Long, messageId: Long) extends AggregateId {
9 | override type IdType = (Long, Long)
10 | override val value = (userId, messageId)
11 | }
12 |
13 | case class UserMessage(
14 | id: UserMessageId,
15 | status: Status,
16 | message: String,
17 | createdAt: ZonedDateTime,
18 | updatedAt: Option[ZonedDateTime]
19 | ) extends Aggregate {
20 | override type IdType = UserMessageId
21 | override type AggregateType = UserMessage
22 | override protected val tag: ClassTag[UserMessage] = classTag[UserMessage]
23 | }
24 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateMultiReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateMultiReader
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateMultiReadFeature
9 | extends AggregateMultiReader[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseReadFeature {
11 |
12 | override def resolveMulti(ids: Seq[IdType]): ReaderT[Task, DBSession, Seq[AggregateType]] =
13 | ReaderT[Task, DBSession, Seq[AggregateType]] { implicit dbSession: DBSession =>
14 | for {
15 | results <- Task {
16 | dao.findAllBy(byConditions(ids))
17 | }
18 | aggregates <- Task.gather(results.map(convertToAggregate(_)(dbSession)))
19 | } yield aggregates
20 | }
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateMultiWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateMultiWriter
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateMultiWriteFeature
9 | extends AggregateMultiWriter[ReaderT[Task, RedisConnection, ?]]
10 | with AggregateBaseWriteFeature {
11 |
12 | override def storeMulti(aggregates: Seq[AggregateType]): ReaderT[Task, RedisConnection, Long] =
13 | ReaderT[Task, RedisConnection, Long] { con =>
14 | for {
15 | records <- Task.traverse(aggregates) { aggregate =>
16 | convertToRecord(aggregate)(con)
17 | }
18 | result <- dao.setMulti(records, expireDuration).run(con)
19 | } yield result
20 | }
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateSingleReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateNotFoundException, AggregateSingleReader }
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateSingleReadFeature
9 | extends AggregateSingleReader[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseReadFeature {
11 |
12 | override def resolveById(id: IdType): ReaderT[Task, DBSession, AggregateType] =
13 | for {
14 | record <- ReaderT[Task, DBSession, RecordType] { implicit dbSession: DBSession =>
15 | Task {
16 | dao.findBy(byCondition(id)).getOrElse(throw AggregateNotFoundException(id))
17 | }
18 | }
19 | aggregate <- convertToAggregate(record)
20 | } yield aggregate
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateMultiReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateMultiReader
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateMultiReadFeature
9 | extends AggregateMultiReader[ReaderT[Task, MemcachedConnection, ?]]
10 | with AggregateBaseReadFeature {
11 |
12 | override def resolveMulti(ids: Seq[IdType]): ReaderT[Task, MemcachedConnection, Seq[AggregateType]] =
13 | ReaderT[Task, MemcachedConnection, Seq[AggregateType]] { con =>
14 | for {
15 | results <- dao.getMulti(ids.map(_.value.toString)).run(con)
16 | aggregates <- Task.gather(results.map(v => convertToAggregate(v)(con)))
17 | } yield aggregates
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateMultiWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.AggregateMultiWriter
4 | import monix.eval.Task
5 |
6 | trait AggregateMultiWriteFeature extends AggregateMultiWriter[Task] with AggregateBaseWriteFeature {
7 |
8 | override def storeMulti(aggregates: Seq[AggregateType]): Task[Long] =
9 | for {
10 | records <- Task.traverse(aggregates) { aggregate =>
11 | convertToRecord(aggregate)
12 | }
13 | result <- Task.deferFutureAction { implicit ec =>
14 | import profile.api._
15 | db.run(DBIO.sequence(records.foldLeft(Seq.empty[DBIO[Long]]) {
16 | case (result, record) =>
17 | result :+ dao.insertOrUpdate(record).map(_.toLong)
18 | }))
19 | .map(_.sum)
20 | }
21 | } yield result
22 | }
23 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateMultiWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateMultiWriter
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateMultiWriteFeature
9 | extends AggregateMultiWriter[ReaderT[Task, MemcachedConnection, ?]]
10 | with AggregateBaseWriteFeature {
11 |
12 | override def storeMulti(aggregates: Seq[AggregateType]): ReaderT[Task, MemcachedConnection, Long] =
13 | ReaderT[Task, MemcachedConnection, Long] { con =>
14 | for {
15 | records <- Task.traverse(aggregates) { aggregate =>
16 | convertToRecord(aggregate)(con)
17 | }
18 | result <- dao.setMulti(records, expireDuration).run(con)
19 | } yield result
20 | }
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/model/UserAccount.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.model
2 |
3 | import java.time.ZonedDateTime
4 |
5 | import com.github.j5ik2o.dddbase.{ Aggregate, AggregateLongId }
6 |
7 | import scala.reflect._
8 |
9 | case class UserAccountId(value: Long) extends AggregateLongId
10 |
11 | case class EmailAddress(value: String)
12 |
13 | case class HashedPassword(value: String)
14 |
15 | case class UserAccount(
16 | id: UserAccountId,
17 | status: Status,
18 | emailAddress: EmailAddress,
19 | password: HashedPassword,
20 | firstName: String,
21 | lastName: String,
22 | createdAt: ZonedDateTime,
23 | updatedAt: Option[ZonedDateTime]
24 | ) extends Aggregate {
25 | override type AggregateType = UserAccount
26 | override type IdType = UserAccountId
27 | override protected val tag: ClassTag[UserAccount] = classTag[UserAccount]
28 | }
29 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateSingleSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateSingleSoftDeletable
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateSingleSoftDeleteFeature
9 | extends AggregateSingleSoftDeletable[ReaderT[Task, RedisConnection, ?]]
10 | with AggregateBaseReadFeature {
11 | override type RecordType <: RedisDaoSupport#SoftDeletableRecord
12 | override type DaoType <: RedisDaoSupport#Dao[ReaderT[Task, RedisConnection, ?], RecordType] with RedisDaoSupport#DaoSoftDeletable[
13 | ReaderT[Task, RedisConnection, ?],
14 | RecordType
15 | ]
16 |
17 | override def softDelete(id: IdType): ReaderT[Task, RedisConnection, Long] = ReaderT { con =>
18 | dao.softDelete(id.value.toString).run(con)
19 | }
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateMultiWriteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateMultiWriter
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateMultiWriteFeature
9 | extends AggregateMultiWriter[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseWriteFeature {
11 |
12 | override def storeMulti(aggregates: Seq[AggregateType]): ReaderT[Task, DBSession, Long] =
13 | ReaderT[Task, DBSession, Long] { dbSession =>
14 | for {
15 | records <- Task.traverse(aggregates) { aggregate =>
16 | convertToRecord(aggregate)(dbSession)
17 | }
18 | result <- Task
19 | .traverse(records) { record =>
20 | Task { dao.createOrUpdate(record) }
21 | }
22 | .map(_.count(_ > 0))
23 | } yield result
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateSingleSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateSingleSoftDeletable
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateSingleSoftDeleteFeature
9 | extends AggregateSingleSoftDeletable[ReaderT[Task, MemcachedConnection, ?]]
10 | with AggregateBaseReadFeature {
11 | override type RecordType <: MemcachedDaoSupport#SoftDeletableRecord
12 | override type DaoType <: MemcachedDaoSupport#Dao[ReaderT[Task, MemcachedConnection, ?], RecordType] with MemcachedDaoSupport#DaoSoftDeletable[
13 | ReaderT[Task, MemcachedConnection, ?],
14 | RecordType
15 | ]
16 |
17 | override def softDelete(id: IdType): ReaderT[Task, MemcachedConnection, Long] = ReaderT { con =>
18 | dao.softDelete(id.value.toString).run(con)
19 | }
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/AggregateSingleReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateNotFoundException, AggregateSingleReader }
5 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateSingleReadFeature
9 | extends AggregateSingleReader[ReaderT[Task, MemcachedConnection, ?]]
10 | with AggregateBaseReadFeature {
11 |
12 | override def resolveById(id: IdType): ReaderT[Task, MemcachedConnection, AggregateType] =
13 | for {
14 | record <- ReaderT[Task, MemcachedConnection, RecordType] { con =>
15 | dao.get(id.value.toString).run(con).flatMap {
16 | case Some(v) => Task.pure(v)
17 | case None => Task.raiseError(AggregateNotFoundException(id))
18 | }
19 | }
20 | aggregate <- convertToAggregate(record)
21 | } yield aggregate
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/AggregateSingleReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateNotFoundException, AggregateSingleReader }
5 | import com.github.j5ik2o.reactive.redis.RedisConnection
6 | import monix.eval.Task
7 |
8 | trait AggregateSingleReadFeature
9 | extends AggregateSingleReader[ReaderT[Task, RedisConnection, ?]]
10 | with AggregateBaseReadFeature {
11 |
12 | override def resolveById(id: IdType): ReaderT[Task, RedisConnection, AggregateType] =
13 | for {
14 | record <- ReaderT[Task, RedisConnection, RecordType] { con =>
15 | dao.get(id.value.toString).run(con).flatMap {
16 | case Some(v) =>
17 | Task.pure(v)
18 | case None =>
19 | Task.raiseError(AggregateNotFoundException(id))
20 | }
21 | }
22 | aggregate <- convertToAggregate(record)
23 | } yield aggregate
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/nosql/memory/src/main/scala/com/github/j5ik2o/dddbase/memory/MemoryDaoSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memory
2 |
3 | trait MemoryDaoSupport {
4 |
5 | trait Record {
6 | val id: String
7 | }
8 |
9 | trait SoftDeletableRecord extends Record {
10 | type This <: SoftDeletableRecord
11 | val status: String
12 | def withStatus(value: String): This
13 | }
14 |
15 | trait Dao[M[_], R <: Record] {
16 |
17 | def set(record: R): M[Long]
18 |
19 | def setMulti(records: Seq[R]): M[Long]
20 |
21 | def get(id: String): M[Option[R]]
22 |
23 | def getAll: M[Seq[R]]
24 |
25 | def getMulti(ids: Seq[String]): M[Seq[R]]
26 |
27 | def delete(id: String): M[Long]
28 |
29 | def deleteMulti(ids: Seq[String]): M[Long]
30 |
31 | }
32 |
33 | trait DaoSoftDeletable[M[_], R <: SoftDeletableRecord] { this: Dao[M, R] =>
34 |
35 | def softDelete(id: String): M[Long]
36 |
37 | def softDeleteMulti(ids: Seq[String]): M[Long]
38 |
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/UserMessageRepository.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository
2 |
3 | import com.github.j5ik2o.dddbase._
4 | import com.github.j5ik2o.dddbase.example.model.{ UserMessage, UserMessageId }
5 | import com.github.j5ik2o.dddbase.example.repository.dynamodb.UserMessageRepositoryOnDynamoDB
6 | import com.github.j5ik2o.reactive.aws.dynamodb.monix.DynamoDbMonixClient
7 |
8 | trait UserMessageRepository[M[_]]
9 | extends AggregateSingleReader[M]
10 | with AggregateSingleWriter[M]
11 | with AggregateMultiReader[M]
12 | with AggregateMultiWriter[M]
13 | with AggregateSingleSoftDeletable[M]
14 | with AggregateMultiSoftDeletable[M] {
15 | override type IdType = UserMessageId
16 | override type AggregateType = UserMessage
17 |
18 | }
19 |
20 | object UserMessageRepository {
21 |
22 | def onDynamoDB(client: DynamoDbMonixClient): UserMessageRepository[OnDynamoDB] =
23 | new UserMessageRepositoryOnDynamoDB(client)
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateSingleSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.AggregateSingleSoftDeletable
5 | import monix.eval.Task
6 | import scalikejdbc._
7 |
8 | trait AggregateSingleSoftDeleteFeature
9 | extends AggregateSingleSoftDeletable[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseReadFeature {
11 |
12 | protected final val DELETE = "deleted"
13 |
14 | override def softDelete(id: IdType): ReaderT[Task, DBSession, Long] = ReaderT { implicit dbSession =>
15 | Task {
16 | dao.updateById(toRecordId(id)).withAttributes('status -> DELETE).toLong
17 | }
18 | }
19 |
20 | abstract override protected def byCondition(id: IdType): SQLSyntax =
21 | super.byCondition(id).and.ne(dao.defaultAlias.status, DELETE)
22 |
23 | abstract override protected def byConditions(ids: Seq[IdType]): SQLSyntax =
24 | super.byConditions(ids).and.ne(dao.defaultAlias.status, DELETE)
25 | }
26 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/AggregateChunkReadFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.{ AggregateChunkReader, AggregatesChunk }
5 | import monix.eval.Task
6 | import scalikejdbc.DBSession
7 |
8 | trait AggregateChunkReadFeature
9 | extends AggregateChunkReader[ReaderT[Task, DBSession, ?]]
10 | with AggregateBaseReadFeature {
11 |
12 | override def resolveMultiWithOffsetLimit(
13 | offset: Option[Long],
14 | limit: Long
15 | ): ReaderT[Task, DBSession, AggregatesChunk[AggregateType]] =
16 | ReaderT[Task, DBSession, AggregatesChunk[AggregateType]] { implicit dbSession: DBSession =>
17 | val index = offset.map(_.toInt).getOrElse(0)
18 | for {
19 | results <- Task {
20 | dao.findAllWithLimitOffset(limit.toInt, index)
21 | }
22 | aggregates <- Task.gather(results.map(convertToAggregate(_)(dbSession)))
23 | } yield AggregatesChunk(index, aggregates)
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/SlickDaoSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import java.time.{ Instant, ZoneId, ZonedDateTime }
4 |
5 | trait SlickDaoSupport {
6 |
7 | val profile: slick.jdbc.JdbcProfile
8 |
9 | import profile.api._
10 |
11 | implicit val zonedDateTimeColumnType =
12 | MappedColumnType.base[ZonedDateTime, java.sql.Timestamp](
13 | { zdt =>
14 | new java.sql.Timestamp(zdt.toInstant.toEpochMilli)
15 | }, { ts =>
16 | val instant = Instant.ofEpochMilli(ts.getTime)
17 | ZonedDateTime.ofInstant(instant, ZoneId.systemDefault())
18 | }
19 | )
20 |
21 | trait Record
22 |
23 | trait SoftDeletableRecord extends Record {
24 | val status: String
25 | }
26 |
27 | abstract class TableBase[T](_tableTag: Tag, _tableName: String, _schemaName: Option[String] = None)
28 | extends Table[T](_tableTag, _schemaName, _tableName)
29 |
30 | trait SoftDeletableTableSupport[T] { this: TableBase[T] =>
31 | def status: Rep[String]
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/util/SkinnySpecSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.util
2 |
3 | import org.scalatest.{ BeforeAndAfter, BeforeAndAfterAll, Suite }
4 | import scalikejdbc.config.DBs
5 | import scalikejdbc.{ ConnectionPool, GlobalSettings, LoggingSQLAndTimeSettings }
6 |
7 | trait SkinnySpecSupport extends BeforeAndAfter with BeforeAndAfterAll with JdbcSpecSupport {
8 | self: Suite with FlywayWithMySQLSpecSupport =>
9 |
10 | override protected def beforeAll(): Unit = {
11 | super.beforeAll()
12 | Class.forName("com.mysql.jdbc.Driver")
13 | ConnectionPool.singleton(s"jdbc:mysql://localhost:${jdbcPort}/dddbase?useSSL=false", "dddbase", "dddbase")
14 | GlobalSettings.loggingSQLAndTime = LoggingSQLAndTimeSettings(
15 | enabled = true,
16 | logLevel = 'DEBUG,
17 | warningEnabled = true,
18 | warningThresholdMillis = 1000L,
19 | warningLogLevel = 'WARN
20 | )
21 | }
22 |
23 | override protected def afterAll(): Unit = {
24 | DBs.closeAll()
25 | super.afterAll()
26 | }
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/nosql/dynamodb/src/main/scala/com/github/j5ik2o/dddbase/dynamodb/DynamoDBDaoSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.dynamodb
2 |
3 | import com.github.j5ik2o.reactive.aws.dynamodb.monix.DynamoDbMonixClient
4 |
5 | trait DynamoDBDaoSupport {
6 |
7 | trait Record[ID] {
8 | val id: ID
9 | }
10 |
11 | trait SoftDeletableRecord[ID] extends Record[ID] {
12 | type This <: SoftDeletableRecord[ID]
13 | val status: String
14 | def withStatus(value: String): This
15 | }
16 |
17 | trait Dao[M[_], ID, R <: Record[ID]] {
18 |
19 | protected def client: DynamoDbMonixClient
20 |
21 | def put(record: R): M[Long]
22 |
23 | def putMulti(records: Seq[R]): M[Long]
24 |
25 | def get(id: ID): M[Option[R]]
26 |
27 | def getMulti(ids: Seq[ID]): M[Seq[R]]
28 |
29 | def delete(id: ID): M[Long]
30 |
31 | def deleteMulti(ids: Seq[ID]): M[Long]
32 |
33 | }
34 |
35 | trait DaoSoftDeletable[M[_], ID, R <: SoftDeletableRecord[ID]] { this: Dao[M, ID, R] =>
36 |
37 | def softDelete(id: ID): M[Long]
38 |
39 | def softDeleteMulti(ids: Seq[ID]): M[Long]
40 |
41 | }
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Junichi Kato
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | jobs:
3 | build:
4 | branches:
5 | ignore:
6 | - master
7 | environment:
8 | - JAVA_OPTS: "-XX:ReservedCodeCacheSize=256M -Xms1g -Xmx3g -Xss2m"
9 | machine: true
10 | steps:
11 | - run:
12 | command: |
13 | sudo apt-get update -qq && sudo apt-get install -y libaio1 libevent-dev
14 | sudo apt-get install -y software-properties-common
15 | sudo add-apt-repository -y ppa:webupd8team/java
16 | sudo apt-get update -qq
17 | echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | sudo /usr/bin/debconf-set-selections
18 | sudo apt-get install -y oracle-java8-installer
19 | sudo apt-get install -y sbt
20 | - checkout
21 | - restore_cache:
22 | key: scala-library-dependencies-{{ checksum "build.sbt" }}
23 | - run: sbt ++2.11.12 test:compile && sbt ++2.12.8 test:compile
24 | - save_cache:
25 | paths: [ "~/.sbt/boot", "~/.ivy2", "~/.wixMySQL" ]
26 | key: scala-library-dependencies-{{ checksum "build.sbt" }}
27 | - run: sbt ++2.11.12 test && sbt ++2.12.8 test
28 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/util/RandomPortSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.util
2 |
3 | import java.net.InetSocketAddress
4 | import java.nio.channels.ServerSocketChannel
5 |
6 | /**
7 | * This code is originated from Spray.
8 | * https://github.com/spray/spray/blob/b473d9e8ce503bafc72825914f46ae6be1588ce7/spray-util/src/main/scala/spray/util/Utils.scala#L35-L47
9 | */
10 | trait RandomPortSupport {
11 |
12 | def temporaryServerAddress(interface: String = "127.0.0.1"): InetSocketAddress = {
13 | val serverSocket = ServerSocketChannel.open()
14 | try {
15 | serverSocket.socket.bind(new InetSocketAddress(interface, 0))
16 | val port = serverSocket.socket.getLocalPort
17 | new InetSocketAddress(interface, port)
18 | } finally serverSocket.close()
19 | }
20 |
21 | def temporaryServerHostnameAndPort(interface: String = "127.0.0.1"): (String, Int) = {
22 | val socketAddress = temporaryServerAddress(interface)
23 | socketAddress.getHostName -> socketAddress.getPort
24 | }
25 |
26 | def temporaryServerPort(interface: String = "127.0.0.1"): Int =
27 | temporaryServerHostnameAndPort(interface)._2
28 | }
29 |
--------------------------------------------------------------------------------
/flyway/src/test/resources/rdb-migration/V1__Create_Tables.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE `user_account` (
2 | `id` bigint NOT NULL ,
3 | `status` enum('active', 'suspended', 'deleted') NOT NULL default 'active',
4 | `email` varchar(255) NOT NULL,
5 | `password` varchar(255) NOT NULL,
6 | `first_name` varchar(255) NOT NULL,
7 | `last_name` varchar(255) NOT NULL,
8 | `created_at` datetime(6) NOT NULL,
9 | `updated_at` datetime(6),
10 | PRIMARY KEY (`id`)
11 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
12 |
13 |
14 | CREATE TABLE `user_account_id_sequence_number`(id bigint unsigned NOT NULL) ENGINE=MyISAM;
15 | INSERT INTO `user_account_id_sequence_number` VALUES (100);
16 |
17 | CREATE TABLE `user_message` (
18 | `user_id` bigint NOT NULL,
19 | `message_id` bigint NOT NULL,
20 | `status` enum('active', 'suspended', 'deleted') NOT NULL default 'active',
21 | `message` varchar(255) NOT NULL,
22 | `created_at` datetime(6) NOT NULL,
23 | `updated_at` datetime(6),
24 | PRIMARY KEY (`user_id`, `message_id`)
25 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
26 |
27 |
28 | CREATE TABLE `message_id_sequence_number`(id bigint unsigned NOT NULL) ENGINE=MyISAM;
29 | INSERT INTO `message_id_sequence_number` VALUES (100);
30 |
31 |
--------------------------------------------------------------------------------
/nosql/redis/src/main/scala/com/github/j5ik2o/dddbase/redis/RedisDaoSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.redis
2 |
3 | import akka.actor.ActorSystem
4 | import cats.data.ReaderT
5 | import com.github.j5ik2o.reactive.redis.{ RedisClient, RedisConnection }
6 | import monix.eval.Task
7 |
8 | import scala.concurrent.duration.Duration
9 |
10 | trait RedisDaoSupport {
11 |
12 | trait Record {
13 | val id: String
14 | }
15 |
16 | trait SoftDeletableRecord extends Record {
17 | type This <: SoftDeletableRecord
18 | val status: String
19 | def withStatus(value: String): This
20 | }
21 |
22 | trait Dao[M[_], R <: Record] {
23 |
24 | implicit val system: ActorSystem
25 |
26 | protected lazy val redisClient = RedisClient()
27 |
28 | def set(record: R, expire: Duration): M[Long]
29 |
30 | def setMulti(records: Seq[R], expire: Duration): M[Long]
31 |
32 | def get(id: String): M[Option[R]]
33 |
34 | def getMulti(ids: Seq[String]): M[Seq[R]]
35 |
36 | def delete(id: String): M[Long]
37 |
38 | def deleteMulti(ids: Seq[String]): M[Long]
39 |
40 | }
41 |
42 | trait DaoSoftDeletable[M[_], R <: SoftDeletableRecord] { this: Dao[M, R] =>
43 |
44 | def softDelete(id: String): M[Long]
45 |
46 | def softDeleteMulti(ids: Seq[String]): M[Long]
47 |
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/jdbc/slick/src/main/scala/com/github/j5ik2o/dddbase/slick/AggregateSingleSoftDeleteFeature.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.slick
2 |
3 | import com.github.j5ik2o.dddbase.AggregateSingleSoftDeletable
4 | import monix.eval.Task
5 | import slick.lifted.{ Rep, TableQuery }
6 |
7 | trait AggregateSingleSoftDeleteFeature extends AggregateSingleSoftDeletable[Task] with AggregateBaseReadFeature {
8 |
9 | override type RecordType <: SlickDaoSupport#SoftDeletableRecord
10 | override type TableType <: SlickDaoSupport#TableBase[RecordType] with SlickDaoSupport#SoftDeletableTableSupport[
11 | RecordType
12 | ]
13 | protected final val DELETE = "deleted"
14 | override protected val dao: TableQuery[TableType]
15 |
16 | override def softDelete(id: IdType): Task[Long] =
17 | Task.deferFutureAction { implicit ec =>
18 | import profile.api._
19 | db.run(dao.filter(byCondition(id)).map(_.status).update(DELETE)).map(_.toLong)
20 | }.asyncBoundary
21 |
22 | abstract override protected def byCondition(id: IdType): TableType => Rep[Boolean] = { e =>
23 | import profile.api._
24 | super.byCondition(id)(e) && e.status =!= DELETE
25 | }
26 |
27 | abstract override protected def byConditions(ids: Seq[IdType]): TableType => Rep[Boolean] = { e =>
28 | import profile.api._
29 | super.byConditions(ids)(e) && e.status =!= DELETE
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/nosql/memcached/src/main/scala/com/github/j5ik2o/dddbase/memcached/MemcachedDaoSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.memcached
2 |
3 | import akka.actor.ActorSystem
4 | import cats.data.ReaderT
5 | import com.github.j5ik2o.reactive.memcached.{ MemcachedClient, MemcachedConnection }
6 | import monix.eval.Task
7 |
8 | import scala.concurrent.duration.Duration
9 |
10 | trait MemcachedDaoSupport {
11 |
12 | trait Record {
13 | val id: String
14 | }
15 |
16 | trait SoftDeletableRecord extends Record {
17 | type This <: SoftDeletableRecord
18 | val status: String
19 | def withStatus(value: String): This
20 | }
21 |
22 | trait Dao[M[_], R <: Record] {
23 |
24 | implicit val system: ActorSystem
25 |
26 | protected lazy val memcachedClient: MemcachedClient = MemcachedClient()
27 |
28 | def set(record: R, expire: Duration): M[Long]
29 |
30 | def setMulti(records: Seq[R], expire: Duration): M[Long]
31 |
32 | def get(id: String): M[Option[R]]
33 |
34 | def getMulti(ids: Seq[String]): M[Seq[R]]
35 |
36 | def delete(id: String): M[Long]
37 |
38 | def deleteMulti(ids: Seq[String]): M[Long]
39 |
40 | }
41 |
42 | trait DaoSoftDeletable[M[_], R <: SoftDeletableRecord] { this: Dao[M, R] =>
43 |
44 | def softDelete(id: String): M[Long]
45 |
46 | def softDeleteMulti(ids: Seq[String]): M[Long]
47 |
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/project/Utils.scala:
--------------------------------------------------------------------------------
1 | import java.net.InetSocketAddress
2 | import java.nio.channels.ServerSocketChannel
3 |
4 | import sbt._
5 |
6 | object Utils {
7 |
8 | implicit class SbtLoggerOps(val self: sbt.Logger) extends AnyVal {
9 |
10 | def toScalaProcessLogger: scala.sys.process.ProcessLogger = new scala.sys.process.ProcessLogger {
11 | private val _log = new FullLogger(self)
12 | override def out(s: => String): Unit = _log.info(s)
13 |
14 | override def err(s: => String): Unit = _log.err(s)
15 |
16 | override def buffer[T](f: => T): T = _log.buffer(f)
17 | }
18 | }
19 |
20 | object RandomPortSupport {
21 |
22 | def temporaryServerAddress(interface: String = "127.0.0.1"): InetSocketAddress = {
23 | val serverSocket = ServerSocketChannel.open()
24 | try {
25 | serverSocket.socket.bind(new InetSocketAddress(interface, 0))
26 | val port = serverSocket.socket.getLocalPort
27 | new InetSocketAddress(interface, port)
28 | } finally serverSocket.close()
29 | }
30 |
31 | def temporaryServerHostnameAndPort(interface: String = "127.0.0.1"): (String, Int) = {
32 | val socketAddress = temporaryServerAddress(interface)
33 | socketAddress.getHostName -> socketAddress.getPort
34 | }
35 |
36 | def temporaryServerPort(interface: String = "127.0.0.1"): Int =
37 | temporaryServerHostnameAndPort(interface)._2
38 | }
39 |
40 | }
41 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/util/FlywayWithMySQLSpecSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.util
2 |
3 | import java.io.File
4 |
5 | import com.github.j5ik2o.scalatestplus.db._
6 | import com.wix.mysql.distribution.Version._
7 | import org.scalatest.TestSuite
8 |
9 | import scala.concurrent.duration._
10 |
11 | trait FlywayWithMySQLSpecSupport extends FlywayWithMySQLdOneInstancePerSuite with RandomPortSupport {
12 | this: TestSuite =>
13 |
14 | override protected lazy val mySQLdConfig: MySQLdConfig = MySQLdConfig(
15 | version = v5_6_21,
16 | port = Some(temporaryServerPort()),
17 | userWithPassword = Some(UserWithPassword("dddbase", "dddbase")),
18 | timeout = Some((30 seconds) * sys.env.getOrElse("SBT_TEST_TIME_FACTOR", "1").toDouble)
19 | )
20 |
21 | override protected lazy val downloadConfig: DownloadConfig =
22 | super.downloadConfig.copy(cacheDir = new File(sys.env("HOME") + "/.wixMySQL/downloads"))
23 |
24 | override protected lazy val schemaConfigs: Seq[SchemaConfig] = Seq(SchemaConfig(name = "dddbase"))
25 |
26 | override protected def flywayConfig(jdbcUrl: String): FlywayConfig =
27 | FlywayConfig(
28 | locations = Seq(
29 | "filesystem:flyway/src/test/resources/rdb-migration"
30 | ),
31 | placeholderConfig = Some(
32 | PlaceholderConfig(
33 | placeholderReplacement = true,
34 | placeholders = Map("engineName" -> "MEMORY", "idSequenceNumberEngineName" -> "MyISAM")
35 | )
36 | )
37 | )
38 |
39 | }
40 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/airframe/AirframeSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.airframe
2 |
3 | import java.time.ZonedDateTime
4 |
5 | import com.github.j5ik2o.dddbase.example.model._
6 | import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, SkinnySpecSupport }
7 | import com.github.j5ik2o.dddbase.example.repository.{ BySkinny, IdGenerator, UserAccountRepository }
8 | import monix.execution.Scheduler.Implicits.global
9 | import org.scalatest.{ FreeSpecLike, Matchers }
10 | import scalikejdbc.AutoSession
11 | import wvlet.airframe._
12 |
13 | class AirframeSpec extends FreeSpecLike with FlywayWithMySQLSpecSupport with SkinnySpecSupport with Matchers {
14 |
15 | override val tables: Seq[String] = Seq("user_account")
16 |
17 | val design = newDesign.bind[UserAccountRepository[BySkinny]].toInstance(UserAccountRepository.bySkinny)
18 |
19 | val userAccount = UserAccount(
20 | id = UserAccountId(IdGenerator.generateIdValue),
21 | status = Status.Active,
22 | emailAddress = EmailAddress("test@test.com"),
23 | password = HashedPassword("aaa"),
24 | firstName = "Junichi",
25 | lastName = "Kato",
26 | createdAt = ZonedDateTime.now,
27 | updatedAt = None
28 | )
29 |
30 | "Airframe" - {
31 | "store and resolveById" in {
32 | design.withSession { session =>
33 | val repository = session.build[UserAccountRepository[BySkinny]]
34 | val result = (for {
35 | _ <- repository.store(userAccount)
36 | r <- repository.resolveById(userAccount.id)
37 | } yield r).run(AutoSession).runToFuture.futureValue
38 | result shouldBe userAccount
39 | }
40 | }
41 | }
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/util/Slick3SpecSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.util
2 |
3 | import com.typesafe.config.ConfigFactory
4 | import org.scalatest.concurrent.ScalaFutures
5 | import org.scalatest.{ BeforeAndAfter, BeforeAndAfterAll, Suite }
6 | import slick.basic.DatabaseConfig
7 | import slick.jdbc.SetParameter.SetUnit
8 | import slick.jdbc.{ JdbcProfile, SQLActionBuilder }
9 |
10 | import scala.concurrent.Future
11 |
12 | trait Slick3SpecSupport extends BeforeAndAfter with BeforeAndAfterAll with ScalaFutures with JdbcSpecSupport {
13 | self: Suite with FlywayWithMySQLSpecSupport =>
14 |
15 | private var _dbConfig: DatabaseConfig[JdbcProfile] = _
16 |
17 | private var _profile: JdbcProfile = _
18 |
19 | protected def dbConfig = _dbConfig
20 |
21 | protected def profile = _profile
22 |
23 | after {
24 | implicit val ec = dbConfig.db.executor.executionContext
25 | val futures = tables.map { table =>
26 | val q = SQLActionBuilder(List(s"TRUNCATE TABLE $table"), SetUnit).asUpdate
27 | dbConfig.db.run(q)
28 | }
29 | Future.sequence(futures).futureValue
30 | }
31 |
32 | override protected def beforeAll(): Unit = {
33 | super.beforeAll()
34 | val config = ConfigFactory.parseString(s"""
35 | |dddbase {
36 | | profile = "slick.jdbc.MySQLProfile$$"
37 | | db {
38 | | connectionPool = disabled
39 | | driver = "com.mysql.jdbc.Driver"
40 | | url = "jdbc:mysql://localhost:$jdbcPort/dddbase?useSSL=false"
41 | | user = "dddbase"
42 | | password = "dddbase"
43 | | }
44 | |}
45 | """.stripMargin)
46 | _dbConfig = DatabaseConfig.forConfig[JdbcProfile]("dddbase", config)
47 | _profile = dbConfig.profile
48 | }
49 |
50 | override protected def afterAll(): Unit = {
51 | dbConfig.db.shutdown
52 | super.afterAll()
53 | }
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/free/UserAccountRepositoryByFree.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.free
2 |
3 | import cats.free.Free.liftF
4 | import cats.{ ~>, Monad }
5 | import com.github.j5ik2o.dddbase.example.model.{ UserAccount, UserAccountId }
6 | import com.github.j5ik2o.dddbase.example.repository.{ ByFree, UserAccountRepository }
7 |
8 | object UserAccountRepositoryByFree extends UserAccountRepository[ByFree] {
9 |
10 | override def resolveById(id: UserAccountId): ByFree[UserAccount] = liftF(ResolveById(id))
11 |
12 | override def resolveMulti(ids: Seq[UserAccountId]): ByFree[Seq[UserAccount]] = liftF(ResolveMulti(ids))
13 |
14 | override def store(aggregate: UserAccount): ByFree[Long] = liftF(Store(aggregate))
15 |
16 | override def storeMulti(aggregates: Seq[UserAccount]): ByFree[Long] =
17 | liftF(StoreMulti(aggregates))
18 |
19 | override def softDelete(id: UserAccountId): ByFree[Long] = liftF(SoftDelete(id))
20 |
21 | override def softDeleteMulti(ids: Seq[UserAccountId]): ByFree[Long] = liftF(SoftDeleteMulti(ids))
22 |
23 | private def interpreter[M[_]](repo: UserAccountRepository[M]): UserRepositoryDSL ~> M = new (UserRepositoryDSL ~> M) {
24 | override def apply[A](fa: UserRepositoryDSL[A]): M[A] = fa match {
25 | case ResolveById(id) =>
26 | repo.resolveById(id).asInstanceOf[M[A]]
27 | case ResolveMulti(ids) =>
28 | repo.resolveMulti(ids).asInstanceOf[M[A]]
29 | case Store(aggregate) =>
30 | repo.store(aggregate).asInstanceOf[M[A]]
31 | case StoreMulti(aggregates) =>
32 | repo.storeMulti(aggregates).asInstanceOf[M[A]]
33 | case SoftDelete(id) =>
34 | repo.softDelete(id).asInstanceOf[M[A]]
35 | case SoftDeleteMulti(ids) =>
36 | repo.softDeleteMulti(ids).asInstanceOf[M[A]]
37 | }
38 | }
39 |
40 | def evaluate[M[_]: Monad, A](evaluator: UserAccountRepository[M])(program: ByFree[A]): M[A] =
41 | program.foldMap(interpreter(evaluator))
42 | }
43 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/skinny/UserMessageRepositoryBySkinnyImplSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.skinny
2 | import java.time.ZonedDateTime
3 |
4 | import com.github.j5ik2o.dddbase.example.model.{ Status, UserMessage, UserMessageId }
5 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport }
6 | import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, SkinnySpecSupport }
7 | import monix.execution.Scheduler.Implicits.global
8 | import org.scalatest.{ FreeSpecLike, Matchers }
9 | import scalikejdbc.AutoSession
10 |
11 | class UserMessageRepositoryBySkinnyImplSpec
12 | extends FreeSpecLike
13 | with FlywayWithMySQLSpecSupport
14 | with SkinnySpecSupport
15 | with Matchers
16 | with SpecSupport {
17 | override val tables: Seq[String] = Seq("user_message")
18 |
19 | val userMessage = UserMessage(
20 | id = UserMessageId(IdGenerator.generateIdValue, IdGenerator.generateIdValue),
21 | status = Status.Active,
22 | message = "ABC",
23 | createdAt = ZonedDateTime.now(),
24 | updatedAt = None
25 | )
26 |
27 | val userMessages = for (idx <- 1L to 10L)
28 | yield
29 | UserMessage(
30 | id = UserMessageId(IdGenerator.generateIdValue, IdGenerator.generateIdValue),
31 | status = Status.Active,
32 | message = s"ABC${idx}",
33 | createdAt = ZonedDateTime.now(),
34 | updatedAt = None
35 | )
36 | val repository = new UserMessageRepositoryBySkinnyImpl
37 |
38 | "UserMessageRepositoryBySkinnyImpl" - {
39 | "store" in {
40 | val result = (for {
41 | _ <- repository.store(userMessage)
42 | r <- repository.resolveById(userMessage.id)
43 | } yield r).run(AutoSession).runToFuture.futureValue
44 |
45 | result shouldBe userMessage
46 | }
47 | "storeMulti" in {
48 | val result = (for {
49 | _ <- repository.storeMulti(userMessages)
50 | r <- repository.resolveMulti(userMessages.map(_.id))
51 | } yield r).run(AutoSession).runToFuture.futureValue
52 |
53 | sameAs(result, userMessages) shouldBe true
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/dynamodb/UserMessageRepositoryOnDynamoDB.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.dynamodb._
4 | import com.github.j5ik2o.dddbase.example.dao.dynamodb.UserMessageComponent
5 | import com.github.j5ik2o.dddbase.example.model._
6 | import com.github.j5ik2o.dddbase.example.repository.{ OnDynamoDB, UserMessageRepository }
7 | import com.github.j5ik2o.reactive.aws.dynamodb.monix.DynamoDbMonixClient
8 | import monix.eval.Task
9 |
10 | class UserMessageRepositoryOnDynamoDB(client: DynamoDbMonixClient)
11 | extends UserMessageRepository[OnDynamoDB]
12 | with AggregateSingleReadFeature
13 | with AggregateSingleWriteFeature
14 | with AggregateMultiReadFeature
15 | with AggregateMultiWriteFeature
16 | with AggregateSingleSoftDeleteFeature
17 | with AggregateMultiSoftDeleteFeature
18 | with UserMessageComponent {
19 | override type RecordIdType = UserMessageRecordId
20 | override type RecordType = UserMessageRecord
21 | override type DaoType = UserMessageDao
22 | override protected val dao = UserMessageDao(client)
23 |
24 | override protected def toRecordId(
25 | id: UserMessageId
26 | ): UserMessageRecordId = UserMessageRecordId(id.userId, id.messageId)
27 |
28 | override protected def convertToAggregate: UserMessageRecord => Task[UserMessage] = { record =>
29 | Task.pure {
30 | UserMessage(
31 | id = UserMessageId(record.id.userId, record.id.messageId),
32 | status = Status.withName(record.status),
33 | message = record.message,
34 | createdAt = record.createdAt,
35 | updatedAt = record.updatedAt
36 | )
37 | }
38 | }
39 |
40 | override protected def convertToRecord: UserMessage => Task[UserMessageRecord] = { aggregate =>
41 | Task.pure {
42 | UserMessageRecord(
43 | id = UserMessageRecordId(aggregate.id.userId, aggregate.id.messageId),
44 | status = aggregate.status.entryName,
45 | message = aggregate.message,
46 | createdAt = aggregate.createdAt,
47 | updatedAt = aggregate.updatedAt
48 | )
49 | }
50 | }
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/slick/UserMessageRepositoryBySlickImplSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.slick
2 |
3 | import java.time.ZonedDateTime
4 |
5 | import com.github.j5ik2o.dddbase.example.model.{ Status, UserMessage, UserMessageId }
6 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport }
7 | import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, Slick3SpecSupport }
8 | import monix.execution.Scheduler.Implicits.global
9 | import org.scalatest.{ FreeSpecLike, Matchers }
10 |
11 | class UserMessageRepositoryBySlickImplSpec
12 | extends FreeSpecLike
13 | with FlywayWithMySQLSpecSupport
14 | with Slick3SpecSupport
15 | with Matchers
16 | with SpecSupport {
17 |
18 | override val tables: Seq[String] = Seq("user_message")
19 |
20 | val userMessage = UserMessage(
21 | id = UserMessageId(IdGenerator.generateIdValue, IdGenerator.generateIdValue),
22 | status = Status.Active,
23 | message = "ABC",
24 | createdAt = ZonedDateTime.now(),
25 | updatedAt = None
26 | )
27 |
28 | val userMessages = for (idx <- 1L to 10L)
29 | yield
30 | UserMessage(
31 | id = UserMessageId(IdGenerator.generateIdValue, IdGenerator.generateIdValue),
32 | status = Status.Active,
33 | message = s"ABC${idx}",
34 | createdAt = ZonedDateTime.now(),
35 | updatedAt = None
36 | )
37 |
38 | "UserMessageRepositoryBySlickImpl" - {
39 | "store" in {
40 | val repository = new UserMessageRepositoryBySlickImpl(dbConfig.profile, dbConfig.db)
41 | val result = (for {
42 | _ <- repository.store(userMessage)
43 | r <- repository.resolveById(userMessage.id)
44 | } yield r).runToFuture.futureValue
45 |
46 | result shouldBe userMessage
47 | }
48 | "storeMulti" in {
49 | val repository = new UserMessageRepositoryBySlickImpl(dbConfig.profile, dbConfig.db)
50 | val result = (for {
51 | _ <- repository.storeMulti(userMessages)
52 | r <- repository.resolveMulti(userMessages.map(_.id))
53 | } yield r).runToFuture.futureValue
54 |
55 | sameAs(result, userMessages) shouldBe true
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/free/UserAccountRepositoryByFreeSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.free
2 |
3 | import java.time.ZonedDateTime
4 |
5 | import cats.free.Free
6 | import com.github.j5ik2o.dddbase.example.model._
7 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport, UserAccountRepository }
8 | import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, SkinnySpecSupport }
9 | import monix.execution.Scheduler.Implicits.global
10 | import org.scalatest.{ FreeSpecLike, Matchers }
11 | import scalikejdbc.AutoSession
12 |
13 | class UserAccountRepositoryByFreeSpec
14 | extends FreeSpecLike
15 | with FlywayWithMySQLSpecSupport
16 | with SkinnySpecSupport
17 | with Matchers
18 | with SpecSupport {
19 |
20 | override val tables: Seq[String] = Seq("user_account")
21 |
22 | val userAccount = UserAccount(
23 | id = UserAccountId(IdGenerator.generateIdValue),
24 | status = Status.Active,
25 | emailAddress = EmailAddress("test@test.com"),
26 | password = HashedPassword("aaa"),
27 | firstName = "Junichi",
28 | lastName = "Kato",
29 | createdAt = ZonedDateTime.now,
30 | updatedAt = None
31 | )
32 |
33 | val userAccounts = for (idx <- 1L to 10L)
34 | yield
35 | UserAccount(
36 | id = UserAccountId(IdGenerator.generateIdValue),
37 | status = Status.Active,
38 | emailAddress = EmailAddress(s"user${idx}@gmail.com"),
39 | password = HashedPassword("aaa"),
40 | firstName = "Junichi",
41 | lastName = "Kato",
42 | createdAt = ZonedDateTime.now,
43 | updatedAt = None
44 | )
45 |
46 | "UserAccountRepositoryByFree" - {
47 | "store" in {
48 | val program: Free[UserRepositoryDSL, UserAccount] = for {
49 | _ <- UserAccountRepositoryByFree.store(userAccount)
50 | result <- UserAccountRepositoryByFree.resolveById(userAccount.id)
51 | } yield result
52 | val skinny = UserAccountRepository.bySkinny
53 | val evalResult = UserAccountRepositoryByFree.evaluate(skinny)(program)
54 | val result = evalResult.run(AutoSession).runToFuture.futureValue
55 | result shouldBe userAccount
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/dynamodb/UserAccountRepositoryOnDynamoDB.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.dynamodb
2 |
3 | import com.github.j5ik2o.dddbase.dynamodb._
4 | import com.github.j5ik2o.dddbase.example.dao.dynamodb.UserAccountComponent
5 | import com.github.j5ik2o.dddbase.example.model._
6 | import com.github.j5ik2o.dddbase.example.repository.{ OnDynamoDB, UserAccountRepository }
7 | import com.github.j5ik2o.reactive.aws.dynamodb.monix.DynamoDbMonixClient
8 | import monix.eval.Task
9 |
10 | class UserAccountRepositoryOnDynamoDB(client: DynamoDbMonixClient)
11 | extends UserAccountRepository[OnDynamoDB]
12 | with AggregateSingleReadFeature
13 | with AggregateSingleWriteFeature
14 | with AggregateMultiReadFeature
15 | with AggregateMultiWriteFeature
16 | with AggregateSingleSoftDeleteFeature
17 | with AggregateMultiSoftDeleteFeature
18 | with UserAccountComponent {
19 | override type RecordIdType = String
20 | override type RecordType = UserAccountRecord
21 | override type DaoType = UserAccountDao
22 | override protected val dao = UserAccountDao(client)
23 |
24 | override protected def toRecordId(id: UserAccountId): String = id.value.toString
25 |
26 | override protected def convertToAggregate: UserAccountRecord => Task[UserAccount] = { record =>
27 | Task.pure {
28 | UserAccount(
29 | id = UserAccountId(record.id.toLong),
30 | status = Status.withName(record.status),
31 | emailAddress = EmailAddress(record.email),
32 | password = HashedPassword(record.password),
33 | firstName = record.firstName,
34 | lastName = record.lastName,
35 | createdAt = record.createdAt,
36 | updatedAt = record.updatedAt
37 | )
38 | }
39 | }
40 |
41 | override protected def convertToRecord: UserAccount => Task[UserAccountRecord] = { aggregate =>
42 | Task.pure {
43 | UserAccountRecord(
44 | id = aggregate.id.value.toString,
45 | status = aggregate.status.entryName,
46 | email = aggregate.emailAddress.value,
47 | password = aggregate.password.value,
48 | firstName = aggregate.firstName,
49 | lastName = aggregate.lastName,
50 | createdAt = aggregate.createdAt,
51 | updatedAt = aggregate.updatedAt
52 | )
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/skinny/UserAccountRepositoryBySkinnyImplSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.skinny
2 |
3 | import java.time.ZonedDateTime
4 |
5 | import com.github.j5ik2o.dddbase.example.model._
6 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport }
7 | import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, SkinnySpecSupport }
8 | import monix.execution.Scheduler.Implicits.global
9 | import org.scalatest.{ FreeSpecLike, Matchers }
10 | import scalikejdbc.AutoSession
11 |
12 | class UserAccountRepositoryBySkinnyImplSpec
13 | extends FreeSpecLike
14 | with FlywayWithMySQLSpecSupport
15 | with SkinnySpecSupport
16 | with Matchers
17 | with SpecSupport {
18 |
19 | val repository = new UserAccountRepositoryBySkinnyImpl
20 | override val tables: Seq[String] = Seq("user_account")
21 |
22 | val userAccount = UserAccount(
23 | id = UserAccountId(IdGenerator.generateIdValue),
24 | status = Status.Active,
25 | emailAddress = EmailAddress("test@test.com"),
26 | password = HashedPassword("aaa"),
27 | firstName = "Junichi",
28 | lastName = "Kato",
29 | createdAt = ZonedDateTime.now,
30 | updatedAt = None
31 | )
32 |
33 | val userAccounts = for (idx <- 1L to 10L)
34 | yield
35 | UserAccount(
36 | id = UserAccountId(IdGenerator.generateIdValue),
37 | status = Status.Active,
38 | emailAddress = EmailAddress(s"user${idx}@gmail.com"),
39 | password = HashedPassword("aaa"),
40 | firstName = "Junichi",
41 | lastName = "Kato",
42 | createdAt = ZonedDateTime.now,
43 | updatedAt = None
44 | )
45 |
46 | "UserAccountRepositoryBySkinny" - {
47 | "store" in {
48 | val result = (for {
49 | _ <- repository.store(userAccount)
50 | r <- repository.resolveById(userAccount.id)
51 | } yield r).run(AutoSession).runToFuture.futureValue
52 |
53 | result shouldBe userAccount
54 | }
55 | "storeMulti" in {
56 | val result = (for {
57 | _ <- repository.storeMulti(userAccounts)
58 | r <- repository.resolveMulti(userAccounts.map(_.id))
59 | } yield r).run(AutoSession).runToFuture.futureValue
60 |
61 | sameAs(result, userAccounts) shouldBe true
62 | }
63 |
64 | }
65 |
66 | }
67 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/slick/AbstractUserMessageRepositoryBySlick.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.slick
2 | import com.github.j5ik2o.dddbase.example.dao.slick.UserMessageComponent
3 | import com.github.j5ik2o.dddbase.example.model.{ Status, UserMessage, UserMessageId }
4 | import com.github.j5ik2o.dddbase.example.repository.{ BySlick, UserMessageRepository }
5 | import com.github.j5ik2o.dddbase.slick._
6 | import monix.eval.Task
7 | import slick.jdbc.JdbcProfile
8 | import slick.lifted.Rep
9 |
10 | abstract class AbstractUserMessageRepositoryBySlick(val profile: JdbcProfile, val db: JdbcProfile#Backend#Database)
11 | extends UserMessageRepository[BySlick]
12 | with AggregateSingleReadFeature
13 | with AggregateMultiReadFeature
14 | with AggregateSingleWriteFeature
15 | with AggregateMultiWriteFeature
16 | with UserMessageComponent {
17 |
18 | override type RecordType = UserMessageRecord
19 | override type TableType = UserMessages
20 | override protected val dao = UserMessageDao
21 |
22 | override protected def byCondition(id: IdType): TableType => Rep[Boolean] = { v =>
23 | import profile.api._
24 | v.userId === id.userId && v.messageId === id.messageId
25 | }
26 |
27 | override protected def byConditions(ids: Seq[IdType]): TableType => Rep[Boolean] = { v =>
28 | import profile.api._
29 | ids
30 | .map { id =>
31 | v.userId === id.userId && v.messageId === id.messageId
32 | }
33 | .reduceLeft(_ || _)
34 | }
35 |
36 | override protected def convertToAggregate: UserMessageRecord => Task[UserMessage] = { record =>
37 | Task.pure {
38 | UserMessage(
39 | id = UserMessageId(record.userId, record.messageId),
40 | status = Status.withName(record.status),
41 | message = record.message,
42 | createdAt = record.createdAt,
43 | updatedAt = record.updatedAt
44 | )
45 | }
46 | }
47 |
48 | override protected def convertToRecord: UserMessage => Task[UserMessageRecord] = { aggregate =>
49 | Task.pure {
50 | UserMessageRecord(
51 | messageId = aggregate.id.messageId,
52 | userId = aggregate.id.userId,
53 | status = aggregate.status.entryName,
54 | message = aggregate.message,
55 | createdAt = aggregate.createdAt,
56 | updatedAt = aggregate.updatedAt
57 | )
58 | }
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/slick/UserAccountRepositoryBySlickImplSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.slick
2 |
3 | import java.time.ZonedDateTime
4 |
5 | import com.github.j5ik2o.dddbase.example.model._
6 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport, UserAccountRepository }
7 | import com.github.j5ik2o.dddbase.example.repository.util.{ FlywayWithMySQLSpecSupport, Slick3SpecSupport }
8 | import monix.execution.Scheduler.Implicits.global
9 | import org.scalatest.{ FreeSpecLike, Matchers }
10 |
11 | class UserAccountRepositoryBySlickImplSpec
12 | extends FreeSpecLike
13 | with FlywayWithMySQLSpecSupport
14 | with Slick3SpecSupport
15 | with Matchers
16 | with SpecSupport {
17 |
18 | override val tables: Seq[String] = Seq("user_account")
19 |
20 | val userAccount = UserAccount(
21 | id = UserAccountId(IdGenerator.generateIdValue),
22 | status = Status.Active,
23 | emailAddress = EmailAddress("test@test.com"),
24 | password = HashedPassword("aaa"),
25 | firstName = "Junichi",
26 | lastName = "Kato",
27 | createdAt = ZonedDateTime.now,
28 | updatedAt = None
29 | )
30 |
31 | val userAccounts = for (idx <- 1L to 10L)
32 | yield
33 | UserAccount(
34 | id = UserAccountId(IdGenerator.generateIdValue),
35 | status = Status.Active,
36 | emailAddress = EmailAddress(s"user${idx}@gmail.com"),
37 | password = HashedPassword("aaa"),
38 | firstName = "Junichi",
39 | lastName = "Kato",
40 | createdAt = ZonedDateTime.now,
41 | updatedAt = None
42 | )
43 |
44 | "UserAccountRepositoryBySlickImpl" - {
45 | "store" in {
46 | val repository = new UserAccountRepositoryBySlickImpl(dbConfig.profile, dbConfig.db)
47 | val result = (for {
48 | _ <- repository.store(userAccount)
49 | r <- repository.resolveById(userAccount.id)
50 | } yield r).runToFuture.futureValue
51 |
52 | result shouldBe userAccount
53 | }
54 | "storeMulti" in {
55 | val repository = new UserAccountRepositoryBySlickImpl(dbConfig.profile, dbConfig.db)
56 | val result = (for {
57 | _ <- repository.storeMulti(userAccounts)
58 | r <- repository.resolveMulti(userAccounts.map(_.id))
59 | } yield r).runToFuture.futureValue
60 |
61 | sameAs(result, userAccounts) shouldBe true
62 | }
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/redis/UserAccountRepositoryOnRedis.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.redis
2 |
3 | import akka.actor.ActorSystem
4 | import cats.data.ReaderT
5 | import com.github.j5ik2o.dddbase.example.dao.redis.UserAccountComponent
6 | import com.github.j5ik2o.dddbase.example.model._
7 | import com.github.j5ik2o.dddbase.example.repository.{ OnRedis, UserAccountRepository }
8 | import com.github.j5ik2o.dddbase.redis._
9 | import com.github.j5ik2o.reactive.redis.RedisConnection
10 | import monix.eval.Task
11 |
12 | import scala.concurrent.duration.Duration
13 |
14 | class UserAccountRepositoryOnRedis(val expireDuration: Duration)(implicit system: ActorSystem)
15 | extends UserAccountRepository[OnRedis]
16 | with AggregateSingleReadFeature
17 | with AggregateSingleWriteFeature
18 | with AggregateMultiReadFeature
19 | with AggregateMultiWriteFeature
20 | with AggregateSingleSoftDeleteFeature
21 | with AggregateMultiSoftDeleteFeature
22 | with UserAccountComponent {
23 |
24 | override type RecordType = UserAccountRecord
25 | override type DaoType = UserAccountDao
26 |
27 | override protected val dao = UserAccountDao()
28 |
29 | override protected def convertToAggregate: UserAccountRecord => ReaderT[Task, RedisConnection, UserAccount] = {
30 | record =>
31 | ReaderT { _ =>
32 | Task.pure {
33 | UserAccount(
34 | id = UserAccountId(record.id.toLong),
35 | status = Status.withName(record.status),
36 | emailAddress = EmailAddress(record.email),
37 | password = HashedPassword(record.password),
38 | firstName = record.firstName,
39 | lastName = record.lastName,
40 | createdAt = record.createdAt,
41 | updatedAt = record.updatedAt
42 | )
43 | }
44 | }
45 | }
46 |
47 | override protected def convertToRecord: UserAccount => ReaderT[Task, RedisConnection, UserAccountRecord] = {
48 | aggregate =>
49 | ReaderT { _ =>
50 | Task.pure {
51 | UserAccountRecord(
52 | id = aggregate.id.value.toString,
53 | status = aggregate.status.entryName,
54 | email = aggregate.emailAddress.value,
55 | password = aggregate.password.value,
56 | firstName = aggregate.firstName,
57 | lastName = aggregate.lastName,
58 | createdAt = aggregate.createdAt,
59 | updatedAt = aggregate.updatedAt
60 | )
61 | }
62 | }
63 | }
64 |
65 | }
66 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/slick/AbstractUserAccountRepositoryBySlick.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.slick
2 |
3 | import com.github.j5ik2o.dddbase.example.dao.slick.UserAccountComponent
4 | import com.github.j5ik2o.dddbase.example.model._
5 | import com.github.j5ik2o.dddbase.example.repository.{ BySlick, UserAccountRepository }
6 | import com.github.j5ik2o.dddbase.slick.{
7 | AggregateMultiReadFeature,
8 | AggregateMultiWriteFeature,
9 | AggregateSingleReadFeature,
10 | AggregateSingleWriteFeature
11 | }
12 | import monix.eval.Task
13 | import slick.jdbc.JdbcProfile
14 | import slick.lifted.Rep
15 |
16 | abstract class AbstractUserAccountRepositoryBySlick(val profile: JdbcProfile, val db: JdbcProfile#Backend#Database)
17 | extends UserAccountRepository[BySlick]
18 | with AggregateSingleReadFeature
19 | with AggregateMultiReadFeature
20 | with AggregateSingleWriteFeature
21 | with AggregateMultiWriteFeature
22 | with UserAccountComponent {
23 | override type RecordType = UserAccountRecord
24 | override type TableType = UserAccounts
25 | override protected val dao = UserAccountDao
26 |
27 | override protected def byCondition(id: IdType): TableType => Rep[Boolean] = {
28 | import profile.api._
29 | _.id === id.value
30 | }
31 |
32 | override protected def byConditions(ids: Seq[IdType]): TableType => Rep[Boolean] = {
33 | import profile.api._
34 | _.id.inSet(ids.map(_.value))
35 | }
36 |
37 | override protected def convertToAggregate: UserAccountRecord => Task[UserAccount] = { record =>
38 | Task.pure {
39 | UserAccount(
40 | id = UserAccountId(record.id),
41 | status = Status.withName(record.status),
42 | emailAddress = EmailAddress(record.email),
43 | password = HashedPassword(record.password),
44 | firstName = record.firstName,
45 | lastName = record.lastName,
46 | createdAt = record.createdAt,
47 | updatedAt = record.updatedAt
48 | )
49 | }
50 | }
51 |
52 | override protected def convertToRecord: UserAccount => Task[UserAccountRecord] = { aggregate =>
53 | Task.pure {
54 | UserAccountRecord(
55 | id = aggregate.id.value,
56 | status = aggregate.status.entryName,
57 | email = aggregate.emailAddress.value,
58 | password = aggregate.password.value,
59 | firstName = aggregate.firstName,
60 | lastName = aggregate.lastName,
61 | createdAt = aggregate.createdAt,
62 | updatedAt = aggregate.updatedAt
63 | )
64 | }
65 | }
66 |
67 | }
68 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/memcached/UserAccountRepositoryOnMemcached.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.memcached
2 |
3 | import akka.actor.ActorSystem
4 | import cats.data.ReaderT
5 | import com.github.j5ik2o.dddbase.example.dao.memcached.UserAccountComponent
6 | import com.github.j5ik2o.dddbase.example.model._
7 | import com.github.j5ik2o.dddbase.example.repository.{ OnMemcached, UserAccountRepository }
8 | import com.github.j5ik2o.dddbase.memcached._
9 | import com.github.j5ik2o.reactive.memcached.MemcachedConnection
10 | import monix.eval.Task
11 |
12 | import scala.concurrent.duration._
13 |
14 | class UserAccountRepositoryOnMemcached(val expireDuration: Duration)(implicit system: ActorSystem)
15 | extends UserAccountRepository[OnMemcached]
16 | with AggregateSingleReadFeature
17 | with AggregateSingleWriteFeature
18 | with AggregateMultiReadFeature
19 | with AggregateMultiWriteFeature
20 | with AggregateSingleSoftDeleteFeature
21 | with AggregateMultiSoftDeleteFeature
22 | with UserAccountComponent {
23 |
24 | require(expireDuration.gteq(1 seconds))
25 |
26 | override type RecordType = UserAccountRecord
27 | override type DaoType = UserAccountDao
28 |
29 | override protected val dao: UserAccountDao = UserAccountDao()
30 |
31 | override protected def convertToAggregate: UserAccountRecord => ReaderT[Task, MemcachedConnection, UserAccount] = {
32 | record =>
33 | ReaderT { _ =>
34 | Task.pure {
35 | UserAccount(
36 | id = UserAccountId(record.id.toLong),
37 | status = Status.withName(record.status),
38 | emailAddress = EmailAddress(record.email),
39 | password = HashedPassword(record.password),
40 | firstName = record.firstName,
41 | lastName = record.lastName,
42 | createdAt = record.createdAt,
43 | updatedAt = record.updatedAt
44 | )
45 | }
46 | }
47 | }
48 |
49 | override protected def convertToRecord: UserAccount => ReaderT[Task, MemcachedConnection, UserAccountRecord] = {
50 | aggregate =>
51 | ReaderT { _ =>
52 | Task.pure {
53 | UserAccountRecord(
54 | id = aggregate.id.value.toString,
55 | status = aggregate.status.entryName,
56 | email = aggregate.emailAddress.value,
57 | password = aggregate.password.value,
58 | firstName = aggregate.firstName,
59 | lastName = aggregate.lastName,
60 | createdAt = aggregate.createdAt,
61 | updatedAt = aggregate.updatedAt
62 | )
63 | }
64 | }
65 | }
66 |
67 | }
68 |
--------------------------------------------------------------------------------
/jdbc/skinny/src/main/scala/com/github/j5ik2o/dddbase/skinny/SkinnyDaoSupport.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.skinny
2 |
3 | import scalikejdbc._
4 | import skinny.orm.feature.{ NoIdCUDFeature, NoIdFinderFeature }
5 | import skinny.orm.{ SkinnyCRUDMapperWithId, SkinnyNoIdCRUDMapper }
6 |
7 | trait SkinnyDaoSupport {
8 |
9 | trait Record[ID] extends Product {
10 | val id: ID
11 | }
12 |
13 | trait SoftDeletableRecord[ID] extends Record[ID] {
14 | val status: String
15 | }
16 |
17 | trait Dao[ID, R <: Record[ID]] extends NoIdCUDFeature[R] with NoIdFinderFeature[R] {
18 |
19 | protected def toNamedIds(record: R): Seq[(Symbol, Any)]
20 | protected def toNamedValues(record: R): Seq[(Symbol, Any)]
21 | protected def byCondition(id: ID): SQLSyntax
22 |
23 | def createOrUpdate(record: R): Long =
24 | DB localTx { implicit dbSession =>
25 | if (countBy(byCondition(record.id)) == 1)
26 | update(record)
27 | else
28 | create(record)
29 | }
30 |
31 | def create(record: R)(implicit session: DBSession): Long = {
32 | createWithAttributes(toNamedIds(record) ++ toNamedValues(record): _*)
33 | 1L
34 | }
35 |
36 | def createAll(records: Seq[R])(implicit session: DBSession): Seq[Long] =
37 | records.map(create)
38 |
39 | def update(record: R)(implicit session: DBSession): Long =
40 | updateById(record.id).withAttributes(toNamedValues(record): _*).toLong
41 |
42 | def updateAll(records: Seq[R])(implicit session: DBSession): Seq[Long] =
43 | records.map(update)
44 |
45 | def updateById(id: ID): UpdateOperationBuilder
46 |
47 | def deleteById(id: ID)(implicit s: DBSession = autoSession): Int
48 | }
49 |
50 | trait DaoWithId[ID, R <: Record[ID]] extends Dao[ID, R] with SkinnyCRUDMapperWithId[ID, R] {
51 |
52 | implicit def pbf: ParameterBinderFactory[ID]
53 |
54 | override def useAutoIncrementPrimaryKey: Boolean = false
55 | override def useExternalIdGenerator: Boolean = true
56 |
57 | def rawValueToId(value: Any): ID
58 | def idToRawValue(id: ID): Any
59 |
60 | protected def toNamedIds(record: R): Seq[(Symbol, Any)] = Seq('id -> idToRawValue(record.id))
61 |
62 | protected def toNamedValues(record: R): Seq[(Symbol, Any)]
63 |
64 | protected def byCondition(id: ID): SQLSyntax =
65 | sqls.eq(column.id, id)
66 |
67 | }
68 |
69 | trait DaoWithCompositeId[ID, R <: Record[ID]] extends Dao[ID, R] with SkinnyNoIdCRUDMapper[R] {
70 |
71 | override def updateById(id: ID): UpdateOperationBuilder =
72 | updateBy(byCondition(id))
73 |
74 | override def deleteById(id: ID)(implicit s: DBSession): Int =
75 | deleteBy(byCondition(id))
76 |
77 | }
78 |
79 | }
80 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/skinny/UserMessageRepositoryBySkinny.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.example.dao.skinny.UserMessageComponent
5 | import com.github.j5ik2o.dddbase.example.model._
6 | import com.github.j5ik2o.dddbase.example.repository.{ BySkinny, UserMessageRepository }
7 | import com.github.j5ik2o.dddbase.skinny._
8 | import monix.eval.Task
9 | import scalikejdbc.{ sqls, DBSession, SQLSyntax }
10 |
11 | trait UserMessageRepositoryBySkinny
12 | extends UserMessageRepository[BySkinny]
13 | with AggregateSingleReadFeature
14 | with AggregateSingleWriteFeature
15 | with AggregateMultiReadFeature
16 | with AggregateMultiWriteFeature
17 | with UserMessageComponent {
18 | override type RecordIdType = UserMessageRecordId
19 | override type RecordType = UserMessageRecord
20 | override type DaoType = UserMessageDao.type
21 | override protected val dao: UserMessageDao.type = UserMessageDao
22 |
23 | override protected def toRecordId(id: UserMessageId): UserMessageRecordId =
24 | UserMessageRecordId(id.messageId, id.userId)
25 |
26 | override protected def byCondition(id: IdType): SQLSyntax =
27 | sqls.eq(dao.column.messageId, id.messageId).and.eq(dao.column.userId, id.userId)
28 |
29 | override protected def byConditions(ids: Seq[IdType]): SQLSyntax =
30 | sqls.in((dao.column.messageId, dao.column.userId), ids.map(v => (v.messageId, v.userId)))
31 |
32 | override protected def convertToAggregate: UserMessageRecord => ReaderT[Task, DBSession, UserMessage] = { record =>
33 | ReaderT { _ =>
34 | Task.pure {
35 | UserMessage(
36 | id = UserMessageId(record.userId, record.messageId),
37 | status = Status.withName(record.status),
38 | message = record.message,
39 | createdAt = record.createdAt,
40 | updatedAt = record.updatedAt
41 | )
42 | }
43 | }
44 | }
45 |
46 | override protected def convertToRecord: UserMessage => ReaderT[Task, DBSession, UserMessageRecord] = { aggregate =>
47 | ReaderT { _ =>
48 | Task.pure {
49 | UserMessageRecord(
50 | messageId = aggregate.id.messageId,
51 | userId = aggregate.id.userId,
52 | status = aggregate.status.entryName,
53 | message = aggregate.message,
54 | createdAt = aggregate.createdAt,
55 | updatedAt = aggregate.updatedAt
56 | )
57 | }
58 | }
59 | }
60 |
61 | }
62 |
63 | class UserMessageRepositoryBySkinnyImpl
64 | extends UserMessageRepositoryBySkinny
65 | with AggregateSingleSoftDeleteFeature
66 | with AggregateMultiSoftDeleteFeature
67 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/skinny/UserAccountRepositoryBySkinny.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.skinny
2 |
3 | import cats.data.ReaderT
4 | import com.github.j5ik2o.dddbase.example.dao.skinny.UserAccountComponent
5 | import com.github.j5ik2o.dddbase.example.model._
6 | import com.github.j5ik2o.dddbase.example.repository.{ BySkinny, UserAccountRepository }
7 | import com.github.j5ik2o.dddbase.skinny._
8 | import monix.eval.Task
9 | import scalikejdbc._
10 |
11 | trait UserAccountRepositoryBySkinny
12 | extends UserAccountRepository[BySkinny]
13 | with AggregateSingleReadFeature
14 | with AggregateSingleWriteFeature
15 | with AggregateMultiReadFeature
16 | with AggregateMultiWriteFeature
17 | with UserAccountComponent {
18 |
19 | override type RecordIdType = Long
20 | override type RecordType = UserAccountRecord
21 | override type DaoType = UserAccountDao.type
22 | override protected val dao: UserAccountDao.type = UserAccountDao
23 |
24 | override protected def toRecordId(id: UserAccountId): Long = id.value
25 |
26 | override protected def byCondition(id: IdType): SQLSyntax = sqls.eq(dao.defaultAlias.id, id.value)
27 | override protected def byConditions(ids: Seq[IdType]): SQLSyntax = sqls.in(dao.defaultAlias.id, ids.map(_.value))
28 |
29 | override protected def convertToAggregate: UserAccountRecord => ReaderT[Task, DBSession, UserAccount] = { record =>
30 | ReaderT { _ =>
31 | Task.pure {
32 | UserAccount(
33 | id = UserAccountId(record.id),
34 | status = Status.withName(record.status),
35 | emailAddress = EmailAddress(record.email),
36 | password = HashedPassword(record.password),
37 | firstName = record.firstName,
38 | lastName = record.lastName,
39 | createdAt = record.createdAt,
40 | updatedAt = record.updatedAt
41 | )
42 | }
43 | }
44 | }
45 |
46 | override protected def convertToRecord: UserAccount => ReaderT[Task, DBSession, UserAccountRecord] = { aggregate =>
47 | ReaderT { _ =>
48 | Task.pure {
49 | UserAccountRecord(
50 | id = aggregate.id.value,
51 | status = aggregate.status.entryName,
52 | email = aggregate.emailAddress.value,
53 | password = aggregate.password.value,
54 | firstName = aggregate.firstName,
55 | lastName = aggregate.lastName,
56 | createdAt = aggregate.createdAt,
57 | updatedAt = aggregate.updatedAt
58 | )
59 | }
60 | }
61 | }
62 | }
63 |
64 | class UserAccountRepositoryBySkinnyImpl
65 | extends UserAccountRepositoryBySkinny
66 | with AggregateSingleSoftDeleteFeature
67 | with AggregateMultiSoftDeleteFeature
68 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/memory/UserAccountRepositoryOnMemorySpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.memory
2 |
3 | import java.time.ZonedDateTime
4 |
5 | import com.github.j5ik2o.dddbase.AggregateNotFoundException
6 | import com.github.j5ik2o.dddbase.example.model._
7 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport, UserAccountRepository }
8 | import com.github.j5ik2o.dddbase.example.repository.util.ScalaFuturesSupportSpec
9 | import monix.eval.Task
10 | import monix.execution.Scheduler.Implicits.global
11 | import org.scalatest.concurrent.ScalaFutures
12 | import org.scalatest.{ FreeSpec, Matchers }
13 |
14 | import scala.concurrent.duration._
15 | import scala.concurrent.{ Await, Future }
16 |
17 | class UserAccountRepositoryOnMemorySpec
18 | extends FreeSpec
19 | with ScalaFutures
20 | with ScalaFuturesSupportSpec
21 | with Matchers
22 | with SpecSupport {
23 |
24 | val userAccount = UserAccount(
25 | id = UserAccountId(IdGenerator.generateIdValue),
26 | status = Status.Active,
27 | emailAddress = EmailAddress("test@test.com"),
28 | password = HashedPassword("aaa"),
29 | firstName = "Junichi",
30 | lastName = "Kato",
31 | createdAt = ZonedDateTime.now,
32 | updatedAt = None
33 | )
34 |
35 | val userAccounts = for (idx <- 1L to 10L)
36 | yield
37 | UserAccount(
38 | id = UserAccountId(IdGenerator.generateIdValue),
39 | status = Status.Active,
40 | emailAddress = EmailAddress(s"user${idx}@gmail.com"),
41 | password = HashedPassword("aaa"),
42 | firstName = "Junichi",
43 | lastName = "Kato",
44 | createdAt = ZonedDateTime.now,
45 | updatedAt = None
46 | )
47 |
48 | "UserAccountRepositoryOnMemory" - {
49 | "store" in {
50 | val repository = UserAccountRepository.onMemory()
51 | val result: UserAccount = (for {
52 | _ <- repository.store(userAccount)
53 | r <- repository.resolveById(userAccount.id)
54 | } yield r).runToFuture.futureValue
55 |
56 | result shouldBe userAccount
57 | }
58 | "storeMulti" in {
59 | val repository = UserAccountRepository.onMemory()
60 | val result: Seq[UserAccount] = (for {
61 | _ <- repository.storeMulti(userAccounts)
62 |
63 | r <- repository.resolveMulti(userAccounts.map(_.id))
64 | } yield r).runToFuture.futureValue
65 |
66 | sameAs(result, userAccounts) shouldBe true
67 | }
68 | "store then expired" in {
69 | val repository = UserAccountRepository.onMemory(expireAfterWrite = Some(1 seconds))
70 | val resultFuture: Future[UserAccount] = (for {
71 | _ <- repository.store(userAccount)
72 | _ <- Task.pure(Thread.sleep(1000))
73 | r <- repository.resolveById(userAccount.id)
74 | } yield r).runToFuture
75 |
76 | an[AggregateNotFoundException] should be thrownBy {
77 | Await.result(resultFuture, Duration.Inf)
78 | }
79 | }
80 | }
81 |
82 | }
83 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/memory/UserAccountRepositoryOnMemory.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.memory
2 |
3 | import com.github.j5ik2o.dddbase.example.dao.memory.UserAccountComponent
4 | import com.github.j5ik2o.dddbase.example.model._
5 | import com.github.j5ik2o.dddbase.example.repository.{ OnMemory, UserAccountRepository }
6 | import com.github.j5ik2o.dddbase.memory._
7 | import com.google.common.base.Ticker
8 | import monix.eval.Task
9 |
10 | import scala.concurrent.duration.Duration
11 |
12 | class UserAccountRepositoryOnMemory(
13 | concurrencyLevel: Option[Int] = None,
14 | expireAfterAccess: Option[Duration] = None,
15 | expireAfterWrite: Option[Duration] = None,
16 | initialCapacity: Option[Int] = None,
17 | maximumSize: Option[Int] = None,
18 | maximumWeight: Option[Int] = None,
19 | recordStats: Option[Boolean] = None,
20 | refreshAfterWrite: Option[Duration] = None,
21 | softValues: Option[Boolean] = None,
22 | ticker: Option[Ticker] = None,
23 | weakKeys: Option[Boolean] = None,
24 | weakValues: Option[Boolean] = None
25 | ) extends UserAccountRepository[OnMemory]
26 | with AggregateSingleReadFeature
27 | with AggregateSingleWriteFeature
28 | with AggregateMultiWriteFeature
29 | with AggregateMultiReadFeature
30 | with AggregateSingleSoftDeleteFeature
31 | with AggregateMultiSoftDeleteFeature
32 | with UserAccountComponent {
33 |
34 | override type RecordType = UserAccountRecord
35 | override type DaoType = UserAccountDao
36 |
37 | override protected val dao: UserAccountDao =
38 | new UserAccountDao(
39 | concurrencyLevel = concurrencyLevel,
40 | expireAfterAccess = expireAfterAccess,
41 | expireAfterWrite = expireAfterWrite,
42 | initialCapacity = initialCapacity,
43 | maximumSize = maximumSize,
44 | maximumWeight = maximumWeight,
45 | recordStats = recordStats,
46 | refreshAfterWrite = refreshAfterWrite,
47 | softValues = softValues,
48 | ticker = ticker,
49 | weakKeys = weakKeys,
50 | weakValues = weakValues
51 | )
52 |
53 | override protected def convertToAggregate: UserAccountRecord => Task[UserAccount] = { record =>
54 | Task.pure {
55 | UserAccount(
56 | id = UserAccountId(record.id.toLong),
57 | status = Status.withName(record.status),
58 | emailAddress = EmailAddress(record.email),
59 | password = HashedPassword(record.password),
60 | firstName = record.firstName,
61 | lastName = record.lastName,
62 | createdAt = record.createdAt,
63 | updatedAt = record.updatedAt
64 | )
65 | }
66 | }
67 |
68 | override protected def convertToRecord: UserAccount => Task[UserAccountRecord] = { aggregate =>
69 | Task.pure {
70 | UserAccountRecord(
71 | id = aggregate.id.value.toString,
72 | status = aggregate.status.entryName,
73 | email = aggregate.emailAddress.value,
74 | password = aggregate.password.value,
75 | firstName = aggregate.firstName,
76 | lastName = aggregate.lastName,
77 | createdAt = aggregate.createdAt,
78 | updatedAt = aggregate.updatedAt
79 | )
80 | }
81 | }
82 |
83 | }
84 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/UserAccountRepository.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository
2 |
3 | import _root_.slick.jdbc.JdbcProfile
4 | import akka.actor.ActorSystem
5 | import com.github.j5ik2o.dddbase._
6 | import com.github.j5ik2o.dddbase.example.model._
7 | import com.github.j5ik2o.dddbase.example.repository.dynamodb.UserAccountRepositoryOnDynamoDB
8 | import com.github.j5ik2o.dddbase.example.repository.memcached.UserAccountRepositoryOnMemcached
9 | import com.github.j5ik2o.dddbase.example.repository.memory.UserAccountRepositoryOnMemory
10 | import com.github.j5ik2o.dddbase.example.repository.redis.UserAccountRepositoryOnRedis
11 | import com.github.j5ik2o.dddbase.example.repository.skinny.UserAccountRepositoryBySkinnyImpl
12 | import com.github.j5ik2o.dddbase.example.repository.slick.UserAccountRepositoryBySlickImpl
13 | import com.github.j5ik2o.reactive.aws.dynamodb.monix.DynamoDbMonixClient
14 | import com.google.common.base.Ticker
15 |
16 | import scala.concurrent.duration.Duration
17 |
18 | trait UserAccountRepository[M[_]]
19 | extends AggregateSingleReader[M]
20 | with AggregateSingleWriter[M]
21 | with AggregateMultiReader[M]
22 | with AggregateMultiWriter[M]
23 | with AggregateSingleSoftDeletable[M]
24 | with AggregateMultiSoftDeletable[M] {
25 | override type IdType = UserAccountId
26 | override type AggregateType = UserAccount
27 |
28 | }
29 |
30 | object UserAccountRepository {
31 |
32 | def bySlick(profile: JdbcProfile, db: JdbcProfile#Backend#Database): UserAccountRepository[BySlick] =
33 | new UserAccountRepositoryBySlickImpl(profile, db)
34 |
35 | def bySkinny: UserAccountRepository[BySkinny] = new UserAccountRepositoryBySkinnyImpl
36 |
37 | def onDynamoDB(client: DynamoDbMonixClient): UserAccountRepository[OnDynamoDB] =
38 | new UserAccountRepositoryOnDynamoDB(client)
39 |
40 | def onRedis(
41 | expireDuration: Duration
42 | )(implicit actorSystem: ActorSystem): UserAccountRepository[OnRedis] =
43 | new UserAccountRepositoryOnRedis(expireDuration)
44 |
45 | def onMemcached(
46 | expireDuration: Duration
47 | )(implicit actorSystem: ActorSystem): UserAccountRepository[OnMemcached] =
48 | new UserAccountRepositoryOnMemcached(expireDuration)
49 |
50 | def onMemory(
51 | concurrencyLevel: Option[Int] = None,
52 | expireAfterAccess: Option[Duration] = None,
53 | expireAfterWrite: Option[Duration] = None,
54 | initialCapacity: Option[Int] = None,
55 | maximumSize: Option[Int] = None,
56 | maximumWeight: Option[Int] = None,
57 | recordStats: Option[Boolean] = None,
58 | refreshAfterWrite: Option[Duration] = None,
59 | softValues: Option[Boolean] = None,
60 | ticker: Option[Ticker] = None,
61 | weakKeys: Option[Boolean] = None,
62 | weakValues: Option[Boolean] = None
63 | ): UserAccountRepository[OnMemory] =
64 | new UserAccountRepositoryOnMemory(
65 | concurrencyLevel,
66 | expireAfterAccess,
67 | expireAfterWrite,
68 | initialCapacity,
69 | maximumSize,
70 | maximumWeight,
71 | recordStats,
72 | refreshAfterWrite,
73 | softValues,
74 | ticker,
75 | weakKeys,
76 | weakValues
77 | )
78 |
79 | }
80 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/dao/UserMessage.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao
2 |
3 | package slick {
4 | import com.github.j5ik2o.dddbase.slick.SlickDaoSupport
5 |
6 | trait UserMessageComponent extends SlickDaoSupport {
7 | import profile.api._
8 |
9 | case class UserMessageRecord(
10 | messageId: Long,
11 | userId: Long,
12 | status: String,
13 | message: String,
14 | createdAt: java.time.ZonedDateTime,
15 | updatedAt: Option[java.time.ZonedDateTime]
16 | ) extends SoftDeletableRecord
17 |
18 | case class UserMessages(tag: Tag)
19 | extends TableBase[UserMessageRecord](tag, "user_message")
20 | with SoftDeletableTableSupport[UserMessageRecord] {
21 | def messageId: Rep[Long] = column[Long]("message_id")
22 | def userId: Rep[Long] = column[Long]("user_id")
23 | def status: Rep[String] = column[String]("status")
24 | def message: Rep[String] = column[String]("message")
25 | def createdAt: Rep[java.time.ZonedDateTime] = column[java.time.ZonedDateTime]("created_at")
26 | def updatedAt: Rep[Option[java.time.ZonedDateTime]] = column[Option[java.time.ZonedDateTime]]("updated_at")
27 | def pk = primaryKey("pk", (messageId, userId))
28 | override def * =
29 | (messageId, userId, status, message, createdAt, updatedAt) <> (UserMessageRecord.tupled, UserMessageRecord.unapply)
30 | }
31 |
32 | object UserMessageDao extends TableQuery(UserMessages)
33 |
34 | }
35 |
36 | }
37 |
38 | package skinny {
39 |
40 | import com.github.j5ik2o.dddbase.skinny.SkinnyDaoSupport
41 | import scalikejdbc._
42 | import _root_.skinny.orm._
43 |
44 | trait UserMessageComponent extends SkinnyDaoSupport {
45 |
46 | case class UserMessageRecordId(messageId: Long, userId: Long)
47 |
48 | case class UserMessageRecord(
49 | messageId: Long,
50 | userId: Long,
51 | status: String,
52 | message: String,
53 | createdAt: java.time.ZonedDateTime,
54 | updatedAt: Option[java.time.ZonedDateTime]
55 | ) extends Record[UserMessageRecordId] {
56 | override val id: UserMessageRecordId = UserMessageRecordId(messageId, userId)
57 | }
58 |
59 | object UserMessageDao extends DaoWithCompositeId[UserMessageRecordId, UserMessageRecord] {
60 |
61 | //import ParameterBinderFactory._
62 |
63 | override val tableName: String = "user_message"
64 |
65 | override protected def toNamedIds(
66 | record: UserMessageRecord
67 | ): Seq[(Symbol, Any)] = Seq(
68 | 'messageId -> record.id.messageId,
69 | 'userId -> record.id.userId
70 | )
71 |
72 | override protected def toNamedValues(record: UserMessageRecord): Seq[(Symbol, Any)] = Seq(
73 | 'status -> record.status,
74 | 'message -> record.message,
75 | 'created_at -> record.createdAt,
76 | 'updated_at -> record.updatedAt
77 | )
78 |
79 | override def defaultAlias: Alias[UserMessageRecord] = createAlias("u")
80 |
81 | override def extract(rs: WrappedResultSet, s: ResultName[UserMessageRecord]): UserMessageRecord =
82 | autoConstruct(rs, s)
83 |
84 | override protected def byCondition(id: UserMessageRecordId): scalikejdbc.SQLSyntax =
85 | sqls.eq(column.messageId, id.messageId).and.eq(column.userId, id.userId)
86 |
87 | }
88 |
89 | }
90 |
91 | }
92 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/dao/UserAccount.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao
2 |
3 | package slick {
4 | import com.github.j5ik2o.dddbase.slick.SlickDaoSupport
5 |
6 | trait UserAccountComponent extends SlickDaoSupport {
7 | import profile.api._
8 |
9 | case class UserAccountRecord(
10 | id: Long,
11 | status: String,
12 | email: String,
13 | password: String,
14 | firstName: String,
15 | lastName: String,
16 | createdAt: java.time.ZonedDateTime,
17 | updatedAt: Option[java.time.ZonedDateTime]
18 | ) extends SoftDeletableRecord
19 |
20 | case class UserAccounts(tag: Tag)
21 | extends TableBase[UserAccountRecord](tag, "user_account")
22 | with SoftDeletableTableSupport[UserAccountRecord] {
23 | def id: Rep[Long] = column[Long]("id")
24 | def status: Rep[String] = column[String]("status")
25 | def email: Rep[String] = column[String]("email")
26 | def password: Rep[String] = column[String]("password")
27 | def firstName: Rep[String] = column[String]("first_name")
28 | def lastName: Rep[String] = column[String]("last_name")
29 | def createdAt: Rep[java.time.ZonedDateTime] = column[java.time.ZonedDateTime]("created_at")
30 | def updatedAt: Rep[Option[java.time.ZonedDateTime]] = column[Option[java.time.ZonedDateTime]]("updated_at")
31 | def pk = primaryKey("pk", (id))
32 | override def * =
33 | (id, status, email, password, firstName, lastName, createdAt, updatedAt) <> (UserAccountRecord.tupled, UserAccountRecord.unapply)
34 | }
35 |
36 | object UserAccountDao extends TableQuery(UserAccounts)
37 |
38 | }
39 |
40 | }
41 |
42 | package skinny {
43 |
44 | import com.github.j5ik2o.dddbase.skinny.SkinnyDaoSupport
45 | import scalikejdbc._
46 | import _root_.skinny.orm._
47 |
48 | trait UserAccountComponent extends SkinnyDaoSupport {
49 |
50 | case class UserAccountRecord(
51 | id: Long,
52 | status: String,
53 | email: String,
54 | password: String,
55 | firstName: String,
56 | lastName: String,
57 | createdAt: java.time.ZonedDateTime,
58 | updatedAt: Option[java.time.ZonedDateTime]
59 | ) extends Record[Long]
60 |
61 | object UserAccountDao extends DaoWithId[Long, UserAccountRecord] {
62 |
63 | override implicit def pbf: ParameterBinderFactory[Long] = ParameterBinderFactory.longParameterBinderFactory
64 |
65 | override def useAutoIncrementPrimaryKey: Boolean = false
66 |
67 | override val tableName: String = "user_account"
68 |
69 | override protected def toNamedValues(record: UserAccountRecord): Seq[(Symbol, Any)] = Seq(
70 | 'status -> record.status,
71 | 'email -> record.email,
72 | 'password -> record.password,
73 | 'first_name -> record.firstName,
74 | 'last_name -> record.lastName,
75 | 'created_at -> record.createdAt,
76 | 'updated_at -> record.updatedAt
77 | )
78 |
79 | override def defaultAlias: Alias[UserAccountRecord] = createAlias("u")
80 |
81 | override def extract(rs: WrappedResultSet, s: ResultName[UserAccountRecord]): UserAccountRecord =
82 | autoConstruct(rs, s)
83 |
84 | override def rawValueToId(value: Any): Long = value.toString.toLong
85 |
86 | override def idToRawValue(id: Long): Any = id
87 |
88 | }
89 |
90 | }
91 |
92 | }
93 |
--------------------------------------------------------------------------------
/example/templates/UserAccount_template.ftl:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao
2 |
3 | package slick {
4 | import com.github.j5ik2o.dddbase.slick.SlickDaoSupport
5 |
6 | <#assign softDelete=false>
7 | trait ${className}Component extends SlickDaoSupport {
8 | import profile.api._
9 |
10 | case class ${className}Record(
11 | <#list primaryKeys as primaryKey>
12 | ${primaryKey.propertyName}: ${primaryKey.propertyTypeName}<#if primaryKey_has_next>,#if>#list><#if primaryKeys?has_content>,#if>
13 | <#list columns as column>
14 | <#if column.columnName == "status">
15 | <#assign softDelete=true>
16 | #if>
17 | <#if column.nullable> ${column.propertyName}: Option[${column.propertyTypeName}]<#if column_has_next>,#if>
18 | <#else> ${column.propertyName}: ${column.propertyTypeName}<#if column_has_next>,#if>
19 | #if>
20 | #list>
21 | ) extends <#if softDelete == false>Record<#else>SoftDeletableRecord#if>
22 |
23 | case class ${className}s(tag: Tag) extends TableBase[${className}Record](tag, "${tableName}")<#if softDelete == true> with SoftDeletableTableSupport[${className}Record]#if> {
24 | <#list primaryKeys as primaryKey>
25 | def ${primaryKey.propertyName}: Rep[${primaryKey.propertyTypeName}] = column[${primaryKey.propertyTypeName}]("${primaryKey.columnName}")
26 | #list>
27 | <#list columns as column>
28 | <#if column.nullable>
29 | def ${column.propertyName}: Rep[Option[${column.propertyTypeName}]] = column[Option[${column.propertyTypeName}]]("${column.columnName}")
30 | <#else>
31 | def ${column.propertyName}: Rep[${column.propertyTypeName}] = column[${column.propertyTypeName}]("${column.columnName}")
32 | #if>
33 | #list>
34 | def pk = primaryKey("pk", (<#list primaryKeys as primaryKey>${primaryKey.propertyName}<#if primaryKey_has_next>,#if>#list>))
35 | override def * = (<#list primaryKeys as primaryKey>${primaryKey.propertyName}<#if primaryKey_has_next>,#if>#list><#if primaryKeys?has_content>,#if><#list columns as column>${column.propertyName}<#if column_has_next>,#if>#list>) <> (${className}Record.tupled, ${className}Record.unapply)
36 | }
37 |
38 | object ${className}Dao extends TableQuery(${className}s)
39 |
40 | }
41 |
42 | }
43 |
44 | package skinny {
45 |
46 | import com.github.j5ik2o.dddbase.skinny.SkinnyDaoSupport
47 | import scalikejdbc._
48 | import _root_.skinny.orm._
49 |
50 | trait ${className}Component extends SkinnyDaoSupport {
51 |
52 | case class ${className}Record(
53 | <#list primaryKeys as primaryKey>
54 | ${primaryKey.propertyName}: ${primaryKey.propertyTypeName}<#if primaryKey_has_next>,#if>#list><#if primaryKeys?has_content>,#if>
55 | <#list columns as column>
56 | <#if column.columnName == "status">
57 | <#assign softDelete=true>
58 | #if>
59 | <#if column.nullable> ${column.propertyName}: Option[${column.propertyTypeName}]<#if column_has_next>,#if>
60 | <#else> ${column.propertyName}: ${column.propertyTypeName}<#if column_has_next>,#if>
61 | #if>
62 | #list>
63 | ) extends Record[Long]
64 |
65 | object ${className}Dao extends DaoWithId[Long, ${className}Record] {
66 |
67 | override implicit def pbf: ParameterBinderFactory[Long] = ParameterBinderFactory.longParameterBinderFactory
68 |
69 | override def useAutoIncrementPrimaryKey: Boolean = false
70 |
71 | override val tableName: String = "${tableName}"
72 |
73 | override protected def toNamedValues(record: ${className}Record): Seq[(Symbol, Any)] = Seq(
74 | <#list columns as column> '${column.name} -> record.${column.propertyName}<#if column.name?ends_with("id") || column.name?ends_with("Id")>.value#if><#if column_has_next>,#if>
75 | #list>
76 | )
77 |
78 | override def defaultAlias: Alias[${className}Record] = createAlias("${className[0]?lower_case}")
79 |
80 | override def extract(rs: WrappedResultSet, s: ResultName[${className}Record]): ${className}Record = autoConstruct(rs, s)
81 |
82 | override def rawValueToId(value: Any): Long = value.toString.toLong
83 |
84 | override def idToRawValue(id: Long): Any = id
85 |
86 | }
87 |
88 |
89 | }
90 |
91 | }
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/memcached/UserAccountRepositoryOnMemcachedSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.memcached
2 |
3 | import java.net.InetSocketAddress
4 | import java.time.ZonedDateTime
5 |
6 | import akka.actor.ActorSystem
7 | import akka.routing.DefaultResizer
8 | import akka.testkit.TestKit
9 | import com.github.j5ik2o.dddbase.AggregateNotFoundException
10 | import com.github.j5ik2o.dddbase.example.model._
11 | import com.github.j5ik2o.dddbase.example.repository.util.ScalaFuturesSupportSpec
12 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport, UserAccountRepository }
13 | import com.github.j5ik2o.reactive.memcached._
14 | import monix.eval.Task
15 | import monix.execution.Scheduler.Implicits.global
16 | import org.scalatest.concurrent.ScalaFutures
17 | import org.scalatest.{ FreeSpecLike, Matchers }
18 |
19 | import scala.concurrent.Await
20 | import scala.concurrent.duration._
21 |
22 | class UserAccountRepositoryOnMemcachedSpec
23 | extends TestKit(ActorSystem("UserAccountRepositoryOnMemcachedSpec"))
24 | with FreeSpecLike
25 | with MemcachedSpecSupport
26 | with ScalaFutures
27 | with ScalaFuturesSupportSpec
28 | with Matchers
29 | with SpecSupport {
30 |
31 | var connectionPool: MemcachedConnectionPool[Task] = _
32 |
33 | protected override def beforeAll(): Unit = {
34 | super.beforeAll()
35 | val peerConfig = PeerConfig(new InetSocketAddress("127.0.0.1", memcachedTestServer.getPort))
36 | connectionPool = MemcachedConnectionPool.ofSingleRoundRobin(
37 | sizePerPeer = 3,
38 | peerConfig,
39 | MemcachedConnection(_, _),
40 | reSizer = Some(DefaultResizer(lowerBound = 1, upperBound = 5))
41 | )
42 | }
43 |
44 | protected override def afterAll(): Unit = {
45 | super.afterAll()
46 | TestKit.shutdownActorSystem(system)
47 | }
48 |
49 | val userAccount: UserAccount = UserAccount(
50 | id = UserAccountId(IdGenerator.generateIdValue),
51 | status = Status.Active,
52 | emailAddress = EmailAddress("test@test.com"),
53 | password = HashedPassword("aaa"),
54 | firstName = "Junichi",
55 | lastName = "Kato",
56 | createdAt = ZonedDateTime.now,
57 | updatedAt = None
58 | )
59 |
60 | val userAccounts: Seq[UserAccount] = for (idx <- 1L to 10L)
61 | yield
62 | UserAccount(
63 | id = UserAccountId(IdGenerator.generateIdValue),
64 | status = Status.Active,
65 | emailAddress = EmailAddress(s"user${idx}@gmail.com"),
66 | password = HashedPassword("aaa"),
67 | firstName = "Junichi",
68 | lastName = "Kato",
69 | createdAt = ZonedDateTime.now,
70 | updatedAt = None
71 | )
72 |
73 | "UserAccountRepositoryOnMemcached" - {
74 | "store" in {
75 | val repository = UserAccountRepository.onMemcached(expireDuration = Duration.Inf)
76 | val result = connectionPool
77 | .withConnectionF { con =>
78 | (for {
79 | _ <- repository.store(userAccount)
80 | r <- repository.resolveById(userAccount.id)
81 | } yield r).run(con)
82 | }
83 | .runToFuture
84 | .futureValue
85 |
86 | result shouldBe userAccount
87 | }
88 | "storeMulti" in {
89 | val repository = UserAccountRepository.onMemcached(expireDuration = Duration.Inf)
90 | val result = connectionPool
91 | .withConnectionF { con =>
92 | (for {
93 | _ <- repository.storeMulti(userAccounts)
94 | r <- repository.resolveMulti(userAccounts.map(_.id))
95 | } yield r).run(con)
96 | }
97 | .runToFuture
98 | .futureValue
99 |
100 | sameAs(result, userAccounts) shouldBe true
101 | }
102 | "store then expired" in {
103 | val repository = UserAccountRepository.onMemcached(expireDuration = 1.5 seconds)
104 | val resultFuture = connectionPool.withConnectionF { con =>
105 | (for {
106 | _ <- repository.store(userAccount)
107 | _ <- ReaderTTask.pure(Thread.sleep(3000))
108 | r <- repository.resolveById(userAccount.id)
109 | } yield r).run(con)
110 | }.runToFuture
111 |
112 | an[AggregateNotFoundException] should be thrownBy {
113 | Await.result(resultFuture, Duration.Inf)
114 | }
115 | }
116 | }
117 |
118 | }
119 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/dynamodb/UserAccountRepositoryOnDynamoDBSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.dynamodb
2 |
3 | import java.net.URI
4 | import java.time.ZonedDateTime
5 |
6 | import com.github.j5ik2o.dddbase.example.model._
7 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport }
8 | import com.github.j5ik2o.reactive.aws.dynamodb.implicits._
9 | import com.github.j5ik2o.reactive.aws.dynamodb.monix.DynamoDbMonixClient
10 | import com.github.j5ik2o.reactive.aws.dynamodb.{ DynamoDBEmbeddedSpecSupport, DynamoDbAsyncClient }
11 | import monix.execution.Scheduler.Implicits.global
12 | import org.scalatest.concurrent.ScalaFutures
13 | import org.scalatest.{ FreeSpec, Matchers }
14 | import software.amazon.awssdk.auth.credentials.{ AwsBasicCredentials, StaticCredentialsProvider }
15 | import software.amazon.awssdk.services.dynamodb.model._
16 | import software.amazon.awssdk.services.dynamodb.{ DynamoDbAsyncClient => JavaDynamoDbAsyncClient }
17 |
18 | import scala.concurrent.duration._
19 |
20 | class UserAccountRepositoryOnDynamoDBSpec
21 | extends FreeSpec
22 | with Matchers
23 | with ScalaFutures
24 | with DynamoDBEmbeddedSpecSupport
25 | with SpecSupport {
26 | implicit val pc: PatienceConfig = PatienceConfig(20 seconds, 1 seconds)
27 |
28 | val underlying: JavaDynamoDbAsyncClient = JavaDynamoDbAsyncClient
29 | .builder()
30 | .credentialsProvider(
31 | StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccessKey))
32 | )
33 | .endpointOverride(URI.create(dynamoDBEndpoint))
34 | .build()
35 |
36 | val userAccount = UserAccount(
37 | id = UserAccountId(IdGenerator.generateIdValue),
38 | status = Status.Active,
39 | emailAddress = EmailAddress("test@test.com"),
40 | password = HashedPassword("aaa"),
41 | firstName = "Junichi",
42 | lastName = "Kato",
43 | createdAt = ZonedDateTime.now,
44 | updatedAt = None
45 | )
46 |
47 | val userAccounts = for (idx <- 1L to 10L)
48 | yield
49 | UserAccount(
50 | id = UserAccountId(IdGenerator.generateIdValue),
51 | status = Status.Active,
52 | emailAddress = EmailAddress(s"user${idx}@gmail.com"),
53 | password = HashedPassword("aaa"),
54 | firstName = "Junichi",
55 | lastName = "Kato",
56 | createdAt = ZonedDateTime.now,
57 | updatedAt = Some(ZonedDateTime.now)
58 | )
59 |
60 | val client: DynamoDbMonixClient = DynamoDbMonixClient(DynamoDbAsyncClient(underlying))
61 |
62 | "UserAccountRepositoryOnDynamoDB" - {
63 | "store" in {
64 | createTable("UserAccount")
65 | val repository = new UserAccountRepositoryOnDynamoDB(client)
66 | val result =
67 | (for {
68 | _ <- repository.store(userAccount)
69 | r <- repository.resolveById(userAccount.id)
70 | } yield r).runToFuture.futureValue
71 |
72 | result shouldBe userAccount
73 | }
74 | "storeMulti" in {
75 | val repository = new UserAccountRepositoryOnDynamoDB(client)
76 | val result =
77 | (for {
78 | _ <- repository.storeMulti(userAccounts)
79 | r <- repository.resolveMulti(userAccounts.map(_.id))
80 | } yield r).runToFuture.futureValue
81 |
82 | sameAs(result, userAccounts) shouldBe true
83 | }
84 | }
85 |
86 | private def createTable(
87 | tableName: String
88 | ): (String, CreateTableResponse) = {
89 | val createRequest = CreateTableRequest
90 | .builder()
91 | .attributeDefinitionsAsScala(
92 | Seq(
93 | AttributeDefinition
94 | .builder()
95 | .attributeName("Id")
96 | .attributeType(ScalarAttributeType.S).build()
97 | )
98 | )
99 | .keySchemaAsScala(
100 | Seq(
101 | KeySchemaElement
102 | .builder()
103 | .attributeName("Id")
104 | .keyType(KeyType.HASH).build()
105 | )
106 | )
107 | .provisionedThroughput(
108 | ProvisionedThroughput
109 | .builder()
110 | .readCapacityUnits(10L)
111 | .writeCapacityUnits(10L).build()
112 | )
113 | .tableName(tableName).build()
114 | val createResponse = client
115 | .createTable(createRequest)
116 | .runToFuture
117 | .futureValue
118 | (tableName, createResponse)
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/redis/UserAccountRepositoryOnRedisSpec.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.repository.redis
2 |
3 | import java.net.InetSocketAddress
4 | import java.time.ZonedDateTime
5 |
6 | import akka.actor.ActorSystem
7 | import akka.routing.DefaultResizer
8 | import akka.testkit.TestKit
9 | import com.github.j5ik2o.dddbase.AggregateNotFoundException
10 | import com.github.j5ik2o.dddbase.example.model._
11 | import com.github.j5ik2o.dddbase.example.repository.{ IdGenerator, SpecSupport, UserAccountRepository }
12 | import com.github.j5ik2o.dddbase.example.repository.util.ScalaFuturesSupportSpec
13 | import com.github.j5ik2o.reactive.redis._
14 | import monix.eval.Task
15 | import monix.execution.Scheduler.Implicits.global
16 | import org.scalatest.concurrent.ScalaFutures
17 | import org.scalatest.{ FreeSpecLike, Matchers }
18 |
19 | import scala.concurrent.Await
20 | import scala.concurrent.duration._
21 |
22 | class UserAccountRepositoryOnRedisSpec
23 | extends TestKit(ActorSystem("UserAccountRepositoryOnRedisSpec"))
24 | with FreeSpecLike
25 | with RedisSpecSupport
26 | with ScalaFutures
27 | with ScalaFuturesSupportSpec
28 | with Matchers
29 | with SpecSupport {
30 |
31 | var connectionPool: RedisConnectionPool[Task] = _
32 |
33 | override def waitFor(): Unit = {
34 | Thread.sleep(500 * sys.env.get("SBT_TEST_TIME_FACTOR").map(_.toLong).getOrElse(1L))
35 | }
36 |
37 | protected override def beforeAll(): Unit = {
38 | super.beforeAll()
39 | val peerConfig = PeerConfig(new InetSocketAddress("127.0.0.1", redisMasterServer.getPort))
40 | connectionPool = RedisConnectionPool.ofSingleRoundRobin(
41 | sizePerPeer = 3,
42 | peerConfig,
43 | RedisConnection.apply,
44 | reSizer = Some(DefaultResizer(lowerBound = 1, upperBound = 5))
45 | )
46 |
47 | }
48 |
49 | protected override def afterAll(): Unit = {
50 | super.afterAll()
51 | waitFor()
52 | TestKit.shutdownActorSystem(system)
53 | }
54 |
55 | val userAccount = UserAccount(
56 | id = UserAccountId(IdGenerator.generateIdValue),
57 | status = Status.Active,
58 | emailAddress = EmailAddress("test@test.com"),
59 | password = HashedPassword("aaa"),
60 | firstName = "Junichi",
61 | lastName = "Kato",
62 | createdAt = ZonedDateTime.now,
63 | updatedAt = None
64 | )
65 |
66 | val userAccounts = for (idx <- 1L to 10L)
67 | yield
68 | UserAccount(
69 | id = UserAccountId(IdGenerator.generateIdValue),
70 | status = Status.Active,
71 | emailAddress = EmailAddress(s"user${idx}@gmail.com"),
72 | password = HashedPassword("aaa"),
73 | firstName = "Junichi",
74 | lastName = "Kato",
75 | createdAt = ZonedDateTime.now,
76 | updatedAt = None
77 | )
78 |
79 | "UserAccountRepositoryOnRedis" - {
80 | "store" in {
81 | val repository = UserAccountRepository.onRedis(expireDuration = Duration.Inf)
82 | val result = connectionPool
83 | .withConnectionF { con =>
84 | (for {
85 | _ <- repository.store(userAccount)
86 | r <- repository.resolveById(userAccount.id)
87 | } yield r).run(con)
88 | }
89 | .runToFuture
90 | .futureValue
91 |
92 | result shouldBe userAccount
93 | }
94 | "storeMulti" in {
95 | val repository = UserAccountRepository.onRedis(expireDuration = Duration.Inf)
96 | val result = connectionPool
97 | .withConnectionF { con =>
98 | (for {
99 | _ <- repository.storeMulti(userAccounts)
100 | r <- repository.resolveMulti(userAccounts.map(_.id))
101 | } yield r).run(con)
102 | }
103 | .runToFuture
104 | .futureValue
105 |
106 | sameAs(result, userAccounts) shouldBe true
107 | }
108 | "store then expired" in {
109 | val repository = UserAccountRepository.onRedis(expireDuration = 1 seconds)
110 | val resultFuture = connectionPool.withConnectionF { con =>
111 | (for {
112 | _ <- repository.store(userAccount)
113 | _ <- ReaderTTask.pure(Thread.sleep(3000))
114 | r <- repository.resolveById(userAccount.id)
115 | } yield r).run(con)
116 | }.runToFuture
117 |
118 | an[AggregateNotFoundException] should be thrownBy {
119 | Await.result(resultFuture, Duration.Inf)
120 | }
121 | }
122 | }
123 |
124 | }
125 |
--------------------------------------------------------------------------------
/example/templates/UserMessage_template.ftl:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao
2 |
3 | package slick {
4 | import com.github.j5ik2o.dddbase.slick.SlickDaoSupport
5 |
6 | <#assign softDelete=false>
7 | trait ${className}Component extends SlickDaoSupport {
8 | import profile.api._
9 |
10 | case class ${className}Record(
11 | <#list primaryKeys as primaryKey>
12 | ${primaryKey.propertyName}: ${primaryKey.propertyTypeName}<#if primaryKey_has_next>,#if>#list><#if primaryKeys?has_content>,#if>
13 | <#list columns as column>
14 | <#if column.columnName == "status">
15 | <#assign softDelete=true>
16 | #if>
17 | <#if column.nullable> ${column.propertyName}: Option[${column.propertyTypeName}]<#if column_has_next>,#if>
18 | <#else> ${column.propertyName}: ${column.propertyTypeName}<#if column_has_next>,#if>
19 | #if>
20 | #list>
21 | ) extends <#if softDelete == false>Record<#else>SoftDeletableRecord#if>
22 |
23 | case class ${className}s(tag: Tag) extends TableBase[${className}Record](tag, "${tableName}")<#if softDelete == true> with SoftDeletableTableSupport[${className}Record]#if> {
24 | <#list primaryKeys as primaryKey>
25 | def ${primaryKey.propertyName}: Rep[${primaryKey.propertyTypeName}] = column[${primaryKey.propertyTypeName}]("${primaryKey.columnName}")
26 | #list>
27 | <#list columns as column>
28 | <#if column.nullable>
29 | def ${column.propertyName}: Rep[Option[${column.propertyTypeName}]] = column[Option[${column.propertyTypeName}]]("${column.columnName}")
30 | <#else>
31 | def ${column.propertyName}: Rep[${column.propertyTypeName}] = column[${column.propertyTypeName}]("${column.columnName}")
32 | #if>
33 | #list>
34 | def pk = primaryKey("pk", (<#list primaryKeys as primaryKey>${primaryKey.propertyName}<#if primaryKey_has_next>,#if>#list>))
35 | override def * = (<#list primaryKeys as primaryKey>${primaryKey.propertyName}<#if primaryKey_has_next>,#if>#list><#if primaryKeys?has_content>,#if><#list columns as column>${column.propertyName}<#if column_has_next>,#if>#list>) <> (${className}Record.tupled, ${className}Record.unapply)
36 | }
37 |
38 | object ${className}Dao extends TableQuery(${className}s)
39 |
40 | }
41 |
42 |
43 | }
44 |
45 |
46 | package skinny {
47 |
48 | import com.github.j5ik2o.dddbase.skinny.SkinnyDaoSupport
49 | import scalikejdbc._
50 | import _root_.skinny.orm._
51 |
52 | trait ${className}Component extends SkinnyDaoSupport {
53 |
54 | case class ${className}RecordId(<#list primaryKeys as primaryKey>
55 | ${primaryKey.propertyName}: ${primaryKey.propertyTypeName}<#if primaryKey_has_next>,#if>#list>)
56 |
57 | case class ${className}Record(
58 | <#list primaryKeys as primaryKey>
59 | ${primaryKey.propertyName}: ${primaryKey.propertyTypeName}<#if primaryKey_has_next>,#if>#list><#if primaryKeys?has_content>,#if>
60 | <#list columns as column>
61 | <#if column.columnName == "status">
62 | <#assign softDelete=true>
63 | #if>
64 | <#if column.nullable> ${column.propertyName}: Option[${column.propertyTypeName}]<#if column_has_next>,#if>
65 | <#else> ${column.propertyName}: ${column.propertyTypeName}<#if column_has_next>,#if>
66 | #if>
67 | #list>
68 | ) extends Record[${className}RecordId] {
69 | override val id: ${className}RecordId = ${className}RecordId(<#list primaryKeys as primaryKey>${primaryKey.propertyName}<#if primaryKey_has_next>,#if>#list>)
70 | }
71 |
72 | object ${className}Dao extends DaoWithCompositeId[${className}RecordId, ${className}Record] {
73 |
74 | //import ParameterBinderFactory._
75 |
76 | override val tableName: String = "${tableName}"
77 |
78 | override protected def toNamedIds(
79 | record: ${className}Record
80 | ): Seq[(Symbol, Any)] = Seq(
81 | <#list primaryKeys as primaryKey>
82 | '${primaryKey.propertyName} -> record.id.${primaryKey.propertyName}<#if primaryKey_has_next>,#if>
83 | #list>
84 | )
85 |
86 | override protected def toNamedValues(record: ${className}Record): Seq[(Symbol, Any)] = Seq(
87 | <#list columns as column> '${column.name} -> record.${column.propertyName}<#if column_has_next>,#if>
88 | #list>
89 | )
90 |
91 | override def defaultAlias: Alias[${className}Record] = createAlias("${className[0]?lower_case}")
92 |
93 | override def extract(rs: WrappedResultSet, s: ResultName[${className}Record]): ${className}Record = autoConstruct(rs, s)
94 |
95 | override protected def byCondition(id: UserMessageRecordId): scalikejdbc.SQLSyntax =
96 | <#list primaryKeys as primaryKey>
97 | <#if primaryKey_index == 0>
98 | sqls.eq(column.${primaryKey.propertyName}, id.${primaryKey.propertyName})
99 | <#else>
100 | .and.eq(column.${primaryKey.propertyName}, id.${primaryKey.propertyName})
101 | #if>
102 | #list>
103 |
104 | }
105 |
106 | }
107 |
108 | }
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/dao/memcached/UserAccountComponent.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao.memcached
2 |
3 | import java.time.{ Instant, ZoneId, ZonedDateTime }
4 |
5 | import akka.actor.ActorSystem
6 | import cats.data.ReaderT
7 | import cats.implicits._
8 | import com.github.j5ik2o.dddbase.memcached.MemcachedDaoSupport
9 | import com.github.j5ik2o.reactive.memcached.command.ValueDesc
10 | import com.github.j5ik2o.reactive.memcached.{ MemcachedConnection, ReaderTTask }
11 | import io.circe.generic.auto._
12 | import io.circe.parser.parse
13 | import io.circe.syntax._
14 | import io.circe.{ Decoder, Encoder }
15 | import monix.eval.Task
16 |
17 | import scala.concurrent.duration.Duration
18 |
19 | trait UserAccountComponent extends MemcachedDaoSupport {
20 |
21 | implicit val zonedDateTimeEncoder: Encoder[ZonedDateTime] = Encoder[Long].contramap(_.toInstant.toEpochMilli)
22 | implicit val zonedDateTimeDecoder: Decoder[ZonedDateTime] = Decoder[Long].map { ts =>
23 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneId.systemDefault())
24 | }
25 |
26 | case class UserAccountRecord(
27 | id: String,
28 | status: String,
29 | email: String,
30 | password: String,
31 | firstName: String,
32 | lastName: String,
33 | createdAt: java.time.ZonedDateTime,
34 | updatedAt: Option[java.time.ZonedDateTime]
35 | ) extends SoftDeletableRecord {
36 | override type This = UserAccountRecord
37 | override def withStatus(value: String): UserAccountRecord =
38 | copy(status = value)
39 | }
40 |
41 | case class UserAccountDao()(implicit val system: ActorSystem)
42 | extends Dao[ReaderT[Task, MemcachedConnection, ?], UserAccountRecord]
43 | with DaoSoftDeletable[ReaderT[Task, MemcachedConnection, ?], UserAccountRecord] {
44 |
45 | val DELETED: String = "deleted"
46 |
47 | private def internalSet(record: UserAccountRecord, expire: Duration): ReaderT[Task, MemcachedConnection, Int] =
48 | memcachedClient.set(record.id, record.asJson.noSpaces.replaceAll("\"", "\\\\\""), expire)
49 |
50 | override def setMulti(
51 | records: Seq[UserAccountRecord],
52 | expire: Duration
53 | ): ReaderT[Task, MemcachedConnection, Long] = ReaderT { con =>
54 | Task
55 | .traverse(records) { record =>
56 | set(record, expire).run(con)
57 | }
58 | .map(_.count(_ > 0))
59 | }
60 |
61 | override def set(
62 | record: UserAccountRecord,
63 | expire: Duration
64 | ): ReaderT[Task, MemcachedConnection, Long] = ReaderT { con =>
65 | internalSet(record, expire).run(con).map(_ => 1L)
66 | }
67 |
68 | override def getMulti(
69 | ids: Seq[String]
70 | ): ReaderT[Task, MemcachedConnection, Seq[UserAccountRecord]] = ReaderT { con =>
71 | Task
72 | .traverse(ids) { id =>
73 | get(id).run(con)
74 | }
75 | .map(_.foldLeft(Seq.empty[UserAccountRecord]) {
76 | case (result, e) =>
77 | result ++ e.map(Seq(_)).getOrElse(Seq.empty)
78 | })
79 | }
80 |
81 | private def internalGet(id: String): ReaderT[Task, MemcachedConnection, Option[UserAccountRecord]] =
82 | memcachedClient
83 | .get(id)
84 | .map {
85 | _.flatMap { v: ValueDesc =>
86 | val r = parse(v.value.replaceAll("\\\\\"", "\"")).leftMap(error => new Exception(error.message)).flatMap {
87 | json =>
88 | json.as[UserAccountRecord].leftMap(error => new Exception(error.message)).map { e =>
89 | if (e.status == DELETED)
90 | None
91 | else
92 | Some(e)
93 | }
94 | }
95 | r match {
96 | case Right(v) => v
97 | case Left(ex) => throw ex
98 | }
99 | }
100 | }
101 |
102 | override def get(
103 | id: String
104 | ): ReaderT[Task, MemcachedConnection, Option[UserAccountRecord]] = ReaderT { con =>
105 | internalGet(id).run(con)
106 | }
107 |
108 | override def softDelete(id: String): ReaderT[Task, MemcachedConnection, Long] = {
109 | get(id).flatMap {
110 | case Some(v) =>
111 | set(v.withStatus(DELETED), Duration.Inf)
112 | case None =>
113 | ReaderTTask.pure(0L)
114 | }
115 | }
116 |
117 | override def softDeleteMulti(
118 | ids: Seq[String]
119 | ): ReaderT[Task, MemcachedConnection, Long] = ReaderT { con =>
120 | Task
121 | .traverse(ids) { id =>
122 | softDelete(id).run(con)
123 | }
124 | .map(_.count(_ > 0))
125 | }
126 |
127 | override def delete(
128 | id: String
129 | ): ReaderT[Task, MemcachedConnection, Long] = ReaderT { con =>
130 | memcachedClient.delete(id).run(con).map { v =>
131 | v.toLong
132 | }
133 | }
134 |
135 | override def deleteMulti(ids: Seq[String]): ReaderT[Task, MemcachedConnection, Long] = ReaderT { con =>
136 | Task
137 | .traverse(ids) { id =>
138 | delete(id).run(con)
139 | }
140 | .map(_.count(_ > 0))
141 | }
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/dao/redis/UserAccountComponent.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao.redis
2 |
3 | import java.time.{ Instant, ZoneId, ZonedDateTime }
4 |
5 | import akka.actor.ActorSystem
6 | import cats.data.{ NonEmptyList, ReaderT }
7 | import cats.implicits._
8 | import com.github.j5ik2o.dddbase.redis.RedisDaoSupport
9 | import com.github.j5ik2o.reactive.redis.{ ReaderTTask, RedisConnection, Result }
10 | import io.circe.generic.auto._
11 | import io.circe.parser._
12 | import io.circe.syntax._
13 | import io.circe.{ Decoder, Encoder }
14 | import monix.eval.Task
15 |
16 | import scala.concurrent.duration._
17 |
18 | trait UserAccountComponent extends RedisDaoSupport {
19 |
20 | implicit val zonedDateTimeEncoder: Encoder[ZonedDateTime] = Encoder[Long].contramap(_.toInstant.toEpochMilli)
21 | implicit val zonedDateTimeDecoder: Decoder[ZonedDateTime] = Decoder[Long].map { ts =>
22 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneId.systemDefault())
23 | }
24 |
25 | case class UserAccountRecord(
26 | id: String,
27 | status: String,
28 | email: String,
29 | password: String,
30 | firstName: String,
31 | lastName: String,
32 | createdAt: java.time.ZonedDateTime,
33 | updatedAt: Option[java.time.ZonedDateTime]
34 | ) extends SoftDeletableRecord {
35 | override type This = UserAccountRecord
36 | override def withStatus(value: String): UserAccountRecord =
37 | copy(status = value)
38 | }
39 |
40 | case class UserAccountDao()(implicit val system: ActorSystem)
41 | extends Dao[ReaderT[Task, RedisConnection, ?], UserAccountRecord]
42 | with DaoSoftDeletable[ReaderT[Task, RedisConnection, ?], UserAccountRecord] {
43 |
44 | val DELETED = "deleted"
45 |
46 | private def internalSet(
47 | record: UserAccountRecord,
48 | expire: Duration
49 | ): ReaderT[Task, RedisConnection, Result[Unit]] = {
50 | expire match {
51 | case e if e.isFinite() && e.lt(1 seconds) =>
52 | redisClient.pSetEx(record.id, FiniteDuration(expire._1, expire._2), toJsonString(record))
53 | case e if e.isFinite() && !e.lt(1 seconds) =>
54 | redisClient.setEx(record.id, FiniteDuration(expire._1, expire._2), toJsonString(record))
55 | case e if !e.isFinite() =>
56 | redisClient.set(record.id, toJsonString(record))
57 | }
58 | }
59 |
60 | override def setMulti(
61 | records: Seq[UserAccountRecord],
62 | expire: Duration
63 | ): ReaderT[Task, RedisConnection, Long] = ReaderT { con =>
64 | Task
65 | .traverse(records) { record =>
66 | set(record, expire).run(con)
67 | }
68 | .map(_.count(_ > 0))
69 | }
70 |
71 | override def set(
72 | record: UserAccountRecord,
73 | expire: Duration
74 | ): ReaderT[Task, RedisConnection, Long] = ReaderT { con =>
75 | internalSet(record, expire).run(con).map(_ => 1L)
76 | }
77 |
78 | override def getMulti(
79 | ids: Seq[String]
80 | ): ReaderT[Task, RedisConnection, Seq[UserAccountRecord]] = ReaderT { con =>
81 | Task
82 | .traverse(ids) { id =>
83 | get(id).run(con)
84 | }
85 | .map(_.foldLeft(Seq.empty[UserAccountRecord]) {
86 | case (result, e) =>
87 | result ++ e.map(Seq(_)).getOrElse(Seq.empty)
88 | })
89 | }
90 |
91 | private def internalGet(id: String): ReaderT[Task, RedisConnection, Option[UserAccountRecord]] =
92 | redisClient
93 | .get(id)
94 | .map {
95 | _.value.flatMap { v =>
96 | val r = parse(v).leftMap(error => new Exception(error.message)).flatMap { json =>
97 | json.as[UserAccountRecord].leftMap(error => new Exception(error.message)).map { v =>
98 | if (v.status == DELETED)
99 | None
100 | else
101 | Some(v)
102 | }
103 | }
104 | r match {
105 | case Right(v) =>
106 | v
107 | case Left(ex) =>
108 | throw ex
109 | }
110 | }
111 | }
112 |
113 | override def get(
114 | id: String
115 | ): ReaderT[Task, RedisConnection, Option[UserAccountRecord]] = ReaderT { con =>
116 | internalGet(id).run(con)
117 | }
118 |
119 | override def softDelete(id: String): ReaderT[Task, RedisConnection, Long] = {
120 | get(id).flatMap {
121 | case Some(v) =>
122 | set(v.withStatus(DELETED), Duration.Inf)
123 | case None =>
124 | ReaderTTask.pure(0L)
125 | }
126 | }
127 |
128 | override def softDeleteMulti(ids: Seq[String]): ReaderT[Task, RedisConnection, Long] = getMulti(ids).flatMap {
129 | values =>
130 | setMulti(values.map(_.withStatus(DELETED)), Duration.Inf)
131 | }
132 |
133 | override def delete(
134 | id: String
135 | ): ReaderT[Task, RedisConnection, Long] = ReaderT { con =>
136 | redisClient.del(NonEmptyList.of(id)).run(con).map { _.value.toLong }
137 | }
138 |
139 | override def deleteMulti(
140 | ids: Seq[String]
141 | ): ReaderT[Task, RedisConnection, Long] =
142 | ReaderT { con =>
143 | Task
144 | .traverse(ids) { id =>
145 | delete(id).run(con)
146 | }
147 | .map(_.count(_ > 0))
148 | }
149 | }
150 |
151 | private def toJsonString(record: UserAccountRecord) = {
152 | record.asJson.noSpaces
153 | }
154 | }
155 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/dao/memory/UserAccountComponent.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao.memory
2 |
3 | import com.github.j5ik2o.dddbase.memory.MemoryDaoSupport
4 | import com.google.common.base.Ticker
5 | import com.google.common.cache._
6 | import monix.eval.Task
7 |
8 | import scala.collection.JavaConverters._
9 | import scala.concurrent.duration.Duration
10 |
11 | trait GuavaMemoryDaoSupport extends MemoryDaoSupport {
12 |
13 | val DELETED = "deleted"
14 |
15 | object GuavaCacheBuilder {
16 |
17 | def build[K, V <: SoftDeletableRecord](
18 | concurrencyLevel: Option[Int] = None,
19 | expireAfterAccess: Option[Duration] = None,
20 | expireAfterWrite: Option[Duration] = None,
21 | initialCapacity: Option[Int] = None,
22 | maximumSize: Option[Int] = None,
23 | maximumWeight: Option[Int] = None,
24 | recordStats: Option[Boolean] = None,
25 | refreshAfterWrite: Option[Duration] = None,
26 | removalListener: Option[RemovalNotification[String, V] => Unit] = None,
27 | softValues: Option[Boolean] = None,
28 | ticker: Option[Ticker] = None,
29 | weakKeys: Option[Boolean] = None,
30 | weakValues: Option[Boolean] = None,
31 | weigher: Option[(String, V) => Int] = None
32 | ): Cache[K, V] = {
33 | {
34 | val b = CacheBuilder.newBuilder()
35 | val b0 = maximumSize.fold(b)(v => b.initialCapacity(v))
36 | val b1 = concurrencyLevel.fold(b0)(v => b0.concurrencyLevel(v))
37 | val b2 = maximumSize.fold(b1)(v => b1.maximumSize(v))
38 | val b3 = expireAfterWrite.fold(b2)(v => b2.expireAfterWrite(v.length, v.unit))
39 | val b4 = maximumWeight.fold(b3)(v => b3.maximumWeight(v))
40 | val b5 = expireAfterAccess.fold(b4)(v => b4.expireAfterAccess(v.length, v.unit))
41 | val b6 = refreshAfterWrite.fold(b5)(v => b5.refreshAfterWrite(v.length, v.unit))
42 | val b7 = removalListener.fold(b6)(
43 | v =>
44 | b6.removalListener(new RemovalListener[AnyRef, AnyRef] {
45 | override def onRemoval(
46 | removalNotification: RemovalNotification[
47 | AnyRef,
48 | AnyRef
49 | ]
50 | ): Unit = v(removalNotification.asInstanceOf[RemovalNotification[String, V]])
51 | })
52 | )
53 | val b8 = softValues.fold(b7)(v => if (v) b7.softValues() else b7)
54 | val b9 = weakValues.fold(b8)(v => if (v) b8.weakValues() else b8)
55 | val b10 = weakKeys.fold(b9)(v => if (v) b9.weakKeys() else b9)
56 | val b11 = weigher.fold(b10)(
57 | f =>
58 | b10.weigher(new Weigher[AnyRef, AnyRef] {
59 | override def weigh(k: AnyRef, v: AnyRef): Int = f(k.asInstanceOf[String], v.asInstanceOf[V])
60 | })
61 | )
62 | val b12 = recordStats.fold(b11)(v => if (v) b11.recordStats() else b11)
63 | val b13 = ticker.fold(b12)(v => b12.ticker(v))
64 | b13
65 | }.build().asInstanceOf[Cache[K, V]]
66 | }
67 | }
68 |
69 | abstract class GuavaCacheDao[K, V <: SoftDeletableRecord](
70 | cache: Cache[String, V]
71 | ) extends Dao[Task, V]
72 | with DaoSoftDeletable[Task, V] {
73 |
74 | override def set(record: V): Task[Long] =
75 | Task {
76 | cache.put(record.id, record)
77 | 1L
78 | }
79 | override def setMulti(records: Seq[V]): Task[Long] = Task {
80 | cache.putAll(records.map(v => (v.id, v)).toMap.asJava)
81 | records.size.toLong
82 | }
83 |
84 | override def get(
85 | id: String
86 | ): Task[Option[V]] = Task {
87 | Option(cache.getIfPresent(id)).filterNot(_.status == DELETED)
88 | }
89 |
90 | override def getAll: Task[Seq[V]] = Task {
91 | cache.asMap().asScala.values.filterNot(_.status == DELETED).toSeq
92 | }
93 |
94 | override def getMulti(
95 | ids: Seq[String]
96 | ): Task[Seq[V]] = Task {
97 | cache.getAllPresent(ids.asJava).asScala.values.filterNot(_.status == DELETED).toSeq
98 | }
99 |
100 | override def delete(id: String): Task[Long] = Task {
101 | cache.invalidate(id)
102 | 1L
103 | }
104 |
105 | override def deleteMulti(ids: Seq[String]): Task[Long] = Task {
106 | cache.invalidateAll(ids.asJava)
107 | ids.size.toLong
108 | }
109 |
110 | override def softDelete(id: String): Task[Long] = get(id).flatMap {
111 | case Some(v) =>
112 | set(v.withStatus(DELETED).asInstanceOf[V])
113 | case None =>
114 | Task.pure(0L)
115 | }
116 |
117 | override def softDeleteMulti(ids: Seq[String]): Task[Long] =
118 | getMulti(ids).flatMap { values =>
119 | setMulti(values.map(_.withStatus(DELETED).asInstanceOf[V]))
120 | }
121 |
122 | }
123 |
124 | }
125 |
126 | trait UserAccountComponent extends GuavaMemoryDaoSupport {
127 |
128 | case class UserAccountRecord(
129 | id: String,
130 | status: String,
131 | email: String,
132 | password: String,
133 | firstName: String,
134 | lastName: String,
135 | createdAt: java.time.ZonedDateTime,
136 | updatedAt: Option[java.time.ZonedDateTime]
137 | ) extends SoftDeletableRecord {
138 | override type This = UserAccountRecord
139 | override def withStatus(value: String): UserAccountRecord =
140 | copy(status = value)
141 | }
142 |
143 | case class UserAccountDao(cache: Cache[String, UserAccountRecord])
144 | extends GuavaCacheDao[String, UserAccountRecord](cache) {
145 | def this(
146 | concurrencyLevel: Option[Int] = None,
147 | expireAfterAccess: Option[Duration] = None,
148 | expireAfterWrite: Option[Duration] = None,
149 | initialCapacity: Option[Int] = None,
150 | maximumSize: Option[Int] = None,
151 | maximumWeight: Option[Int] = None,
152 | recordStats: Option[Boolean] = None,
153 | refreshAfterWrite: Option[Duration] = None,
154 | removalListener: Option[RemovalNotification[String, UserAccountRecord] => Unit] = None,
155 | softValues: Option[Boolean] = None,
156 | ticker: Option[Ticker] = None,
157 | weakKeys: Option[Boolean] = None,
158 | weakValues: Option[Boolean] = None,
159 | weigher: Option[(String, UserAccountRecord) => Int] = None
160 | ) = {
161 | this(
162 | GuavaCacheBuilder
163 | .build[String, UserAccountRecord](
164 | concurrencyLevel,
165 | expireAfterAccess,
166 | expireAfterWrite,
167 | initialCapacity,
168 | maximumSize,
169 | maximumWeight,
170 | recordStats,
171 | refreshAfterWrite,
172 | removalListener,
173 | softValues,
174 | ticker,
175 | weakKeys,
176 | weakValues,
177 | weigher
178 | )
179 | )
180 | }
181 | }
182 |
183 | }
184 |
--------------------------------------------------------------------------------
/scalastyle-config.xml:
--------------------------------------------------------------------------------
1 |
2 | Scalastyle standard configuration
3 |
4 |
5 |
6 |
7 |
8 |
9 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
78 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/dao/dynamodb/UserAccountComponent.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao.dynamodb
2 | import java.time.{ Instant, ZoneId, ZonedDateTime }
3 |
4 | import com.github.j5ik2o.dddbase.dynamodb.DynamoDBDaoSupport
5 | import com.github.j5ik2o.reactive.aws.dynamodb.implicits._
6 | import com.github.j5ik2o.reactive.aws.dynamodb.monix.DynamoDbMonixClient
7 | import monix.eval.Task
8 | import software.amazon.awssdk.services.dynamodb.model._
9 |
10 | trait UserAccountComponent extends DynamoDBDaoSupport {
11 |
12 | case class UserAccountRecord(
13 | id: String,
14 | status: String,
15 | email: String,
16 | password: String,
17 | firstName: String,
18 | lastName: String,
19 | createdAt: java.time.ZonedDateTime,
20 | updatedAt: Option[java.time.ZonedDateTime]
21 | ) extends SoftDeletableRecord[String] {
22 | override type This = UserAccountRecord
23 | override def withStatus(value: String): UserAccountRecord =
24 | copy(status = value)
25 | }
26 |
27 | case class UserAccountDao(client: DynamoDbMonixClient)
28 | extends Dao[Task, String, UserAccountRecord]
29 | with DaoSoftDeletable[Task, String, UserAccountRecord] {
30 | val tableName = "UserAccount"
31 | val DELETED: String = "deleted"
32 |
33 | override def put(record: UserAccountRecord): Task[Long] = {
34 | client
35 | .putItem(
36 | tableName,
37 | Map(
38 | "Id" -> AttributeValue.builder().s(record.id).build(),
39 | "Status" -> AttributeValue.builder().s(record.status).build(),
40 | "Email" -> AttributeValue.builder().s(record.email).build(),
41 | "Password" -> AttributeValue.builder().s(record.password).build(),
42 | "FirstName" -> AttributeValue.builder().s(record.firstName).build(),
43 | "LastName" -> AttributeValue.builder().s(record.lastName).build(),
44 | "CreatedAt" -> AttributeValue.builder().n(record.createdAt.toInstant.toEpochMilli.toString).build()
45 | ) ++ record.updatedAt
46 | .map { s =>
47 | Map("UpdatedAt" -> AttributeValue.builder().n(s.toInstant.toEpochMilli.toString).build())
48 | }
49 | .getOrElse(Map.empty)
50 | )
51 | .flatMap { response =>
52 | if (response.sdkHttpResponse().isSuccessful)
53 | Task.pure(1L)
54 | else
55 | Task.raiseError(new Exception())
56 | }
57 | }
58 |
59 | override def putMulti(records: Seq[UserAccountRecord]): Task[Long] = {
60 | client
61 | .batchWriteItem(
62 | Map(
63 | tableName -> records.map { record =>
64 | WriteRequest
65 | .builder().putRequest(
66 | PutRequest
67 | .builder().itemAsScala(
68 | Map(
69 | "Id" -> AttributeValue.builder().s(record.id).build(),
70 | "Status" -> AttributeValue.builder().s(record.status).build(),
71 | "Email" -> AttributeValue.builder().s(record.email).build(),
72 | "Password" -> AttributeValue.builder().s(record.password).build(),
73 | "FirstName" -> AttributeValue.builder().s(record.firstName).build(),
74 | "LastName" -> AttributeValue.builder().s(record.lastName).build(),
75 | "CreatedAt" -> AttributeValue
76 | .builder()
77 | .n(record.createdAt.toInstant.toEpochMilli.toString).build()
78 | ) ++ record.updatedAt
79 | .map { s =>
80 | Map("UpdatedAt" -> AttributeValue.builder().n(s.toInstant.toEpochMilli.toString).build())
81 | }
82 | .getOrElse(Map.empty)
83 | ).build()
84 | ).build()
85 | }
86 | )
87 | )
88 | .flatMap { response =>
89 | if (response.sdkHttpResponse().isSuccessful) {
90 | Task.pure(records.size - response.unprocessedItems.size)
91 | } else
92 | Task.raiseError(new Exception())
93 | }
94 | }
95 |
96 | override def get(id: String): Task[Option[UserAccountRecord]] = {
97 | client.getItem(tableName, Map("Id" -> AttributeValue.builder().s(id).build())).flatMap { response =>
98 | if (response.sdkHttpResponse().isSuccessful) {
99 | Task.pure {
100 | response.itemAsScala.map { item =>
101 | UserAccountRecord(
102 | id = item("Id").s,
103 | status = item("Status").s,
104 | email = item("Email").s,
105 | password = item("Password").s,
106 | firstName = item("FirstName").s,
107 | lastName = item("LastName").s,
108 | createdAt = item("CreatedAt").nAsScala.map { v =>
109 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(v.toLong), ZoneId.systemDefault())
110 | }.get,
111 | updatedAt = item
112 | .get("UpdatedAt")
113 | .flatMap(_.nAsScala.map { v =>
114 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(v.toLong), ZoneId.systemDefault())
115 | })
116 | )
117 | }
118 | }
119 | } else
120 | Task.raiseError(new Exception())
121 | }
122 | }
123 |
124 | override def getMulti(ids: Seq[String]): Task[Seq[UserAccountRecord]] = {
125 | client
126 | .batchGetItem(
127 | Map(
128 | tableName -> KeysAndAttributes
129 | .builder()
130 | .keysAsScala(ids.map { id =>
131 | Map("Id" -> AttributeValue.builder().s(id).build())
132 | }).build()
133 | )
134 | )
135 | .flatMap { response =>
136 | if (response.sdkHttpResponse().isSuccessful) {
137 | Task.pure {
138 | response.responsesAsScala
139 | .map { records =>
140 | records.values.toSeq.flatMap { records =>
141 | records.map { item =>
142 | UserAccountRecord(
143 | id = item("Id").s,
144 | status = item("Status").s,
145 | email = item("Email").s,
146 | password = item("Password").s,
147 | firstName = item("FirstName").s,
148 | lastName = item("LastName").s,
149 | createdAt = item("CreatedAt").nAsScala.map { v =>
150 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(v.toLong), ZoneId.systemDefault())
151 | }.get,
152 | updatedAt = item("UpdatedAt").nAsScala.map { v =>
153 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(v.toLong), ZoneId.systemDefault())
154 | }
155 | )
156 | }
157 | }
158 | }
159 | .getOrElse(Seq.empty)
160 | }
161 | } else
162 | Task.raiseError(new Exception())
163 | }
164 |
165 | }
166 |
167 | override def delete(id: String): Task[Long] = {
168 | client.deleteItem(tableName, Map("Id" -> AttributeValue.builder().s(id).build())).flatMap { response =>
169 | if (response.sdkHttpResponse().isSuccessful)
170 | Task.pure(1L)
171 | else
172 | Task.raiseError(new Exception())
173 | }
174 | }
175 |
176 | override def deleteMulti(ids: Seq[String]): Task[Long] = {
177 | client
178 | .batchWriteItem(
179 | Map(
180 | tableName -> ids.map { id =>
181 | WriteRequest
182 | .builder().deleteRequest(
183 | DeleteRequest
184 | .builder().keyAsScala(
185 | Map(
186 | "Id" -> AttributeValue.builder().s(id).build()
187 | )
188 | ).build()
189 | ).build()
190 | }
191 | )
192 | )
193 | .flatMap { response =>
194 | if (response.sdkHttpResponse().isSuccessful) {
195 | Task.pure(ids.size - response.unprocessedItems.size)
196 | } else
197 | Task.raiseError(new Exception())
198 | }
199 | }
200 |
201 | override def softDelete(id: String): Task[Long] = {
202 | client
203 | .updateItem(
204 | tableName,
205 | Map("Id" -> AttributeValue.builder().s(id).build()),
206 | Map("Status" -> AttributeValueUpdate.builder().value(AttributeValue.builder().s(DELETED).build()).build())
207 | )
208 | .flatMap { response =>
209 | if (response.sdkHttpResponse().isSuccessful) {
210 | Task.pure(1L)
211 | } else
212 | Task.raiseError(new Exception)
213 | }
214 | }
215 |
216 | override def softDeleteMulti(ids: Seq[String]): Task[Long] = {
217 | Task
218 | .traverse(ids) { id =>
219 | delete(id)
220 | }
221 | .map(_.count(_ > 0))
222 | }
223 | }
224 |
225 | }
226 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # scala-ddd-base
2 |
3 | [](https://circleci.com/gh/j5ik2o/scala-ddd-base/tree/master)
4 | [](https://maven-badges.herokuapp.com/maven-central/com.github.j5ik2o/scala-ddd-base-core_2.12)
5 | [](http://javadoc-badge.appspot.com/com.github.j5ik2o/scala-ddd-base-core_2.12/com/github/j5ik2o/dddbase/index.html?javadocio=true)
6 | [](LICENSE)
7 |
8 | `scala-ddd-base` provides traits to support for ddd repositories and aggregates.
9 |
10 | ## Installation
11 |
12 |
13 | Add the following to your sbt build (Scala 2.11.x, 2.12.x):
14 |
15 | ```scala
16 | resolvers += "Sonatype OSS Release Repository" at "https://oss.sonatype.org/content/repositories/releases/"
17 |
18 | val scalaDddBaseVersion = "..."
19 |
20 | libraryDependencies ++= Seq(
21 | "com.github.j5ik2o" %% "scala-ddd-base-core" % scalaDddBaseVersion,
22 | // Please set as necessary
23 | // "com.github.j5ik2o" %% "scala-ddd-base-slick" % scalaDddBaseVersion,
24 | // "com.github.j5ik2o" %% "scala-ddd-base-skinny" % scalaDddBaseVersion,
25 | // "com.github.j5ik2o" %% "scala-ddd-base-redis" % scalaDddBaseVersion,
26 | // "com.github.j5ik2o" %% "scala-ddd-base-memcached" % scalaDddBaseVersion,
27 | // "com.github.j5ik2o" %% "scala-ddd-base-dynamodb" % scalaDddBaseVersion,
28 | // "com.github.j5ik2o" %% "scala-ddd-base-memory" % scalaDddBaseVersion
29 | )
30 | ```
31 |
32 | ## Core traits
33 |
34 | The following provides basic abstract methods.
35 |
36 | - AggregateSingleReader
37 | - AggregateSingleWriter
38 | - AggregateMultiReader
39 | - AggregateMultiWriter
40 | - AggregateSingleSoftDeletable
41 | - AggregateSingleHardDeletable
42 | - AggregateMultiSoftDeletable
43 | - AggregateMultiHardDeletable
44 |
45 | ## Support traits
46 |
47 | The following provides an implementation for each ORM/KVS.
48 |
49 | - AggregateSingleReadFeature
50 | - AggregateSingleWriteFeature
51 | - AggregateMultiReadFeature
52 | - AggregateMultiWriteFeature
53 | - AggregateSingleSoftDeleteFeature
54 | - AggregateSingleHardDeleteFeature
55 | - AggregateMultiSoftDeleteFeature
56 | - AggregateMultiHardDeleteFeature
57 |
58 | The supported ORM/KVS/Cache is below.
59 |
60 | - Slick(JDBC)
61 | - SkinnyORM(JDBC)
62 | - Redis([reactive-redis](https://github.com/j5ik2o/reactive-redis))
63 | - Memcached([reactive-memcached](https://github.com/j5ik2o/reactive-memcached))
64 | - DynamoDB([reactive-aws-dynamodb](https://github.com/j5ik2o/reactive-aws-client/tree/master/reactive-aws-dynamodb))
65 | - Memory([Guava Cache](https://github.com/google/guava))
66 |
67 | ## Example
68 |
69 | Please mix in the core and support traits to your implementation.
70 | Slick, SkinnyORM, Memcached, Redis, Memory etc. You can also choose the implementation as you like.
71 |
72 | ```scala
73 | trait UserAccountRepository[M[_]]
74 | extends AggregateSingleReader[M]
75 | with AggregateMultiReader[M]
76 | with AggregateSingleWriter[M]
77 | with AggregateMultiWriter[M]
78 | with AggregateSingleSoftDeletable[M]
79 | with AggregateMultiSoftDeletable[M] {
80 | override type IdType = UserAccountId
81 | override type AggregateType = UserAccount
82 | }
83 |
84 | object UserAccountRepository {
85 |
86 | type OnRedis[A] = ReaderT[Task, RedisConnection, A]
87 | type OnMemcached[A] = ReaderT[Task, MemcachedConnection, A]
88 | type OnDynamoDB[A] = Task[A]
89 | type OnMemory[A] = Task[A]
90 | type BySlick[A] = Task[A]
91 | type BySkinny[A] = ReaderT[Task, DBSession, A]
92 | type ByFree[A] = Free[UserRepositoryDSL, A]
93 |
94 | def bySlick(profile: JdbcProfile, db: JdbcProfile#Backend#Database): UserAccountRepository[BySlick] =
95 | new UserAccountRepositoryBySlick(profile, db)
96 |
97 | def bySkinny: UserAccountRepository[BySkinny] = new UserAccountRepositoryBySkinny
98 |
99 | def onRedis(
100 | expireDuration: Duration
101 | )(implicit actorSystem: ActorSystem): UserAccountRepository[OnRedis] =
102 | new UserAccountRepositoryOnRedis(expireDuration)
103 |
104 | def onMemcached(
105 | expireDuration: Duration
106 | )(implicit actorSystem: ActorSystem): UserAccountRepository[OnMemcached] =
107 | new UserAccountRepositoryOnMemcached(expireDuration)
108 |
109 | def onDynamoDB(dynamoDbAsyncClient: DynamoDbAsyncClient): UserAccountRepository[OnDynamoDB] =
110 | new UserAccountRepositoryOnDynamoDB(DynamoDBTaskClientV2(DynamoDBAsyncClientV2(underlying))
111 |
112 | def onMemory(minSize: Option[Int] = None,
113 | maxSize: Option[Int] = None,
114 | expireDuration: Option[Duration] = None,
115 | concurrencyLevel: Option[Int] = None,
116 | maxWeight: Option[Int] = None): UserAccountRepository[OnMemory] =
117 | new UserAccountRepositoryOnMemory(minSize, maxSize, expireDuration, concurrencyLevel, maxWeight)
118 |
119 | }
120 | ```
121 |
122 | - [for Slick3](example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/slick/UserAccountRepositoryBySlick.scala)
123 | - [for SkinnyORM](example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/skinny/UserAccountRepositoryBySkinny.scala)
124 | - [for Memcached](example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/memcached/UserAccountRepositoryOnMemcached.scala)
125 | - [for Redis](example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/redis/UserAccountRepositoryOnRedis.scala)
126 | - [for DynamoDB](example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/dynamodb/UserAccountRepositoryOnDynamoDB.scala)
127 | - [for Memory(Guava Cache)](example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/memory/UserAccountRepositoryOnMemory.scala)
128 | - [for Free](example/src/main/scala/com/github/j5ik2o/dddbase/example/repository/free/UserAccountRepositoryByFree.scala)
129 |
130 | ### Usage
131 |
132 | - for Slick3
133 |
134 | ```scala
135 | val userAccountRepository: UserAccountRepository[BySlick] = UserAccountRepository.bySlick(dbConfig.profile, dbConfig.db)
136 | val resultTask: Task[UserAccount] = for {
137 | _ <- userAccountRepository.store(userAccount)
138 | result <- userAccountRepository.resolveBy(userAccount.id)
139 | } yield result
140 |
141 | val resultFuture: Future[UserAccount] = resultTask.runToFuture
142 | ```
143 |
144 | - for SkinnyORM
145 |
146 | ```scala
147 | val userAccountRepository: UserAccountRepository[BySkinny] = UserAccountRepository.bySkinny
148 | val resultTask: Task[UserAccount] = for {
149 | _ <- userAccountRepository.store(userAccount)
150 | result <- userAccountRepository.resolveBy(userAccount.id)
151 | } yield result
152 |
153 | val resultFuture: Future[UserAccount] = resultTask.run(AutoSession).runToFuture
154 | ```
155 |
156 | - for Memcached
157 |
158 | ```scala
159 | val userAccountRepository: UserAccountRepository[OnMemcached] = UserAccountRepository.onMemcached(expireDuration = 5 minutes)
160 | val resultFuture: Future[UserAccount] = connectionPool
161 | .withConnectionF { con =>
162 | (for {
163 | _ <- userAccountRepository.store(userAccount)
164 | r <- userAccountRepository.resolveById(userAccount.id)
165 | } yield r).run(con)
166 | }
167 | .runToFuture
168 | ```
169 |
170 | - for Redis
171 |
172 | ```scala
173 | val userAccountRepository: UserAccountRepository[OnRedis] = UserAccountRepository.onRedis(expireDuration = 5 minutes)
174 | val resultFuture: Future[UserAccount] = connectionPool
175 | .withConnectionF { con =>
176 | (for {
177 | _ <- userAccountRepository.store(userAccount)
178 | r <- userAccountRepository.resolveById(userAccount.id)
179 | } yield r).run(con)
180 | }
181 | .runToFuture
182 | ```
183 |
184 | - for DynamoDB
185 |
186 | ```scala
187 | val userAccountRepository: UserAccountRepository[OnDynamoDB] = UserAccountRepository.onDynamoDB(dynamoDbAsyncClient)
188 | val resultFuture: Future[UserAccount] = (for {
189 | _ <- userAccountRepository.store(userAccount)
190 | r <- userAccountRepository.resolveById(userAccount.id)
191 | } yield r).runToFuture
192 | ```
193 |
194 | - for Memory(Guava Cache)
195 |
196 | ```scala
197 | val userAccountRepository: UserAccountRepository[OnMemory] = UserAccountRepository.onMemory(expireAfterWrite = Some(5 minutes))
198 | val resultFuture: Future[UserAccount] = (for {
199 | _ <- repository.store(userAccount)
200 | r <- repository.resolveById(userAccount.id)
201 | } yield r).runToFuture
202 | ```
203 |
204 | - for Free
205 |
206 | ```scala
207 | val free: UserAccountRepository[ByFree] = UserAccountRepositoryByFree
208 | val program: Free[UserRepositoryDSL, UserAccount] = for {
209 | _ <- free.store(userAccount)
210 | result <- free.resolveById(userAccount.id)
211 | } yield result
212 |
213 | val slick = UserAccountRepository.bySlick(dbConfig.profile, dbConfig.db)
214 | val resultTask: Task[UserAccount] = UserAccountRepositoryOnFree.evaluate(slick)(program)
215 | val resultFuture: Future[UserAccount] = evalResult.runToFuture
216 |
217 | // if evaluation by skinny
218 | // val skinny = UserAccountRepository.bySkinny
219 | // val resultTask: Task[UserAccount] = UserAccountRepositoryOnFree.evaluate(skinny)(program)
220 | // val resultFuture: Future[UserAccount] = evalResult.run(AutoSession).runToFuture
221 | ```
222 |
--------------------------------------------------------------------------------
/example/src/main/scala/com/github/j5ik2o/dddbase/example/dao/dynamodb/UserMesssageComponent.scala:
--------------------------------------------------------------------------------
1 | package com.github.j5ik2o.dddbase.example.dao.dynamodb
2 |
3 | import java.time.{ Instant, ZoneId, ZonedDateTime }
4 |
5 | import com.github.j5ik2o.dddbase.dynamodb.DynamoDBDaoSupport
6 | import com.github.j5ik2o.reactive.aws.dynamodb.implicits._
7 | import com.github.j5ik2o.reactive.aws.dynamodb.monix.DynamoDbMonixClient
8 | import monix.eval.Task
9 | import software.amazon.awssdk.services.dynamodb.model._
10 |
11 | trait UserMessageComponent extends DynamoDBDaoSupport {
12 |
13 | case class UserMessageRecordId(userId: Long, messageId: Long)
14 |
15 | case class UserMessageRecord(
16 | id: UserMessageRecordId,
17 | status: String,
18 | message: String,
19 | createdAt: java.time.ZonedDateTime,
20 | updatedAt: Option[java.time.ZonedDateTime]
21 | ) extends SoftDeletableRecord[UserMessageRecordId] {
22 | override type This = UserMessageRecord
23 | override def withStatus(value: String): UserMessageRecord =
24 | copy(status = value)
25 | }
26 |
27 | case class UserMessageDao(client: DynamoDbMonixClient)
28 | extends Dao[Task, UserMessageRecordId, UserMessageRecord]
29 | with DaoSoftDeletable[Task, UserMessageRecordId, UserMessageRecord] {
30 | val tableName = "UserMessage"
31 | val DELETED: String = "deleted"
32 |
33 | override def put(record: UserMessageRecord): Task[Long] = {
34 | client
35 | .putItem(
36 | tableName,
37 | Map(
38 | "UserId" -> AttributeValue.builder().n(record.id.userId.toString).build(),
39 | "MessageId" -> AttributeValue.builder().n(record.id.messageId.toString).build(),
40 | "Status" -> AttributeValue.builder().s(record.status).build(),
41 | "Message" -> AttributeValue.builder().s(record.message).build(),
42 | "CreatedAt" -> AttributeValue.builder().n(record.createdAt.toInstant.toEpochMilli.toString).build()
43 | ) ++ record.updatedAt
44 | .map { s =>
45 | Map("UpdatedAt" -> AttributeValue.builder().n(s.toInstant.toEpochMilli.toString).build())
46 | }
47 | .getOrElse(Map.empty)
48 | )
49 | .flatMap { response =>
50 | if (response.sdkHttpResponse().isSuccessful)
51 | Task.pure(1L)
52 | else
53 | Task.raiseError(new Exception())
54 | }
55 | }
56 |
57 | override def putMulti(records: Seq[UserMessageRecord]): Task[Long] = {
58 | client
59 | .batchWriteItem(
60 | Map(
61 | tableName -> records.map { record =>
62 | WriteRequest
63 | .builder().putRequest(
64 | PutRequest
65 | .builder().itemAsScala(
66 | Map(
67 | "UserId" -> AttributeValue.builder().n(record.id.userId.toString).build(),
68 | "MessageId" -> AttributeValue.builder().n(record.id.messageId.toString).build(),
69 | "Status" -> AttributeValue.builder().s(record.status).build(),
70 | "Message" -> AttributeValue.builder().s(record.message).build(),
71 | "CreatedAt" -> AttributeValue
72 | .builder()
73 | .n(record.createdAt.toInstant.toEpochMilli.toString).build()
74 | ) ++ record.updatedAt
75 | .map { s =>
76 | Map("UpdatedAt" -> AttributeValue.builder().n(s.toInstant.toEpochMilli.toString).build())
77 | }
78 | .getOrElse(Map.empty)
79 | ).build()
80 | ).build()
81 | }
82 | )
83 | )
84 | .flatMap { response =>
85 | if (response.sdkHttpResponse.isSuccessful) {
86 | Task.pure(records.size - response.unprocessedItems.size)
87 | } else
88 | Task.raiseError(new Exception())
89 | }
90 | }
91 |
92 | override def get(id: UserMessageRecordId): Task[Option[UserMessageRecord]] = {
93 | client
94 | .getItem(
95 | tableName,
96 | Map(
97 | "UserId" -> AttributeValue.builder().n(id.userId.toString).build(),
98 | "MessageId" -> AttributeValue.builder().n(id.messageId.toString).build()
99 | )
100 | )
101 | .flatMap { response =>
102 | if (response.sdkHttpResponse().isSuccessful) {
103 | Task.pure {
104 | response.itemAsScala.map { item =>
105 | UserMessageRecord(
106 | id = UserMessageRecordId(item("UserId").n.toLong, item("MessageId").n.toLong),
107 | status = item("Status").s,
108 | message = item("Password").s,
109 | createdAt = item("CreatedAt").nAsScala.map { v =>
110 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(v.toLong), ZoneId.systemDefault())
111 | }.get,
112 | updatedAt = item
113 | .get("UpdatedAt")
114 | .flatMap(_.nAsScala.map { v =>
115 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(v.toLong), ZoneId.systemDefault())
116 | })
117 | )
118 | }
119 | }
120 | } else
121 | Task.raiseError(new Exception())
122 | }
123 |
124 | }
125 | override def getMulti(ids: Seq[UserMessageRecordId]): Task[Seq[UserMessageRecord]] = {
126 | client
127 | .batchGetItem(
128 | Map(
129 | tableName -> KeysAndAttributes
130 | .builder()
131 | .keysAsScala(ids.map { id =>
132 | Map(
133 | "UserId" -> AttributeValue.builder().n(id.userId.toString).build(),
134 | "MessageId" -> AttributeValue.builder().n(id.messageId.toString).build()
135 | )
136 | }).build()
137 | )
138 | )
139 | .flatMap { response =>
140 | if (response.sdkHttpResponse().isSuccessful) {
141 | Task.pure {
142 | response.responsesAsScala
143 | .map { records =>
144 | records.values.toSeq.flatMap { records =>
145 | records.map { item =>
146 | UserMessageRecord(
147 | id = UserMessageRecordId(item("UserId").n.toLong, item("MessageId").n.toLong),
148 | status = item("Status").s,
149 | message = item("Password").s,
150 | createdAt = item("CreatedAt").nAsScala.map { v =>
151 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(v.toLong), ZoneId.systemDefault())
152 | }.get,
153 | updatedAt = item("UpdatedAt").nAsScala.map { v =>
154 | ZonedDateTime.ofInstant(Instant.ofEpochMilli(v.toLong), ZoneId.systemDefault())
155 | }
156 | )
157 | }
158 | }
159 | }
160 | .getOrElse(Seq.empty)
161 | }
162 | } else
163 | Task.raiseError(new Exception())
164 | }
165 |
166 | }
167 |
168 | override def delete(id: UserMessageRecordId): Task[Long] = {
169 | client
170 | .deleteItem(
171 | tableName,
172 | Map(
173 | "UserId" -> AttributeValue.builder().n(id.userId.toString).build(),
174 | "MessageId" -> AttributeValue.builder().n(id.messageId.toString).build()
175 | )
176 | )
177 | .flatMap { response =>
178 | if (response.sdkHttpResponse().isSuccessful)
179 | Task.pure(1L)
180 | else
181 | Task.raiseError(new Exception())
182 | }
183 | }
184 |
185 | override def deleteMulti(ids: Seq[UserMessageRecordId]): Task[Long] = {
186 | client
187 | .batchWriteItem(
188 | Map(
189 | tableName -> ids.map { id =>
190 | WriteRequest
191 | .builder().deleteRequest(
192 | DeleteRequest
193 | .builder().keyAsScala(
194 | Map(
195 | "UserId" -> AttributeValue.builder().n(id.userId.toString).build(),
196 | "MessageId" -> AttributeValue.builder().n(id.messageId.toString).build()
197 | )
198 | ).build()
199 | ).build()
200 | }
201 | )
202 | )
203 | .flatMap { response =>
204 | if (response.sdkHttpResponse().isSuccessful) {
205 | Task.pure(ids.size - response.unprocessedItems.size)
206 | } else
207 | Task.raiseError(new Exception())
208 | }
209 | }
210 |
211 | override def softDelete(id: UserMessageRecordId): Task[Long] = {
212 | client
213 | .updateItem(
214 | tableName,
215 | Map(
216 | "UserId" -> AttributeValue.builder().n(id.userId.toString).build(),
217 | "MessageId" -> AttributeValue.builder().n(id.messageId.toString).build()
218 | ),
219 | Map("Status" -> AttributeValueUpdate.builder().value(AttributeValue.builder().s(DELETED).build()).build())
220 | )
221 | .flatMap { response =>
222 | if (response.sdkHttpResponse().isSuccessful) {
223 | Task.pure(1L)
224 | } else
225 | Task.raiseError(new Exception)
226 | }
227 | }
228 |
229 | override def softDeleteMulti(ids: Seq[UserMessageRecordId]): Task[Long] = {
230 | Task
231 | .traverse(ids) { id =>
232 | delete(id)
233 | }
234 | .map(_.count(_ > 0))
235 | }
236 | }
237 |
238 | }
239 |
--------------------------------------------------------------------------------