├── project
├── build.properties
├── plugins.sbt
├── Settings.scala
├── DockerSettings.scala
├── AvroSupport.scala
└── Dependencies.scala
├── version.sbt
├── src
├── main
│ ├── resources
│ │ ├── avro
│ │ │ ├── avsc-history
│ │ │ │ └── history-avsc-placeholder
│ │ │ ├── avsc
│ │ │ │ ├── UpdatebleAvro.avsc
│ │ │ │ ├── NoteAvro.avsc
│ │ │ │ ├── PersistentEventMetadataAvro.avsc
│ │ │ │ ├── NoteDeletedAvro.avsc
│ │ │ │ ├── NoteCreatedAvro.avsc
│ │ │ │ └── NoteUpdatedAvro.avsc
│ │ │ ├── PersistentEventMetadata.avdl
│ │ │ └── NoteInternalEvents.avdl
│ │ ├── application.conf
│ │ ├── logback.xml
│ │ ├── environment.conf
│ │ ├── akka.conf
│ │ └── kamon.conf
│ ├── scala
│ │ └── com
│ │ │ └── ingenuiq
│ │ │ └── note
│ │ │ ├── settings
│ │ │ ├── HttpListenerSettings.scala
│ │ │ ├── TracingMonitoringSettings.scala
│ │ │ └── Settings.scala
│ │ │ ├── query
│ │ │ ├── dao
│ │ │ │ ├── model
│ │ │ │ │ ├── PersistenceOffset.scala
│ │ │ │ │ ├── NoteSql.scala
│ │ │ │ │ └── NoteEventSql.scala
│ │ │ │ ├── common
│ │ │ │ │ ├── DBComponent.scala
│ │ │ │ │ ├── QueryFilterOptions.scala
│ │ │ │ │ └── DbTypeMappers.scala
│ │ │ │ ├── schema
│ │ │ │ │ ├── PersistenceOffsetTableDefinition.scala
│ │ │ │ │ ├── NoteTableDefinition.scala
│ │ │ │ │ └── NoteEventTableDefinition.scala
│ │ │ │ ├── repos
│ │ │ │ │ ├── NoteEventRepo.scala
│ │ │ │ │ ├── PersistenceOffsetRepo.scala
│ │ │ │ │ └── NoteRepo.scala
│ │ │ │ └── TableDefinitionCreator.scala
│ │ │ ├── events
│ │ │ │ ├── NoteEventsQuery.scala
│ │ │ │ └── NoteEventsViewActor.scala
│ │ │ ├── note
│ │ │ │ ├── NoteQuery.scala
│ │ │ │ └── NoteViewActor.scala
│ │ │ ├── common
│ │ │ │ ├── Traceable.scala
│ │ │ │ ├── BaseViewActor.scala
│ │ │ │ ├── ResumableProjection.scala
│ │ │ │ └── ViewBuilderActor.scala
│ │ │ ├── QuerySupervisorActor.scala
│ │ │ └── model
│ │ │ │ └── ServiceResult.scala
│ │ │ ├── command
│ │ │ ├── note
│ │ │ │ ├── Note.scala
│ │ │ │ ├── NoteProtocol.scala
│ │ │ │ └── NoteAggregateActor.scala
│ │ │ ├── CommandSupervisorActor.scala
│ │ │ └── persistence
│ │ │ │ ├── TaggingEventAdapter.scala
│ │ │ │ ├── AvroConverters.scala
│ │ │ │ ├── StatementSchemaMap.scala
│ │ │ │ └── CommonPersistenceSerializer.scala
│ │ │ ├── common
│ │ │ ├── PredefinedTimeout.scala
│ │ │ └── valueClasses.scala
│ │ │ ├── http
│ │ │ ├── HealthCheckRoute.scala
│ │ │ ├── model
│ │ │ │ ├── ErrorMessageResponse.scala
│ │ │ │ └── CorrelationIdResponse.scala
│ │ │ ├── command
│ │ │ │ ├── CommandRequest.scala
│ │ │ │ ├── CommandResponse.scala
│ │ │ │ └── CommandRoutes.scala
│ │ │ ├── query
│ │ │ │ ├── QueryResponse.scala
│ │ │ │ └── QueryRoutes.scala
│ │ │ ├── RouteHelpers.scala
│ │ │ └── BaseRoutes.scala
│ │ │ ├── utils
│ │ │ ├── BackoffActorHelper.scala
│ │ │ └── package.scala
│ │ │ ├── Main.scala
│ │ │ └── serialization
│ │ │ └── PlayJsonSupport.scala
│ └── main.iml
├── test
│ ├── scala
│ │ └── com
│ │ │ └── ingenuiq
│ │ │ └── note
│ │ │ ├── base
│ │ │ ├── GetInternalStateActor.scala
│ │ │ ├── RestartableActor.scala
│ │ │ └── BaseRepoSpec.scala
│ │ │ ├── utils
│ │ │ ├── SchemaTestHelper.scala
│ │ │ ├── NoteModelsHelper.scala
│ │ │ ├── ClassUtils.scala
│ │ │ └── JournalSchemaEvolutionTest.scala
│ │ │ ├── http
│ │ │ └── HealthCheckRouteSpec.scala
│ │ │ ├── command
│ │ │ ├── InMemoryPersistenceBaseTrait.scala
│ │ │ └── note
│ │ │ │ └── NoteAggregateActorSpec.scala
│ │ │ └── query
│ │ │ └── dao
│ │ │ └── NoteRepoSpec.scala
│ ├── test.iml
│ └── resources
│ │ ├── application.conf
│ │ └── logback.xml
└── it
│ ├── resources
│ └── application.conf
│ └── scala
│ └── com
│ └── ingenuiq
│ └── note
│ └── integration
│ ├── base
│ ├── IntegrationConf.scala
│ ├── EmbeddedCassandra.scala
│ └── IntegrationBase.scala
│ ├── utils
│ ├── PlayJsonSupportWriters.scala
│ └── PlayJsonSupportReaders.scala
│ └── NoteIntegrationSpec.scala
├── .sbtopts
├── docker
├── docker-compose-essential.yml
├── elk.yml
├── filebeat.yml
└── docker-compose-dev.yml
├── .gitignore
├── .scalafmt.conf
└── README.md
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version = 1.5.2
2 |
--------------------------------------------------------------------------------
/version.sbt:
--------------------------------------------------------------------------------
1 | ThisBuild / version := "0.0.1-SNAPSHOT"
2 |
--------------------------------------------------------------------------------
/src/main/resources/avro/avsc-history/history-avsc-placeholder:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/settings/HttpListenerSettings.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.settings
2 |
3 | case class HttpListenerSettings(interface: String, port: Int)
4 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/model/PersistenceOffset.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.model
2 |
3 | case class PersistenceOffset(id: String, offset: Long)
4 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/settings/TracingMonitoringSettings.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.settings
2 |
3 | case class TracingMonitoringSettings(prometheusEnabled: Boolean, zipkinEnabled: Boolean)
4 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/command/note/Note.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command.note
2 |
3 | import com.ingenuiq.note.common.NoteId
4 |
5 | case class Note(id: NoteId, title: Option[String], content: Option[String])
6 |
--------------------------------------------------------------------------------
/.sbtopts:
--------------------------------------------------------------------------------
1 | -J-XX:MinHeapFreeRatio=10
2 | -J-XX:MaxHeapFreeRatio=20
3 | -J-Xmx4g
4 | -J-Xms512m
5 | -J-Xss8m
6 | -J-XX:MaxMetaspaceSize=1g
7 | -J-XX:MetaspaceSize=200m
8 | -J-Dsbt.override.build.repos=false
9 | -Dsbt.override.build.repos=false
10 |
--------------------------------------------------------------------------------
/src/main/resources/avro/avsc/UpdatebleAvro.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "type" : "record",
3 | "name" : "UpdatebleAvro",
4 | "namespace" : "com.ingenuiq.note.events",
5 | "fields" : [ {
6 | "name" : "value",
7 | "type" : [ "null", "string" ]
8 | } ]
9 | }
10 |
--------------------------------------------------------------------------------
/src/main/resources/avro/PersistentEventMetadata.avdl:
--------------------------------------------------------------------------------
1 | @namespace("com.ingenuiq.common")
2 | protocol Common {
3 |
4 | record PersistentEventMetadataAvro {
5 | string correlationId;
6 | string eventId;
7 | string userId;
8 | long created;
9 | string spanId;
10 | }
11 | }
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/common/PredefinedTimeout.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.common
2 |
3 | import akka.util.Timeout
4 |
5 | import scala.concurrent.duration._
6 | import scala.language.postfixOps
7 |
8 | trait PredefinedTimeout {
9 |
10 | implicit val timeout: Timeout = Timeout(10 seconds)
11 | }
12 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/base/GetInternalStateActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.base
2 |
3 | import akka.actor.Actor.Receive
4 |
5 | object GetInternalStateActor {
6 | case object GetInternalState
7 | case object GetPartialFunctions
8 | case class ActorPartialFunctions(command: Receive, recover: Receive)
9 | }
10 |
--------------------------------------------------------------------------------
/src/main/resources/avro/avsc/NoteAvro.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "type" : "record",
3 | "name" : "NoteAvro",
4 | "namespace" : "com.ingenuiq.note.events",
5 | "fields" : [ {
6 | "name" : "id",
7 | "type" : "string"
8 | }, {
9 | "name" : "title",
10 | "type" : [ "null", "string" ]
11 | }, {
12 | "name" : "content",
13 | "type" : [ "null", "string" ]
14 | } ]
15 | }
16 |
--------------------------------------------------------------------------------
/docker/docker-compose-essential.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 |
5 | postgres:
6 | image: postgres
7 | restart: unless-stopped
8 | environment:
9 | POSTGRES_DB: postgres
10 | POSTGRES_USER: postgres
11 | POSTGRES_PASSWORD: postgres
12 | ports:
13 | - 5434:5432
14 |
15 | cassandra:
16 | image: cassandra:latest
17 | ports:
18 | - "9042:9042" # native protocol clients
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/HealthCheckRoute.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http
2 |
3 | import akka.http.scaladsl.model.StatusCodes.OK
4 | import akka.http.scaladsl.server.Directives.{ complete, path }
5 | import akka.http.scaladsl.server.Route
6 |
7 | trait HealthCheckRoute {
8 |
9 | private[http] def healthCheckRoute: Route =
10 | path("health-check") {
11 | complete((OK, "ok"))
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | build/
2 | .idea
3 | target
4 | project/target
5 | journal/
6 | log/
7 | src/main/resources/assets
8 | src/main/resources/app
9 | cassandra/
10 | data/
11 | file~/
12 | swagger-editor/
13 | *.tgz
14 | .bsp
15 |
16 | ######################
17 | # OS generated files #
18 | ######################
19 | */.DS_Store
20 | .DS_Store
21 | .DS_Store?
22 | ._*
23 | .Spotlight-V100
24 | .Trashes
25 | Icon?
26 | ehthumbs.db
27 | Thumbs.db
--------------------------------------------------------------------------------
/src/it/resources/application.conf:
--------------------------------------------------------------------------------
1 | rdbms = {
2 | dataSourceClass = "slick.jdbc.DatabaseUrlDataSource"
3 | properties {
4 | driver = "org.h2.Driver"
5 | url = "jdbc:h2:mem:alpha;MODE=PostgreSQL"
6 | user = "h2user"
7 | password = "h2password"
8 | connectionPool = disabled
9 | keepAliveConnection = true
10 | }
11 | }
12 |
13 | cassandra-query-journal {
14 | events-by-tag {
15 | eventual-consistency-delay = 100ms
16 | }
17 | }
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/model/ErrorMessageResponse.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http.model
2 |
3 | import com.ingenuiq.note.utils
4 | import play.api.libs.json.{ Json, Writes }
5 |
6 | case class ErrorMessageResponse(errorMessage: String = "Error on our side, working on it", correlationId: String = utils.currentTraceId)
7 |
8 | object ErrorMessageResponse {
9 | implicit val writes: Writes[ErrorMessageResponse] = Json.writes[ErrorMessageResponse]
10 | }
11 |
--------------------------------------------------------------------------------
/src/test/test.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/model/CorrelationIdResponse.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http.model
2 |
3 | import com.ingenuiq.note.common.CorrelationId
4 | import com.ingenuiq.note.utils
5 | import play.api.libs.json.{ Json, Writes }
6 |
7 | case class CorrelationIdResponse(correlationId: CorrelationId = CorrelationId(utils.currentTraceId))
8 |
9 | object CorrelationIdResponse {
10 | implicit val writes: Writes[CorrelationIdResponse] = Json.writes[CorrelationIdResponse]
11 | }
12 |
--------------------------------------------------------------------------------
/src/it/scala/com/ingenuiq/note/integration/base/IntegrationConf.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.integration.base
2 |
3 | import com.typesafe.config.{ Config, ConfigFactory, ConfigValueFactory }
4 |
5 | import scala.collection.JavaConverters._
6 |
7 | object IntegrationConf {
8 |
9 | def config(className: Class[_]): Config = {
10 | val clusterName = className.getName
11 | .replace('.', '-')
12 | .replace('_', '-')
13 | .filter(_ != '$')
14 |
15 | ConfigFactory
16 | .load()
17 | }
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/src/main/resources/avro/avsc/PersistentEventMetadataAvro.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "type" : "record",
3 | "name" : "PersistentEventMetadataAvro",
4 | "namespace" : "com.ingenuiq.common",
5 | "fields" : [ {
6 | "name" : "correlationId",
7 | "type" : "string"
8 | }, {
9 | "name" : "eventId",
10 | "type" : "string"
11 | }, {
12 | "name" : "userId",
13 | "type" : "string"
14 | }, {
15 | "name" : "created",
16 | "type" : "long"
17 | }, {
18 | "name" : "spanId",
19 | "type" : "string"
20 | } ]
21 | }
22 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/common/DBComponent.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.common
2 |
3 | import com.typesafe.config.ConfigFactory
4 |
5 | object DBComponent {
6 |
7 | private val config = ConfigFactory.load()
8 |
9 | val driver = config.getString("rdbms.properties.driver") match {
10 | case "org.h2.Driver" => slick.jdbc.H2Profile
11 | case _ => slick.jdbc.PostgresProfile
12 | }
13 |
14 | import driver.api._
15 |
16 | val db: Database = Database.forConfig("rdbms.properties")
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/settings/Settings.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.settings
2 |
3 | import com.typesafe.config.ConfigFactory
4 | import pureconfig.ConfigSource
5 | import pureconfig.generic.auto._
6 |
7 | case class Settings(rebuildReadside: Boolean,
8 | tracingMonitoringSettings: TracingMonitoringSettings,
9 | httpListenerSettings: HttpListenerSettings)
10 |
11 | object Settings {
12 |
13 | val conf: Settings = ConfigSource.fromConfig(ConfigFactory.load).loadOrThrow[Settings]
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | include "environment.conf"
2 | include "akka.conf"
3 | include "kamon.conf"
4 |
5 | http-settings {
6 | host-port = "localhost:9001"
7 | host-port = ${?HOST_PORT}
8 | public-dir = "public"
9 | app-dir = "app"
10 | default-page = "index.html"
11 | }
12 |
13 | http-listener-settings {
14 | interface = "0.0.0.0"
15 | interface = ${?HTTP_LISTEN_INTERFACE}
16 | port = 9001
17 | port = ${?HTTP_LISTEN_PORT}
18 | }
19 |
20 | cors.allowed-origin = "*"
21 |
22 | rebuild-readside = true
23 | rebuild-readside = ${?REBUILD_READSIDE}
--------------------------------------------------------------------------------
/src/main/main.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/utils/SchemaTestHelper.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.utils
2 |
3 | import com.ingenuiq.note.query.dao.TableDefinitionCreator
4 |
5 | import scala.concurrent.ExecutionContext.Implicits.global
6 |
7 | object SchemaTestHelper extends TableDefinitionCreator {
8 |
9 | import com.ingenuiq.note.query.dao.common.DBComponent._
10 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
11 |
12 | def createQuerySchema(): Unit = createQuerySchemaWithRetry(1)
13 |
14 | def deleteQueryContent(): Unit = tables.foreach(x => db.run(x.delete))
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/src/it/scala/com/ingenuiq/note/integration/base/EmbeddedCassandra.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.integration.base
2 |
3 | import java.io.File
4 |
5 | import akka.persistence.cassandra.testkit.CassandraLauncher
6 |
7 | object EmbeddedCassandra {
8 | private val directory = new File("cassandra")
9 | private val configResource = "test-embedded-cassandra.yaml"
10 | private val clean = true
11 | private val port = 9042
12 |
13 | def startCassandra(): Unit = CassandraLauncher.start(directory, configResource, clean, port)
14 |
15 | def stopCassandra(): Unit = CassandraLauncher.stop()
16 | }
17 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/base/RestartableActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.base
2 |
3 | import akka.persistence.PersistentActor
4 | import com.ingenuiq.note.base.RestartableActor.{ RestartActor, RestartActorException }
5 |
6 | trait RestartableActor extends PersistentActor {
7 |
8 | abstract override def receiveCommand: Receive = super.receiveCommand orElse {
9 | case RestartActor => throw new RestartActorException("Test - Restarting with exception")
10 | }
11 | }
12 |
13 | object RestartableActor {
14 | case object RestartActor
15 |
16 | private class RestartActorException(message: String) extends Exception(message)
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/events/NoteEventsQuery.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.events
2 |
3 | import com.ingenuiq.note.common.UserId
4 | import com.ingenuiq.note.query.common.{ Event, Query }
5 | import com.ingenuiq.note.query.dao.model.NoteEventSql
6 |
7 | sealed trait NoteEventQuery extends Query
8 |
9 | object NoteEventQuery {
10 | case class GetNoteEvents(userId: UserId) extends NoteEventQuery
11 |
12 | }
13 |
14 | sealed trait NoteEventQueryResponse extends Event
15 |
16 | object NoteEventQueryResponse {
17 |
18 | case class NoteEventsFetchedResponse(notes: Iterable[NoteEventSql]) extends NoteEventQueryResponse
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/.scalafmt.conf:
--------------------------------------------------------------------------------
1 | version = 2.4.0
2 | style = intellij
3 | maxColumn = 140
4 | lineEndings = unix
5 | align = most
6 | align.tokens = ["%", "%%", "=", ":", "=>", "<-", "~>", "<~"]
7 | align.arrowEnumeratorGenerator = true
8 | align.openParenCallSite = false
9 | align.openParenDefnSite = true
10 | newlines.sometimesBeforeColonInMethodReturnType = true
11 | newlines.alwaysBeforeTopLevelStatements = true
12 | newlines.alwaysBeforeElseAfterCurlyIf = true
13 | unindentTopLevelOperators = true
14 | spaces.inImportCurlyBraces = true
15 | danglingParentheses = true
16 | includeCurlyBraceInSelectChains = true
17 | project.git = true
18 | indentOperator.exclude = "^(&&|\\~|\\|\\|)$"
19 |
--------------------------------------------------------------------------------
/src/test/resources/application.conf:
--------------------------------------------------------------------------------
1 | rdbms = {
2 | dataSourceClass = "slick.jdbc.DatabaseUrlDataSource"
3 | properties {
4 | driver = "org.h2.Driver"
5 | url = "jdbc:h2:mem:alpha;MODE=PostgreSQL"
6 | user = "h2user"
7 | password = "h2password"
8 | connectionPool = disabled
9 | keepAliveConnection = true
10 | }
11 | }
12 |
13 | inmemory-journal {
14 | event-adapters {
15 | command-tagging = "com.ingenuiq.note.command.persistence.TaggingEventAdapter"
16 | }
17 |
18 | event-adapter-bindings {
19 | "org.apache.avro.specific.SpecificRecordBase" = [command-tagging]
20 | "com.ingenuiq.note.query.common.PersistentEvent" = [command-tagging]
21 | }
22 | }
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/utils/BackoffActorHelper.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.utils
2 |
3 | import akka.actor.Props
4 | import akka.pattern.{ BackoffOpts, BackoffSupervisor }
5 |
6 | import scala.concurrent.duration._
7 |
8 | trait BackoffActorHelper {
9 |
10 | def backoffActor(childName: String, props: Props): Props =
11 | BackoffSupervisor.props(
12 | BackoffOpts
13 | .onStop(
14 | props,
15 | childName = childName,
16 | minBackoff = 3.seconds,
17 | maxBackoff = 30.seconds,
18 | randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly
19 | )
20 | .withDefaultStoppingStrategy // Stop at any Exception thrown
21 | )
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/note/NoteQuery.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.note
2 |
3 | import com.ingenuiq.note.common.{ NoteId, UserId }
4 | import com.ingenuiq.note.query.common.{ Event, Query }
5 | import com.ingenuiq.note.query.dao.model.NoteSql
6 |
7 | sealed trait NoteQuery extends Query
8 |
9 | object NoteQuery {
10 | case class GetNotes(userId: UserId) extends NoteQuery
11 |
12 | case class GetNote(userId: UserId, noteId: NoteId) extends NoteQuery
13 | }
14 |
15 | sealed trait NoteQueryResponse extends Event
16 |
17 | object NoteQueryResponse {
18 |
19 | case class NotesFetchedResponse(notes: Iterable[NoteSql]) extends NoteQueryResponse
20 | case class NoteFetchedResponse(note: Option[NoteSql]) extends NoteQueryResponse
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/command/CommandSupervisorActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command
2 |
3 | import akka.actor.{ Actor, ActorRef, Props }
4 | import com.ingenuiq.note.command.note._
5 | import com.ingenuiq.note.common.PredefinedTimeout
6 | import com.ingenuiq.note.utils.BackoffActorHelper
7 |
8 | object CommandSupervisorActor {
9 | def apply() = Props(classOf[CommandSupervisorActor])
10 | }
11 |
12 | class CommandSupervisorActor extends Actor with PredefinedTimeout with BackoffActorHelper {
13 |
14 | val noteAggregateActor: ActorRef = context.system.actorOf(backoffActor("noteAggregateActor", NoteAggregateActor()))
15 |
16 | override def receive: Receive = {
17 | case command: NoteCommand => noteAggregateActor.forward(command)
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/http/HealthCheckRouteSpec.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http
2 |
3 | import akka.http.scaladsl.server.Directives
4 | import akka.http.scaladsl.testkit.{ RouteTest, ScalatestRouteTest }
5 | import org.scalatest.concurrent.Eventually
6 | import org.scalatest.matchers.should.Matchers
7 | import org.scalatest.wordspec.AnyWordSpec
8 |
9 | class HealthCheckRouteSpec
10 | extends AnyWordSpec
11 | with HealthCheckRoute
12 | with Matchers
13 | with Eventually
14 | with Directives
15 | with RouteTest
16 | with ScalatestRouteTest {
17 |
18 | "Health Check " should {
19 | "run health check is ok" in {
20 | Get(s"""/health-check""") ~> healthCheckRoute ~> check {
21 | handled shouldBe true
22 | responseAs[String] shouldBe "ok"
23 | }
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/common/QueryFilterOptions.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.common
2 |
3 | import slick.lifted.CanBeQueryCondition
4 |
5 | import scala.language.higherKinds
6 |
7 | trait QueryFilterOptions {
8 |
9 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
10 |
11 | implicit class ConditionalQueryFilter[A, B, C[_]](q: Query[A, B, C]) {
12 |
13 | def filterOpt[D, T <: Rep[_]: CanBeQueryCondition](option: Option[D])(f: (A, D) => T): Query[A, B, C] =
14 | option.map(d => q.filter(a => f(a, d))).getOrElse(q)
15 |
16 | def filterIf(p: Boolean)(f: A => Rep[Boolean]): Query[A, B, C] =
17 | if (p) q.filter(f) else q
18 |
19 | def filterIfOptional(p: Boolean)(f: A => Rep[Option[Boolean]]): Query[A, B, C] =
20 | if (p) q.filter(f) else q
21 |
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/resources/avro/avsc/NoteDeletedAvro.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "type" : "record",
3 | "name" : "NoteDeletedAvro",
4 | "namespace" : "com.ingenuiq.note.events",
5 | "fields" : [ {
6 | "name" : "metadata",
7 | "type" : {
8 | "type" : "record",
9 | "name" : "PersistentEventMetadataAvro",
10 | "namespace" : "com.ingenuiq.common",
11 | "fields" : [ {
12 | "name" : "correlationId",
13 | "type" : "string"
14 | }, {
15 | "name" : "eventId",
16 | "type" : "string"
17 | }, {
18 | "name" : "userId",
19 | "type" : "string"
20 | }, {
21 | "name" : "created",
22 | "type" : "long"
23 | }, {
24 | "name" : "spanId",
25 | "type" : "string"
26 | } ]
27 | }
28 | }, {
29 | "name" : "noteId",
30 | "type" : "string"
31 | } ]
32 | }
33 |
--------------------------------------------------------------------------------
/src/it/scala/com/ingenuiq/note/integration/utils/PlayJsonSupportWriters.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.integration.utils
2 |
3 | import com.ingenuiq.note.command.note.Note
4 | import com.ingenuiq.note.command.note.NoteEvent.NoteCreated
5 | import com.ingenuiq.note.http.command.CommandRequest.NotePayload
6 | import com.ingenuiq.note.query.common.PersistentEventMetadata
7 | import play.api.libs.json._
8 |
9 | trait PlayJsonSupportWriters {
10 |
11 | implicit lazy val writes1: Writes[NotePayload] = Json.writes[NotePayload]
12 |
13 | implicit lazy val noteWrites: Writes[Note] = Json.writes[Note]
14 | implicit lazy val persistentEventMetadataWrites: Writes[PersistentEventMetadata] = Json.writes[PersistentEventMetadata]
15 | implicit lazy val noteCreatedWrites: Writes[NoteCreated] = Json.writes[NoteCreated]
16 | }
17 |
--------------------------------------------------------------------------------
/src/main/resources/avro/NoteInternalEvents.avdl:
--------------------------------------------------------------------------------
1 | @namespace("com.ingenuiq.note.events")
2 | protocol NotePersistentEvent {
3 |
4 | import idl "PersistentEventMetadata.avdl";
5 |
6 | record UpdatebleAvro {
7 | union { null, string } value;
8 | }
9 |
10 | record NoteAvro {
11 | string id;
12 | union { null, string } title;
13 | union { null, string } content;
14 | }
15 |
16 | record NoteCreatedAvro {
17 | com.ingenuiq.common.PersistentEventMetadataAvro metadata;
18 | NoteAvro note;
19 | }
20 |
21 | record NoteUpdatedAvro {
22 | com.ingenuiq.common.PersistentEventMetadataAvro metadata;
23 | string id;
24 | union { null, UpdatebleAvro } title;
25 | union { null, UpdatebleAvro } content;
26 | }
27 |
28 | record NoteDeletedAvro {
29 | com.ingenuiq.common.PersistentEventMetadataAvro metadata;
30 | string noteId;
31 | }
32 |
33 | }
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/schema/PersistenceOffsetTableDefinition.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.schema
2 |
3 | import com.ingenuiq.note.query.dao.common.DbTypeMappers
4 | import com.ingenuiq.note.query.dao.model.PersistenceOffset
5 | import slick.lifted.ProvenShape
6 |
7 | trait PersistenceOffsetTableDefinition extends DbTypeMappers {
8 |
9 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
10 |
11 | val persistenceOffsets = TableQuery[PersistenceOffsetTable](new PersistenceOffsetTable(_))
12 |
13 | final class PersistenceOffsetTable(tag: Tag) extends Table[PersistenceOffset](tag, "persistence_offset") {
14 |
15 | def persistenceId = column[String]("persistence_id", O.PrimaryKey)
16 | def offset = column[Long]("offset")
17 |
18 | def * : ProvenShape[PersistenceOffset] =
19 | (persistenceId, offset).mapTo[PersistenceOffset]
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2")
2 |
3 | addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.10")
4 |
5 | addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0")
6 |
7 | addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.15")
8 |
9 | addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.1.0-M7")
10 |
11 | addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
12 |
13 | addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.2")
14 |
15 | addSbtPlugin("com.julianpeeters" % "sbt-avrohugger" % "2.0.0-RC15")
16 |
17 | addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.4.1")
18 |
19 | resolvers += Resolver.bintrayRepo("kamon-io", "sbt-plugins")
20 | addSbtPlugin("io.kamon" % "sbt-aspectj-runner" % "1.1.0")
21 | addSbtPlugin("com.lightbend.sbt" % "sbt-javaagent" % "0.1.4")
22 |
23 | libraryDependencies += "org.apache.avro" % "avro-tools" % "1.8.2"
24 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/utils/NoteModelsHelper.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.utils
2 |
3 | import com.ingenuiq.note.command.note.Note
4 | import com.ingenuiq.note.common.NoteId
5 | import com.ingenuiq.note.http.command.CommandRequest.NotePayload
6 | import com.ingenuiq.note.http.query.QueryResponse.NoteResponse
7 |
8 | import scala.util.Random
9 |
10 | trait NoteModelsHelper {
11 |
12 | private def randomString: String = s"Note-${Random.alphanumeric.take(10).mkString}"
13 |
14 | def generateRandomNotePayload(title: Option[String] = Option(randomString), content: Option[String] = Option(randomString)): NotePayload =
15 | NotePayload(title = title, content = content)
16 |
17 | def generateRandomNote(id: NoteId = NoteId.generateNew,
18 | title: Option[String] = Option(randomString),
19 | content: Option[String] = Option(randomString)): Note =
20 | Note(id = id, title = title, content = content)
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/repos/NoteEventRepo.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.repos
2 |
3 | import com.ingenuiq.note.query.dao.common.QueryFilterOptions
4 | import com.ingenuiq.note.query.dao.model.NoteEventSql
5 | import com.ingenuiq.note.query.dao.schema._
6 | import com.typesafe.scalalogging.LazyLogging
7 |
8 | import scala.concurrent.{ ExecutionContext, Future }
9 | import scala.language.higherKinds
10 |
11 | class NoteEventRepo(implicit ec: ExecutionContext) extends NoteEventTableDefinition with LazyLogging with QueryFilterOptions {
12 |
13 | import com.ingenuiq.note.query.dao.common.DBComponent.db
14 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
15 |
16 | def upsertNoteEvent(ne: NoteEventSql): Future[Int] = {
17 | logger.trace("Upserting a note event")
18 | db.run(noteEvents.insertOrUpdate(ne))
19 | }
20 |
21 | def getNoteEvents: Future[Seq[NoteEventSql]] = {
22 | logger.trace("Querying note events")
23 | db.run(noteEvents.result)
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/common/Traceable.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.common
2 |
3 | import java.time.LocalDateTime
4 |
5 | import com.ingenuiq.note.common.{ CorrelationId, EventId, UserId }
6 | import com.ingenuiq.note.utils
7 |
8 | trait Command extends WithMetadata
9 |
10 | trait Event
11 |
12 | trait Query extends WithMetadata
13 |
14 | trait PersistentEvent extends Event {
15 | def persistentEventMetadata: PersistentEventMetadata
16 | }
17 |
18 | trait Traceable {
19 | def correlationId: CorrelationId
20 | }
21 |
22 | trait WithMetadata {
23 | def userId: UserId
24 | }
25 |
26 | case class PersistentEventMetadata(userId: UserId,
27 | eventId: EventId = EventId.generateNew,
28 | created: LocalDateTime = utils.now(),
29 | correlationId: CorrelationId = CorrelationId(utils.currentTraceId),
30 | spanId: String = utils.currentSpanId)
31 | extends WithMetadata
32 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/base/BaseRepoSpec.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.base
2 |
3 | import com.ingenuiq.note.settings.Settings
4 | import com.ingenuiq.note.utils.{ NoteModelsHelper, SchemaTestHelper }
5 | import org.scalatest.concurrent.ScalaFutures
6 | import org.scalatest.matchers.should.Matchers
7 | import org.scalatest.time.{ Millis, Span }
8 | import org.scalatest.wordspec.AnyWordSpec
9 | import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach }
10 |
11 | abstract class BaseRepoSpec
12 | extends AnyWordSpec
13 | with NoteModelsHelper
14 | with ScalaFutures
15 | with Matchers
16 | with BeforeAndAfterAll
17 | with BeforeAndAfterEach {
18 |
19 | override implicit val patienceConfig: PatienceConfig = PatienceConfig(Span(1000, Millis), Span(10, Millis))
20 |
21 | val settings: Settings = Settings.conf
22 |
23 | override protected def beforeAll(): Unit = {
24 | super.beforeAll()
25 | SchemaTestHelper.createQuerySchema()
26 | }
27 |
28 | override protected def afterEach(): Unit =
29 | SchemaTestHelper.deleteQueryContent()
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/command/persistence/TaggingEventAdapter.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command.persistence
2 |
3 | import akka.persistence.journal.{ EventAdapter, EventSeq }
4 | import com.ingenuiq.note.command.note.PersistentNoteEvent
5 | import com.typesafe.scalalogging.LazyLogging
6 | import org.apache.avro.specific.SpecificRecordBase
7 |
8 | class TaggingEventAdapter extends EventAdapter with LazyLogging {
9 |
10 | override def toJournal(event: Any): Any = event match {
11 | case e: PersistentNoteEvent => AvroConverters.from(e)
12 | case e => logger.error(s"Received unexpected message to be written in journal, ${e.getClass.getSimpleName}, $e")
13 | }
14 |
15 | override def fromJournal(event: Any, manifest: String): EventSeq = EventSeq.single {
16 | event match {
17 | case e: SpecificRecordBase => AvroConverters.to(e)
18 | case e =>
19 | logger.error(s"Received unexpected message from journal, not avro, ${e.getClass.getSimpleName}, $e")
20 | e
21 | }
22 | }
23 |
24 | override def manifest(event: Any): String = ""
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/command/CommandRequest.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http.command
2 |
3 | import com.ingenuiq.note.command.note.Note
4 | import com.ingenuiq.note.command.note.NoteCommand.{ CreateNote, UpdateNote }
5 | import com.ingenuiq.note.common.{ NoteId, UserId }
6 | import com.ingenuiq.note.serialization.PlayJsonSupport
7 | import play.api.libs.json._
8 |
9 | object CommandRequest extends PlayJsonSupport {
10 |
11 | case class NotePayload(title: Option[String], content: Option[String]) {
12 |
13 | def toCreateCommand(userId: UserId): CreateNote =
14 | CreateNote(userId = userId, note = NotePayload.toNote(NoteId.generateNew, this))
15 |
16 | def toUpdateCommand(userId: UserId, noteId: NoteId): UpdateNote =
17 | UpdateNote(userId = userId, note = NotePayload.toNote(noteId, this))
18 | }
19 |
20 | object NotePayload {
21 |
22 | private def toNote(noteId: NoteId, payload: NotePayload): Note =
23 | Note(id = noteId, title = payload.title, content = payload.content)
24 |
25 | implicit val reads: Reads[NotePayload] = Json.reads[NotePayload]
26 | }
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/QuerySupervisorActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query
2 |
3 | import akka.actor.{ Actor, ActorRef, Props }
4 | import com.ingenuiq.note.query.events.{ NoteEventQuery, NoteEventView, NoteEventViewBuilder }
5 | import com.ingenuiq.note.query.note._
6 | import com.ingenuiq.note.utils.BackoffActorHelper
7 |
8 | object QuerySupervisorActor {
9 |
10 | def apply(): Props =
11 | Props(classOf[QuerySupervisorActor])
12 | }
13 |
14 | class QuerySupervisorActor extends Actor with BackoffActorHelper {
15 |
16 | context.actorOf(backoffActor(NoteViewBuilder.name, NoteViewBuilder()))
17 | context.actorOf(backoffActor(NoteEventViewBuilder.name, NoteEventViewBuilder()))
18 |
19 | val noteHistoryViewActor: ActorRef = context.actorOf(backoffActor(NoteView.name, NoteView()))
20 | val noteEventHistoryViewActor: ActorRef = context.actorOf(backoffActor(NoteEventView.name, NoteEventView()))
21 |
22 | override def receive: Receive = {
23 | case e: NoteQuery => noteHistoryViewActor.forward(e)
24 | case e: NoteEventQuery => noteEventHistoryViewActor.forward(e)
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/model/NoteSql.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.model
2 |
3 | import java.time.LocalDateTime
4 |
5 | import com.ingenuiq.note.command.note.Note
6 | import com.ingenuiq.note.command.note.NoteEvent.NoteCreated
7 | import com.ingenuiq.note.common.{ CorrelationId, NoteId }
8 |
9 | case class NoteSql(id: NoteId, title: Option[String], content: Option[String], lastModified: LocalDateTime, correlationId: CorrelationId)
10 |
11 | object NoteSql {
12 |
13 | def fromCreatedToSql(noteCreated: NoteCreated): NoteSql =
14 | toSql(
15 | note = noteCreated.note,
16 | lastModified = noteCreated.persistentEventMetadata.created,
17 | correlationId = noteCreated.persistentEventMetadata.correlationId
18 | )
19 |
20 | private def toSql(note: Note, lastModified: LocalDateTime, correlationId: CorrelationId): NoteSql =
21 | NoteSql(id = note.id, title = note.title, content = note.content, lastModified = lastModified, correlationId = correlationId)
22 |
23 | val tupled: ((NoteId, Option[String], Option[String], LocalDateTime, CorrelationId)) => NoteSql = (this.apply _).tupled
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | false
7 |
8 | %date{ISO8601} %-5level %logger %X{akkaSource} %X{sourceThread} - %msg%n
9 |
10 |
11 |
12 |
13 | false
14 | target/note.log
15 | true
16 |
17 | %date{ISO8601} %-5level %logger %X{akkaSource} %X{sourceThread} - %msg%n
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/command/InMemoryPersistenceBaseTrait.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command
2 |
3 | import akka.actor.ActorSystem
4 | import akka.testkit.{ ImplicitSender, TestKit }
5 | import com.typesafe.config.{ ConfigFactory, ConfigValueFactory }
6 | import org.scalatest.BeforeAndAfterAll
7 | import org.scalatest.matchers.should.Matchers
8 | import org.scalatest.wordspec.AnyWordSpecLike
9 |
10 | import scala.concurrent.duration.{ FiniteDuration, SECONDS }
11 |
12 | abstract class InMemoryPersistenceBaseTrait
13 | extends TestKit(
14 | ActorSystem(
15 | "e-portal-test",
16 | ConfigFactory
17 | .load()
18 | .withValue("akka.persistence.journal.plugin", ConfigValueFactory.fromAnyRef("inmemory-journal"))
19 | .withValue("akka.persistence.snapshot-store.plugin", ConfigValueFactory.fromAnyRef("inmemory-snapshot-store"))
20 | )
21 | )
22 | with AnyWordSpecLike
23 | with Matchers
24 | with BeforeAndAfterAll
25 | with ImplicitSender {
26 |
27 | override def afterAll(): Unit =
28 | TestKit.shutdownActorSystem(system)
29 |
30 | val waitToMessageTimeout = FiniteDuration(1, SECONDS)
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/resources/avro/avsc/NoteCreatedAvro.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "type" : "record",
3 | "name" : "NoteCreatedAvro",
4 | "namespace" : "com.ingenuiq.note.events",
5 | "fields" : [ {
6 | "name" : "metadata",
7 | "type" : {
8 | "type" : "record",
9 | "name" : "PersistentEventMetadataAvro",
10 | "namespace" : "com.ingenuiq.common",
11 | "fields" : [ {
12 | "name" : "correlationId",
13 | "type" : "string"
14 | }, {
15 | "name" : "eventId",
16 | "type" : "string"
17 | }, {
18 | "name" : "userId",
19 | "type" : "string"
20 | }, {
21 | "name" : "created",
22 | "type" : "long"
23 | }, {
24 | "name" : "spanId",
25 | "type" : "string"
26 | } ]
27 | }
28 | }, {
29 | "name" : "note",
30 | "type" : {
31 | "type" : "record",
32 | "name" : "NoteAvro",
33 | "fields" : [ {
34 | "name" : "id",
35 | "type" : "string"
36 | }, {
37 | "name" : "title",
38 | "type" : [ "null", "string" ]
39 | }, {
40 | "name" : "content",
41 | "type" : [ "null", "string" ]
42 | } ]
43 | }
44 | } ]
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/resources/avro/avsc/NoteUpdatedAvro.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "type" : "record",
3 | "name" : "NoteUpdatedAvro",
4 | "namespace" : "com.ingenuiq.note.events",
5 | "fields" : [ {
6 | "name" : "metadata",
7 | "type" : {
8 | "type" : "record",
9 | "name" : "PersistentEventMetadataAvro",
10 | "namespace" : "com.ingenuiq.common",
11 | "fields" : [ {
12 | "name" : "correlationId",
13 | "type" : "string"
14 | }, {
15 | "name" : "eventId",
16 | "type" : "string"
17 | }, {
18 | "name" : "userId",
19 | "type" : "string"
20 | }, {
21 | "name" : "created",
22 | "type" : "long"
23 | }, {
24 | "name" : "spanId",
25 | "type" : "string"
26 | } ]
27 | }
28 | }, {
29 | "name" : "id",
30 | "type" : "string"
31 | }, {
32 | "name" : "title",
33 | "type" : [ "null", {
34 | "type" : "record",
35 | "name" : "UpdatebleAvro",
36 | "fields" : [ {
37 | "name" : "value",
38 | "type" : [ "null", "string" ]
39 | } ]
40 | } ]
41 | }, {
42 | "name" : "content",
43 | "type" : [ "null", "UpdatebleAvro" ]
44 | } ]
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/command/CommandResponse.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http.command
2 |
3 | import com.ingenuiq.note.common.{ CorrelationId, NoteId }
4 | import com.ingenuiq.note.utils
5 | import play.api.libs.json._
6 |
7 | sealed trait CommandResponse
8 |
9 | object CommandResponse {
10 |
11 | case class NoteCreationResponse(noteId: NoteId, correlationId: CorrelationId = CorrelationId(utils.currentTraceId))
12 | extends CommandResponse
13 |
14 | object NoteCreationResponse {
15 | implicit val writes: Writes[NoteCreationResponse] = Json.writes[NoteCreationResponse]
16 | }
17 |
18 | case class NoteUpdateResponse(noteId: NoteId, correlationId: CorrelationId = CorrelationId(utils.currentTraceId)) extends CommandResponse
19 |
20 | object NoteUpdateResponse {
21 | implicit val writes: Writes[NoteUpdateResponse] = Json.writes[NoteUpdateResponse]
22 | }
23 |
24 | case class NoteDeletionResponse(noteId: NoteId, correlationId: CorrelationId = CorrelationId(utils.currentTraceId))
25 | extends CommandResponse
26 |
27 | object NoteDeletionResponse {
28 | implicit val writes: Writes[NoteDeletionResponse] = Json.writes[NoteDeletionResponse]
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/project/Settings.scala:
--------------------------------------------------------------------------------
1 | import org.scalafmt.sbt.ScalafmtPlugin.autoImport.{ scalafmtOnCompile, _ }
2 | import sbt.{ Compile, Resolver, Setting, URL }
3 | import sbt.Keys._
4 |
5 | object Settings {
6 |
7 | val settings: Seq[Setting[_]] = Seq(
8 | scalaVersion := "2.13.6",
9 | scalacOptions := Seq(
10 | "-unchecked",
11 | "-feature",
12 | "-deprecation",
13 | "-encoding",
14 | "utf8",
15 | // "-Xfatal-warnings", // Fail the compilation if there are any warnings.
16 | "-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
17 | "-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id.
18 | "-Xlint:package-object-classes", // Class or object defined in package object.
19 | "-Xlint:adapted-args" // Warn if an argument list is modified to match the receiver.
20 | // "-Ywarn-value-discard" // Warn when non-Unit expression results are unused.
21 | ),
22 | Compile / console / scalacOptions ~= (_.filterNot(Set("-Ywarn-unused:imports", "-Xfatal-warnings"))),
23 | Compile / scalafmtOnCompile := true,
24 | publishMavenStyle := false
25 | )
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/schema/NoteTableDefinition.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.schema
2 |
3 | import java.time.LocalDateTime
4 |
5 | import com.ingenuiq.note.common.{ CorrelationId, NoteId }
6 | import com.ingenuiq.note.query.dao.common.DbTypeMappers
7 | import com.ingenuiq.note.query.dao.model.NoteSql
8 | import slick.lifted.ProvenShape
9 |
10 | import scala.language.higherKinds
11 |
12 | trait NoteTableDefinition extends DbTypeMappers {
13 |
14 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
15 |
16 | val notes = TableQuery[NoteTable]
17 |
18 | final class NoteTable(slickTag: Tag) extends Table[NoteSql](slickTag, _tableName = "notes") {
19 |
20 | val id: Rep[NoteId] = column[NoteId]("id", O.PrimaryKey)
21 | val title: Rep[Option[String]] = column[Option[String]]("title")
22 | val content: Rep[Option[String]] = column[Option[String]]("content")
23 | val lastModified: Rep[LocalDateTime] = column[LocalDateTime]("last_modified")
24 | val correlationId: Rep[CorrelationId] = column[CorrelationId]("correlation_id")
25 |
26 | def * = (id, title, content, lastModified, correlationId).mapTo[NoteSql]
27 |
28 | }
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/docker/elk.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 |
5 | elasticsearch:
6 | environment:
7 | http.host: 0.0.0.0
8 | transport.host: 127.0.0.1
9 | image: docker.elastic.co/elasticsearch/elasticsearch:7.13.1
10 | ports:
11 | - 9200:9200
12 | restart: unless-stopped
13 | volumes:
14 | - elasticsearch:/usr/share/elasticsearch/data:rw
15 |
16 | filebeat:
17 | image: docker.elastic.co/beats/filebeat:7.13.1
18 | user: root
19 | depends_on:
20 | - elasticsearch
21 | command: filebeat -e -E output.elasticsearch.username=elastic -E output.elasticsearch.password=changeme -strict.perms=false
22 | hostname: filebeat
23 | restart: unless-stopped
24 | volumes:
25 | - ./filebeat.yml:/usr/share/filebeat/filebeat.yml
26 | - ../log:/opt/docker/log
27 |
28 | kibana:
29 | depends_on:
30 | - elasticsearch
31 | environment:
32 | ELASTICSEARCH_PASSWORD: changeme
33 | ELASTICSEARCH_URL: http://elasticsearch:9200
34 | ELASTICSEARCH_USERNAME: elastic
35 | image: docker.elastic.co/kibana/kibana:7.13.1
36 | ports:
37 | - 5601:5601
38 | restart: unless-stopped
39 |
40 | volumes:
41 | elasticsearch:
42 | driver: local
43 | app-logs:
44 |
--------------------------------------------------------------------------------
/docker/filebeat.yml:
--------------------------------------------------------------------------------
1 | filebeat.inputs:
2 | - type: log
3 | enabled: true
4 | paths:
5 | - /opt/docker/log/*.log
6 | json.keys_under_root: true
7 | exclude_lines: [ "^\\s+[\\-`('.|_]" ] # Drop ASCII Art
8 | json.add_error_key: true
9 | json.ignore_decoding_error: true
10 | json.message_key: message
11 | json.overwrite_keys: false
12 | combine_partial: true
13 | processors:
14 | - timestamp:
15 | field: timestamp
16 | ignore_missing: true
17 | ignore_failure: true
18 | layouts:
19 | - '2006-01-02T15:04:05.000-0700'
20 | test:
21 | - '2021-06-04T15:01:50.382+0200'
22 |
23 | setup.template:
24 | enabled: true
25 | overwrite: true
26 | name: "filebeat-template"
27 | pattern: "filebeat-*"
28 | settings:
29 | # A dictionary of settings to place into the settings.index dictionary
30 | # of the Elasticsearch template. For more details, please check
31 | # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html
32 | index:
33 | number_of_shards: 1
34 | refresh_interval: 10s
35 | number_of_replicas: 1
36 | codec: best_compression
37 |
38 | output.elasticsearch:
39 | hosts: ['elasticsearch:9200']
40 | index: "note-%{+yyyy.MM}"
41 | username: elastic
42 | password: changeme
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/repos/PersistenceOffsetRepo.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.repos
2 |
3 | import com.ingenuiq.note.query.dao.model.PersistenceOffset
4 | import com.ingenuiq.note.query.dao.schema.PersistenceOffsetTableDefinition
5 | import com.typesafe.scalalogging.LazyLogging
6 |
7 | import scala.concurrent.{ ExecutionContext, Future }
8 |
9 | class PersistenceOffsetRepo(implicit ec: ExecutionContext) extends PersistenceOffsetTableDefinition with LazyLogging {
10 |
11 | import com.ingenuiq.note.query.dao.common.DBComponent.db
12 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
13 |
14 | def upsert(po: PersistenceOffset): Future[Int] = {
15 | logger.trace(s"Updating persistence offset ${po.id} to ${po.offset}")
16 | db.run(persistenceOffsets.insertOrUpdate(po))
17 | }
18 |
19 | def getByPersistenceId(persistenceId: String): Future[PersistenceOffset] = {
20 | logger.trace(s"Get offset for $persistenceId")
21 | val query = persistenceOffsets.filter(_.persistenceId === persistenceId)
22 | db.run(query.result).map(_.headOption.getOrElse(PersistenceOffset(persistenceId, 0)))
23 | }
24 |
25 | }
26 |
27 | object PersistenceOffsetRepo {
28 | def apply()(implicit ec: ExecutionContext): PersistenceOffsetRepo = new PersistenceOffsetRepo
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/schema/NoteEventTableDefinition.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.schema
2 |
3 | import java.time.LocalDateTime
4 |
5 | import com.ingenuiq.note.common.{ CorrelationId, EventId, NoteId, UserId }
6 | import com.ingenuiq.note.query.dao.common.DbTypeMappers
7 | import com.ingenuiq.note.query.dao.model.NoteEventSql
8 |
9 | import scala.language.higherKinds
10 |
11 | trait NoteEventTableDefinition extends DbTypeMappers {
12 |
13 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
14 |
15 | val noteEvents = TableQuery[NoteEventTable]
16 |
17 | final class NoteEventTable(slickTag: Tag) extends Table[NoteEventSql](slickTag, _tableName = "note_events") {
18 |
19 | val eventId: Rep[EventId] = column[EventId]("event_id", O.PrimaryKey)
20 | val userId: Rep[UserId] = column[UserId]("user_id")
21 | val noteId: Rep[NoteId] = column[NoteId]("note_id")
22 | val eventName: Rep[String] = column[String]("event_name")
23 | val lastModified: Rep[LocalDateTime] = column[LocalDateTime]("last_modified")
24 | val correlationId: Rep[CorrelationId] = column[CorrelationId]("correlation_id")
25 |
26 | def * = (eventId, userId, noteId, eventName, lastModified, correlationId).mapTo[NoteEventSql]
27 |
28 | }
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/resources/environment.conf:
--------------------------------------------------------------------------------
1 | services {
2 |
3 | default-host = "127.0.0.1"
4 | default-host = ${?SERVICES_HOST}
5 |
6 | cassandra {
7 | default-port = 9042
8 | contact-points = ["localhost"]
9 | contact-points = ${?CASSANDRA_CONTACT_POINTS}
10 | username = cassandra
11 | username = ${?CASSANDRA_USERNAME}
12 | password = cassandra
13 | password = ${?CASSANDRA_PASSWORD}
14 | keyspace-prefix = "local"
15 | keyspace-prefix = ${?CASSANDRA_KEYSPACE_PREFIX}
16 | replication-factor = 1
17 | replication-factor = ${?CASSANDRA_REPLICATION_FACTOR}
18 | }
19 |
20 | }
21 |
22 | rdbms = {
23 | keepAliveConnection = true
24 | dataSourceClass = "slick.jdbc.DatabaseUrlDataSource"
25 | properties {
26 | driver = "org.postgresql.Driver"
27 | url = "jdbc:postgresql://localhost:5434/postgres"
28 | url = ${?POSTGRES_URL}
29 | user = "postgres"
30 | user = ${?POSTGRES_USER}
31 | password = "postgres"
32 | password = ${?POSTGRES_PW}
33 |
34 |
35 | # https://github.com/slick/slick/blob/3.2.3/slick/src/main/scala/slick/util/AsyncExecutor.scala#L105
36 | numThreads = 5
37 | numThreads = ${?POSTGRES_NUM_THREADS}
38 | minConnections = 0
39 | minConnections = ${?POSTGRES_MIN_CONNECTIONS}
40 | maxConnections = 5
41 | maxConnections = ${?POSTGRES_MAX_CONNECTIONS}
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/model/NoteEventSql.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.model
2 |
3 | import java.time.LocalDateTime
4 |
5 | import com.ingenuiq.note.command.note.NoteEvent.{ NoteCreated, NoteDeleted, NoteUpdated }
6 | import com.ingenuiq.note.command.note.PersistentNoteEvent
7 | import com.ingenuiq.note.common.{ CorrelationId, EventId, NoteId, UserId }
8 |
9 | case class NoteEventSql(eventId: EventId,
10 | userId: UserId,
11 | noteId: NoteId,
12 | eventName: String,
13 | lastModified: LocalDateTime,
14 | correlationId: CorrelationId)
15 |
16 | object NoteEventSql {
17 |
18 | def toSql(pne: PersistentNoteEvent): NoteEventSql = {
19 | val name = pne match {
20 | case _: NoteCreated => "Note created"
21 | case _: NoteUpdated => "Note updated"
22 | case _: NoteDeleted => "Note deleted"
23 | }
24 |
25 | NoteEventSql(
26 | pne.persistentEventMetadata.eventId,
27 | pne.persistentEventMetadata.userId,
28 | pne.noteId,
29 | name,
30 | pne.persistentEventMetadata.created,
31 | pne.persistentEventMetadata.correlationId
32 | )
33 | }
34 |
35 | val tupled: ((EventId, UserId, NoteId, String, LocalDateTime, CorrelationId)) => NoteEventSql = (this.apply _).tupled
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/utils/ClassUtils.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.utils
2 |
3 | import java.io.File
4 | import java.net.URLClassLoader
5 |
6 | import org.clapper.classutil.{ ClassFinder, ClassInfo }
7 |
8 | trait ClassUtils {
9 |
10 | def toClass: ClassInfo => Class[_] = impl => Class.forName(impl.name)
11 |
12 | def removeLastSymbolFromName(o: Any): String = o.getClass.getName.dropRight(1)
13 |
14 | def implementationsOf(clazz: Class[_], filter: Option[String] = None): List[ClassInfo] = {
15 | val classFiles = ClassFinder(
16 | (Thread.currentThread().getContextClassLoader match {
17 | case classLoader: URLClassLoader => classLoader.getURLs
18 | case classLoader => classLoader.getParent.asInstanceOf[URLClassLoader].getURLs
19 | }).toList
20 | .filter(x => filter.forall(x.getPath.contains))
21 | .map(x => new File(x.getPath.replaceAll("%20", " ")))
22 | )
23 |
24 | findImplementations(clazz.getName, ClassFinder.classInfoMap(classFiles.getClasses().iterator))
25 | }
26 |
27 | private def findImplementations(ancestor: String, classes: Map[String, ClassInfo]): List[ClassInfo] =
28 | classes.get(ancestor).fold(List.empty[ClassInfo]) { ancestorInfo =>
29 | def compare(info: ClassInfo): Boolean =
30 | info.name == ancestorInfo.name || (info.superClassName :: info.interfaces).exists(n => classes.get(n).exists(compare))
31 |
32 | classes.valuesIterator.filter(info => info.isConcrete && compare(info)).toList
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/common/BaseViewActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.common
2 |
3 | import akka.actor._
4 | import com.ingenuiq.note.common.PredefinedTimeout
5 | import com.ingenuiq.note.query.model.{ FailureResult, FailureType, FullResult, ServiceResult }
6 | import com.typesafe.scalalogging.LazyLogging
7 |
8 | import scala.concurrent.Future
9 |
10 | /**
11 | * Base actor definition for other actors in the note app to extend from
12 | */
13 | trait BaseViewActor extends Actor with LazyLogging with PredefinedTimeout {
14 | import akka.pattern.pipe
15 | import context.dispatcher
16 |
17 | //PF to be used with the .recover combinator to convert an exception on a failed Future into a
18 | //Failure ServiceResult
19 | private val toFailure: PartialFunction[Throwable, ServiceResult[Nothing]] = {
20 | case ex => FailureResult(FailureType.Service, ServiceResult.UnexpectedFailure, Some(ex))
21 | }
22 |
23 | /**
24 | * Pipes the response from a request to a service actor back to the sender, first
25 | * converting to a ServiceResult per the contract of communicating with a note service
26 | * @param f The Future to map the result from into a ServiceResult
27 | */
28 | def pipeResponse[T](f: Future[T], msgSender: ActorRef = sender()): Unit =
29 | f.map {
30 | case o: Option[_] => ServiceResult.fromOption(o)
31 | case f: FailureResult => f
32 | case other => FullResult(other)
33 | }
34 | .recover(toFailure)
35 | .pipeTo(msgSender)
36 | }
37 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/common/ResumableProjection.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.common
2 |
3 | import akka.actor.{ ActorSystem, ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
4 | import akka.persistence.query.Offset
5 | import com.ingenuiq.note.query.dao.model.PersistenceOffset
6 | import com.ingenuiq.note.query.dao.repos.PersistenceOffsetRepo
7 |
8 | import scala.concurrent.Future
9 |
10 | abstract class ResumableProjection(identifier: String) {
11 | def storeLatestOffset(offset: Offset): Future[Boolean]
12 | def fetchLatestOffset: Future[Offset]
13 | }
14 |
15 | object ResumableProjection {
16 |
17 | def apply(identifier: String, system: ActorSystem) =
18 | new DBProjectionStorageExt(system)
19 | }
20 |
21 | class DBProjectionStorageExt(system: ActorSystem) extends Extension {
22 |
23 | import system.dispatcher
24 |
25 | val persistenceSequenceNrRepo: PersistenceOffsetRepo = PersistenceOffsetRepo()
26 |
27 | def updateOffset(identifier: String, offset: Long): Future[Boolean] =
28 | persistenceSequenceNrRepo.upsert(PersistenceOffset(identifier, offset)).map(_ > 0)
29 |
30 | def fetchLatestOffset(identifier: String): Future[Long] =
31 | persistenceSequenceNrRepo.getByPersistenceId(identifier).map(_.offset)
32 | }
33 |
34 | object DBProjectionStorage extends ExtensionId[DBProjectionStorageExt] with ExtensionIdProvider {
35 | override def lookup: DBProjectionStorage.type = DBProjectionStorage
36 |
37 | override def createExtension(system: ExtendedActorSystem) =
38 | new DBProjectionStorageExt(system)
39 | }
40 |
--------------------------------------------------------------------------------
/project/DockerSettings.scala:
--------------------------------------------------------------------------------
1 | import com.typesafe.sbt.GitPlugin.autoImport._
2 | import com.typesafe.sbt.SbtNativePackager.Docker
3 | import com.typesafe.sbt.SbtNativePackager.autoImport._
4 | import com.typesafe.sbt.packager.docker.Cmd
5 | import com.typesafe.sbt.packager.docker.DockerPlugin.autoImport._
6 | import com.typesafe.sbt.packager.linux.LinuxPlugin.autoImport._
7 | import java.time.Clock
8 | import sbt.Def._
9 | import sbt.Keys._
10 |
11 | object DockerSettings {
12 |
13 | lazy val settings: Seq[Setting[_]] = Seq(
14 | Docker / daemonUser := "65534",
15 | dockerAlias := DockerAlias(
16 | dockerRepository.value,
17 | dockerUsername.value,
18 | packageName.value,
19 | Option(sys.env.getOrElse("DOCKER_IMAGE_TAG", git.gitDescribedVersion.value.getOrElse(version.value)))
20 | ),
21 | dockerBaseImage := "openjdk:8-jre-alpine",
22 | dockerBuildOptions ++= Seq("--pull"),
23 | dockerCommands := {
24 | dockerCommands.value.flatMap {
25 | case eq @ Cmd("EXPOSE", _) =>
26 | Seq(eq, Cmd("RUN", "apk add --no-cache tini"))
27 | case other => Seq(other)
28 | }
29 | },
30 | dockerEntrypoint := Seq("tini") ++ dockerEntrypoint.value,
31 | dockerExposedPorts := Seq(8080),
32 | dockerLabels := Map(
33 | "BUILD_BRANCH" -> git.gitCurrentBranch.value,
34 | "BUILD_COMMIT" -> git.gitHeadCommit.value.getOrElse(""),
35 | "BUILD_TIME" -> Clock.systemUTC().instant().toString,
36 | "SERVICE_NAME" -> sys.env.getOrElse("SERVICE_NAME", name.value)
37 | )
38 | )
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/command/note/NoteProtocol.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command.note
2 |
3 | import com.ingenuiq.note.common.{ EventId, NoteId, UserId }
4 | import com.ingenuiq.note.query.common.{ Command, Event, PersistentEvent, PersistentEventMetadata }
5 |
6 | sealed trait NoteCommand extends Command
7 |
8 | object NoteCommand {
9 |
10 | case class CreateNote(userId: UserId, note: Note) extends NoteCommand
11 |
12 | case class UpdateNote(userId: UserId, note: Note) extends NoteCommand
13 |
14 | case class DeleteNote(userId: UserId, noteId: NoteId) extends NoteCommand
15 |
16 | }
17 |
18 | sealed trait NoteEvent extends Event
19 |
20 | sealed trait NoteQueryEvent extends NoteEvent
21 |
22 | sealed trait PersistentNoteEvent extends NoteEvent with PersistentEvent {
23 | def noteId: NoteId
24 | }
25 |
26 | object NoteEvent {
27 |
28 | case class NoteCreated(persistentEventMetadata: PersistentEventMetadata, note: Note) extends PersistentNoteEvent {
29 | override def noteId: NoteId = note.id
30 | }
31 |
32 | case class NoteDeleted(persistentEventMetadata: PersistentEventMetadata, noteId: NoteId) extends PersistentNoteEvent
33 |
34 | case class NoteUpdated(persistentEventMetadata: PersistentEventMetadata,
35 | noteId: NoteId,
36 | title: Option[Option[String]],
37 | content: Option[Option[String]])
38 | extends PersistentNoteEvent
39 |
40 | case object NoteNoChangesToUpdateFound extends NoteQueryEvent
41 | case object NoteAlreadyExists extends NoteQueryEvent
42 | case object NoteNotFound extends NoteQueryEvent
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/utils/package.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note
2 |
3 | import java.time._
4 |
5 | package object utils {
6 |
7 | val defaultZone: ZoneId = ZoneId.of("UTC")
8 |
9 | def now(zoneId: ZoneId = defaultZone): LocalDateTime = LocalDateTime.now(zoneId)
10 |
11 | def currentSpanId: String = "Kamon.currentSpan().context().spanID.string"
12 | def currentParentSpanId: String = "Kamon.currentSpan().context().parentID.string"
13 | def currentTraceId: String = "Kamon.currentSpan().context().traceID.string"
14 | // def currentSpanId: String = Kamon.currentSpan().context().spanID.string
15 | // def currentParentSpanId: String = Kamon.currentSpan().context().parentID.string
16 | // def currentTraceId: String = Kamon.currentSpan().context().traceID.string
17 |
18 | def localDateTimeToLong(date: LocalDateTime): Long = date.toInstant(ZoneOffset.UTC).toEpochMilli
19 | def longToLocalDateTime(date: Long): LocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(date), ZoneOffset.UTC)
20 |
21 | def localDateToLong(date: LocalDate): Long = localDateTimeToLong(date.atStartOfDay)
22 | def longToLocalDate(date: Long): LocalDate = longToLocalDateTime(date).toLocalDate
23 |
24 | def bigDecimalToString(value: BigDecimal): String = value.toString
25 | def stringToBigDecimal(value: String): BigDecimal = BigDecimal(value)
26 |
27 | def coalesce[B, A](kv: Seq[(B, Seq[A])]): Seq[(B, Seq[A])] = {
28 | val r = kv.foldLeft(Map.empty[B, Seq[A]]) {
29 | case (acc, (k, v)) =>
30 | val newV = v ++ acc.getOrElse(k, Seq.empty[A])
31 | acc ++ Map(k -> newV)
32 | }
33 | r.toVector
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/common/DbTypeMappers.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.common
2 |
3 | import java.time._
4 | import java.util.UUID
5 |
6 | import com.ingenuiq.note.common.{ CorrelationId, EventId, NoteId, UserId }
7 | import slick.jdbc.JdbcType
8 |
9 | trait DbTypeMappers {
10 | import DBComponent.driver.api._
11 |
12 | private def localDateTimeToLong(date: LocalDateTime): Long = date.toInstant(ZoneOffset.UTC).toEpochMilli
13 | private def localDateToLong(date: LocalDate): Long = date.atStartOfDay.toInstant(ZoneOffset.UTC).toEpochMilli
14 | private def longToLocalDateTime(date: Long): LocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(date), ZoneOffset.UTC)
15 | private def longToLocalDate(date: Long): LocalDate = LocalDateTime.ofInstant(Instant.ofEpochMilli(date), ZoneOffset.UTC).toLocalDate
16 |
17 | implicit val dateTimeConverter: JdbcType[LocalDateTime] =
18 | MappedColumnType
19 | .base[LocalDateTime, Long](e => localDateTimeToLong(e), e => longToLocalDateTime(e))
20 |
21 | implicit val dateConverter: JdbcType[LocalDate] =
22 | MappedColumnType
23 | .base[LocalDate, Long](e => localDateToLong(e), e => longToLocalDate(e))
24 |
25 | implicit val noteIdConverter: JdbcType[NoteId] =
26 | MappedColumnType.base[NoteId, UUID](_.value, e => NoteId(e))
27 |
28 | implicit val userIdConverter: JdbcType[UserId] =
29 | MappedColumnType.base[UserId, String](_.value, e => UserId(e))
30 |
31 | implicit val eventIdConverter: JdbcType[EventId] =
32 | MappedColumnType.base[EventId, UUID](_.value, e => EventId(e))
33 |
34 | implicit val correlationIdConverter: JdbcType[CorrelationId] =
35 | MappedColumnType.base[CorrelationId, String](_.value, e => CorrelationId(e))
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/docker/docker-compose-dev.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 |
5 | postgres:
6 | image: postgres
7 | restart: unless-stopped
8 | environment:
9 | POSTGRES_DB: postgres
10 | POSTGRES_USER: postgres
11 | POSTGRES_PASSWORD: postgres
12 | ports:
13 | - 5434:5432
14 |
15 | cassandra:
16 | image: cassandra:latest
17 | ports:
18 | - "9042:9042" # native protocol clients
19 |
20 | zipkin:
21 | image: openzipkin/zipkin
22 | environment:
23 | - STORAGE_TYPE=elasticsearch
24 | - ES_HOSTS=elasticsearch
25 | # Uncomment to see requests to and from elasticsearch
26 | # - ES_HTTP_LOGGING=BODY
27 | ports:
28 | - 9411:9411
29 |
30 | elasticsearch:
31 | environment:
32 | http.host: 0.0.0.0
33 | transport.host: 127.0.0.1
34 | image: docker.elastic.co/elasticsearch/elasticsearch:6.2.4
35 | ports:
36 | - 9200:9200
37 | restart: unless-stopped
38 |
39 | filebeat:
40 | image: docker.elastic.co/beats/filebeat:6.2.4
41 | user: root
42 | depends_on:
43 | - elasticsearch
44 | command: filebeat -e -E output.elasticsearch.username=elastic -E output.elasticsearch.password=changeme -strict.perms=false
45 | hostname: filebeat
46 | restart: unless-stopped
47 | volumes:
48 | - ./filebeat.yml:/usr/share/filebeat/filebeat.yml
49 | - ../log:/opt/docker/log
50 |
51 | kibana:
52 | depends_on:
53 | - elasticsearch
54 | environment:
55 | ELASTICSEARCH_PASSWORD: changeme
56 | ELASTICSEARCH_URL: http://elasticsearch:9200
57 | ELASTICSEARCH_USERNAME: elastic
58 | image: docker.elastic.co/kibana/kibana:6.2.4
59 | ports:
60 | - 5601:5601
61 | restart: unless-stopped
62 |
63 | volumes:
64 | grafana_data:
65 | assetserver-images:
66 | assetserver-backgrounds:
67 |
68 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/events/NoteEventsViewActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.events
2 |
3 | import akka.actor.Props
4 | import com.ingenuiq.note.command.note.{ NoteAggregateActor, PersistentNoteEvent }
5 | import com.ingenuiq.note.query.common.ViewBuilderActor.Action
6 | import com.ingenuiq.note.query.common.{ BaseViewActor, PersistedEventEnvelope, ViewBuilderActor }
7 | import com.ingenuiq.note.query.dao.model.NoteEventSql
8 | import com.ingenuiq.note.query.dao.repos.NoteEventRepo
9 | import com.ingenuiq.note.query.events.NoteEventQuery.GetNoteEvents
10 | import com.ingenuiq.note.query.events.NoteEventQueryResponse.NoteEventsFetchedResponse
11 |
12 | object NoteEventViewBuilder {
13 | val name = "noteEventViewBuilder"
14 | def apply() = Props(classOf[NoteEventViewBuilder])
15 | }
16 |
17 | class NoteEventViewBuilder extends ViewBuilderActor {
18 | override def persistenceId: String = NoteAggregateActor.persistenceId
19 |
20 | override def identifier: String = "NoteEventViewBuilder"
21 |
22 | import context.dispatcher
23 | val noteEventRepo: NoteEventRepo = new NoteEventRepo
24 |
25 | override def actionFor(env: PersistedEventEnvelope): Action[_] =
26 | env.event match {
27 | case pne: PersistentNoteEvent =>
28 | () => noteEventRepo.upsertNoteEvent(NoteEventSql.toSql(pne))
29 | }
30 | }
31 |
32 | object NoteEventView {
33 |
34 | val name = "noteEventView"
35 |
36 | def apply() = Props(classOf[NoteEventView])
37 | }
38 |
39 | class NoteEventView extends BaseViewActor {
40 |
41 | import context.dispatcher
42 |
43 | val noteEventRepo: NoteEventRepo = new NoteEventRepo
44 |
45 | override def receive: Receive = {
46 | case e: GetNoteEvents =>
47 | logger.trace("Received request to fetch note events")
48 | pipeResponse(
49 | noteEventRepo.getNoteEvents
50 | .map { res =>
51 | logger.trace("Received response from repo to fetch note events")
52 | NoteEventsFetchedResponse(res)
53 | }
54 | )
55 |
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/Main.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note
2 |
3 | import akka.actor.ActorSystem
4 | import akka.http.scaladsl.Http
5 | import com.ingenuiq.note.command.CommandSupervisorActor
6 | import com.ingenuiq.note.http.BaseRoutes
7 | import com.ingenuiq.note.query.QuerySupervisorActor
8 | import com.ingenuiq.note.query.dao.TableDefinitionCreator
9 | import com.ingenuiq.note.settings.Settings
10 | import com.typesafe.scalalogging.LazyLogging
11 |
12 | import scala.concurrent.{ ExecutionContext, Future }
13 | import scala.util.{ Failure, Success }
14 |
15 | object Main extends App with KamonInit with LazyLogging with BaseRoutes {
16 |
17 | implicit val system: ActorSystem = ActorSystem("note-actor-system")
18 | implicit val executionContext: ExecutionContext = system.dispatcher
19 |
20 | override val settings: Settings = Settings.conf
21 |
22 | new TableDefinitionCreator().rebuildSchema(settings.rebuildReadside)
23 |
24 | override val commandActor = system.actorOf(CommandSupervisorActor(), name = "commandActor")
25 | override val queryActor = system.actorOf(QuerySupervisorActor(), name = "queryActor")
26 |
27 | if (settings.tracingMonitoringSettings.zipkinEnabled) {
28 | logger.info("Zipkin tracing enabled")
29 | // Kamon.addReporter(new ZipkinReporter)
30 | }
31 | else
32 | logger.info("Zipkin tracing disabled")
33 |
34 | private val bindingFutureHttp: Future[Http.ServerBinding] =
35 | Http()
36 | .newServerAt(settings.httpListenerSettings.interface, settings.httpListenerSettings.port)
37 | .bindFlow(routes(commandActor, queryActor))
38 |
39 | bindingFutureHttp.onComplete {
40 | case Success(_) =>
41 | logger.info(s"Server started on [${settings.httpListenerSettings.interface}:${settings.httpListenerSettings.port}]")
42 | case Failure(error) => logger.error(s"Error binding HTTP listener: $error")
43 | }
44 |
45 | sys.addShutdownHook {
46 | bindingFutureHttp.flatMap(_.unbind()).onComplete(_ => system.terminate())
47 | }
48 | }
49 |
50 | trait KamonInit {
51 | // Kamon.init()
52 | }
53 |
--------------------------------------------------------------------------------
/src/main/resources/akka.conf:
--------------------------------------------------------------------------------
1 | include "environment.conf"
2 |
3 | akka {
4 | loggers = ["akka.event.slf4j.Slf4jLogger"]
5 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
6 | loglevel = INFO
7 |
8 | actor {
9 |
10 | serializers {
11 | common = "com.ingenuiq.note.command.persistence.CommonPersistenceSerializer"
12 | }
13 |
14 | serialization-bindings {
15 | "java.io.Serializable" = none
16 | "org.apache.avro.specific.SpecificRecordBase" = common
17 | "com.ingenuiq.note.query.common.PersistentEvent" = common
18 | }
19 | }
20 |
21 | persistence {
22 | journal.plugin = "cassandra-journal"
23 | snapshot-store.plugin = "cassandra-snapshot-store"
24 | }
25 |
26 | http.parsing.illegal-response-header-value-processing-mode = warn
27 | }
28 |
29 | cassandra-journal {
30 | contact-points = ${services.cassandra.contact-points}
31 | default-port = ${services.cassandra.default-port}
32 | keyspace = ${services.cassandra.keyspace-prefix}_note_akka_journal
33 | log-queries = on
34 | slow-query-latency-threshold-millis = 3000
35 | authentication {
36 | username = ${services.cassandra.username}
37 | password = ${services.cassandra.password}
38 | }
39 | replication-factor = ${services.cassandra.replication-factor}
40 |
41 | event-adapters {
42 | command-tagging = "com.ingenuiq.note.command.persistence.TaggingEventAdapter"
43 | }
44 |
45 | event-adapter-bindings {
46 | "org.apache.avro.specific.SpecificRecordBase" = [command-tagging]
47 | "com.ingenuiq.note.query.common.PersistentEvent" = [command-tagging]
48 | }
49 | }
50 |
51 | cassandra-snapshot-store {
52 | contact-points = ${services.cassandra.contact-points}
53 | default-port = ${services.cassandra.default-port}
54 | keyspace = ${services.cassandra.keyspace-prefix}_note_akka_snapshot
55 | authentication {
56 | username = ${services.cassandra.username}
57 | password = ${services.cassandra.password}
58 | }
59 | replication-factor = ${services.cassandra.replication-factor}
60 | }
61 |
62 | cassandra-query-journal {
63 | refresh-interval = 250ms
64 | }
--------------------------------------------------------------------------------
/project/AvroSupport.scala:
--------------------------------------------------------------------------------
1 | import java.util
2 |
3 | import org.apache.avro.tool.IdlToSchemataTool
4 | import sbt.Keys._
5 | import sbt._
6 | import sbtavrohugger.SbtAvrohugger.autoImport.{ avroScalaGenerateSpecific, avroScalaSpecificCustomTypes, avroSpecificSourceDirectories }
7 |
8 | object AvroSupport {
9 |
10 | val generateInternalAvsc = Def.task {
11 | val s: TaskStreams = streams.value
12 | s.log.info("Generating .avsc schema files for Avro, used as journal model for Akka persistence...")
13 |
14 | val basePath = new java.io.File("").getAbsolutePath
15 | val avdlPath = new java.io.File(basePath + "/src/main/resources/avro")
16 | val avscPath = new java.io.File(basePath + "/src/main/resources/avro/avsc")
17 |
18 | avdlPath.listFiles().toList.withFilter(_.getName.endsWith(".avdl")).foreach { file =>
19 | val sourceFilePath = avdlPath.getPath + "/" + file.getName
20 | val arglist = util.Arrays.asList(sourceFilePath, avscPath.getPath)
21 | try {
22 | s.log.info(s"Generating .avsc files in ${avscPath.getPath} from $sourceFilePath")
23 | new IdlToSchemataTool().run(null, null, null, arglist)
24 | }
25 | catch {
26 | case e: Exception =>
27 | s.log.error("Exception during avsc generation " + e.getLocalizedMessage)
28 | throw e
29 | }
30 | }
31 |
32 | val gitAddCmd = sys.env.getOrElse("GIT_ADD_CMD", "git add *.avsc")
33 | s.log.info(s"Adding .avsc files to git")
34 | import scala.sys.process._
35 | s"$gitAddCmd $avscPath".!!
36 | }
37 |
38 | val avroSettings = Seq(
39 | Compile / compile := (Compile / compile).dependsOn(generateInternalAvsc).value,
40 | Compile / sourceGenerators += (Compile / avroScalaGenerateSpecific).taskValue,
41 | Compile / avroSpecificSourceDirectories += (Compile / sourceDirectory).value / "resources" / "avro" / "avsc",
42 | Compile / avroSpecificSourceDirectories := (Compile / avroSpecificSourceDirectories).value,
43 | Compile / avroScalaSpecificCustomTypes := {
44 | avrohugger.format.SpecificRecord.defaultTypes.copy(protocol = avrohugger.types.ScalaADT, array = avrohugger.types.ScalaList)
45 | }
46 | )
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/serialization/PlayJsonSupport.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.serialization
2 |
3 | import akka.http.scaladsl.marshalling.{ Marshaller, ToEntityMarshaller }
4 | import akka.http.scaladsl.model.ContentTypeRange
5 | import akka.http.scaladsl.model.MediaType
6 | import akka.http.scaladsl.model.MediaTypes.`application/json`
7 | import akka.http.scaladsl.server.{ RejectionError, ValidationRejection }
8 | import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshaller }
9 | import akka.util.ByteString
10 | import play.api.libs.json.{ JsError, JsValue, Json, Reads, Writes }
11 | import scala.collection.immutable.Seq
12 |
13 | object PlayJsonSupport extends PlayJsonSupport {
14 |
15 | final case class PlayJsonError(error: JsError) extends RuntimeException {
16 |
17 | override def getMessage: String =
18 | JsError.toJson(error).toString()
19 | }
20 | }
21 |
22 | trait PlayJsonSupport {
23 | import PlayJsonSupport._
24 |
25 | def unmarshallerContentTypes: Seq[ContentTypeRange] =
26 | mediaTypes.map(ContentTypeRange.apply)
27 |
28 | def mediaTypes: Seq[MediaType.WithFixedCharset] =
29 | List(`application/json`)
30 |
31 | private val jsonStringUnmarshaller =
32 | Unmarshaller.byteStringUnmarshaller
33 | .forContentTypes(unmarshallerContentTypes: _*)
34 | .mapWithCharset {
35 | case (ByteString.empty, _) => throw Unmarshaller.NoContentException
36 | case (data, charset) => data.decodeString(charset.nioCharset.name)
37 | }
38 |
39 | private val jsonStringMarshaller =
40 | Marshaller.oneOf(mediaTypes: _*)(Marshaller.stringMarshaller)
41 |
42 | implicit def unmarshaller[A: Reads]: FromEntityUnmarshaller[A] = {
43 | def read(json: JsValue) =
44 | implicitly[Reads[A]]
45 | .reads(json)
46 | .recoverTotal(e => throw RejectionError(ValidationRejection(JsError.toJson(e).toString, Some(PlayJsonError(e)))))
47 | jsonStringUnmarshaller.map(data => read(Json.parse(data)))
48 | }
49 |
50 | implicit def marshaller[A](implicit writes: Writes[A], printer: JsValue => String = Json.prettyPrint): ToEntityMarshaller[A] =
51 | jsonStringMarshaller.compose(printer).compose(writes.writes)
52 | }
53 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/note/NoteViewActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.note
2 |
3 | import akka.actor.Props
4 | import com.ingenuiq.note.command.note.NoteAggregateActor
5 | import com.ingenuiq.note.command.note.NoteEvent._
6 | import com.ingenuiq.note.query.common.ViewBuilderActor.Action
7 | import com.ingenuiq.note.query.common.{ BaseViewActor, PersistedEventEnvelope, ViewBuilderActor }
8 | import com.ingenuiq.note.query.dao.repos.NoteRepo
9 | import com.ingenuiq.note.query.note.NoteQuery.{ GetNote, GetNotes }
10 | import com.ingenuiq.note.query.note.NoteQueryResponse.{ NoteFetchedResponse, NotesFetchedResponse }
11 |
12 | object NoteViewBuilder {
13 | val name = "noteViewBuilder"
14 | def apply() = Props(classOf[NoteViewBuilder])
15 | }
16 |
17 | class NoteViewBuilder extends ViewBuilderActor {
18 | override def persistenceId: String = NoteAggregateActor.persistenceId
19 |
20 | override def identifier: String = "NoteViewBuilder"
21 |
22 | import context.dispatcher
23 | val noteRepo: NoteRepo = new NoteRepo
24 |
25 | override def actionFor(env: PersistedEventEnvelope): Action[_] =
26 | env.event match {
27 | case ur: NoteCreated =>
28 | () => noteRepo.insertNote(ur)
29 | case ur: NoteUpdated =>
30 | () => noteRepo.updateNote(ur)
31 | case ur: NoteDeleted =>
32 | () => noteRepo.removeNote(ur)
33 | }
34 | }
35 |
36 | object NoteView {
37 |
38 | val name = "noteHistoryView"
39 |
40 | def apply() = Props(classOf[NoteView])
41 | }
42 |
43 | class NoteView extends BaseViewActor {
44 |
45 | import context.dispatcher
46 |
47 | val noteRepo: NoteRepo = new NoteRepo
48 |
49 | override def receive: Receive = {
50 | case e: GetNotes =>
51 | logger.trace("Received request to fetch notes")
52 | pipeResponse(
53 | noteRepo.getNotes
54 | .map { res =>
55 | logger.trace("Received response from repo to fetch notes")
56 | NotesFetchedResponse(res)
57 | }
58 | )
59 |
60 | case e: GetNote =>
61 | logger.trace("Received request to fetch note")
62 | pipeResponse(noteRepo.getNote(e.noteId).map { res =>
63 | logger.trace("Received response from repo to fetch note")
64 | NoteFetchedResponse(res)
65 | })
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/src/it/scala/com/ingenuiq/note/integration/utils/PlayJsonSupportReaders.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.integration.utils
2 |
3 | import java.util.UUID
4 |
5 | import com.ingenuiq.note.common.{ CorrelationId, NoteId, UserId }
6 | import com.ingenuiq.note.http.command.CommandRequest.NotePayload
7 | import com.ingenuiq.note.http.command.CommandResponse._
8 | import com.ingenuiq.note.http.model.{ CorrelationIdResponse, ErrorMessageResponse }
9 | import com.ingenuiq.note.http.query.QueryResponse._
10 | import play.api.libs.json._
11 |
12 | trait PlayJsonSupportReaders {
13 |
14 | implicit lazy val treads: Reads[NoteId] = Reads[NoteId] {
15 | case JsString(str) => JsSuccess(NoteId(UUID.fromString(str)))
16 | case _ => JsError("Unparsable UUID")
17 | }
18 |
19 | implicit lazy val creads: Reads[CorrelationId] = Reads[CorrelationId] {
20 | case JsString(str) => JsSuccess(CorrelationId(str))
21 | case _ => JsError("Not JsString for CorrelationId")
22 | }
23 |
24 | implicit lazy val ureads: Reads[UserId] = Reads[UserId] {
25 | case JsString(str) => JsSuccess(UserId(str))
26 | case _ => JsError("Unparsable UUID")
27 | }
28 |
29 | implicit lazy val readsNoteEventResponse: Reads[NoteEventResponse] = Json.reads[NoteEventResponse]
30 | implicit lazy val readsNoteEventsResponse: Reads[NoteEventsResponse] = Json.reads[NoteEventsResponse]
31 | implicit lazy val readsNoteDetailsResponse: Reads[NoteResponse] = Json.reads[NoteResponse]
32 | implicit lazy val readsNotesTableRowsResponse: Reads[NotesResponse] = Json.reads[NotesResponse]
33 | implicit lazy val readsNoteCreationResponse: Reads[NoteCreationResponse] = Json.reads[NoteCreationResponse]
34 | implicit lazy val readsNoteUpdateResponse: Reads[NoteUpdateResponse] = Json.reads[NoteUpdateResponse]
35 | implicit lazy val readsNoteDeletionResponse: Reads[NoteDeletionResponse] = Json.reads[NoteDeletionResponse]
36 | implicit lazy val readsErrorMessageResponse: Reads[ErrorMessageResponse] = Json.reads[ErrorMessageResponse]
37 | implicit lazy val readsCorrelationIdResponse: Reads[CorrelationIdResponse] = Json.reads[CorrelationIdResponse]
38 | implicit lazy val readsNotePayload: Reads[NotePayload] = Json.reads[NotePayload]
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/query/QueryResponse.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http.query
2 |
3 | import com.ingenuiq.note.common.{ CorrelationId, NoteId, UserId }
4 | import com.ingenuiq.note.http.PredefinedRoutePaths
5 | import com.ingenuiq.note.query.common.Traceable
6 | import com.ingenuiq.note.query.dao.model.{ NoteEventSql, NoteSql }
7 | import com.ingenuiq.note.query.events.NoteEventQueryResponse
8 | import com.ingenuiq.note.query.note.NoteQueryResponse
9 | import com.ingenuiq.note.utils
10 | import play.api.libs.json._
11 |
12 | sealed trait QueryResponse extends Traceable
13 |
14 | object QueryResponse extends PredefinedRoutePaths {
15 |
16 | case class NotesResponse(notes: Iterable[NoteResponse], correlationId: CorrelationId = CorrelationId(utils.currentTraceId))
17 | extends QueryResponse
18 |
19 | object NotesResponse {
20 | implicit val writes: Writes[NotesResponse] = Json.writes[NotesResponse]
21 |
22 | def toResponse(ohf: NoteQueryResponse.NotesFetchedResponse): NotesResponse =
23 | NotesResponse(ohf.notes.map(NoteResponse.toResponse))
24 | }
25 |
26 | case class NoteResponse(id: NoteId, title: Option[String], content: Option[String])
27 |
28 | object NoteResponse {
29 | implicit val writes: Writes[NoteResponse] = Json.writes[NoteResponse]
30 |
31 | def toResponse(n: NoteSql): NoteResponse =
32 | NoteResponse(id = n.id, title = n.title, content = n.content)
33 | }
34 |
35 | case class NoteEventsResponse(noteEvents: Iterable[NoteEventResponse], correlationId: CorrelationId = CorrelationId(utils.currentTraceId))
36 | extends QueryResponse
37 |
38 | object NoteEventsResponse {
39 | implicit val writes: Writes[NoteEventsResponse] = Json.writes[NoteEventsResponse]
40 |
41 | def toResponse(ohf: NoteEventQueryResponse.NoteEventsFetchedResponse): NoteEventsResponse =
42 | NoteEventsResponse(ohf.notes.map(NoteEventResponse.toResponse))
43 | }
44 |
45 | case class NoteEventResponse(userId: UserId, noteId: NoteId, eventName: String)
46 |
47 | object NoteEventResponse {
48 | implicit val writes: Writes[NoteEventResponse] = Json.writes[NoteEventResponse]
49 |
50 | def toResponse(n: NoteEventSql): NoteEventResponse =
51 | NoteEventResponse(userId = n.userId, noteId = n.noteId, eventName = n.eventName)
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/common/valueClasses.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.common
2 |
3 | import java.util.UUID
4 |
5 | import play.api.libs.json._
6 |
7 | case class UserId(value: String) extends AnyVal {
8 | override def toString: String = value.toString
9 | }
10 |
11 | case class CorrelationId(value: String) extends AnyVal {
12 | override def toString: String = value.toString
13 | }
14 |
15 | case class NoteId(value: UUID) extends AnyVal {
16 | override def toString: String = value.toString
17 | }
18 |
19 | case class EventId(value: UUID) extends AnyVal {
20 | override def toString: String = value.toString
21 | }
22 |
23 | object EventId {
24 | def generateNew = EventId(UUID.randomUUID)
25 |
26 | implicit val eventIdWrites: Writes[EventId] = (t: EventId) => JsString(t.value.toString)
27 |
28 | implicit val eventIdReads: Reads[EventId] = {
29 | case e: JsString => JsSuccess(EventId(UUID.fromString(e.value)))
30 | case e => JsError(s"Expecting JsString in EventId reads but got $e")
31 | }
32 | }
33 |
34 | object UserId {
35 | val system = UserId("99999999-9999-9999-9999-999999999999")
36 | val noUserId = UserId("00000000-0000-0000-0000-000000000000")
37 | val unknownUserId = UserId("00000000-6666-0000-0000-000000000000")
38 | def generateNew = UserId(UUID.randomUUID.toString)
39 |
40 | implicit val userIdWrites: Writes[UserId] = (t: UserId) => JsString(t.value.toString)
41 |
42 | implicit val userIdReads: Reads[UserId] = {
43 | case e: JsString => JsSuccess(UserId(e.value))
44 | case e => JsError(s"Expecting JsString in UserId reads but got $e")
45 | }
46 | }
47 |
48 | object CorrelationId {
49 | val noCorrelationId: CorrelationId = CorrelationId("00000000-0000-0000-0000-000000000000")
50 |
51 | implicit val correlationIdWrites: Writes[CorrelationId] = (t: CorrelationId) => JsString(t.value.toString)
52 |
53 | implicit val correlationIdReads: Reads[CorrelationId] = {
54 | case e: JsString => JsSuccess(CorrelationId(e.value))
55 | case e => JsError(s"Expecting JsString in correlationId reads but got $e")
56 | }
57 | }
58 |
59 | object NoteId {
60 | implicit val fromUUID: UUID => NoteId = uuid => NoteId(uuid)
61 | def generateNew: NoteId = NoteId(UUID.randomUUID)
62 | implicit val noteIdWrites: Writes[NoteId] = (t: NoteId) => JsString(t.value.toString)
63 | }
64 |
--------------------------------------------------------------------------------
/src/it/scala/com/ingenuiq/note/integration/base/IntegrationBase.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.integration.base
2 |
3 | import akka.actor.ActorRef
4 | import akka.http.scaladsl.server.{ Directives, Route }
5 | import akka.http.scaladsl.testkit.{ RouteTest, RouteTestTimeout, ScalatestRouteTest }
6 | import com.ingenuiq.note.command.CommandSupervisorActor
7 | import com.ingenuiq.note.common.{ PredefinedTimeout, UserId }
8 | import com.ingenuiq.note.http.PredefinedRoutePaths
9 | import com.ingenuiq.note.http.command.CommandRoutes
10 | import com.ingenuiq.note.http.query.QueryRoutes
11 | import com.ingenuiq.note.query.QuerySupervisorActor
12 | import com.ingenuiq.note.query.dao.TableDefinitionCreator
13 | import com.ingenuiq.note.serialization.PlayJsonSupport
14 | import com.ingenuiq.note.settings.Settings
15 | import org.scalatest.BeforeAndAfterAll
16 | import org.scalatest.concurrent.Eventually
17 | import org.scalatest.matchers.should.Matchers
18 | import org.scalatest.time.{ Millis, Seconds, Span }
19 | import org.scalatest.wordspec.AnyWordSpec
20 |
21 | import scala.concurrent.duration.DurationInt
22 |
23 | abstract class IntegrationBase
24 | extends AnyWordSpec
25 | with Matchers
26 | with Eventually
27 | with BeforeAndAfterAll
28 | with PredefinedTimeout
29 | with Directives
30 | with RouteTest
31 | with ScalatestRouteTest
32 | with PlayJsonSupport
33 | with PredefinedRoutePaths {
34 |
35 | implicit def default: RouteTestTimeout = RouteTestTimeout(new DurationInt(10).second)
36 |
37 | override implicit val patienceConfig: PatienceConfig = PatienceConfig(Span(10, Seconds), Span(200, Millis))
38 |
39 | lazy val commandActor: ActorRef = system.actorOf(CommandSupervisorActor(), "commandActor")
40 | lazy val queryActor: ActorRef = system.actorOf(QuerySupervisorActor(), "queryActor")
41 |
42 | val settings: Settings = Settings.conf
43 |
44 | lazy val commandRoutes: CommandRoutes = new CommandRoutes(commandActor, settings)
45 | lazy val queryRoutes: QueryRoutes = new QueryRoutes(queryActor, settings)
46 |
47 | val userId: UserId = UserId.generateNew
48 |
49 | def baseTestRoute(implicit userId: UserId = userId): Route =
50 | Route.seal(commandRoutes.routes ~ queryRoutes.routes)
51 |
52 | override protected def beforeAll(): Unit = {
53 | super.beforeAll()
54 | EmbeddedCassandra.startCassandra()
55 | new TableDefinitionCreator().createQuerySchemaWithRetry(1)
56 | }
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/utils/JournalSchemaEvolutionTest.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.utils
2 |
3 | import com.ingenuiq.note.command.persistence.{ SchemaInfo, StatementSchemaMap }
4 | import org.apache.avro.SchemaCompatibility
5 | import org.apache.avro.SchemaCompatibility.SchemaCompatibilityType
6 | import org.scalatest.freespec.AnyFreeSpec
7 | import org.scalatest.matchers.should.Matchers
8 |
9 | class JournalSchemaEvolutionTest extends AnyFreeSpec with Matchers {
10 |
11 | val (currentSchemaPairs: List[SchemaInfo], allSchemaPairs: List[SchemaInfo]) = StatementSchemaMap()
12 |
13 | "Schema compatibility" - {
14 | val groupedSchemasByName: Map[String, List[SchemaInfo]] = allSchemaPairs.groupBy(_.schema.getName)
15 | val schemasWithMultipleVersions = currentSchemaPairs.filter(x => groupedSchemasByName(x.schema.getName).size > 1)
16 |
17 | "be forward compatibility" in {
18 |
19 | schemasWithMultipleVersions.foreach { currentSchemaInfo =>
20 | val allSchemasForSpecificEventName = groupedSchemasByName(currentSchemaInfo.schema.getName)
21 |
22 | allSchemasForSpecificEventName.foreach { oldSchema =>
23 | withClue(
24 | s"Event name: ${currentSchemaInfo.schema.getName}, incompatible manifestHash: ${oldSchema.manifestHash}, reader filename: ${oldSchema.filePath}, writer filename: ${currentSchemaInfo.filePath}"
25 | ) {
26 | SchemaCompatibility
27 | .checkReaderWriterCompatibility(oldSchema.schema, currentSchemaInfo.schema)
28 | .getType shouldBe SchemaCompatibilityType.COMPATIBLE
29 | }
30 | }
31 | }
32 | }
33 |
34 | "be backward compatibility" in {
35 | schemasWithMultipleVersions.foreach { currentSchemaInfo =>
36 | val allSchemasForSpecificEventName = groupedSchemasByName(currentSchemaInfo.schema.getName)
37 |
38 | allSchemasForSpecificEventName.foreach { newSchema =>
39 | withClue(
40 | s"Event name: ${currentSchemaInfo.schema.getName}, incompatible manifestHash: ${newSchema.manifestHash}, reader filename: ${currentSchemaInfo.filePath}, writer filename: ${newSchema.filePath}"
41 | ) {
42 | SchemaCompatibility
43 | .checkReaderWriterCompatibility(currentSchemaInfo.schema, newSchema.schema)
44 | .getType shouldBe SchemaCompatibilityType.COMPATIBLE
45 | }
46 | }
47 | }
48 | }
49 | }
50 |
51 | "Schemas should not be duplicated" in {
52 | allSchemaPairs.groupBy(_.manifestHash).exists(_._2.size > 1) shouldBe false
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/repos/NoteRepo.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao.repos
2 |
3 | import com.ingenuiq.note.command.note.NoteEvent._
4 | import com.ingenuiq.note.common.NoteId
5 | import com.ingenuiq.note.query.dao.common.QueryFilterOptions
6 | import com.ingenuiq.note.query.dao.model.NoteSql
7 | import com.ingenuiq.note.query.dao.schema._
8 | import com.typesafe.scalalogging.LazyLogging
9 |
10 | import scala.concurrent.{ ExecutionContext, Future }
11 | import scala.language.higherKinds
12 |
13 | class NoteRepo(implicit ec: ExecutionContext) extends NoteTableDefinition with LazyLogging with QueryFilterOptions {
14 |
15 | import com.ingenuiq.note.query.dao.common.DBComponent.db
16 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
17 |
18 | def insertNote(po: NoteCreated): Future[Int] = {
19 | logger.trace("Inserting a note")
20 | db.run(notes += NoteSql.fromCreatedToSql(po))
21 | }
22 |
23 | def updateNote(po: NoteUpdated): Future[Int] = {
24 | logger.trace("Updating note")
25 |
26 | (po.title, po.content) match {
27 | case (Some(title), Some(content)) =>
28 | db.run(
29 | notes
30 | .filter(_.id === po.noteId)
31 | .map(x => (x.title, x.content, x.lastModified, x.correlationId))
32 | .update((title, content, po.persistentEventMetadata.created, po.persistentEventMetadata.correlationId))
33 | )
34 | case (Some(title), None) =>
35 | db.run(
36 | notes
37 | .filter(_.id === po.noteId)
38 | .map(x => (x.title, x.lastModified, x.correlationId))
39 | .update((title, po.persistentEventMetadata.created, po.persistentEventMetadata.correlationId))
40 | )
41 | case (None, Some(content)) =>
42 | db.run(
43 | notes
44 | .filter(_.id === po.noteId)
45 | .map(x => (x.content, x.lastModified, x.correlationId))
46 | .update((content, po.persistentEventMetadata.created, po.persistentEventMetadata.correlationId))
47 | )
48 | case _ =>
49 | Future.successful(0)
50 | }
51 |
52 | }
53 |
54 | def removeNote(po: NoteDeleted): Future[Int] = {
55 | logger.trace("Inserting a note")
56 | db.run(notes.filter(note => note.id === po.noteId).delete)
57 | }
58 |
59 | def getNotes: Future[Seq[NoteSql]] = {
60 | logger.trace("Get a notes from repo")
61 |
62 | db.run(notes.result)
63 | }
64 |
65 | def getNote(noteId: NoteId): Future[Option[NoteSql]] = {
66 | logger.trace("Get a note")
67 |
68 | db.run(notes.filter(x => x.id === noteId).result).map(_.headOption)
69 | }
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/RouteHelpers.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http
2 |
3 | import java.util.UUID
4 |
5 | import akka.http.scaladsl.model.StatusCodes.BadRequest
6 | import akka.http.scaladsl.model.Uri.Path
7 | import akka.http.scaladsl.server.Directives._
8 | import akka.http.scaladsl.server.PathMatcher.{ Matched, Matching, Unmatched }
9 | import akka.http.scaladsl.server._
10 | import com.ingenuiq.note.http.model.ErrorMessageResponse
11 | import com.ingenuiq.note.serialization.PlayJsonSupport
12 | import com.typesafe.scalalogging.LazyLogging
13 |
14 | import scala.util.{ Failure, Success, Try }
15 |
16 | trait RouteHelpers extends PlayJsonSupport with LazyLogging with PredefinedRoutePaths {
17 |
18 | private type Match0 = PathMatcher0
19 | private type Match1 = PathMatcher1[Try[UUID]]
20 | private type Match2 = PathMatcher[(Try[UUID], Try[UUID])]
21 | private type Match3 = PathMatcher[(Try[UUID], Try[UUID], Try[UUID])]
22 |
23 | object ID extends PathMatcher1[Try[UUID]] {
24 |
25 | def apply(path: Path): Matching[Tuple1[Try[UUID]]] = path match {
26 | case Path.Segment(segment, tail) => Matched(tail, Tuple1(parse(segment)))
27 | case _ => Unmatched
28 | }
29 | }
30 |
31 | def end[L](pm: PathMatcher[L]): PathMatcher[L] = pm ~ Slash.? ~ PathEnd
32 |
33 | def pathTail(pm: Match0): Directive0 = pathPrefix(end(pm))
34 |
35 | def pathTail[T](pm: Match1)(route: T => Route)(implicit t: UUID => T): Route =
36 | pathPrefix(end(pm)) { id: Try[UUID] => extractTry(for (u <- id) yield route(t(u))) }
37 |
38 | def pathTail[S, T](pm: Match2)(route: (S, T) => Route)(implicit s: UUID => S, t: UUID => T): Route =
39 | pathPrefix(end(pm))((id1: Try[UUID], id2: Try[UUID]) => extractTry(for { u1 <- id1; u2 <- id2 } yield route(s(u1), t(u2))))
40 |
41 | def pathTail[S, T, U](pm: Match3)(route: (S, T, U) => Route)(implicit s: UUID => S, t: UUID => T, u: UUID => U): Route =
42 | pathPrefix(end(pm)) { (id1: Try[UUID], id2: Try[UUID], id3: Try[UUID]) =>
43 | extractTry(for { u1 <- id1; u2 <- id2; u3 <- id3 } yield route(s(u1), t(u2), u(u3)))
44 | }
45 |
46 | private def extractTry(t: Try[Route]): Route = t match {
47 | case Success(x) => x
48 | case Failure(e) =>
49 | logger.error("extract try error", e)
50 | complete(BadRequest -> ErrorMessageResponse(errorMessage = e.getLocalizedMessage))
51 | }
52 |
53 | private def parse(id: String): Try[UUID] = Try(UUID.fromString(id))
54 | }
55 |
56 | trait PredefinedRoutePaths {
57 | val BasePath: String = "api"
58 | val QueryPath: String = "query"
59 | val NotePath: String = "note"
60 | val EventPath: String = "event"
61 | }
62 |
63 | object PredefinedRoutePaths extends PredefinedRoutePaths
64 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/command/CommandRoutes.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http.command
2 |
3 | import akka.actor.ActorRef
4 | import akka.http.scaladsl.model.StatusCodes
5 | import akka.http.scaladsl.server.Directives._
6 | import akka.http.scaladsl.server.Route
7 | import akka.pattern.ask
8 | import com.ingenuiq.note.command.note.NoteCommand._
9 | import com.ingenuiq.note.command.note.NoteEvent._
10 | import com.ingenuiq.note.common.{ NoteId, PredefinedTimeout, UserId }
11 | import com.ingenuiq.note.http.RouteHelpers
12 | import com.ingenuiq.note.http.model.ErrorMessageResponse
13 | import com.ingenuiq.note.serialization.PlayJsonSupport
14 | import com.ingenuiq.note.settings.Settings
15 | import com.typesafe.scalalogging.LazyLogging
16 |
17 | class CommandRoutes(commandActor: ActorRef, val settings: Settings)
18 | extends PlayJsonSupport
19 | with LazyLogging
20 | with PredefinedTimeout
21 | with RouteHelpers {
22 |
23 | def routes(implicit userId: UserId): Route =
24 | pathTail(NotePath) {
25 | post(createNote)
26 | } ~
27 | pathTail(NotePath / ID) { id: NoteId => put(updateNote(id)) ~ delete(deleteNote(id)) }
28 |
29 | def createNote(implicit userId: UserId): Route =
30 | decodeRequest {
31 | entity(as[CommandRequest.NotePayload]) { payload =>
32 | onSuccess(commandActor ? payload.toCreateCommand(userId)) {
33 | case e: NoteCreated =>
34 | logger.trace(s"Note created response")
35 | logger.trace(s"Note created response")
36 | complete(StatusCodes.Created -> CommandResponse.NoteCreationResponse(e.note.id))
37 | }
38 | }
39 | }
40 |
41 | def updateNote(noteId: NoteId)(implicit userId: UserId): Route =
42 | pathEndOrSingleSlash {
43 | put {
44 | decodeRequest {
45 | entity(as[CommandRequest.NotePayload]) { payload =>
46 | onSuccess(commandActor ? payload.toUpdateCommand(userId, noteId)) {
47 | case _: NoteUpdated =>
48 | logger.trace(s"Note updated response")
49 | complete(StatusCodes.OK -> CommandResponse.NoteUpdateResponse(noteId))
50 | case NoteNotFound =>
51 | logger.trace("Note not found")
52 | complete(StatusCodes.NotFound -> ErrorMessageResponse("Note not found"))
53 | }
54 | }
55 | }
56 | }
57 | }
58 |
59 | def deleteNote(noteId: NoteId)(implicit userId: UserId): Route =
60 | decodeRequest {
61 | onSuccess(commandActor ? DeleteNote(userId, noteId)) {
62 | case e: NoteDeleted =>
63 | logger.trace(s"Note deleted response")
64 | complete(StatusCodes.OK -> CommandResponse.NoteDeletionResponse(e.noteId))
65 | case NoteNotFound =>
66 | logger.trace("Note not found")
67 | complete(StatusCodes.NotFound -> ErrorMessageResponse("Note not found"))
68 | }
69 | }
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/src/main/resources/kamon.conf:
--------------------------------------------------------------------------------
1 | tracing-monitoring-settings {
2 | zipkin-enabled = true
3 | zipkin-enabled = ${?ZIPKIN_ENABLED}
4 |
5 | prometheus-enabled = true
6 | prometheus-enabled = ${?PROMETHEUS_ENABLED}
7 | }
8 |
9 | kamon {
10 | environment {
11 | service = "inote"
12 | host = "localhost"
13 | host = ${?KAMON_ENV_HOST}
14 | instance = "note@localhost"
15 | instance = ${?KAMON_ENV_INSTANCE}
16 |
17 | tags {
18 | service = "note"
19 | env = local
20 | env = ${?ENV}
21 | }
22 | }
23 |
24 | metric {
25 | # Interval at which metric snapshots will be collected and sent to all metric reporters.
26 | # Should match prometheus scraping interval
27 | tick-interval = 15 seconds
28 | }
29 |
30 | akka-http {
31 | add-http-status-code-as-metric-tag = true
32 | }
33 |
34 | system-metrics {
35 | host.enabled = false
36 | jmx.enabled = true
37 | }
38 |
39 | trace {
40 | sampler = always
41 | join-remote-parents-with-same-span-id = true
42 | }
43 |
44 | zipkin {
45 | host = 127.0.0.1
46 | host = ${?ZIPKIN_HTTP_HOST}
47 | port = 9411
48 | }
49 |
50 | prometheus {
51 | start-embedded-http-server = false
52 | include-environment-tags = yes
53 |
54 | buckets {
55 |
56 | default-buckets = [
57 | 11,
58 | 30,
59 | 100,
60 | 300,
61 | 1000,
62 | 3000,
63 | 10000,
64 | 30000,
65 | 100000
66 | ]
67 |
68 | time-buckets = [
69 | 0.005,
70 | 0.01,
71 | 0.025,
72 | 0.05,
73 | 0.075,
74 | 0.1,
75 | 0.25,
76 | 0.5,
77 | 0.75,
78 | 1,
79 | 2.5,
80 | 5,
81 | 7.5,
82 | 10
83 | ]
84 |
85 | information-buckets = [
86 | 512,
87 | 1024,
88 | 2048,
89 | 4096,
90 | 16384,
91 | 65536,
92 | 524288,
93 | 1048576
94 | ]
95 | }
96 | }
97 |
98 | util.filters {
99 |
100 | "akka.tracked-dispatcher" {
101 | includes = ["**"]
102 | }
103 |
104 |
105 | "akka.traced-actor" {
106 | includes = ["*/system/sharding/**", "note-actor-system/user/**"]
107 | excludes = [
108 | "*/user/IO-**",
109 | "*/system/sharding/**Coordinator",
110 | "note-actor-system/user/queryActor/*/*/StreamSupervisor-**"
111 | ]
112 | }
113 |
114 | "akka.tracked-actor" {
115 | includes = ["*/system/sharding/**", "note-actor-system/user/**"]
116 | excludes = [
117 | "*/user/IO-**",
118 | "*/system/sharding/**Coordinator",
119 | "note-actor-system/user/queryActor/*/*/StreamSupervisor-**"
120 | ]
121 | }
122 |
123 | "akka.tracked-router" {
124 | includes = [ "**" ],
125 | excludes = [
126 | "note-actor-system/system/**"
127 | ]
128 | }
129 |
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/BaseRoutes.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http
2 |
3 | import akka.actor.ActorRef
4 | import akka.event.Logging
5 | import akka.http.scaladsl.model._
6 | import akka.http.scaladsl.server.Directives._
7 | import akka.http.scaladsl.server._
8 | import akka.http.scaladsl.server.directives.DebuggingDirectives
9 | import ch.megard.akka.http.cors.scaladsl.CorsDirectives._
10 | import ch.megard.akka.http.cors.scaladsl.model.HttpOriginMatcher
11 | import ch.megard.akka.http.cors.scaladsl.settings.CorsSettings
12 | import com.ingenuiq.note.common.{ PredefinedTimeout, UserId }
13 | import com.ingenuiq.note.http.command.CommandRoutes
14 | import com.ingenuiq.note.http.model.ErrorMessageResponse
15 | import com.ingenuiq.note.http.query.QueryRoutes
16 | import com.ingenuiq.note.serialization.PlayJsonSupport
17 | import com.ingenuiq.note.settings.Settings
18 | import com.typesafe.scalalogging.LazyLogging
19 |
20 | trait BaseRoutes extends LazyLogging with PlayJsonSupport with HealthCheckRoute with PredefinedTimeout {
21 |
22 | val commandActor: ActorRef
23 | val queryActor: ActorRef
24 | val settings: Settings
25 |
26 | lazy val commandRoutes: CommandRoutes = new CommandRoutes(commandActor, settings)
27 | lazy val queryRoutes: QueryRoutes = new QueryRoutes(queryActor, settings)
28 |
29 | private val rejectionHandler: RejectionHandler =
30 | RejectionHandler
31 | .newBuilder()
32 | .result()
33 | .withFallback(RejectionHandler.default)
34 | .mapRejectionResponse {
35 | case res @ HttpResponse(_, _, ent: HttpEntity.Strict, _) =>
36 | // since all Akka default rejection responses are Strict this will handle all rejections
37 | val message = ent.data.utf8String.replaceAll("\"", """\"""")
38 | res.withEntity(entity = HttpEntity(ContentTypes.`application/json`, s"""{"rejection": "$message"}"""))
39 |
40 | case x => x // pass through all other types of responses
41 | }
42 |
43 | private val exceptionHandler: ExceptionHandler = ExceptionHandler {
44 | case e =>
45 | extractUri { uri =>
46 | logger.error(s"Exception while handling request $uri", e)
47 | complete(StatusCodes.InternalServerError -> ErrorMessageResponse())
48 | }
49 |
50 | }
51 |
52 | private val handleErrors: Directive[Unit] = handleRejections(rejectionHandler) & handleExceptions(exceptionHandler)
53 |
54 | private val corsSettings: CorsSettings = CorsSettings.defaultSettings
55 | .withAllowedOrigins(HttpOriginMatcher.*)
56 | .withAllowedMethods(List(HttpMethods.PUT, HttpMethods.GET, HttpMethods.POST, HttpMethods.OPTIONS, HttpMethods.DELETE))
57 |
58 | private def publicRoutes: Route = healthCheckRoute
59 |
60 | private def securedRoutes(commandActor: ActorRef, queryActor: ActorRef): Route =
61 | pathPrefix(PredefinedRoutePaths.BasePath) {
62 | DebuggingDirectives.logRequest(("", Logging.DebugLevel)) {
63 | implicit val userId: UserId = UserId.generateNew // login missing
64 | commandRoutes.routes ~ queryRoutes.routes
65 | }
66 | }
67 |
68 | def routes(commandActor: ActorRef, queryActor: ActorRef): Route =
69 | cors(corsSettings) {
70 | handleErrors {
71 | publicRoutes ~ securedRoutes(commandActor, queryActor)
72 | }
73 | }
74 |
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/http/query/QueryRoutes.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.http.query
2 |
3 | import akka.actor.ActorRef
4 | import akka.http.scaladsl.model.StatusCodes
5 | import akka.http.scaladsl.server.Directives._
6 | import akka.http.scaladsl.server.Route
7 | import akka.pattern.ask
8 | import com.ingenuiq.note.common._
9 | import com.ingenuiq.note.http.RouteHelpers
10 | import com.ingenuiq.note.http.model._
11 | import com.ingenuiq.note.query.events.NoteEventQuery.GetNoteEvents
12 | import com.ingenuiq.note.query.events.NoteEventQueryResponse
13 | import com.ingenuiq.note.query.model.{ FailureResult, FullResult }
14 | import com.ingenuiq.note.query.note.NoteQuery.GetNotes
15 | import com.ingenuiq.note.query.note.{ NoteQuery, NoteQueryResponse }
16 | import com.ingenuiq.note.serialization.PlayJsonSupport
17 | import com.ingenuiq.note.settings.Settings
18 | import com.typesafe.scalalogging.LazyLogging
19 |
20 | class QueryRoutes(queryActor: ActorRef, settings: Settings)
21 | extends PlayJsonSupport
22 | with PredefinedTimeout
23 | with RouteHelpers
24 | with LazyLogging {
25 |
26 | def routes(implicit userId: UserId): Route =
27 | pathPrefix(QueryPath) {
28 | pathTail(NotePath) {
29 | get(getNotes)
30 | } ~
31 | pathTail(NotePath / EventPath) {
32 | get(getNoteEvents)
33 | } ~
34 | pathTail(NotePath / ID) { id: NoteId => get(getNoteById(id)) }
35 | }
36 |
37 | def getNoteEvents(implicit userId: UserId): Route =
38 | decodeRequest {
39 | onSuccess(queryActor ? GetNoteEvents(userId)) {
40 | case FullResult(e: NoteEventQueryResponse.NoteEventsFetchedResponse) =>
41 | logger.info("Note events response")
42 | complete(StatusCodes.OK -> QueryResponse.NoteEventsResponse.toResponse(e))
43 | case FailureResult(t, m, e) =>
44 | logger.error(s"Type $t, message: $m, exception ${e.map(_.getLocalizedMessage)}")
45 | complete(StatusCodes.InternalServerError -> ErrorMessageResponse())
46 | }
47 | }
48 |
49 | def getNotes(implicit userId: UserId): Route =
50 | decodeRequest {
51 | onSuccess(queryActor ? GetNotes(userId)) {
52 | case FullResult(e: NoteQueryResponse.NotesFetchedResponse) =>
53 | logger.info("Notes response")
54 | complete(StatusCodes.OK -> QueryResponse.NotesResponse.toResponse(e))
55 | case FailureResult(t, m, e) =>
56 | logger.error(s"Type $t, message: $m, exception ${e.map(_.getLocalizedMessage)}")
57 | complete(StatusCodes.InternalServerError -> ErrorMessageResponse())
58 | }
59 | }
60 |
61 | def getNoteById(id: NoteId)(implicit userId: UserId): Route =
62 | decodeRequest {
63 | onSuccess(queryActor ? NoteQuery.GetNote(userId, id)) {
64 | case FullResult(NoteQueryResponse.NoteFetchedResponse(Some(note))) =>
65 | logger.info("Note response")
66 | complete(StatusCodes.OK -> QueryResponse.NoteResponse.toResponse(note))
67 | case FullResult(NoteQueryResponse.NoteFetchedResponse(None)) =>
68 | logger.info("Note not found response")
69 | complete(StatusCodes.NotFound -> CorrelationIdResponse())
70 | case FailureResult(t, m, e) =>
71 | logger.error(s"Type $t, message: $m, exception ${e.map(_.getLocalizedMessage)}")
72 | complete(StatusCodes.InternalServerError -> ErrorMessageResponse())
73 | }
74 | }
75 |
76 | }
77 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # More detailed architecture explained at
2 |
3 |
4 | [ES/CQRS the Akka way](https://medium.com/@reeebuuk/es-cqrs-the-akka-way-abbb3ffb89a1)
5 |
6 | # Event sourcing / CQRS Note example Service
7 |
8 | Application is built in CQRS (Command Query Responsibility Segregation) fashion:
9 | - Command side
10 | - Akka persistence
11 | - Cassandra as event store
12 | - Query side
13 | - Akka persistence query for event store to view store transfer
14 | - Slick
15 | - Postgres as a view store
16 |
17 | Application will constantly read from the event store and keep query side in sync. This model has Eventual consistency which means once the command is executed the view will eventually be updated.
18 |
19 | ## How to build and run
20 |
21 | Dependencies: Java 8 & sbt
22 |
23 | ### Running with full authorization service (being able to login as different user)
24 |
25 | - Run `docker-compose -f docker-compose-essential.yml up`
26 | - From the root directory run `sbt run`
27 | - Go to http://localhost:9001/ to test the application
28 |
29 | ### Example of API usage
30 |
31 | ##### Creation of new note
32 | Request:
33 |
34 | curl -X POST \
35 | http://localhost:9001/api/note \
36 | -H 'Content-Type: application/json' \
37 | -d '{
38 | "title": "Some title",
39 | "published": false,
40 | "subTitle": "Some subtitle"
41 | }'
42 |
43 | Response:
44 |
45 | {
46 | "noteId": "af8a7f3b-67ca-45cd-87d1-44438f1d3f58",
47 | "correlationId": "6238f89e05c205be"
48 | }
49 |
50 | #### Fetch individual note
51 | Request:
52 |
53 | curl -X GET http://localhost:9001/api/query/note/af8a7f3b-67ca-45cd-87d1-44438f1d3f58
54 |
55 | Response:
56 |
57 | {
58 | "note": {
59 | "id": "af8a7f3b-67ca-45cd-87d1-44438f1d3f58",
60 | "title": "Some title",
61 | "subTitle": "Some subtitle",
62 | "published": false
63 | },
64 | "correlationId": "aa1c3e6e633ee8a5"
65 | }
66 |
67 |
68 | ### Schema evolution!!!
69 |
70 | All Akka persistence events are generated from .avsc files located in `src/main/resources/avro/avsc`.
71 |
72 | All events that you see changed in `avsc` folder needs to be versioned properly if we want to avoid breaking changes in environments.
73 |
74 | Tricky thing to do here is to take the version that was there before and copy it to `avsc-history` folder.
75 |
76 | ##### Example
77 |
78 | You need to make a change in Note object which is within NoteCreated event.
79 |
80 | 1. Go to `src/main/resources/avro` folder and find in which .avdl file is this event modelled
81 | 2. Change the record by adding the new field `WITH default value`. If default value isn't added old events won't be able to reply. There is no changing of current fields or types. Only addition.
82 | 3. Execute command `sbt generateAvsc`
83 | 4. Notice that in `src/main/resources/avro/avsc` there are multiple files changed NoteAvro, NoteCreatedAvro and NoteUpdatedAvro. NoteAvro.avsc isn't really used as standalone object so we leave it as is. NoteCreatedAvro and NoteUpdatedAvro both use it internally so we need to version these events.
84 | 5. We need to copy previous versions (ones before you ran command in step 3, just use history to fetch it) and copy it into `src/main/resources/avro/avsc-history` in a folder with JIRA number ie EP-1234. Just copy the previous versions there. Nothing else.
85 | 6. Execute command `sbt compile` and boom, new version of Avro events is generated.
86 |
--------------------------------------------------------------------------------
/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | [%blue(%date{HH:mm:ss.SSS})][%highlight(%-5level)][%magenta(%replace(%logger{0}.%M){'\.\?+|\$*\.\$.+\$+\d*|\$',''})] - %msg %blue(%marker) [%cyan(%thread)] %n
16 |
17 |
18 |
19 |
20 | ${HOME}/log/${NAME_PREFIX}.log
21 | true
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 | {
30 | "timestamp": "%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ}",
31 | "level": "%level",
32 | "class": "%replace(%logger.%M){'\\.\\?+|\\$*\\.\\$.+\\$+\\d*|\\$',''}",
33 | "short-class": "%replace(%logger{0}.%M){'\\.\\?+|\\$*\\.\\$.+\\$+\\d*|\\$',''}",
34 | "thread": "%thread"
35 | }
36 |
37 |
38 |
39 |
40 |
41 | 30
42 | 2048
43 | 20
44 | true
45 |
46 |
47 |
48 |
49 |
50 | ${HOME}/log/${NAME_PREFIX}.log.%d{yyyy-MM-dd}.%i.gz
51 |
52 | ${MAX_FILE_SIZE}
53 | ${MAX_DAYS_KEPT}
54 | ${TOTAL_SIZE_CAP}
55 | true
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/command/persistence/AvroConverters.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command.persistence
2 |
3 | import java.time._
4 | import java.util.UUID
5 |
6 | import com.ingenuiq.common.PersistentEventMetadataAvro
7 | import com.ingenuiq.note.events._
8 | import com.ingenuiq.note.command.note.NoteEvent._
9 | import com.ingenuiq.note.command.note.{ Note, PersistentNoteEvent }
10 | import com.ingenuiq.note.common._
11 | import com.ingenuiq.note.query.common.{ PersistentEvent, PersistentEventMetadata }
12 | import org.apache.avro.specific.SpecificRecordBase
13 |
14 | object AvroConverters {
15 |
16 | def from(e: Note): NoteAvro =
17 | new NoteAvro(id = e.id.value.toString, title = e.title, content = e.content)
18 |
19 | def to(e: NoteAvro): Note = Note(id = NoteId(UUID.fromString(e.id)), title = e.title, content = e.content)
20 |
21 | def from(e: PersistentNoteEvent): SpecificRecordBase = e match {
22 | case e: NoteCreated => from(e)
23 | case e: NoteUpdated => from(e)
24 | case e: NoteDeleted => from(e)
25 | }
26 |
27 | def to(e: SpecificRecordBase): PersistentEvent = e match {
28 | case e: NoteCreatedAvro => to(e)
29 | case e: NoteUpdatedAvro => to(e)
30 | case e: NoteDeletedAvro => to(e)
31 | }
32 |
33 | def from(metadata: PersistentEventMetadata): PersistentEventMetadataAvro =
34 | PersistentEventMetadataAvro(
35 | correlationId = metadata.correlationId.value,
36 | eventId = metadata.eventId.value.toString,
37 | userId = metadata.userId.value,
38 | created = localDateTimeToLong(metadata.created),
39 | spanId = metadata.spanId
40 | )
41 |
42 | def to(metadata: PersistentEventMetadataAvro): PersistentEventMetadata =
43 | PersistentEventMetadata(
44 | correlationId = CorrelationId(metadata.correlationId),
45 | eventId = EventId(UUID.fromString(metadata.eventId)),
46 | userId = UserId(metadata.userId),
47 | created = longToLocalDateTime(metadata.created),
48 | spanId = metadata.spanId
49 | )
50 |
51 | def from(e: NoteCreated): NoteCreatedAvro =
52 | new NoteCreatedAvro(metadata = from(e.persistentEventMetadata), note = from(e.note))
53 |
54 | def to(e: NoteCreatedAvro): NoteCreated =
55 | NoteCreated(persistentEventMetadata = to(e.metadata), note = to(e.note))
56 |
57 | def from(e: NoteUpdated): NoteUpdatedAvro =
58 | new NoteUpdatedAvro(
59 | metadata = from(e.persistentEventMetadata),
60 | id = e.noteId.value.toString,
61 | title = e.title.map(UpdatebleAvro.apply),
62 | content = e.content.map(UpdatebleAvro.apply)
63 | )
64 |
65 | def to(e: NoteUpdatedAvro): NoteUpdated =
66 | NoteUpdated(
67 | persistentEventMetadata = to(e.metadata),
68 | noteId = NoteId(UUID.fromString(e.id)),
69 | title = e.title.map(_.value),
70 | content = e.content.map(_.value)
71 | )
72 |
73 | def from(e: NoteDeleted): NoteDeletedAvro =
74 | new NoteDeletedAvro(metadata = from(e.persistentEventMetadata), noteId = e.noteId.value.toString)
75 |
76 | def to(e: NoteDeletedAvro): NoteDeleted =
77 | NoteDeleted(persistentEventMetadata = to(e.metadata), noteId = NoteId(UUID.fromString(e.noteId)))
78 |
79 | private def localDateTimeToLong(date: LocalDateTime): Long = date.toInstant(ZoneOffset.UTC).toEpochMilli
80 | private def longToLocalDateTime(date: Long): LocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(date), ZoneOffset.UTC)
81 | }
82 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/model/ServiceResult.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.model
2 |
3 | /**
4 | * Represents a result from a call to a service actor, being either a FullResult (Some), EmptyResult (None) or a Failure.
5 | * Has the full complete of methods like map, flatMap and filter to treat as a Monad
6 | */
7 | sealed abstract class ServiceResult[+A] {
8 | def isEmpty: Boolean
9 | def isValid: Boolean
10 | def getOrElse[B >: A](default: => B): B = default
11 | def map[B](f: A => B): ServiceResult[B] = EmptyResult
12 | def flatMap[B](f: A => ServiceResult[B]): ServiceResult[B] = EmptyResult
13 | def filter(p: A => Boolean): ServiceResult[A] = this
14 |
15 | def toOption: Option[A] = this match {
16 | case FullResult(a) => Some(a)
17 | case _ => None
18 | }
19 | }
20 |
21 | /**
22 | * Companion to ServiceResult
23 | */
24 | object ServiceResult {
25 | val UnexpectedFailure = ErrorMessage("common.unexpect", Some("An unexpected exception has occurred"))
26 |
27 | def fromOption[A](opt: Option[A]): ServiceResult[A] = opt match {
28 | case None => EmptyResult
29 | case Some(value) => FullResult(value)
30 | }
31 | }
32 |
33 | /**
34 | * Empty (negative) representation of a service call result. For example, if looking up
35 | * an entity by id and it wasn't there this is the type of response to use
36 | */
37 | sealed abstract class Empty extends ServiceResult[Nothing] {
38 | def isValid: Boolean = false
39 | def isEmpty: Boolean = true
40 | }
41 | case object EmptyResult extends Empty
42 |
43 | /**
44 | * Full (positive) representation of a service call result. This will wrap the result of a call to service to qualify
45 | * to the receiver that the call was successful
46 | */
47 | final case class FullResult[+A](value: A) extends ServiceResult[A] {
48 | def isValid: Boolean = true
49 | def isEmpty: Boolean = false
50 | override def getOrElse[B >: A](default: => B): B = value
51 | override def map[B](f: A => B): ServiceResult[B] = FullResult(f(value))
52 | override def filter(p: A => Boolean): ServiceResult[A] = if (p(value)) this else EmptyResult
53 | override def flatMap[B](f: A => ServiceResult[B]): ServiceResult[B] = f(value)
54 | }
55 |
56 | /**
57 | * Represents the type of failure encountered by the app
58 | */
59 | object FailureType extends Enumeration {
60 | val Validation, Service = Value
61 | }
62 |
63 | /**
64 | * Represents an error message from a failure with a service call. Has fields for the code of the error
65 | * as well as a description of the error
66 | */
67 | case class ErrorMessage(code: String, shortText: Option[String] = None, params: Option[Map[String, String]] = None)
68 |
69 | /**
70 | * Companion to ErrorMessage
71 | */
72 | object ErrorMessage {
73 |
74 | /**
75 | * Common error where an operation is requested on an entity that does not exist
76 | */
77 | val InvalidEntityId = ErrorMessage("invalid.entity.id", Some("No matching entity found"))
78 | }
79 |
80 | /**
81 | * Failed (negative) result from a call to a service with fields for what type as well as the error message
82 | * and optionally a stack trace
83 | */
84 | sealed case class FailureResult(failType: FailureType.Value, message: ErrorMessage, exception: Option[Throwable] = None) extends Empty {
85 | type A = Nothing
86 | override def map[B](f: A => B): ServiceResult[B] = this
87 | override def flatMap[B](f: A => ServiceResult[B]): ServiceResult[B] = this
88 | }
89 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/common/ViewBuilderActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.common
2 |
3 | import akka.NotUsed
4 | import akka.actor.{ Actor, PoisonPill }
5 | import akka.pattern.pipe
6 | import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal
7 | import akka.persistence.query.{ EventEnvelope, PersistenceQuery }
8 | import akka.stream.Supervision
9 | import akka.stream.scaladsl.{ Flow, Sink, Source }
10 | import com.typesafe.scalalogging.LazyLogging
11 |
12 | import scala.concurrent.Future
13 | import scala.concurrent.duration._
14 | import scala.util.control.NonFatal
15 | import scala.util.{ Failure, Success }
16 |
17 | object ViewBuilderActor {
18 |
19 | type Action[O] = () => Future[O]
20 |
21 | case class EnvelopeAndFunction(env: PersistedEventEnvelope, action: Action[_])
22 | case class LatestOffsetResult(offset: Long)
23 | }
24 |
25 | case class PersistedEventEnvelope(offset: Long, persistenceId: String, event: PersistentEvent)
26 |
27 | abstract class ViewBuilderActor extends Actor with LazyLogging {
28 |
29 | import ViewBuilderActor._
30 | import context.dispatcher
31 | import context.system
32 |
33 | val decider: Supervision.Decider = {
34 | case NonFatal(ex) =>
35 | logger.error(s"Got non fatal exception in ViewBuilder $identifier flow", ex)
36 | Supervision.Resume
37 | case ex =>
38 | logger.error(s"Got fatal exception in ViewBuilder $identifier flow, stream will be stopped", ex)
39 | Supervision.Stop
40 | }
41 |
42 | val resumableProjection = ResumableProjection(identifier, context.system)
43 |
44 | val eventsFlow: Flow[EventEnvelope, Unit, NotUsed] =
45 | Flow[EventEnvelope]
46 | .collect {
47 | case EventEnvelope(_, persistenceId, sequenceNr, event: PersistentEvent) =>
48 | PersistedEventEnvelope(sequenceNr, persistenceId, event)
49 | case x =>
50 | throw new RuntimeException(s"Invalid event in the journal! $x")
51 | }
52 | .map(env => EnvelopeAndFunction(env, actionFor(env)))
53 | .mapAsync(parallelism = 1) { case EnvelopeAndFunction(env, f) => f.apply().map(_ => env) }
54 | .mapAsync(parallelism = 1)(env => resumableProjection.updateOffset(identifier, env.offset).map(_ => ()))
55 |
56 | val journal: CassandraReadJournal = PersistenceQuery(context.system).readJournalFor[CassandraReadJournal](CassandraReadJournal.Identifier)
57 |
58 | self ! "start"
59 |
60 | def persistenceId: String
61 |
62 | def identifier: String
63 |
64 | def actionFor(env: PersistedEventEnvelope): Action[_]
65 |
66 | def receive: PartialFunction[Any, Unit] = {
67 | case "start" =>
68 | resumableProjection.fetchLatestOffset(identifier).map(LatestOffsetResult).pipeTo(self)
69 | case LatestOffsetResult(offset) =>
70 | logger.info(s"Starting up view builder for entity $identifier, with persistenceId $persistenceId with offset of $offset")
71 | val eventsSource: Source[EventEnvelope, NotUsed] = journal.eventsByPersistenceId(persistenceId, offset, Long.MaxValue)
72 |
73 | eventsSource
74 | .via(eventsFlow)
75 | .runWith(Sink.ignore)
76 | .onComplete {
77 | case Failure(err) =>
78 | logger.error(s"Persistence query $identifier ended with failure: ${err.getMessage}")
79 | self ! PoisonPill
80 | case Success(_) =>
81 | logger.error(s"Persistence query $identifier ended successfully")
82 | context.system.scheduler.scheduleOnce(1.second, self, "start")
83 | }
84 | case x =>
85 | logger.error(s"Failed to obtain offset for $identifier, got this message ${x.toString}")
86 | self ! PoisonPill
87 | }
88 |
89 | }
90 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/command/persistence/StatementSchemaMap.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command.persistence
2 |
3 | import java.io.File
4 | import java.util.jar.JarFile
5 |
6 | import com.ingenuiq.note.command.persistence.StatementSchemaMap.HashFingerprint
7 | import com.typesafe.scalalogging.LazyLogging
8 | import org.apache.avro.Schema
9 | import org.apache.commons.codec.digest.DigestUtils
10 |
11 | import scala.jdk.CollectionConverters._
12 | import scala.io.Source
13 |
14 | case class SchemaInfo(manifestHash: HashFingerprint, schema: Schema, filePath: String)
15 |
16 | object StatementSchemaMap extends LazyLogging {
17 |
18 | private val CUT_LINE: String = ".avsc"
19 |
20 | type HashFingerprint = String
21 | type FullPath = String
22 |
23 | private val activeSchemasPath: String = "avro/avsc"
24 | private val historyVersionSchemasPath: String = "avro/avsc-history"
25 |
26 | private lazy val activeSchemaFilenames: List[String] = getFolderFiles(activeSchemasPath)
27 | private lazy val historyVersionSchemaFilenames: List[String] = getFolderFiles(historyVersionSchemasPath)
28 |
29 | private def getFolderFiles(path: String): List[String] = {
30 | val jarFile = new File(getClass.getProtectionDomain.getCodeSource.getLocation.getPath)
31 | if (jarFile.isFile) readJarFile(jarFile, path) // Run with JAR file
32 | else readPath(path) // Run with IDE
33 | }
34 |
35 | private[persistence] def readJarFile(jarFile: File, path: String): List[String] = {
36 | val jar = new JarFile(jarFile)
37 | val entries = jar.entries.asScala.toList //gives ALL entries in jar
38 | jar.close()
39 | entries.flatMap { entry =>
40 | val name = entry.getName
41 | if (name.startsWith(path + "/") && !name.endsWith("/"))
42 | Option(name.replace(s"$path/", ""))
43 | else None
44 | }.sorted
45 | }
46 |
47 | private[persistence] def readPath(path: String): List[String] = {
48 | val basePath = getClass.getResource("/").toURI.getPath
49 | val files = new File(getClass.getResource("/" + path).toURI).listFiles.toList
50 | files.flatMap { entry =>
51 | if (entry.isFile) List(entry.getName)
52 | else if (entry.isDirectory)
53 | getFolderFiles(entry.getAbsolutePath.replace(basePath, "")).map(f => s"${entry.getName}/$f")
54 | else List.empty
55 | }.sorted
56 | }
57 |
58 | private lazy val currentSchemaPairs: List[SchemaInfo] =
59 | activeSchemaFilenames.collect {
60 | case filename if filename.endsWith(CUT_LINE) =>
61 | val inputStream = Source.fromResource(activeSchemasPath + "/" + filename).getLines().mkString(" ")
62 | val fingerprint: HashFingerprint = getMD5FromUrl(inputStream)
63 | val schema: Schema = getSchemaFromUrl(inputStream)
64 | SchemaInfo(fingerprint, schema, activeSchemasPath + "/" + filename)
65 | }
66 |
67 | private lazy val historySchemaPairs: List[SchemaInfo] =
68 | historyVersionSchemaFilenames.collect {
69 | case filename if filename.endsWith(CUT_LINE) =>
70 | val inputStream = Source.fromResource(historyVersionSchemasPath + "/" + filename).getLines().mkString(" ")
71 | val fingerprint: HashFingerprint = getMD5FromUrl(inputStream)
72 | val schema: Schema = getSchemaFromUrl(inputStream)
73 | SchemaInfo(fingerprint, schema, historyVersionSchemasPath + "/" + filename)
74 | }
75 |
76 | final lazy val allSchemaPairs: List[SchemaInfo] = currentSchemaPairs ++ historySchemaPairs
77 |
78 | def getMD5FromUrl(fileStream: String): HashFingerprint = DigestUtils.md5Hex(fileStream)
79 |
80 | def getSchemaFromUrl(fileStream: String): Schema =
81 | new Schema.Parser().parse(Source.fromString(fileStream).getLines().mkString)
82 |
83 | def apply(): (List[SchemaInfo], List[SchemaInfo]) = (currentSchemaPairs, allSchemaPairs)
84 | }
85 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/command/persistence/CommonPersistenceSerializer.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command.persistence
2 |
3 | import java.io.{ ByteArrayInputStream, ByteArrayOutputStream }
4 |
5 | import akka.serialization.SerializerWithStringManifest
6 | import com.ingenuiq.note.command.persistence.StatementSchemaMap.HashFingerprint
7 | import com.typesafe.scalalogging.LazyLogging
8 | import org.apache.avro.Schema
9 | import org.apache.avro.io.{ DecoderFactory, EncoderFactory }
10 | import org.apache.avro.specific.{ SpecificDatumReader, SpecificDatumWriter, SpecificRecordBase }
11 |
12 | class CommonPersistenceSerializer extends SerializerWithStringManifest with LazyLogging {
13 |
14 | private val DISCRIMINATOR: String = "-|-"
15 |
16 | val (currentSchemaPairs: List[SchemaInfo], allSchemaPairs: List[SchemaInfo]) =
17 | StatementSchemaMap()
18 |
19 | def identifier = 885242445
20 |
21 | override def manifest(obj: AnyRef): HashFingerprint = {
22 | val avroEventName = obj.getClass.getSimpleName
23 | currentSchemaPairs.find(_.schema.getName == avroEventName) match {
24 | case Some(schemaPair) => avroEventName + DISCRIMINATOR + schemaPair.manifestHash
25 | case None =>
26 | logger.error(s"Could not find a schema pair for $avroEventName")
27 | throw new NoSuchElementException(s"Could not find a schema pair for $avroEventName")
28 |
29 | }
30 | }
31 |
32 | override def toBinary(obj: AnyRef): Array[Byte] = obj match {
33 | case x: SpecificRecordBase =>
34 | val writer = new SpecificDatumWriter[SpecificRecordBase](x.getSchema)
35 | val out = new ByteArrayOutputStream()
36 | val encoder = EncoderFactory.get().binaryEncoder(out, null)
37 | writer.write(x, encoder)
38 | encoder.flush()
39 | out.toByteArray
40 | }
41 |
42 | override def fromBinary(bytes: Array[Byte], manifest: HashFingerprint): AnyRef = {
43 |
44 | val decodedEvent = for {
45 | avroEventName <- manifest.split(DISCRIMINATOR).headOption
46 | manifestHash <- manifest.split(DISCRIMINATOR).lastOption
47 | } yield {
48 | val writerSchema: SchemaInfo =
49 | allSchemaPairs
50 | .find(_.manifestHash == manifestHash) match {
51 | case Some(schemaPair) => schemaPair
52 | case None =>
53 | currentSchemaPairs.find(_.schema.getName == avroEventName) match {
54 | case Some(schemaPair) =>
55 | logger.trace(
56 | s"Found older version of writer for schema manifestHash: ${schemaPair.manifestHash}, className: $avroEventName"
57 | )
58 | schemaPair
59 | case None =>
60 | throw new NoSuchElementException(s"No history schema found for manifest hash $manifest.")
61 | }
62 | }
63 |
64 | val readerSchema: Schema = currentSchemaPairs.find(_.schema.getName == avroEventName) match {
65 | case Some(schemaPair) => schemaPair.schema
66 | case None =>
67 | logger.error(s"Cannot find a reader schema className: $avroEventName")
68 | throw new NoSuchElementException(s"No active schema pair found for event $avroEventName.")
69 | }
70 |
71 | val reader = new SpecificDatumReader[SpecificRecordBase](writerSchema.schema, readerSchema)
72 | val in = new ByteArrayInputStream(bytes)
73 | val decoder = DecoderFactory.get().binaryDecoder(in, null)
74 | val decodedEvent = reader.read(null, decoder)
75 |
76 | if (writerSchema.manifestHash != manifestHash) {
77 | logger.trace(
78 | s"Event ${decodedEvent.getClass} was decoded with older reader schema. ${writerSchema.manifestHash} instead of $manifestHash"
79 | )
80 | }
81 | decodedEvent
82 | }
83 |
84 | decodedEvent.getOrElse {
85 | logger.error(s"Manifest $manifest couldn't be split into event name and manifest hash with discriminator $DISCRIMINATOR")
86 | throw new IllegalArgumentException(
87 | s"Manifest $manifest couldn't be split into event name and manifest hash with discriminator $DISCRIMINATOR"
88 | )
89 | }
90 | }
91 |
92 | }
93 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/command/note/NoteAggregateActor.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command.note
2 |
3 | import akka.actor.{ ActorLogging, Props }
4 | import akka.persistence.{ PersistentActor, RecoveryCompleted }
5 | import com.ingenuiq.note.command.note.NoteCommand._
6 | import com.ingenuiq.note.command.note.NoteEvent._
7 | import com.ingenuiq.note.common.{ NoteId, UserId }
8 | import com.ingenuiq.note.query.common.PersistentEventMetadata
9 |
10 | object NoteAggregateActor {
11 |
12 | def apply(): Props = Props(classOf[NoteAggregateActor], NoteAggregateActor.persistenceId)
13 |
14 | val persistenceId: String = "NoteAggregateActor"
15 |
16 | }
17 |
18 | class NoteAggregateActor(override val persistenceId: String) extends PersistentActor with ActorLogging {
19 |
20 | var notes: Map[NoteId, Note] = Map.empty
21 |
22 | override def receiveRecover: Receive = {
23 | case e: PersistentNoteEvent => changeState(e)
24 | case RecoveryCompleted =>
25 | }
26 |
27 | override def receiveCommand: Receive = {
28 | case CreateNote(userId, note) =>
29 | notes.get(note.id) match {
30 | case Some(_) =>
31 | log.info(s"Received create note with note id that already exists, ${note.id}")
32 | sender() ! NoteAlreadyExists
33 | case None =>
34 | persist(NoteCreated(PersistentEventMetadata(userId), note)) { e =>
35 | log.debug("Persisted note creation")
36 | changeState(e)
37 | sender() ! e
38 |
39 | }
40 | }
41 |
42 | case UpdateNote(userId, updatedNote) =>
43 | notes.get(updatedNote.id) match {
44 | case None =>
45 | log.info("Received update for note that doesn't exit")
46 | sender() ! NoteNotFound
47 | case Some(existingNote) =>
48 | calculateUpdate(userId, existingNote, updatedNote) match {
49 | case None => sender() ! NoteNoChangesToUpdateFound
50 | case Some(noteUpdated) =>
51 | persist(noteUpdated) { e =>
52 | log.debug("Persisted note updated")
53 | changeState(e)
54 | sender() ! e
55 | }
56 | }
57 | }
58 |
59 | case DeleteNote(userId, noteId) =>
60 | notes.get(noteId) match {
61 | case Some(_) =>
62 | persist(NoteDeleted(PersistentEventMetadata(userId), noteId)) { e =>
63 | log.debug("Persisted note deletion")
64 | changeState(e)
65 | sender() ! e
66 | }
67 | case None =>
68 | log.info("Received deletion of a note that doesn't exit")
69 | sender() ! NoteNotFound
70 | }
71 | }
72 |
73 | def calculateUpdate(userId: UserId, currentNote: Note, updatedNote: Note): Option[NoteUpdated] = {
74 | val titleUpdate =
75 | if (currentNote.title != updatedNote.title)
76 | Some(updatedNote.title)
77 | else None
78 | val contentUpdate =
79 | if (currentNote.content != updatedNote.content)
80 | Some(updatedNote.content)
81 | else None
82 |
83 | (titleUpdate, contentUpdate) match {
84 | case (None, None) => None
85 | case _ => Some(NoteUpdated(PersistentEventMetadata(userId), currentNote.id, titleUpdate, contentUpdate))
86 | }
87 | }
88 |
89 | def changeState: PartialFunction[PersistentNoteEvent, Unit] = {
90 | case e: NoteCreated =>
91 | notes += e.note.id -> e.note
92 | case e: NoteUpdated =>
93 | notes.get(e.noteId) match {
94 | case Some(note) =>
95 | val updatedNote = (e.title, e.content) match {
96 | case (Some(updateTitle), Some(updateContent)) => note.copy(title = updateTitle, content = updateContent)
97 | case (Some(updateTitle), None) => note.copy(title = updateTitle)
98 | case (None, Some(updateContent)) => note.copy(content = updateContent)
99 | case _ =>
100 | log.warning(s"Received event $e but there is no updates")
101 | note
102 | }
103 |
104 | notes += e.noteId -> updatedNote
105 | case None =>
106 | log.warning(s"Received event $e but there is no note")
107 | }
108 | case e: NoteDeleted =>
109 | notes = notes.filter(_._1 != e.noteId)
110 |
111 | }
112 |
113 | }
114 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/query/dao/NoteRepoSpec.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao
2 |
3 | import com.ingenuiq.note.base.BaseRepoSpec
4 | import com.ingenuiq.note.command.note.NoteEvent._
5 | import com.ingenuiq.note.common.{ NoteId, UserId }
6 | import com.ingenuiq.note.query.common.PersistentEventMetadata
7 | import com.ingenuiq.note.query.dao.repos.NoteRepo
8 | import org.scalatest.concurrent.Eventually
9 |
10 | import scala.concurrent.ExecutionContext.Implicits.global
11 |
12 | class NoteRepoSpec extends BaseRepoSpec with Eventually {
13 |
14 | val noteRepo: NoteRepo = new NoteRepo
15 |
16 | val userId: UserId = UserId.generateNew
17 |
18 | "insert" should {
19 | "create new note" in {
20 |
21 | val noteCreated = NoteCreated(PersistentEventMetadata(userId), generateRandomNote())
22 |
23 | noteRepo.insertNote(noteCreated).futureValue
24 |
25 | val res = noteRepo.getNotes.futureValue
26 | res.map(_.id) should contain(noteCreated.note.id)
27 | }
28 | }
29 |
30 | "update" should {
31 | "update existing note title" in {
32 | val noteCreated = NoteCreated(PersistentEventMetadata(userId), generateRandomNote())
33 |
34 | noteRepo.insertNote(noteCreated).futureValue
35 |
36 | val noteUpdated = NoteUpdated(PersistentEventMetadata(userId), noteCreated.note.id, Some(Some("title")), None)
37 |
38 | noteRepo.updateNote(noteUpdated).futureValue
39 |
40 | val res = noteRepo.getNotes.futureValue.filter(_.id == noteCreated.note.id)
41 | res.head.title shouldBe Some("title")
42 | res.head.content shouldBe noteCreated.note.content
43 | }
44 |
45 | "update existing note content" in {
46 | val noteCreated = NoteCreated(PersistentEventMetadata(userId), generateRandomNote())
47 |
48 | noteRepo.insertNote(noteCreated).futureValue
49 |
50 | val noteUpdated = NoteUpdated(PersistentEventMetadata(userId), noteCreated.note.id, None, Some(Some("content")))
51 |
52 | noteRepo.updateNote(noteUpdated).futureValue
53 |
54 | val res = noteRepo.getNotes.futureValue.filter(_.id == noteCreated.note.id)
55 | res.head.title shouldBe noteCreated.note.title
56 | res.head.content shouldBe Some("content")
57 | }
58 |
59 | "update existing note title and content" in {
60 | val noteCreated = NoteCreated(PersistentEventMetadata(userId), generateRandomNote())
61 |
62 | noteRepo.insertNote(noteCreated).futureValue
63 |
64 | val noteUpdated = NoteUpdated(PersistentEventMetadata(userId), noteCreated.note.id, Some(Some("title")), Some(Some("content")))
65 |
66 | noteRepo.updateNote(noteUpdated).futureValue
67 |
68 | val res = noteRepo.getNotes.futureValue.filter(_.id == noteCreated.note.id)
69 | res.head.title shouldBe Some("title")
70 | res.head.content shouldBe Some("content")
71 | }
72 |
73 | "not update title if note doesn't exist" in {
74 | val noteUpdated = NoteUpdated(PersistentEventMetadata(userId), NoteId.generateNew, Some(Some("title")), None)
75 |
76 | noteRepo.updateNote(noteUpdated).futureValue
77 |
78 | val res = noteRepo.getNotes.futureValue
79 | res.map(_.id) should not contain noteUpdated.noteId
80 | }
81 |
82 | }
83 |
84 | "delete" should {
85 | "delete an existing note" in {
86 | val noteCreated = NoteCreated(PersistentEventMetadata(userId), generateRandomNote())
87 | noteRepo.insertNote(noteCreated).futureValue shouldBe 1
88 |
89 | val res1 = noteRepo.getNotes.futureValue
90 | res1.map(_.id) should contain(noteCreated.note.id)
91 |
92 | val noteDeleted = NoteDeleted(PersistentEventMetadata(userId), noteCreated.note.id)
93 | noteRepo.removeNote(noteDeleted).futureValue shouldBe 1
94 |
95 | val res2 = noteRepo.getNotes.futureValue
96 | res2.map(_.id) shouldNot contain(noteCreated.note.id)
97 | }
98 |
99 | "delete nothing if note do not exists" in {
100 | val noteDeleted = NoteDeleted(PersistentEventMetadata(userId), NoteId.generateNew)
101 | noteRepo.removeNote(noteDeleted).futureValue shouldBe 0
102 | }
103 | }
104 |
105 | "getNotes" should {
106 | "get all notes" in {
107 | val noteCreated1 = NoteCreated(PersistentEventMetadata(userId), generateRandomNote())
108 | val noteCreated2 = NoteCreated(PersistentEventMetadata(userId), generateRandomNote())
109 |
110 | noteRepo.insertNote(noteCreated1).futureValue
111 | noteRepo.insertNote(noteCreated2).futureValue
112 |
113 | val res = noteRepo.getNotes.futureValue
114 |
115 | res should have size 2
116 |
117 | }
118 | }
119 |
120 | }
121 |
--------------------------------------------------------------------------------
/src/main/scala/com/ingenuiq/note/query/dao/TableDefinitionCreator.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.query.dao
2 |
3 | import com.ingenuiq.note.query.dao.schema._
4 | import com.typesafe.scalalogging.LazyLogging
5 | import slick.jdbc.meta.MTable
6 |
7 | import scala.annotation.tailrec
8 | import scala.concurrent.duration._
9 | import scala.concurrent.{ Await, ExecutionContext, Future }
10 | import scala.util.{ Failure, Success, Try }
11 |
12 | //TODO move to tests or even better remove completely when schema gets stable. Flyway
13 | class TableDefinitionCreator(implicit ec: ExecutionContext)
14 | extends PersistenceOffsetTableDefinition
15 | with NoteTableDefinition
16 | with NoteEventTableDefinition
17 | with LazyLogging {
18 |
19 | import com.ingenuiq.note.query.dao.common.DBComponent._
20 | import com.ingenuiq.note.query.dao.common.DBComponent.driver.api._
21 |
22 | protected val tables = List(notes, noteEvents, persistenceOffsets)
23 |
24 | private val fibonacci: LazyList[Int] = 0 #:: 1 #:: (fibonacci zip fibonacci.tail).map(t => t._1 + t._2)
25 |
26 | def rebuildSchema(rebuildReadside: Boolean): Unit = {
27 | if (rebuildReadside) {
28 | dropQuerySchemaWithRetry()
29 | }
30 | createQuerySchemaWithRetry()
31 | }
32 |
33 | def createQuerySchemaWithRetry(retries: Int = 10): Unit = {
34 | logger.info(s"Generating SQL schema. Max retries is set to $retries")
35 | createSchema(0, retries)
36 | }
37 |
38 | @tailrec private def createSchema(last: Int, retries: Int): Unit =
39 | if (last < retries) {
40 | val futureCreation: Future[List[Unit]] = db
41 | .run(MTable.getTables)
42 | .flatMap { v =>
43 | val names = v.map(mt => mt.name.name)
44 | val createIfNotExist = tables.filter(table => !names.contains(table.baseTableRow.tableName)).map(_.schema.create)
45 | db.run(DBIO.sequence(createIfNotExist))
46 | }
47 |
48 | Try(Await.result(futureCreation, 5.seconds)) match {
49 | case Success(_) => logger.info("Schema successfully generated")
50 | case Failure(e) =>
51 | logger.error(s"Failed to generate query side schema", e)
52 | logger.info(s"Attempt n ${last + 1} for creation of tables has failed. Max retries: $retries")
53 | Thread.sleep(nextSleep(last))
54 | createSchema(last + 1, retries)
55 | }
56 | }
57 | else {
58 | logger.error(s"Failed to create the schema after $retries retries. Shutting down system..")
59 | System.exit(-1)
60 | }
61 |
62 | protected def nextSleep(index: Int): Int = {
63 | require(index <= 100, "Too high index for the fibonacci sequence")
64 | require(index >= 0, "Invalid negative index for the fibonacci sequence")
65 | fibonacci(index) * 1500
66 | }
67 |
68 | def dropQuerySchemaWithRetry(retries: Int = 10): Unit = {
69 | logger.info(s"Dropping SQL schema. Max retries is set to $retries")
70 | dropSchema(0, retries)
71 | }
72 |
73 | @tailrec private def dropSchema(last: Int, retries: Int): Unit =
74 | if (last < retries) {
75 | val futureGetTables = db.run(driver.defaultTables)
76 |
77 | //fix drop
78 | Try(Await.result(futureGetTables, 5.seconds)) match {
79 | case Success(tableList) =>
80 | val existingTableNames = tables.map(_.baseTableRow.tableName)
81 | val tableNames = tableList
82 | .withFilter(table => table.name.schema.contains("public") && existingTableNames.contains(table.name.name))
83 | .map(_.name.name)
84 | logger.info(s"Found ${tableNames.size} tables to drop, tables: ${tableNames.mkString("[", ", ", "]")}")
85 | val futureDropTables: Future[Seq[Int]] = Future.sequence(tableNames.map(table => db.run(sqlu"""DROP TABLE "#$table"""")))
86 |
87 | Try(Await.result(futureDropTables, 5.seconds)) match {
88 | case Success(_) =>
89 | logger.info("All tables successfully dropped")
90 | case Failure(e) =>
91 | logger.error(s"Failed to drop all tables on query side", e)
92 | logger.info(s"Attempt n ${last + 1} for dropping query schema has failed. Max retries: $retries")
93 | Thread.sleep(nextSleep(last))
94 | dropSchema(last + 1, retries)
95 | }
96 | case Failure(e) =>
97 | logger.error(s"Failed to drop query side schema", e)
98 | logger.info(s"Attempt n ${last + 1} for dropping query schema has failed. Max retries: $retries")
99 | Thread.sleep(nextSleep(last))
100 | dropSchema(last + 1, retries)
101 | }
102 | }
103 | else {
104 | logger.error(s"Failed to drop the schema after $retries retries. Shutting down system..")
105 | System.exit(-1)
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/project/Dependencies.scala:
--------------------------------------------------------------------------------
1 | import sbt._
2 |
3 | object Dependencies {
4 |
5 | private[this] object Versions {
6 | val akka: String = "2.6.14"
7 | val akkaHttp: String = "10.2.4"
8 | val akkaHttpCors: String = "1.1.1"
9 | val akkaPersistenceInmemory: String = "2.5.15.2"
10 | // val akkaPersistencePostgres: String = "0.5.0-M1"
11 | val akkaPersistenceCassandra: String = "1.0.5"
12 | val enumeratum: String = "1.6.1"
13 | val logbackClassic: String = "1.2.3"
14 | val playJson: String = "2.9.2"
15 | val pureConfig: String = "0.15.0"
16 | val scalaLogging: String = "3.9.3"
17 | val scalaTest: String = "3.2.9"
18 | val avro4s: String = "4.0.9"
19 | val slick: String = "3.3.3"
20 | val postgres: String = "42.2.20"
21 | val h2: String = "1.4.200"
22 | val embeddedPostgres: String = "1.2.6"
23 | val classutil: String = "1.5.1"
24 | val mockitoCore: String = "3.10.0"
25 | val commonsCodec: String = "1.15"
26 | val slf4j: String = "1.7.30"
27 | val kamon: String = "2.1.18"
28 | val flyway: String = "7.9.1"
29 | }
30 |
31 | val all: Seq[ModuleID] = ProductionDependencies.values ++ TestDependencies.values
32 |
33 | private[this] object ProductionDependencies {
34 |
35 | val values: Seq[ModuleID] =
36 | akka ++ avro4s ++ playJson ++ pureConfig ++ logging ++ query ++ postgres ++ enumeratum ++ kamon ++ commonCodec ++ flyway
37 |
38 | private lazy val akka: Seq[ModuleID] = Seq(
39 | "com.typesafe.akka" %% "akka-actor" % Versions.akka,
40 | "com.typesafe.akka" %% "akka-stream" % Versions.akka,
41 | "com.typesafe.akka" %% "akka-cluster" % Versions.akka,
42 | "com.typesafe.akka" %% "akka-cluster-sharding" % Versions.akka,
43 | "com.typesafe.akka" %% "akka-persistence-query" % Versions.akka,
44 | "com.typesafe.akka" %% "akka-http-core" % Versions.akkaHttp,
45 | "com.typesafe.akka" %% "akka-persistence-cassandra" % Versions.akkaPersistenceCassandra,
46 | // "com.swissborg" %% "akka-persistence-postgres" % Versions.akkaPersistencePostgres,
47 | "ch.megard" %% "akka-http-cors" % Versions.akkaHttpCors,
48 | "org.scala-lang.modules" %% "scala-xml" % "2.0.0"
49 | )
50 |
51 | private lazy val avro4s: Seq[ModuleID] = Seq("com.sksamuel.avro4s" %% "avro4s-core" % Versions.avro4s)
52 |
53 | private lazy val logging: Seq[ModuleID] = Seq(
54 | "ch.qos.logback" % "logback-classic" % Versions.logbackClassic,
55 | "com.typesafe.scala-logging" %% "scala-logging" % Versions.scalaLogging,
56 | "org.slf4j" % "log4j-over-slf4j" % Versions.slf4j,
57 | "com.typesafe.akka" %% "akka-slf4j" % Versions.akka
58 | )
59 |
60 | private lazy val pureConfig: Seq[ModuleID] = Seq("com.github.pureconfig" %% "pureconfig" % Versions.pureConfig)
61 |
62 | private lazy val flyway: Seq[ModuleID] = Seq("org.flywaydb" % "flyway-core" % Versions.flyway)
63 |
64 | private lazy val query: Seq[ModuleID] =
65 | Seq("com.typesafe.slick" %% "slick" % Versions.slick, "com.typesafe.slick" %% "slick-hikaricp" % Versions.slick)
66 |
67 | private lazy val enumeratum: Seq[ModuleID] = Seq("com.beachape" %% "enumeratum" % Versions.enumeratum)
68 |
69 | private lazy val postgres: Seq[ModuleID] = Seq("org.postgresql" % "postgresql" % Versions.postgres)
70 |
71 | private lazy val playJson: Seq[ModuleID] = Seq("com.typesafe.play" %% "play-json" % Versions.playJson)
72 |
73 | private lazy val kamon: Seq[ModuleID] = Seq("io.kamon" %% "kamon-bundle" % Versions.kamon)
74 |
75 | private lazy val commonCodec: Seq[ModuleID] = Seq("commons-codec" % "commons-codec" % Versions.commonsCodec)
76 | }
77 |
78 | private[this] object TestDependencies {
79 |
80 | private val TestAndITs = "test;it"
81 |
82 | lazy val values: Seq[ModuleID] =
83 | (akkaTest ++ scalaTest ++ otherDepsTest ++ embeddedCassandra ++ h2 ++ slickTest).map(_ % TestAndITs)
84 |
85 | private lazy val akkaTest: Seq[ModuleID] = Seq(
86 | "com.typesafe.akka" %% "akka-testkit" % Versions.akka,
87 | "com.typesafe.akka" %% "akka-stream-testkit" % Versions.akka,
88 | "com.typesafe.akka" %% "akka-http-testkit" % Versions.akkaHttp,
89 | "com.github.dnvriend" %% "akka-persistence-inmemory" % Versions.akkaPersistenceInmemory
90 | )
91 |
92 | private lazy val scalaTest: Seq[ModuleID] = Seq("org.scalatest" %% "scalatest" % Versions.scalaTest)
93 |
94 | private lazy val embeddedCassandra: Seq[ModuleID] = Seq(
95 | "com.typesafe.akka" %% "akka-persistence-cassandra-launcher" % Versions.akkaPersistenceCassandra
96 | )
97 |
98 | private lazy val h2: Seq[ModuleID] = Seq("com.h2database" % "h2" % Versions.h2)
99 |
100 | // private lazy val postgres: Seq[ModuleID] = Seq("io.zonky.test" % "embedded-postgres" % Versions.embeddedPostgres)
101 |
102 | private lazy val otherDepsTest: Seq[ModuleID] =
103 | Seq("org.mockito" % "mockito-core" % Versions.mockitoCore, "org.clapper" %% "classutil" % Versions.classutil)
104 |
105 | private lazy val slickTest: Seq[ModuleID] = Seq("com.typesafe.slick" %% "slick-testkit" % Versions.slick)
106 |
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/src/it/scala/com/ingenuiq/note/integration/NoteIntegrationSpec.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.integration
2 |
3 | import java.util.UUID
4 |
5 | import akka.http.scaladsl.model.StatusCodes._
6 | import akka.http.scaladsl.unmarshalling.Unmarshaller._
7 | import com.ingenuiq.note.http.command.CommandResponse._
8 | import com.ingenuiq.note.http.query.QueryResponse._
9 | import com.ingenuiq.note.integration.base.IntegrationBase
10 | import com.ingenuiq.note.integration.utils.{ PlayJsonSupportReaders, PlayJsonSupportWriters }
11 | import com.ingenuiq.note.utils.NoteModelsHelper
12 |
13 | class NoteIntegrationSpec extends IntegrationBase with NoteModelsHelper with PlayJsonSupportWriters with PlayJsonSupportReaders {
14 |
15 | "note get all" should {
16 | "Submit new note and it should appear in the view" in {
17 | val newNote = generateRandomNotePayload()
18 |
19 | Post(s"/$NotePath", newNote) ~> baseTestRoute ~> check {
20 | status should be(Created)
21 |
22 | eventually {
23 | Get(s"/$QueryPath/$NotePath") ~> baseTestRoute ~> check {
24 | status should be(OK)
25 | val notes = responseAs[NotesResponse].notes
26 | notes should have size 1
27 | notes.head.content shouldBe newNote.content
28 | notes.head.title shouldBe newNote.title
29 | }
30 | }
31 | }
32 | }
33 | }
34 |
35 | "GET /note/{uuid}" should {
36 |
37 | "get existing note" in {
38 | val newNote = generateRandomNotePayload()
39 |
40 | Post(s"/$NotePath", newNote) ~> baseTestRoute ~> check {
41 | status should be(Created)
42 | val noteId = responseAs[NoteCreationResponse].noteId
43 |
44 | eventually {
45 | Get(s"/$QueryPath/$NotePath/$noteId") ~> baseTestRoute ~> check {
46 | val result = responseAs[NoteResponse]
47 | result.content shouldBe newNote.content
48 | result.title shouldBe newNote.title
49 | result.id shouldBe noteId
50 | }
51 | }
52 | }
53 | }
54 |
55 | "get 404 for non-exiting note id" in {
56 | Get(s"/$QueryPath/$NotePath/${UUID.randomUUID().toString}") ~> baseTestRoute ~> check {
57 | status should be(NotFound)
58 | }
59 | }
60 |
61 | "get 400 for invalid note id" in {
62 | Get(s"/$QueryPath/$NotePath/blabla") ~> baseTestRoute ~> check {
63 | status should be(BadRequest)
64 | }
65 | }
66 | }
67 |
68 | "PUT /note/{uuid}" should {
69 |
70 | "update an existing note" in {
71 | val newNote = generateRandomNotePayload()
72 |
73 | Post(s"/$NotePath", newNote) ~> baseTestRoute ~> check {
74 | status should be(Created)
75 | val noteId = responseAs[NoteCreationResponse].noteId
76 |
77 | Put(s"/$NotePath/$noteId", newNote.copy(Some("new title"))) ~> baseTestRoute ~> check {
78 | status shouldBe OK
79 | responseAs[NoteUpdateResponse].noteId shouldBe noteId
80 | }
81 |
82 | eventually {
83 | Get(s"/$QueryPath/$NotePath/$noteId") ~> baseTestRoute ~> check {
84 | val result = responseAs[NoteResponse]
85 | result.content shouldBe newNote.content
86 | result.title shouldBe Some("new title")
87 | result.id shouldBe noteId
88 | }
89 | }
90 |
91 | }
92 | }
93 |
94 | "get 400 for malformed note id" in {
95 | val newNote = generateRandomNotePayload()
96 | Put(s"/$NotePath/wrongUuid", newNote) ~> baseTestRoute ~> check {
97 | status shouldBe BadRequest
98 | }
99 | }
100 |
101 | "get 404 for non-existing note id" in {
102 | val newNote = generateRandomNotePayload()
103 | Put(s"/$NotePath/${UUID.randomUUID().toString}", newNote) ~> baseTestRoute ~> check {
104 | status shouldBe NotFound
105 | }
106 | }
107 | }
108 |
109 | "DELETE /note/{uuid}" should {
110 |
111 | "delete an existing note" in {
112 | val newNote = generateRandomNotePayload()
113 |
114 | Post(s"/$NotePath", newNote) ~> baseTestRoute ~> check {
115 | status should be(Created)
116 | val noteId = responseAs[NoteCreationResponse].noteId
117 |
118 | Delete(s"/$NotePath/$noteId") ~> baseTestRoute ~> check {
119 | status shouldBe OK
120 | responseAs[NoteDeletionResponse].noteId shouldBe noteId
121 | }
122 | }
123 | }
124 |
125 | "get 400 for malformed note id" in {
126 | Delete(s"/$NotePath/wrongUuid") ~> baseTestRoute ~> check {
127 | status shouldBe BadRequest
128 | }
129 | }
130 |
131 | "get 404 for non-existing note id" in {
132 | Delete(s"/$NotePath/${UUID.randomUUID().toString}") ~> baseTestRoute ~> check {
133 | status shouldBe NotFound
134 | }
135 | }
136 | }
137 |
138 | "GET /note/event" should {
139 |
140 | "return created events" in {
141 | val newNote = generateRandomNotePayload()
142 |
143 | Post(s"/$NotePath", newNote) ~> baseTestRoute ~> check {
144 | status should be(Created)
145 | val noteId = responseAs[NoteCreationResponse].noteId
146 |
147 | Put(s"/$NotePath/$noteId", newNote.copy(Some("new title"))) ~> baseTestRoute ~> check {
148 | status shouldBe OK
149 | responseAs[NoteUpdateResponse].noteId shouldBe noteId
150 | }
151 |
152 | eventually {
153 | Get(s"/$QueryPath/$NotePath/$EventPath") ~> baseTestRoute ~> check {
154 | val result = responseAs[NoteEventsResponse]
155 | result.noteEvents.filter(_.noteId == noteId) should have size 2
156 | }
157 | }
158 | }
159 | }
160 | }
161 |
162 | }
163 |
--------------------------------------------------------------------------------
/src/test/scala/com/ingenuiq/note/command/note/NoteAggregateActorSpec.scala:
--------------------------------------------------------------------------------
1 | package com.ingenuiq.note.command.note
2 |
3 | import java.util.UUID
4 |
5 | import akka.actor.{ ActorRef, Props }
6 | import akka.persistence.PersistentActor
7 | import com.ingenuiq.note.base.GetInternalStateActor.{ ActorPartialFunctions, GetInternalState, GetPartialFunctions }
8 | import com.ingenuiq.note.base.RestartableActor
9 | import com.ingenuiq.note.base.RestartableActor.RestartActor
10 | import com.ingenuiq.note.command.InMemoryPersistenceBaseTrait
11 | import com.ingenuiq.note.command.note.NoteCommand._
12 | import com.ingenuiq.note.command.note.NoteEvent._
13 | import com.ingenuiq.note.common._
14 | import com.ingenuiq.note.utils.{ ClassUtils, NoteModelsHelper }
15 | import org.mockito.Mockito.mock
16 |
17 | class NoteAggregateActorSpec extends InMemoryPersistenceBaseTrait with NoteModelsHelper with ClassUtils {
18 |
19 | def productAggregateActorProps(persistenceId: String = UUID.randomUUID().toString) =
20 | Props(new NoteAggregateActor(persistenceId) with RestartableActor with GetNoteInternalStateActor)
21 |
22 | val userId: UserId = UserId.generateNew
23 |
24 | val randomNote: Note = generateRandomNote()
25 |
26 | "CreateNote" should {
27 | "create new note" in {
28 | val aggregate: ActorRef = system.actorOf(productAggregateActorProps())
29 |
30 | aggregate ! CreateNote(userId, randomNote)
31 | expectMsgClass(classOf[NoteCreated])
32 |
33 | aggregate ! GetInternalState
34 | expectMsg(Map(randomNote.id -> randomNote))
35 |
36 | aggregate ! RestartActor
37 |
38 | aggregate ! GetInternalState
39 | expectMsg(Map(randomNote.id -> randomNote))
40 | }
41 |
42 | "get error if not already exists" in {
43 | val aggregate: ActorRef = system.actorOf(productAggregateActorProps())
44 |
45 | aggregate ! CreateNote(userId, randomNote)
46 | expectMsgClass(classOf[NoteCreated])
47 |
48 | aggregate ! CreateNote(userId, randomNote)
49 | expectMsg(NoteAlreadyExists)
50 |
51 | aggregate ! GetInternalState
52 | expectMsg(Map(randomNote.id -> randomNote))
53 |
54 | aggregate ! RestartActor
55 |
56 | aggregate ! GetInternalState
57 | expectMsg(Map(randomNote.id -> randomNote))
58 | }
59 | }
60 |
61 | "UpdateNote" should {
62 | "get error if note doesn't exit" in {
63 | val aggregate: ActorRef = system.actorOf(productAggregateActorProps())
64 |
65 | aggregate ! UpdateNote(userId, randomNote)
66 | expectMsg(NoteNotFound)
67 | }
68 |
69 | "update note title" in {
70 | val aggregate: ActorRef = system.actorOf(productAggregateActorProps())
71 |
72 | aggregate ! CreateNote(userId, randomNote)
73 | expectMsgClass(classOf[NoteCreated])
74 |
75 | aggregate ! UpdateNote(userId, randomNote.copy(title = Some("new title")))
76 | expectMsgClass(classOf[NoteUpdated])
77 |
78 | aggregate ! GetInternalState
79 | expectMsg(Map(randomNote.id -> randomNote.copy(title = Some("new title"))))
80 |
81 | aggregate ! RestartActor
82 |
83 | aggregate ! GetInternalState
84 | expectMsg(Map(randomNote.id -> randomNote.copy(title = Some("new title"))))
85 | }
86 |
87 | "update note content" in {
88 | val aggregate: ActorRef = system.actorOf(productAggregateActorProps())
89 |
90 | aggregate ! CreateNote(userId, randomNote)
91 | expectMsgClass(classOf[NoteCreated])
92 |
93 | aggregate ! UpdateNote(userId, randomNote.copy(content = Some("new content")))
94 | expectMsgClass(classOf[NoteUpdated])
95 |
96 | aggregate ! GetInternalState
97 | expectMsg(Map(randomNote.id -> randomNote.copy(content = Some("new content"))))
98 |
99 | aggregate ! RestartActor
100 |
101 | aggregate ! GetInternalState
102 | expectMsg(Map(randomNote.id -> randomNote.copy(content = Some("new content"))))
103 | }
104 |
105 | "update note title and content" in {
106 | val aggregate: ActorRef = system.actorOf(productAggregateActorProps())
107 |
108 | aggregate ! CreateNote(userId, randomNote)
109 | expectMsgClass(classOf[NoteCreated])
110 |
111 | aggregate ! UpdateNote(userId, randomNote.copy(title = Some("new title"), content = Some("new content")))
112 | expectMsgClass(classOf[NoteUpdated])
113 |
114 | aggregate ! GetInternalState
115 | expectMsg(Map(randomNote.id -> Note(randomNote.id, Some("new title"), Some("new content"))))
116 |
117 | aggregate ! RestartActor
118 |
119 | aggregate ! GetInternalState
120 | expectMsg(Map(randomNote.id -> Note(randomNote.id, Some("new title"), Some("new content"))))
121 | }
122 |
123 | }
124 |
125 | "DeleteNote" should {
126 | "get error if note doesn't exit" in {
127 | val aggregate: ActorRef = system.actorOf(productAggregateActorProps())
128 |
129 | aggregate ! DeleteNote(userId, randomNote.id)
130 | expectMsg(NoteNotFound)
131 | }
132 |
133 | "delete existing note" in {
134 | val aggregate: ActorRef = system.actorOf(productAggregateActorProps())
135 |
136 | aggregate ! CreateNote(userId, randomNote)
137 | expectMsgClass(classOf[NoteCreated])
138 |
139 | aggregate ! DeleteNote(userId, randomNote.id)
140 | expectMsgClass(classOf[NoteDeleted])
141 |
142 | aggregate ! GetInternalState
143 | expectMsg(Map.empty)
144 |
145 | aggregate ! RestartActor
146 |
147 | aggregate ! GetInternalState
148 | expectMsg(Map.empty)
149 | }
150 | }
151 |
152 | "check handling of all incoming commands and recovery events" should {
153 | import org.scalatest.prop.TableDrivenPropertyChecks._
154 | import org.scalatest.prop.TableFor1
155 |
156 | val commands: TableFor1[Class[_]] =
157 | Table("Command classes", implementationsOf(classOf[NoteCommand], Option("target")).map(toClass): _*)
158 | val events: TableFor1[Class[_]] =
159 | Table("Persistent classes", implementationsOf(classOf[PersistentNoteEvent], Option("target")).map(toClass): _*)
160 |
161 | system.actorOf(productAggregateActorProps()) ! GetPartialFunctions
162 | val pfs: ActorPartialFunctions = expectMsgType[ActorPartialFunctions]
163 |
164 | forAll(commands) { clazz: Class[_] =>
165 | s"""be able to match $clazz in "receive" function""" in {
166 | pfs.command.isDefinedAt(mock(clazz)) should be(true)
167 | }
168 | }
169 |
170 | forAll(events) { clazz: Class[_] =>
171 | // Could be useful to enrich the TableFor1 with the other possible events that an Actor can handle.
172 | // E.g. SnapshotOffer, RecoveryCompleted etc.etc.
173 | s"""be able to match $clazz in "receive" function""" in {
174 | pfs.recover.isDefinedAt(mock(clazz)) should be(true)
175 | }
176 | }
177 | }
178 | }
179 |
180 | trait GetNoteInternalStateActor extends PersistentActor {
181 |
182 | var notes: Map[NoteId, Note]
183 |
184 | private def receivedInternalStateCommand: Receive = {
185 | case GetInternalState => sender() ! notes
186 | case GetPartialFunctions => sender() ! ActorPartialFunctions(receiveCommand, receiveRecover)
187 | }
188 |
189 | abstract override def receiveCommand: Receive =
190 | receivedInternalStateCommand orElse super.receiveCommand
191 | }
192 |
--------------------------------------------------------------------------------