├── .github ├── release-drafter.yml └── workflows │ ├── ci.yml │ ├── release-drafter.yml │ └── release.yml ├── .gitignore ├── .scalafmt.conf ├── Migration-6_1-8.md ├── Migration-8-9.md ├── README.md ├── api ├── build.gradle └── src │ └── main │ └── scala │ └── com │ └── avast │ └── clients │ └── rabbitmq │ └── api │ ├── CorrelationIdStrategy.scala │ ├── Delivery.scala │ ├── DeliveryResult.scala │ ├── MessageProperties.scala │ ├── RabbitMQConsumer.scala │ ├── RabbitMQProducer.scala │ ├── RabbitMQPullConsumer.scala │ ├── RabbitMQStreamingConsumer.scala │ └── exceptions.scala ├── build.gradle ├── core ├── build.gradle ├── docker-compose.override.yml ├── docker-compose.yml └── src │ ├── main │ └── scala │ │ └── com │ │ └── avast │ │ └── clients │ │ └── rabbitmq │ │ ├── ChannelListener.scala │ │ ├── ConfirmedDeliveryResult.scala │ │ ├── ConnectionListener.scala │ │ ├── ConsumerBase.scala │ │ ├── ConsumerChannelOps.scala │ │ ├── ConsumerListener.scala │ │ ├── ConsumerWithCallbackBase.scala │ │ ├── DefaultRabbitMQClientFactory.scala │ │ ├── DefaultRabbitMQConnection.scala │ │ ├── DefaultRabbitMQConsumer.scala │ │ ├── DefaultRabbitMQPullConsumer.scala │ │ ├── DefaultRabbitMQStreamingConsumer.scala │ │ ├── DeliveryContext.scala │ │ ├── ExponentialDelay.scala │ │ ├── JavaConverters.scala │ │ ├── MultiFormatConsumer.scala │ │ ├── PoisonedMessageHandler.scala │ │ ├── RabbitMQConnection.scala │ │ ├── RecoveryDelayHandlers.scala │ │ ├── RepublishStrategy.scala │ │ ├── configuration.scala │ │ ├── converters.scala │ │ ├── logging │ │ ├── ImplicitContextLogger.scala │ │ ├── LoggingContext.scala │ │ └── package.scala │ │ ├── publisher │ │ ├── BaseRabbitMQProducer.scala │ │ ├── DefaultRabbitMQProducer.scala │ │ └── PublishConfirmsRabbitMQProducer.scala │ │ └── rabbitmq.scala │ └── test │ ├── proto │ └── ExampleEvents.proto │ ├── resources │ ├── application.conf │ └── logback.xml │ └── scala │ └── com │ └── avast │ └── clients │ └── rabbitmq │ ├── BasicLiveTest.scala │ ├── ByteUtils.java │ ├── DefaultRabbitMQConsumerTest.scala │ ├── DefaultRabbitMQProducerTest.scala │ ├── DefaultRabbitMQPullConsumerTest.scala │ ├── MultiFormatConsumerTest.scala │ ├── PackageTest.scala │ ├── PoisonedMessageHandlerLiveTest.scala │ ├── PoisonedMessageHandlerTest.scala │ ├── PublisherConfirmsRabbitMQProducerTest.scala │ ├── RecoveryDelayHandlersTest.scala │ ├── RepublishStrategyTest.scala │ ├── StreamingConsumerLiveTest.scala │ ├── TestBase.scala │ ├── TestDeliveryContext.scala │ ├── TestHelper.scala │ └── TestMonitor.scala ├── extras-circe ├── README.md ├── build.gradle └── src │ └── main │ └── scala │ └── com │ └── avast │ └── clients │ └── rabbitmq │ └── extras │ └── format │ ├── JsonDeliveryConverter.scala │ └── JsonProductConverter.scala ├── extras-protobuf ├── README.md ├── build.gradle └── src │ ├── main │ └── scala │ │ └── com │ │ └── avast │ │ └── clients │ │ └── rabbitmq │ │ └── extras │ │ └── format │ │ ├── ProtobufAsBinaryDeliveryConverter.scala │ │ ├── ProtobufAsBinaryProductConverter.scala │ │ ├── ProtobufAsJsonDeliveryConverter.scala │ │ ├── ProtobufAsJsonProductConverter.scala │ │ └── ProtobufConsumer.scala │ └── test │ ├── proto │ └── ExampleEvents.proto │ └── scala │ └── com │ └── avast │ └── clients │ └── rabbitmq │ └── extras │ └── format │ ├── ProtobufConsumerTest.scala │ └── ProtobufConvertersTest.scala ├── extras-scalapb ├── README.md ├── build.gradle └── src │ ├── main │ └── scala │ │ └── com │ │ └── avast │ │ └── clients │ │ └── rabbitmq │ │ └── extras │ │ └── format │ │ ├── ScalaPBAsBinaryDeliveryConverter.scala │ │ ├── ScalaPBAsBinaryProductConverter.scala │ │ ├── ScalaPBAsJsonDeliveryConverter.scala │ │ ├── ScalaPBAsJsonProductConverter.scala │ │ └── ScalaPBConsumer.scala │ └── test │ ├── proto │ └── ExampleEvents.proto │ └── scala │ └── com │ └── avast │ └── clients │ └── rabbitmq │ └── extras │ └── format │ ├── ScalaPBConsumerTest.scala │ └── ScalaPBConvertersTest.scala ├── extras ├── README.md ├── build.gradle └── src │ ├── main │ └── scala │ │ └── com │ │ └── avast │ │ └── clients │ │ └── rabbitmq │ │ └── extras │ │ └── HealthCheck.scala │ └── test │ ├── resources │ ├── application.conf │ └── logback.xml │ └── scala │ └── com │ └── avast │ └── clients │ └── rabbitmq │ └── extras │ ├── TestBase.scala │ ├── TestHelper.scala │ └── TestMonitor.scala ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── pureconfig ├── README.md ├── build.gradle └── src │ └── main │ └── scala │ └── com │ └── avast │ └── clients │ └── rabbitmq │ └── pureconfig │ ├── ConfigRabbitMQConnection.scala │ ├── PureconfigImplicits.scala │ └── pureconfig.scala ├── recovery-testing ├── build.gradle ├── docker-compose.yml ├── gradle.properties └── src │ └── test │ ├── resources │ └── reference.conf │ └── scala │ └── testing │ └── RecoveryTest.scala ├── renovate.json ├── scalastyle_config.xml └── settings.gradle /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | template: | 2 | ## Pull Requests 3 | 4 | $CHANGES 5 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | pull_request: 4 | jobs: 5 | ci: 6 | runs-on: ubuntu-latest 7 | strategy: 8 | fail-fast: false 9 | matrix: 10 | scala-version: [ 2.12.13, 2.13.8 ] 11 | steps: 12 | - uses: actions/checkout@v4.2.2 13 | with: 14 | fetch-depth: 100 15 | - name: Fetch tags 16 | run: git fetch --depth=100 origin +refs/tags/*:refs/tags/* 17 | - uses: olafurpg/setup-scala@v14 18 | with: 19 | java-version: adopt@1.11 20 | - name: gradle test ${{ github.ref }} 21 | run: ./gradlew check 22 | env: 23 | SCALA_VERSION: ${{ matrix.scala-version }} 24 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Release Drafter 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | update_release_draft: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: release-drafter/release-drafter@v6 13 | env: 14 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 15 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | release: 4 | types: [ published ] 5 | jobs: 6 | release: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | scala-version: [ 2.12.13, 2.13.8 ] 11 | steps: 12 | - uses: actions/checkout@v4.2.2 13 | with: 14 | fetch-depth: 100 15 | - name: Fetch tags 16 | run: git fetch --depth=100 origin +refs/tags/*:refs/tags/* 17 | - uses: olafurpg/setup-scala@v14 18 | with: 19 | java-version: adopt@1.11 20 | - name: Sonatype publishing ${{ github.ref }} 21 | run: ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository -Pversion="${{ github.event.release.tag_name }}" --info 22 | env: 23 | SCALA_VERSION: ${{ matrix.scala-version }} 24 | SIGNING_KEY: ${{ secrets.SIGNING_KEY }} 25 | SIGNING_PASSWORD: ${{ secrets.SIGNING_PASSWORD }} 26 | ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USERNAME }} 27 | ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }} 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | tmp 3 | .idea/ 4 | *.iml 5 | *.uml 6 | *.log 7 | *.png 8 | *.txt 9 | *.pem 10 | *.xlsx 11 | Test.java 12 | .gradle 13 | build/ 14 | out 15 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | maxColumn = 140 2 | assumeStandardLibraryStripMargin = true 3 | rewrite.rules = [RedundantParens, SortImports, PreferCurlyFors] 4 | align.tokens = [] 5 | lineEndings = preserve -------------------------------------------------------------------------------- /Migration-6_1-8.md: -------------------------------------------------------------------------------- 1 | ## Migration from 6.1.x to 8.0.x 2 | 3 | 4 | Java API was **COMPLETELY REMOVED** and won't be supported anymore. Checkout version 7.x for the last version supporting Java API. 5 | 6 | --- 7 | 8 | 1. API is fully effectful. 9 | * Init methods return [`cats.effect.Resource`](https://typelevel.org/cats-effect/datatypes/resource.html) for easy resource management. 10 | * Declaration methods return `F[Unit]`. 11 | 1. Library could be configured with case classes - this is the default way. See [`RabbitMQConnection.make`](core/src/main/scala/com/avast/clients/rabbitmq/RabbitMQConnection.scala#L73). 12 | 1. Configuration from [Lightbend `Config`](https://github.com/lightbend/config) was separated to [`pureconfig`](pureconfig) module. Please read 13 | the docs. It's behavior remained backward compatible with these exceptions: 14 | * SSL context is now passed explicitly and is **ignored** in `Config` (see more below). 15 | * **You have to use `import com.avast.clients.rabbitmq.pureconfig._` to be able to use `fromConfig` method!** 16 | * **Required structure of config has been changed** - all consumers must be in `consumers` block, producers in `producers` and additional 17 | declarations in `declarations` blocks. See more info in [`pureconfig module docs`](pureconfig). 18 | * It's required that the config doesn't contain anything else than configuration of the client. Presence of unknown keys will cause failure 19 | of configuration parsing. 20 | 1. SSL is now configured explicitly in `make`/`fromConfig` method by passing `Option[SSLContext]`. Use `Some(SSLContext.getDefault)` for enabling 21 | SSL without custom (key|trust)store; it's now disabled by default (`None`)! 22 | 1. You are now able to specify network recovery strategy. 23 | 1. You are now able to specify timeout log level. 24 | 1. Additional declarations: `bindQueue` now has more consistent API. The only change is `bindArguments` were renamed to `arguments`. 25 | 1. You are now able to configure custom exchange for republishing. 26 | 27 | --- 28 | 29 | The client now uses circe 0.13 (only [extras-circe module](extras-circe)), cactus 0.17 (only [extras-cactus module](extras-cactus)), 30 | cats 2+ and cats-effect 2+ - it doesn't use Monix anymore!. 31 | -------------------------------------------------------------------------------- /Migration-8-9.md: -------------------------------------------------------------------------------- 1 | ## Migration from 8.x.x to 9.0.x 2 | 3 | In general, there are MANY changes under the hood, however, only a few changes in the API. The most relevant changes are: 4 | 5 | 1. `extras-cactus` module was removed. 6 | 2. `StreamedDelivery` has now only new `handleWith` method, forcing the user to correct behavior and enabling much more effective tasks 7 | processing and canceling. 8 | 3. The client now explicitly supports CorrelationId handling. 9 | Producer has `implicit cidStrategy: CorrelationIdStrategy` parameter (with `CorrelationIdStrategy.FromPropertiesOrRandomNew` as a 10 | default) which enables you to configure how the CorrelationId should be derived/generated. You can implement your own _strategy_ to suit 11 | your needs. 12 | You can also easily get the CorrelationId from message properties on the consumer side. The CID (on consumer side) is taken from both 13 | AMQP properties and `X-Correlation-Id` header (where the property has precedence and the header is just a fallback). It's propagated to 14 | logs in logging context (not in message directly). 15 | 4. [`PoisonedMessageHandler`](README.md#poisoned-message-handler) is now a part of the core. It's not wrapping your _action_ anymore, 16 | however, there exists only a few options, describable in configuration. 17 | 5. `DeliveryResult.Republish` now has `countAsPoisoned` parameter (defaults to `true`) determining whether the PMH (if configured) should 18 | count the attempt or not. 19 | 6. Logging in general was somewhat _tuned_. 20 | 7. The client uses Cats-Effect version of `Monitor` from [Metrics library](https://github.com/avast/metrics). 21 | 22 | As there are only very minor changes in the API between 8 and 9 versions, version 8 won't be supported any more (unless necessary). 23 | 24 | Some additional notes, mainly for library developers: 25 | 26 | 1. The library is still cross-compiled to Scala 2.12 and 2.13. 27 | 2. All consumers are now free of boilerplate code - new `ConsumerBase` class is a dependency of all consumers. 28 | 3. `DefaultRabbitMQClientFactory` was redesigned and it's a class now. 29 | 4. "Live" tests were split to `BasicLiveTest`, `StreamingConsumerLiveTest` and `PoisonedMessageHandlerLiveTest`. 30 | 31 | --- 32 | 33 | The client now (9.0.0) uses circe 0.14.1 (only [extras-circe module](extras-circe)), pureconfig 0.17.1 ( 34 | only [pureconfig module](pureconfig)), still cats 2 and cats-effect 2. 35 | -------------------------------------------------------------------------------- /api/build.gradle: -------------------------------------------------------------------------------- 1 | archivesBaseName = "rabbitmq-client-api_$scalaVersion" 2 | 3 | dependencies { 4 | api "com.avast.bytes:bytes-core:${bytesVersion}" 5 | api "co.fs2:fs2-core_$scalaVersion:$fs2Version" 6 | } 7 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/CorrelationIdStrategy.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | 3 | import scala.annotation.implicitNotFound 4 | import scala.util.Random 5 | 6 | @implicitNotFound("You have to specify the CorrelationIdStrategy to proceed") 7 | trait CorrelationIdStrategy { 8 | def toCIDValue: String 9 | } 10 | 11 | object CorrelationIdStrategy { 12 | final val CorrelationIdKeyName: String = "X-Correlation-Id" 13 | 14 | /** 15 | * Generate a new, random CorrelationId. 16 | * 17 | * Example: 18 | * {{{ 19 | * implicit val cidStrategy: CorrelationIdStrategy = CorrelationIdStrategy.RandomNew 20 | * }}} 21 | */ 22 | case object RandomNew extends CorrelationIdStrategy { 23 | override def toCIDValue: String = randomValue 24 | } 25 | 26 | /** 27 | * Always provide same value. 28 | * 29 | * Example: 30 | * {{{ 31 | * implicit val cidStrategy: CorrelationIdStrategy = CorrelationIdStrategy.Fixed("my-corr-id") 32 | * }}} 33 | */ 34 | case class Fixed(value: String) extends CorrelationIdStrategy { 35 | override val toCIDValue: String = value 36 | } 37 | 38 | /** 39 | * Try to find the CID in properties (or `X-Correlation-Id` header as a fallback). Generate a new, random one, if nothing was found. 40 | * 41 | * Example: 42 | * {{{ 43 | * val mp = MessageProperties(correlationId = Some(cid)) 44 | * ... 45 | * implicit val cidStrategy: CorrelationIdStrategy = CorrelationIdStrategy.FromPropertiesOrRandomNew(Some(mp)) 46 | * }}} 47 | */ 48 | case class FromPropertiesOrRandomNew(mp: Option[MessageProperties]) extends CorrelationIdStrategy { 49 | override lazy val toCIDValue: String = { 50 | // take it from properties or from header (as a fallback)... if still empty, generate new 51 | mp.flatMap(p => p.correlationId.orElse(p.headers.get(CorrelationIdKeyName).map(_.toString))).getOrElse(randomValue) 52 | } 53 | } 54 | 55 | private def randomValue: String = { 56 | Random.alphanumeric.take(20).mkString 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/Delivery.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | 3 | import com.avast.bytes.Bytes 4 | 5 | sealed trait Delivery[+A] { 6 | def properties: MessageProperties 7 | 8 | def routingKey: String 9 | } 10 | 11 | object Delivery { 12 | 13 | case class Ok[+A](body: A, properties: MessageProperties, routingKey: String) extends Delivery[A] 14 | 15 | case class MalformedContent(body: Bytes, properties: MessageProperties, routingKey: String, ce: ConversionException) 16 | extends Delivery[Nothing] 17 | 18 | def apply[A](body: A, properties: MessageProperties, routingKey: String): Delivery.Ok[A] = { 19 | Delivery.Ok(body, properties, routingKey) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/DeliveryResult.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | 3 | sealed trait DeliveryResult extends Product with Serializable 4 | 5 | object DeliveryResult { 6 | 7 | /** Success while processing the message - it will be removed from the queue. */ 8 | case object Ack extends DeliveryResult 9 | 10 | /** Reject the message from processing - it will be removed (discarded). */ 11 | case object Reject extends DeliveryResult 12 | 13 | /** The message cannot be processed but is worth - it will be requeued to the top of the queue. */ 14 | case object Retry extends DeliveryResult 15 | 16 | /** The message cannot be processed but is worth - it will be requeued to the bottom of the queue. 17 | * 18 | * @param countAsPoisoned Determines whether the PoisonedMessageHandler should count this republish and throw the delivery away if it's reached the limit. 19 | * @param newHeaders Headers to be added to the delivery before requeueing. 20 | * 21 | * */ 22 | case class Republish(countAsPoisoned: Boolean = true, newHeaders: Map[String, AnyRef] = Map.empty) extends DeliveryResult 23 | 24 | /** The message cannot be processed and should be considered as _poisoned_ even without further retrying. */ 25 | case object DirectlyPoison extends DeliveryResult 26 | 27 | } 28 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/MessageProperties.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | 3 | import java.time.Instant 4 | 5 | case class MessageProperties(contentType: Option[String] = None, 6 | contentEncoding: Option[String] = None, 7 | headers: Map[String, AnyRef] = Map.empty, 8 | deliveryMode: Option[DeliveryMode] = None, 9 | priority: Option[Integer] = None, 10 | correlationId: Option[String] = None, 11 | replyTo: Option[String] = None, 12 | expiration: Option[String] = None, 13 | messageId: Option[String] = None, 14 | timestamp: Option[Instant] = None, 15 | `type`: Option[String] = None, 16 | userId: Option[String] = None, 17 | appId: Option[String] = None, 18 | clusterId: Option[String] = None) 19 | 20 | object MessageProperties { 21 | val empty: MessageProperties = MessageProperties() 22 | } 23 | 24 | sealed trait DeliveryMode { 25 | def code: Integer 26 | } 27 | object DeliveryMode { 28 | case object NonPersistent extends DeliveryMode { 29 | override def code: Integer = 1 30 | } 31 | case object Persistent extends DeliveryMode { 32 | override def code: Integer = 2 33 | } 34 | def fromCode(code: Integer): Option[DeliveryMode] = code.toInt match { 35 | case 0 => None 36 | case 1 => Some(NonPersistent) 37 | case 2 => Some(Persistent) 38 | case _ => throw new IllegalArgumentException(s"Unknown delivery mode: $code") 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/RabbitMQConsumer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | 3 | trait RabbitMQConsumer[F[_]] 4 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/RabbitMQProducer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | 3 | import com.avast.clients.rabbitmq.api.CorrelationIdStrategy.FromPropertiesOrRandomNew 4 | 5 | trait RabbitMQProducer[F[_], A] { 6 | def send(routingKey: String, body: A, properties: Option[MessageProperties] = None)(implicit cidStrategy: CorrelationIdStrategy = 7 | FromPropertiesOrRandomNew(properties)): F[Unit] 8 | } 9 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/RabbitMQPullConsumer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | 3 | trait RabbitMQPullConsumer[F[_], A] { 4 | 5 | /** Retrieves one message from the queue, if there is any. 6 | */ 7 | def pull(): F[PullResult[F, A]] 8 | } 9 | 10 | /** Trait which contains `Delivery` and it's _handle through which it can be *acked*, *rejected* etc. 11 | */ 12 | trait DeliveryWithHandle[+F[_], +A] { 13 | def delivery: Delivery[A] 14 | 15 | def handle(result: DeliveryResult): F[Unit] 16 | } 17 | 18 | sealed trait PullResult[+F[_], +A] { 19 | def toOption: Option[DeliveryWithHandle[F, A]] 20 | } 21 | 22 | object PullResult { 23 | 24 | case class Ok[F[_], A](deliveryWithHandle: DeliveryWithHandle[F, A]) extends PullResult[F, A] { 25 | override def toOption: Option[DeliveryWithHandle[F, A]] = Some(deliveryWithHandle) 26 | } 27 | 28 | case object EmptyQueue extends PullResult[Nothing, Nothing] { 29 | override def toOption: Option[DeliveryWithHandle[Nothing, Nothing]] = None 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/RabbitMQStreamingConsumer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | 3 | trait RabbitMQStreamingConsumer[F[_], A] { 4 | def deliveryStream: fs2.Stream[F, StreamedDelivery[F, A]] 5 | } 6 | 7 | trait StreamedDelivery[F[_], A] { 8 | def handleWith(f: Delivery[A] => F[DeliveryResult]): F[Unit] 9 | } 10 | -------------------------------------------------------------------------------- /api/src/main/scala/com/avast/clients/rabbitmq/api/exceptions.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.api 2 | import java.io.IOException 3 | 4 | case class ConversionException(desc: String, cause: Throwable = null) extends RuntimeException(desc, cause) 5 | 6 | case class ChannelNotRecoveredException(desc: String, cause: Throwable = null) extends IOException(desc, cause) 7 | 8 | case class TooBigMessage(desc: String, cause: Throwable = null) extends IllegalArgumentException(desc, cause) 9 | 10 | case class MaxAttemptsReached(desc: String, cause: Throwable = null) extends RuntimeException(desc, cause) 11 | 12 | case class NotAcknowledgedPublish(desc: String, cause: Throwable = null, messageId: Long) extends RuntimeException(desc, cause) 13 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | buildscript { 2 | repositories { 3 | mavenCentral() 4 | } 5 | dependencies { 6 | classpath 'com.google.protobuf:protobuf-gradle-plugin:0.9.5' 7 | classpath 'com.avast.gradle:gradle-docker-compose-plugin:0.17.12' 8 | classpath 'com.github.ben-manes:gradle-versions-plugin:0.39.0' 9 | } 10 | } 11 | 12 | plugins { 13 | id "io.github.gradle-nexus.publish-plugin" version '2.0.0' 14 | } 15 | 16 | nexusPublishing { 17 | repositories { 18 | sonatype() 19 | } 20 | 21 | transitionCheckOptions { 22 | maxRetries.set(180) // 30 minutes 23 | } 24 | } 25 | 26 | allprojects { 27 | apply plugin: 'idea' 28 | group = 'com.avast.clients.rabbitmq' 29 | version = version == 'unspecified' ? 'DEVELOPER-SNAPSHOT' : version 30 | ext { 31 | def scalaVersionEnv = System.getenv("SCALA_VERSION") 32 | 33 | scalaVersionFull = scalaVersionEnv != null ? scalaVersionEnv : "2.12.13" 34 | scalaVersion = "${scalaVersionFull}".split("\\.").dropRight(1).join(".") 35 | 36 | bytesVersion = "2.2.0" 37 | circeVersion = "0.14.1" 38 | catsVersion = "2.7.0" 39 | catsEffectVersion = "2.5.4" 40 | fs2Version = "2.5.3" 41 | metricsVersion = "3.0.4" 42 | protobufVersion = "3.25.8" 43 | pureconfigVersion = "0.17.8" 44 | scalapbVersion = "0.11.17" 45 | scalapbJson4sVersion = "0.11.1" 46 | typesafeConfigVersion = "1.4.3" 47 | monixVersion = "3.4.0" // just for tests! 48 | } 49 | 50 | /* 51 | * Report failed test at the end of the run: 52 | * */ 53 | 54 | // add a collection to track failedTests 55 | ext.failedTests = [] 56 | 57 | // add a testlistener to all tasks of type Test 58 | tasks.withType(Test) { 59 | afterTest { TestDescriptor descriptor, TestResult result -> 60 | if (result.resultType == TestResult.ResultType.FAILURE) { 61 | failedTests << ["${descriptor.className}::${descriptor.name}"] 62 | } 63 | } 64 | } 65 | 66 | // print out tracked failed tests when the build has finished 67 | gradle.buildFinished { 68 | if (!failedTests.empty) { 69 | println "\n\n----------------------------------------" 70 | println "Failed tests for ${project.name}:" 71 | failedTests.each { failedTest -> 72 | println " - ${failedTest}" 73 | } 74 | println "----------------------------------------" 75 | println "\n" 76 | } 77 | } 78 | } 79 | 80 | println("Scala version: ${scalaVersion} (${scalaVersionFull})") // will display the version once, when configuring root 81 | 82 | subprojects { 83 | apply plugin: 'java' 84 | apply plugin: 'scala' 85 | apply plugin: 'java-library' 86 | apply plugin: 'com.github.ben-manes.versions' 87 | 88 | if (!project.hasProperty("notPublished")) { 89 | apply plugin: 'maven-publish' 90 | apply plugin: 'signing' 91 | } 92 | 93 | group = "com.avast.clients.rabbitmq" 94 | 95 | java { 96 | withJavadocJar() 97 | withSourcesJar() 98 | } 99 | 100 | if (!project.hasProperty("notPublished")) { 101 | afterEvaluate { 102 | publishing { 103 | publications { 104 | mavenJava(MavenPublication) { 105 | from(components.java) 106 | artifactId = archivesBaseName 107 | pom { 108 | name = 'RabbitMQ client for Scala' 109 | description = 'Scala wrapper over standard RabbitMQ Java client library' 110 | url = 'https://github.com/avast/rabbitmq-scala-client' 111 | licenses { 112 | license { 113 | name = 'The MIT License' 114 | url = 'http://www.opensource.org/licenses/mit-license.php' 115 | } 116 | } 117 | developers { 118 | developer { 119 | id = 'jendakol' 120 | name = 'Jenda Kolena' 121 | email = 'jan.kolena@avast.com' 122 | } 123 | } 124 | scm { 125 | connection = 'scm:git:git://github.com/avast/rabbitmq-scala-client.git' 126 | developerConnection = 'scm:git:ssh://github.com/avast/rabbitmq-scala-client.git' 127 | url = 'https://github.com/avast/rabbitmq-scala-client' 128 | } 129 | } 130 | } 131 | } 132 | } 133 | } 134 | 135 | signing { 136 | String base64Key = System.getenv('SIGNING_KEY') 137 | if (base64Key) { 138 | useInMemoryPgpKeys(new String(Base64.decoder.decode(base64Key)), System.getenv('SIGNING_PASSWORD')) 139 | sign publishing.publications 140 | } 141 | } 142 | } 143 | 144 | configurations { 145 | scalaCompilerPlugin 146 | } 147 | 148 | compileScala { 149 | scalaCompileOptions.additionalParameters = ['-target:jvm-1.8'] 150 | } 151 | 152 | test { 153 | testLogging { 154 | showStandardStreams = true 155 | } 156 | } 157 | 158 | sourceCompatibility = '1.8' 159 | targetCompatibility = '1.8' 160 | 161 | repositories { 162 | mavenCentral() 163 | } 164 | 165 | dependencies { 166 | scalaCompilerPlugin "org.typelevel:kind-projector_$scalaVersionFull:0.13.2" 167 | 168 | api "org.scala-lang.modules:scala-collection-compat_$scalaVersion:2.6.0" 169 | 170 | testImplementation "io.monix:monix_$scalaVersion:$monixVersion" 171 | 172 | testImplementation "com.avast.metrics:metrics-jmx:${metricsVersion}" 173 | 174 | testImplementation "org.scalaj:scalaj-http_$scalaVersion:2.4.2" 175 | testImplementation "io.circe:circe-core_$scalaVersion:$circeVersion" 176 | testImplementation "io.circe:circe-generic_$scalaVersion:$circeVersion" 177 | testImplementation "io.circe:circe-parser_$scalaVersion:$circeVersion" 178 | 179 | testImplementation 'junit:junit:4.13.2' 180 | testImplementation "org.scalatest:scalatest_$scalaVersion:3.0.8" 181 | testImplementation 'org.mockito:mockito-all:1.10.19' 182 | testImplementation "com.typesafe.scala-logging:scala-logging_$scalaVersion:3.9.4" 183 | testImplementation 'ch.qos.logback:logback-classic:1.5.18' 184 | 185 | testImplementation 'org.pegdown:pegdown:1.6.0' 186 | } 187 | 188 | tasks.withType(ScalaCompile) { 189 | List plugins = configurations.scalaCompilerPlugin.files.collect { "-Xplugin:${it.getAbsolutePath()}".toString() } 190 | if (scalaVersion.startsWith("2.12")) { 191 | logger.info("Using -Ypartial-unification for Scala ${scalaVersion}") 192 | plugins.add("-Ypartial-unification") 193 | } 194 | scalaCompileOptions.additionalParameters = plugins 195 | } 196 | 197 | test { 198 | testLogging { 199 | events 'failed' 200 | exceptionFormat 'full' 201 | } 202 | } 203 | } 204 | 205 | 206 | allprojects { 207 | //noinspection UnnecessaryQualifiedReference 208 | tasks.withType(org.gradle.api.tasks.scala.ScalaDoc) { 209 | enabled = false 210 | } 211 | } 212 | -------------------------------------------------------------------------------- /core/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'docker-compose' 2 | apply plugin: 'com.google.protobuf' 3 | 4 | archivesBaseName = "rabbitmq-client-core_$scalaVersion" 5 | 6 | protobuf { 7 | protoc { 8 | artifact = "com.google.protobuf:protoc:$protobufVersion" 9 | } 10 | } 11 | 12 | dockerCompose.isRequiredBy(test) 13 | 14 | test.doFirst { 15 | dockerCompose.exposeAsSystemProperties(test) 16 | } 17 | 18 | dependencies { 19 | api project(":api") 20 | 21 | api "com.avast.metrics:metrics-cats-effect-2_$scalaVersion:${metricsVersion}" 22 | api "org.typelevel:log4cats-slf4j_$scalaVersion:1.4.0" 23 | 24 | api "com.avast.bytes:bytes-core:${bytesVersion}" 25 | 26 | api 'com.rabbitmq:amqp-client:5.25.0' 27 | 28 | api "org.typelevel:cats-core_$scalaVersion:$catsVersion" 29 | api "org.typelevel:cats-effect_$scalaVersion:$catsEffectVersion" 30 | 31 | api 'org.xbib:jsr-305:1.0.0' 32 | api "org.scala-lang:scala-reflect:$scalaVersionFull" 33 | 34 | testImplementation project(":extras") 35 | testImplementation project(":extras-circe") 36 | 37 | testImplementation project(":extras") 38 | testImplementation project(":pureconfig") 39 | 40 | testImplementation "io.circe:circe-generic_$scalaVersion:$circeVersion" 41 | testImplementation "io.circe:circe-generic-extras_$scalaVersion:$circeVersion" 42 | testImplementation "com.google.protobuf:protobuf-java:$protobufVersion" 43 | } 44 | -------------------------------------------------------------------------------- /core/docker-compose.override.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | rabbit: 4 | ports: 5 | - "5672:5672" 6 | - "15672:15672" 7 | -------------------------------------------------------------------------------- /core/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | rabbit: 4 | image: rabbitmq:3.13.7-management 5 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/ChannelListener.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.Sync 4 | import com.rabbitmq.client.impl.recovery.RecoveryAwareChannelN 5 | import com.rabbitmq.client.{Channel, ShutdownSignalException} 6 | import org.typelevel.log4cats.slf4j.Slf4jLogger 7 | 8 | trait ChannelListener[F[_]] { 9 | def onShutdown(cause: ShutdownSignalException, channel: Channel): F[Unit] 10 | 11 | def onCreate(channel: Channel): F[Unit] 12 | 13 | def onCreateFailure(failure: Throwable): F[Unit] 14 | 15 | def onRecoveryStarted(channel: Channel): F[Unit] 16 | 17 | def onRecoveryCompleted(channel: Channel): F[Unit] 18 | 19 | def onRecoveryFailure(channel: Channel, failure: Throwable): F[Unit] 20 | } 21 | 22 | object ChannelListener { 23 | def default[F[_]: Sync]: ChannelListener[F] = new ChannelListener[F] { 24 | private val logger = Slf4jLogger.getLoggerFromClass[F](this.getClass) 25 | 26 | override def onCreate(channel: Channel): F[Unit] = { 27 | logger.info(s"Channel created: $channel") 28 | } 29 | 30 | override def onCreateFailure(failure: Throwable): F[Unit] = { 31 | logger.warn(failure)(s"Channel was NOT created") 32 | } 33 | 34 | override def onRecoveryCompleted(channel: Channel): F[Unit] = { 35 | logger.info(s"Channel recovered: $channel") 36 | } 37 | 38 | override def onRecoveryStarted(channel: Channel): F[Unit] = { 39 | logger.debug(s"Channel recovery started: $channel") 40 | } 41 | 42 | override def onRecoveryFailure(channel: Channel, failure: Throwable): F[Unit] = { 43 | channel match { 44 | case ch: RecoveryAwareChannelN if !ch.isOpen => 45 | val initByApp = Option(ch.getCloseReason).map(_.isInitiatedByApplication).exists(identity) 46 | 47 | if (initByApp) { 48 | logger.debug(failure)(s"Channel could not be recovered, because it was manually closed: $channel") 49 | } else logger.warn(failure)(s"Channel recovery failed: $channel") 50 | 51 | case _ => logger.warn(failure)(s"Channel recovery failed: $channel") 52 | } 53 | } 54 | 55 | override def onShutdown(cause: ShutdownSignalException, channel: Channel): F[Unit] = { 56 | if (cause.isInitiatedByApplication) { 57 | logger.debug(s"Channel shutdown: $channel") 58 | } else { 59 | logger.warn(cause)(s"Channel shutdown: $channel") 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/ConfirmedDeliveryResult.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.Concurrent 4 | import cats.effect.concurrent.Deferred 5 | import com.avast.clients.rabbitmq.api.DeliveryResult 6 | 7 | private[rabbitmq] trait ConfirmedDeliveryResult[F[_]] { 8 | def deliveryResult: DeliveryResult 9 | def confirm: F[Unit] 10 | def awaitConfirmation: F[Unit] 11 | } 12 | 13 | private[rabbitmq] object ConfirmedDeliveryResult { 14 | def apply[F[_]: Concurrent](dr: DeliveryResult): ConfirmedDeliveryResult[F] = { 15 | new ConfirmedDeliveryResult[F] { 16 | private val deff = Deferred.unsafe[F, Unit] 17 | 18 | override val deliveryResult: DeliveryResult = dr 19 | 20 | override def confirm: F[Unit] = deff.complete(()) 21 | override def awaitConfirmation: F[Unit] = deff.get 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/ConnectionListener.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.Sync 4 | import com.rabbitmq.client.{Connection, ShutdownSignalException} 5 | import org.typelevel.log4cats.slf4j.Slf4jLogger 6 | 7 | trait ConnectionListener[F[_]] { 8 | def onCreate(connection: Connection): F[Unit] 9 | 10 | def onCreateFailure(failure: Throwable): F[Unit] 11 | 12 | def onRecoveryStarted(connection: Connection): F[Unit] 13 | 14 | def onRecoveryCompleted(connection: Connection): F[Unit] 15 | 16 | def onRecoveryFailure(connection: Connection, failure: Throwable): F[Unit] 17 | 18 | def onShutdown(connection: Connection, cause: ShutdownSignalException): F[Unit] 19 | } 20 | 21 | object ConnectionListener { 22 | def default[F[_]: Sync]: ConnectionListener[F] = new ConnectionListener[F] { 23 | private val logger = Slf4jLogger.getLoggerFromClass[F](this.getClass) 24 | 25 | override def onCreate(connection: Connection): F[Unit] = { 26 | logger.info(s"Connection created: $connection (name ${connection.getClientProvidedName})") 27 | } 28 | 29 | override def onCreateFailure(failure: Throwable): F[Unit] = { 30 | logger.warn(failure)(s"Connection NOT created") 31 | } 32 | 33 | override def onRecoveryStarted(connection: Connection): F[Unit] = { 34 | logger.info(s"Connection recovery started: $connection (name ${connection.getClientProvidedName})") 35 | } 36 | 37 | override def onRecoveryCompleted(connection: Connection): F[Unit] = { 38 | logger.info(s"Connection recovery completed: $connection (name ${connection.getClientProvidedName})") 39 | } 40 | 41 | override def onRecoveryFailure(connection: Connection, failure: Throwable): F[Unit] = { 42 | logger.warn(failure)(s"Connection recovery failed: $connection (name ${connection.getClientProvidedName})") 43 | } 44 | 45 | override def onShutdown(connection: Connection, cause: ShutdownSignalException): F[Unit] = { 46 | if (cause.isInitiatedByApplication) { 47 | logger.debug(s"Connection shutdown: $connection (name ${connection.getClientProvidedName})") 48 | } else { 49 | logger.warn(cause)(s"Connection shutdown: $connection (name ${connection.getClientProvidedName})") 50 | } 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/ConsumerBase.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.{Blocker, Concurrent, ConcurrentEffect, ContextShift, Timer} 4 | import cats.implicits.{catsSyntaxApplicativeError, toFunctorOps} 5 | import cats.syntax.flatMap._ 6 | import com.avast.bytes.Bytes 7 | import com.avast.clients.rabbitmq.JavaConverters.AmqpPropertiesConversions 8 | import com.avast.clients.rabbitmq.api._ 9 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 10 | import com.avast.metrics.scalaeffectapi.Monitor 11 | import com.rabbitmq.client.{AMQP, Envelope} 12 | import org.slf4j.event.Level 13 | 14 | import scala.concurrent.TimeoutException 15 | import scala.concurrent.duration.{Duration, FiniteDuration} 16 | import scala.util._ 17 | 18 | // it's case-class to have `copy` method for free.... 19 | final private[rabbitmq] case class ConsumerBase[F[_]: ConcurrentEffect: Timer, A]( 20 | consumerName: String, 21 | queueName: String, 22 | redactPayload: Boolean, 23 | blocker: Blocker, 24 | consumerLogger: ImplicitContextLogger[F], 25 | consumerRootMonitor: Monitor[F])(implicit val contextShift: ContextShift[F], implicit val deliveryConverter: DeliveryConverter[A]) { 26 | 27 | val F: ConcurrentEffect[F] = ConcurrentEffect[F] // scalastyle:ignore 28 | 29 | private val timeoutsMeter = consumerRootMonitor.meter("timeouts") 30 | 31 | def parseDelivery(envelope: Envelope, rawBody: Bytes, properties: AMQP.BasicProperties): F[DeliveryWithContext[A]] = { 32 | implicit val dctx: DeliveryContext = DeliveryContext.from(envelope, properties) 33 | import dctx.fixedProperties 34 | 35 | blocker 36 | .delay(Try(deliveryConverter.convert(rawBody))) 37 | .flatMap[Delivery[A]] { 38 | case Success(Right(a)) => 39 | val delivery = Delivery(a, fixedProperties.asScala, dctx.routingKey.value) 40 | 41 | consumerLogger 42 | .trace(s"[$consumerName] Received delivery from queue '$queueName': ${redactIfConfigured(delivery)}") 43 | .as(delivery) 44 | 45 | case Success(Left(ce)) => 46 | val delivery = Delivery.MalformedContent(rawBody, fixedProperties.asScala, dctx.routingKey.value, ce) 47 | 48 | consumerLogger 49 | .trace( 50 | s"[$consumerName] Received delivery from queue '$queueName' but could not convert it: ${redactIfConfigured(delivery)}" 51 | ) 52 | .as(delivery) 53 | 54 | case Failure(ce) => 55 | val ex = ConversionException("Unexpected failure", ce) 56 | val delivery = Delivery.MalformedContent(rawBody, fixedProperties.asScala, dctx.routingKey.value, ex) 57 | 58 | consumerLogger 59 | .trace(s"[$consumerName] Received delivery from queue '$queueName' but " + 60 | s"could not convert it as the convertor has failed: ${redactIfConfigured(delivery)}") 61 | .as(delivery) 62 | } 63 | .map(DeliveryWithContext(_, dctx)) 64 | } 65 | 66 | def watchForTimeoutIfConfigured(processTimeout: FiniteDuration, timeoutAction: DeliveryResult, timeoutLogLevel: Level)( 67 | delivery: Delivery[A], 68 | result: F[ConfirmedDeliveryResult[F]])( 69 | customTimeoutAction: F[Unit], 70 | )(implicit dctx: DeliveryContext): F[ConfirmedDeliveryResult[F]] = { 71 | import dctx._ 72 | 73 | if (processTimeout != Duration.Zero) { 74 | Concurrent 75 | .timeout(result, processTimeout) 76 | .recoverWith { 77 | case e: TimeoutException => 78 | customTimeoutAction >> 79 | consumerLogger.trace(e)(s"[$consumerName] Timeout for $messageId") >> 80 | timeoutsMeter.mark >> { 81 | 82 | lazy val msg = s"[$consumerName] Task timed-out after $processTimeout of processing delivery $messageId " + 83 | s"with routing key ${delivery.routingKey}, applying DeliveryResult.$timeoutAction. " + 84 | s"Delivery was:\n${redactIfConfigured(delivery)}" 85 | 86 | (timeoutLogLevel match { 87 | case Level.ERROR => consumerLogger.error(msg) 88 | case Level.WARN => consumerLogger.warn(msg) 89 | case Level.INFO => consumerLogger.info(msg) 90 | case Level.DEBUG => consumerLogger.debug(msg) 91 | case Level.TRACE => consumerLogger.trace(msg) 92 | }).as { 93 | ConfirmedDeliveryResult[F](timeoutAction) 94 | } 95 | } 96 | } 97 | } else result 98 | } 99 | 100 | def redactIfConfigured(delivery: Delivery[_]): String = { 101 | (if (!redactPayload) delivery else delivery.withRedactedBody).toString 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/ConsumerChannelOps.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.{Blocker, ConcurrentEffect, ContextShift, Resource, Timer} 4 | import cats.implicits.catsSyntaxApplicativeError 5 | import cats.syntax.flatMap._ 6 | import com.avast.bytes.Bytes 7 | import com.avast.clients.rabbitmq.DefaultRabbitMQConsumer._ 8 | import com.avast.clients.rabbitmq.api._ 9 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 10 | import com.avast.metrics.scalaeffectapi.Monitor 11 | import com.rabbitmq.client.AMQP.BasicProperties 12 | 13 | import scala.jdk.CollectionConverters._ 14 | import scala.util._ 15 | 16 | // it's case-class to have `copy` method for free.... 17 | final private[rabbitmq] case class ConsumerChannelOps[F[_]: ConcurrentEffect: Timer: ContextShift, A]( 18 | private val consumerName: String, 19 | private val queueName: String, 20 | channel: ServerChannel, 21 | private val blocker: Blocker, 22 | republishStrategy: RepublishStrategy[F], 23 | poisonedMessageHandler: PoisonedMessageHandler[F, A], 24 | connectionInfo: RabbitMQConnectionInfo, 25 | private val consumerLogger: ImplicitContextLogger[F], 26 | private val consumerRootMonitor: Monitor[F]) { 27 | 28 | val resultsMonitor: Monitor[F] = consumerRootMonitor.named("results") 29 | private val resultAckMeter = resultsMonitor.meter("ack") 30 | private val resultRejectMeter = resultsMonitor.meter("reject") 31 | private val resultRetryMeter = resultsMonitor.meter("retry") 32 | private val resultRepublishMeter = resultsMonitor.meter("republish") 33 | 34 | def handleResult(rawBody: Bytes, delivery: Delivery[A])(res: DeliveryResult)(implicit dctx: DeliveryContext): F[Unit] = { 35 | import DeliveryResult._ 36 | import dctx._ 37 | 38 | poisonedMessageHandler.interceptResult(delivery, messageId, rawBody)(res).flatMap { 39 | case Ack => ack() 40 | case Reject => reject() 41 | case Retry => retry() 42 | case Republish(_, newHeaders) => republish(createPropertiesForRepublish(newHeaders, fixedProperties, routingKey), rawBody) 43 | case DirectlyPoison => throw new IllegalStateException("Poison state should be handled by PMH, this is most probably a BUG") 44 | } 45 | } 46 | 47 | protected def ack()(implicit dctx: DeliveryContext): F[Unit] = { 48 | import dctx._ 49 | 50 | consumerLogger.debug(s"[$consumerName] ACK delivery $messageId, $deliveryTag") >> 51 | blocker 52 | .delay { 53 | if (!channel.isOpen) throw new IllegalStateException("Cannot ack delivery on closed channel") 54 | channel.basicAck(deliveryTag.value, false) 55 | } 56 | .attempt 57 | .flatMap { 58 | case Right(()) => resultAckMeter.mark 59 | case Left(e) => consumerLogger.warn(e)(s"[$consumerName] Error while confirming the delivery $messageId") 60 | } 61 | } 62 | 63 | protected def reject()(implicit dctx: DeliveryContext): F[Unit] = { 64 | import dctx._ 65 | 66 | consumerLogger.debug(s"[$consumerName] REJECT delivery $messageId, $deliveryTag") >> 67 | blocker 68 | .delay { 69 | if (!channel.isOpen) throw new IllegalStateException("Cannot reject delivery on closed channel") 70 | channel.basicReject(deliveryTag.value, false) 71 | } 72 | .attempt 73 | .flatMap { 74 | case Right(()) => resultRejectMeter.mark 75 | case Left(e) => consumerLogger.warn(e)(s"[$consumerName] Error while rejecting the delivery $messageId") 76 | } 77 | } 78 | 79 | protected def retry()(implicit dctx: DeliveryContext): F[Unit] = { 80 | import dctx._ 81 | 82 | consumerLogger.debug(s"[$consumerName] REJECT (with requeue) delivery $messageId, $deliveryTag") >> 83 | blocker 84 | .delay { 85 | if (!channel.isOpen) throw new IllegalStateException("Cannot retry delivery on closed channel") 86 | channel.basicReject(deliveryTag.value, true) 87 | } 88 | .attempt 89 | .flatMap { 90 | case Right(()) => resultRetryMeter.mark 91 | case Left(e) => consumerLogger.warn(e)(s"[$consumerName] Error while rejecting (with requeue) the delivery $messageId") 92 | } 93 | } 94 | 95 | protected def republish(properties: BasicProperties, rawBody: Bytes)(implicit dctx: DeliveryContext): F[Unit] = { 96 | 97 | republishStrategy.republish(blocker, channel, consumerName)(queueName, properties, rawBody) >> 98 | resultRepublishMeter.mark 99 | } 100 | 101 | protected def createPropertiesForRepublish(newHeaders: Map[String, AnyRef], 102 | properties: BasicProperties, 103 | routingKey: RoutingKey): BasicProperties = { 104 | // values in newHeaders will overwrite values in original headers 105 | // we must also ensure that UserID will be the same as current username (or nothing): https://www.rabbitmq.com/validated-user-id.html 106 | val originalUserId = Option(properties.getUserId).filter(_.nonEmpty) 107 | val h = originalUserId match { 108 | case Some(uid) => newHeaders + (RepublishOriginalRoutingKeyHeaderName -> routingKey.value) + (RepublishOriginalUserId -> uid) 109 | case None => newHeaders + (RepublishOriginalRoutingKeyHeaderName -> routingKey.value) 110 | } 111 | val headers = Option(properties.getHeaders).map(_.asScala ++ h).getOrElse(h) 112 | val newUserId = originalUserId match { 113 | case Some(_) => connectionInfo.username.orNull 114 | case None => null 115 | } 116 | properties.builder().headers(headers.asJava).userId(newUserId).build() 117 | } 118 | } 119 | 120 | class ConsumerChannelOpsFactory[F[_]: ConcurrentEffect: Timer: ContextShift, A: DeliveryConverter]( 121 | consumerName: String, 122 | queueName: String, 123 | blocker: Blocker, 124 | republishStrategy: RepublishStrategy[F], 125 | poisonedMessageHandler: PoisonedMessageHandler[F, A], 126 | connectionInfo: RabbitMQConnectionInfo, 127 | consumerLogger: ImplicitContextLogger[F], 128 | consumerRootMonitor: Monitor[F], 129 | newChannel: Resource[F, ServerChannel]) { 130 | 131 | val create: Resource[F, ConsumerChannelOps[F, A]] = { 132 | newChannel.map { channel => 133 | new ConsumerChannelOps[F, A](consumerName, 134 | queueName, 135 | channel, 136 | blocker, 137 | republishStrategy, 138 | poisonedMessageHandler, 139 | connectionInfo, 140 | consumerLogger, 141 | consumerRootMonitor) 142 | } 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/ConsumerListener.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.Sync 4 | import com.rabbitmq.client._ 5 | import org.typelevel.log4cats.slf4j.Slf4jLogger 6 | 7 | trait ConsumerListener[F[_]] { 8 | def onError(consumer: Consumer, consumerName: String, channel: Channel, failure: Throwable): F[Unit] 9 | 10 | def onShutdown(consumer: Consumer, channel: Channel, consumerName: String, consumerTag: String, sig: ShutdownSignalException): F[Unit] 11 | } 12 | 13 | object ConsumerListener { 14 | def default[F[_]: Sync]: ConsumerListener[F] = new ConsumerListener[F] { 15 | private val logger = Slf4jLogger.getLoggerFromClass[F](this.getClass) 16 | 17 | override def onError(consumer: Consumer, consumerName: String, channel: Channel, failure: Throwable): F[Unit] = { 18 | logger.warn(failure)(s"[$consumerName] Error in consumer on channel $channel") 19 | } 20 | 21 | override def onShutdown(consumer: Consumer, 22 | channel: Channel, 23 | consumerName: String, 24 | consumerTag: String, 25 | cause: ShutdownSignalException): F[Unit] = { 26 | if (cause.isInitiatedByApplication) { 27 | logger.debug(s"[$consumerName] Shutdown of consumer on channel $channel") 28 | } else { 29 | logger.warn(cause)(s"[$consumerName] Shutdown of consumer on channel $channel") 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/ConsumerWithCallbackBase.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.{ConcurrentEffect, Timer => CatsTimer} 4 | import cats.syntax.all._ 5 | import com.avast.bytes.Bytes 6 | import com.avast.clients.rabbitmq.api.DeliveryResult 7 | import com.avast.metrics.scalaeffectapi._ 8 | import com.rabbitmq.client.AMQP.BasicProperties 9 | import com.rabbitmq.client._ 10 | 11 | import java.time.{Duration, Instant} 12 | import java.util.concurrent.RejectedExecutionException 13 | import scala.util.control.NonFatal 14 | 15 | abstract class ConsumerWithCallbackBase[F[_]: ConcurrentEffect: CatsTimer, A: DeliveryConverter](base: ConsumerBase[F, A], 16 | channelOps: ConsumerChannelOps[F, A], 17 | failureAction: DeliveryResult, 18 | consumerListener: ConsumerListener[F]) 19 | extends DefaultConsumer(channelOps.channel) { 20 | import base._ 21 | import channelOps._ 22 | 23 | protected val readMeter: Meter[F] = consumerRootMonitor.meter("read") 24 | 25 | protected val processingFailedMeter: Meter[F] = resultsMonitor.meter("processingFailed") 26 | 27 | protected val tasksMonitor: Monitor[F] = consumerRootMonitor.named("tasks") 28 | 29 | protected val processingCount: SettableGauge[F, Long] = tasksMonitor.gauge.settableLong("processing", replaceExisting = true) 30 | 31 | protected val processedTimer: TimerPair[F] = tasksMonitor.timerPair("processed") 32 | 33 | override def handleShutdownSignal(consumerTag: String, sig: ShutdownSignalException): Unit = { 34 | consumerListener.onShutdown(this, channel, consumerName, consumerTag, sig).unsafeStartAndForget() 35 | } 36 | 37 | protected def handleNewDelivery(d: DeliveryWithContext[A]): F[Option[ConfirmedDeliveryResult[F]]] 38 | 39 | override final def handleDelivery(consumerTag: String, envelope: Envelope, properties: BasicProperties, body: Array[Byte]): Unit = { 40 | val action = processingCount.inc >> { 41 | val rawBody = Bytes.copyFrom(body) 42 | 43 | base 44 | .parseDelivery(envelope, rawBody, properties) 45 | .flatMap { d => 46 | import d.context 47 | import context._ 48 | 49 | consumerLogger.debug(s"[$consumerName] Read delivery with $messageId, $deliveryTag") >> 50 | consumerLogger.trace(s"[$consumerName] Received delivery: ${d.delivery}") >> 51 | readMeter.mark >> { 52 | 53 | val st = Instant.now() 54 | val taskDuration = F.delay(Duration.between(st, Instant.now())) 55 | 56 | unsafeExecuteReadAction(d, rawBody, taskDuration) 57 | .recoverWith { 58 | case e: RejectedExecutionException => 59 | consumerLogger.debug(e)(s"[$consumerName] Executor was unable to plan the handling task for $messageId, $deliveryTag") >> 60 | handleFailure(d, rawBody, e) 61 | } 62 | } 63 | } 64 | .recoverWith { 65 | case e => 66 | processingCount.dec >> 67 | processingFailedMeter.mark >> 68 | consumerLogger.plainDebug(e)(s"Could not process delivery with delivery tag ${envelope.getDeliveryTag}") >> 69 | F.raiseError[Unit](e) 70 | } 71 | } 72 | 73 | // Actually start the processing. This is the barrier between synchronous and asynchronous world. 74 | startAndForget { 75 | action 76 | } 77 | } 78 | 79 | private def unsafeExecuteReadAction(delivery: DeliveryWithContext[A], rawBody: Bytes, taskDuration: F[Duration]): F[Unit] = { 80 | import delivery.context 81 | import context._ 82 | 83 | handleNewDelivery(delivery) 84 | .flatMap { 85 | case Some(dr) => 86 | handleResult(rawBody, delivery.delivery)(dr.deliveryResult) >> dr.confirm 87 | 88 | case None => 89 | consumerLogger.trace(s"[$consumerName] Delivery result for $messageId ignored") 90 | } 91 | .flatTap { _ => 92 | taskDuration.flatMap { duration => 93 | consumerLogger.debug(s"[$consumerName] Delivery $messageId handling succeeded in $duration") >> 94 | processedTimer.update(duration) >> 95 | processingCount.dec 96 | } 97 | } 98 | .recoverWith { 99 | case NonFatal(t) => 100 | taskDuration.flatMap { duration => 101 | consumerLogger.debug(t)(s"[$consumerName] Delivery $messageId handling failed in $duration") >> 102 | processedTimer.updateFailure(duration) >> 103 | handleFailure(delivery, rawBody, t) 104 | } 105 | } 106 | } 107 | 108 | private def handleFailure(delivery: DeliveryWithContext[A], rawBody: Bytes, t: Throwable)(implicit dctx: DeliveryContext): F[Unit] = { 109 | processingFailedMeter.mark >> 110 | processingCount.dec >> 111 | consumerListener.onError(this, consumerName, channel, t) >> 112 | executeFailureAction(delivery, rawBody) 113 | } 114 | 115 | private def executeFailureAction(d: DeliveryWithContext[A], rawBody: Bytes): F[Unit] = { 116 | import d._ 117 | handleResult(rawBody, delivery)(failureAction) 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/DefaultRabbitMQConnection.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect._ 4 | import cats.syntax.flatMap._ 5 | import com.avast.clients.rabbitmq.api._ 6 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 7 | import com.avast.metrics.scalaeffectapi.Monitor 8 | import com.rabbitmq.client.ShutdownSignalException 9 | 10 | import scala.util.control.NonFatal 11 | 12 | class DefaultRabbitMQConnection[F[_]] private (connection: ServerConnection, 13 | info: RabbitMQConnectionInfo, 14 | republishStrategy: RepublishStrategyConfig, 15 | override val connectionListener: ConnectionListener[F], 16 | override val channelListener: ChannelListener[F], 17 | override val consumerListener: ConsumerListener[F], 18 | blocker: Blocker)(implicit F: ConcurrentEffect[F], timer: Timer[F], cs: ContextShift[F]) 19 | extends RabbitMQConnection[F] { 20 | 21 | private val logger = ImplicitContextLogger.createLogger[F, DefaultRabbitMQConnection[F]] 22 | 23 | private val factory = new DefaultRabbitMQClientFactory[F](this, info, blocker, republishStrategy) 24 | 25 | def newChannel(): Resource[F, ServerChannel] = { 26 | createChannel() 27 | } 28 | 29 | private val createChannelF: F[ServerChannel] = { 30 | F.delay { 31 | try { 32 | connection.createChannel() match { 33 | case channel: ServerChannel => 34 | channel.addShutdownListener { (cause: ShutdownSignalException) => 35 | channelListener.onShutdown(cause, channel).unsafeStartAndForget() 36 | } 37 | channelListener.onCreate(channel).unsafeStartAndForget() 38 | channel 39 | 40 | case null => throw new IllegalStateException(s"New channel could not be created, maybe the max. count limit was reached?") 41 | 42 | // since the connection is `Recoverable`, the channel should always be `Recoverable` too (based on docs), so the exception will never be thrown 43 | case n => throw new IllegalStateException(s"Required Recoverable Channel") 44 | } 45 | } catch { 46 | case NonFatal(e) => 47 | channelListener.onCreateFailure(e).unsafeStartAndForget() 48 | throw e 49 | } 50 | } 51 | .flatTap(channel => logger.plainDebug(s"Created channel: $channel ${channel.hashCode()}")) 52 | } 53 | 54 | override def newStreamingConsumer[A: DeliveryConverter]( 55 | consumerConfig: StreamingConsumerConfig, 56 | monitor: Monitor[F], 57 | ): Resource[F, RabbitMQStreamingConsumer[F, A]] = { 58 | factory.StreamingConsumer 59 | .create[A](consumerConfig, monitor, consumerListener) 60 | .map(identity[RabbitMQStreamingConsumer[F, A]]) // type inference... :-( 61 | } 62 | 63 | def newConsumer[A: DeliveryConverter](consumerConfig: ConsumerConfig, monitor: Monitor[F])( 64 | readAction: DeliveryReadAction[F, A]): Resource[F, RabbitMQConsumer[F]] = { 65 | factory.Consumer.create[A](consumerConfig, consumerListener, readAction, monitor) 66 | } 67 | 68 | def newPullConsumer[A: DeliveryConverter](pullConsumerConfig: PullConsumerConfig, 69 | monitor: Monitor[F]): Resource[F, RabbitMQPullConsumer[F, A]] = { 70 | factory.PullConsumer.create[A](pullConsumerConfig, monitor) 71 | } 72 | 73 | private def createChannel(): Resource[F, ServerChannel] = 74 | Resource.make(createChannelF)( 75 | channel => 76 | logger.plainDebug(s"Closing channel: $channel ${channel.hashCode()}") >> 77 | F.delay { 78 | channel.close() 79 | }) 80 | 81 | override def newProducer[A: ProductConverter](producerConfig: ProducerConfig, 82 | monitor: Monitor[F]): Resource[F, RabbitMQProducer[F, A]] = { 83 | factory.Producer 84 | .create[A](producerConfig, monitor) 85 | } 86 | 87 | override def declareExchange(config: DeclareExchangeConfig): F[Unit] = withChannel { ch => 88 | factory.Declarations.declareExchange(config, ch) 89 | } 90 | 91 | override def declareQueue(config: DeclareQueueConfig): F[Unit] = withChannel { ch => 92 | factory.Declarations.declareQueue(config, ch) 93 | } 94 | 95 | override def bindExchange(config: BindExchangeConfig): F[Unit] = withChannel { ch => 96 | factory.Declarations.bindExchange(config, ch) 97 | } 98 | 99 | override def bindQueue(config: BindQueueConfig): F[Unit] = withChannel { ch => 100 | factory.Declarations.bindQueue(config, ch) 101 | } 102 | 103 | def withChannel[A](f: ServerChannel => F[A]): F[A] = { 104 | createChannel().use(f) 105 | } 106 | 107 | // prepare exchange for republishing 108 | private[rabbitmq] val setUpRepublishing: F[Unit] = { 109 | withChannel { channel => 110 | republishStrategy match { 111 | case RepublishStrategyConfig.CustomExchange(exchangeName, exchangeDeclare, _) if exchangeDeclare => 112 | factory.declareExchange( 113 | name = exchangeName, 114 | `type` = ExchangeType.Direct, 115 | durable = true, 116 | autoDelete = false, 117 | arguments = DeclareArgumentsConfig(), 118 | channel = channel, 119 | )(logger) 120 | 121 | case _ => F.unit // no-op 122 | } 123 | } 124 | } 125 | } 126 | 127 | object DefaultRabbitMQConnection { 128 | def make[F[_]](connection: ServerConnection, 129 | info: RabbitMQConnectionInfo, 130 | republishStrategy: RepublishStrategyConfig, 131 | connectionListener: ConnectionListener[F], 132 | channelListener: ChannelListener[F], 133 | consumerListener: ConsumerListener[F], 134 | blocker: Blocker)(implicit F: ConcurrentEffect[F], timer: Timer[F], cs: ContextShift[F]): F[DefaultRabbitMQConnection[F]] = 135 | F.delay { 136 | new DefaultRabbitMQConnection(connection, info, republishStrategy, connectionListener, channelListener, consumerListener, blocker) 137 | } 138 | .flatTap { _.setUpRepublishing } 139 | 140 | } 141 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/DefaultRabbitMQConsumer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect._ 4 | import cats.implicits._ 5 | import com.avast.clients.rabbitmq.api._ 6 | import com.rabbitmq.client.{Delivery => _} 7 | import org.slf4j.event.Level 8 | 9 | import scala.concurrent.duration.FiniteDuration 10 | 11 | class DefaultRabbitMQConsumer[F[_]: ConcurrentEffect: Timer, A: DeliveryConverter]( 12 | private[rabbitmq] val base: ConsumerBase[F, A], 13 | channelOps: ConsumerChannelOps[F, A], 14 | processTimeout: FiniteDuration, 15 | timeoutAction: DeliveryResult, 16 | timeoutLogLevel: Level, 17 | failureAction: DeliveryResult, 18 | consumerListener: ConsumerListener[F])(userAction: DeliveryReadAction[F, A]) 19 | extends ConsumerWithCallbackBase(base, channelOps, failureAction, consumerListener) 20 | with RabbitMQConsumer[F] { 21 | import base._ 22 | 23 | override protected def handleNewDelivery(d: DeliveryWithContext[A]): F[Option[ConfirmedDeliveryResult[F]]] = { 24 | import d._ 25 | 26 | // we try to catch also long-lasting synchronous work on the thread 27 | val resultAction = blocker.delay { userAction(delivery).map(ConfirmedDeliveryResult(_)) }.flatten 28 | 29 | watchForTimeoutIfConfigured(processTimeout, timeoutAction, timeoutLogLevel)(delivery, resultAction)(F.unit).map(Some(_)) 30 | } 31 | 32 | } 33 | 34 | object DefaultRabbitMQConsumer { 35 | final val RepublishOriginalRoutingKeyHeaderName = "X-Original-Routing-Key" 36 | final val RepublishOriginalUserId = "X-Original-User-Id" 37 | final val FederationOriginalRoutingKeyHeaderName = "x-original-routing-key" 38 | final val CorrelationIdHeaderName = CorrelationIdStrategy.CorrelationIdKeyName 39 | } 40 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/DefaultRabbitMQPullConsumer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect._ 4 | import cats.implicits._ 5 | import com.avast.bytes.Bytes 6 | import com.avast.clients.rabbitmq.api._ 7 | import com.avast.metrics.scalaeffectapi.SettableGauge 8 | 9 | import java.util.concurrent.atomic.AtomicInteger 10 | 11 | class DefaultRabbitMQPullConsumer[F[_]: ConcurrentEffect, A: DeliveryConverter](base: ConsumerBase[F, A], 12 | channelOps: ConsumerChannelOps[F, A]) 13 | extends RabbitMQPullConsumer[F, A] { 14 | import base._ 15 | import channelOps._ 16 | 17 | protected val processingCount: SettableGauge[F, Long] = consumerRootMonitor.gauge.settableLong("processing", replaceExisting = true) 18 | 19 | override def pull(): F[PullResult[F, A]] = { 20 | blocker 21 | .delay { 22 | Option(channel.basicGet(queueName, false)) 23 | } 24 | .flatMap { 25 | case Some(response) => 26 | val rawBody = Bytes.copyFrom(response.getBody) 27 | 28 | (processingCount.inc >> parseDelivery(response.getEnvelope, rawBody, response.getProps)).flatMap { d => 29 | import d._ 30 | import context._ 31 | 32 | val dwh = createDeliveryWithHandle(delivery) { result => 33 | handleResult(rawBody, delivery)(result) >> 34 | processingCount.dec.void 35 | } 36 | 37 | consumerLogger.debug(s"[$consumerName] Read delivery with $messageId $deliveryTag").as { 38 | PullResult.Ok(dwh) 39 | } 40 | } 41 | 42 | case None => 43 | Effect[F].pure { 44 | PullResult.EmptyQueue.asInstanceOf[PullResult[F, A]] 45 | } 46 | } 47 | } 48 | 49 | private def createDeliveryWithHandle[B](d: Delivery[B])(handleResult: DeliveryResult => F[Unit]): DeliveryWithHandle[F, B] = { 50 | new DeliveryWithHandle[F, B] { 51 | override val delivery: Delivery[B] = d 52 | 53 | override def handle(result: DeliveryResult): F[Unit] = handleResult(result) 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/DeliveryContext.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import com.avast.clients.rabbitmq.DefaultRabbitMQConsumer.CorrelationIdHeaderName 4 | import com.avast.clients.rabbitmq.api.CorrelationIdStrategy.CorrelationIdKeyName 5 | import com.avast.clients.rabbitmq.api.Delivery 6 | import com.avast.clients.rabbitmq.logging.LoggingContext 7 | import com.rabbitmq.client.AMQP.BasicProperties 8 | import com.rabbitmq.client.Envelope 9 | 10 | private[rabbitmq] case class DeliveryWithContext[A](delivery: Delivery[A], implicit val context: DeliveryContext) 11 | 12 | private[rabbitmq] case class DeliveryContext(messageId: MessageId, 13 | correlationId: Option[CorrelationId], 14 | deliveryTag: DeliveryTag, 15 | routingKey: RoutingKey, 16 | fixedProperties: BasicProperties) 17 | extends LoggingContext { 18 | 19 | override lazy val asContextMap: Map[String, String] = { 20 | correlationId.map(_.asContextMap).getOrElse(Map.empty) 21 | } 22 | } 23 | 24 | private[rabbitmq] object DeliveryContext { 25 | def from(envelope: Envelope, properties: BasicProperties): DeliveryContext = { 26 | val correlationIdRaw = Option(properties.getCorrelationId).orElse { 27 | Option(properties.getHeaders).flatMap(h => Option(h.get(CorrelationIdHeaderName))).map(_.toString) 28 | } 29 | 30 | val fixedProperties = properties.builder().correlationId(correlationIdRaw.orNull).build() 31 | 32 | val correlationId = correlationIdRaw.map(CorrelationId) 33 | val messageId = MessageId(Option(fixedProperties.getMessageId).getOrElse("-none-")) 34 | 35 | val deliveryTag = DeliveryTag(envelope.getDeliveryTag) 36 | val routingKey = RoutingKey(fixedProperties.getOriginalRoutingKey.getOrElse(envelope.getRoutingKey)) 37 | 38 | DeliveryContext(messageId, correlationId, deliveryTag, routingKey, fixedProperties) 39 | } 40 | } 41 | 42 | private[rabbitmq] final case class CorrelationId(value: String) extends LoggingContext { 43 | def asContextMap: Map[String, String] = Map(CorrelationIdKeyName -> value) 44 | } 45 | 46 | private[rabbitmq] final case class MessageId(value: String) extends AnyVal 47 | private[rabbitmq] final case class RoutingKey(value: String) extends AnyVal 48 | private[rabbitmq] final case class DeliveryTag(value: Long) extends AnyVal 49 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/ExponentialDelay.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import java.util.concurrent.TimeUnit 4 | import scala.concurrent.duration.{Duration, FiniteDuration} 5 | 6 | class ExponentialDelay(val initialDelay: Duration, val period: Duration, val factor: Double, val maxLength: Duration) { 7 | private val maxMillis = maxLength.toMillis 8 | 9 | def getExponentialDelay(attempt: Int): FiniteDuration = { 10 | if (attempt == 0) FiniteDuration(initialDelay._1, initialDelay._2) 11 | else { 12 | val millis = math.min( 13 | maxMillis, 14 | (period.toMillis * math.pow(factor, attempt - 1)).toLong 15 | ) 16 | FiniteDuration(millis, TimeUnit.MILLISECONDS) 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/JavaConverters.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import java.util.concurrent.{CompletableFuture, Executor} 4 | import java.util.{Date, Optional} 5 | 6 | import com.avast.clients.rabbitmq.api.{DeliveryMode, MessageProperties => ScalaProperties} 7 | import com.rabbitmq.client.AMQP 8 | import com.rabbitmq.client.AMQP.BasicProperties 9 | 10 | import scala.jdk.CollectionConverters._ 11 | import scala.concurrent._ 12 | import scala.util.{Failure, Success} 13 | 14 | private[rabbitmq] object JavaConverters { 15 | 16 | implicit class ScalaFuture2JavaCompletableFuture[A](val f: Future[A]) extends AnyVal { 17 | def asJava(implicit executor: ExecutionContext): CompletableFuture[A] = { 18 | val promise = new CompletableFuture[A]() 19 | 20 | f onComplete { 21 | case Success(result) => promise.complete(result) 22 | case Failure(ex) => promise.completeExceptionally(ex) 23 | } 24 | 25 | promise 26 | } 27 | } 28 | 29 | implicit class CompletableFuture2ScalaFuture[A](val f: CompletableFuture[A]) extends AnyVal { 30 | def asScala(implicit executor: Executor): Future[A] = { 31 | val promise = Promise[A]() 32 | 33 | f.whenCompleteAsync((result: A, ex: Throwable) => { 34 | if (ex == null) { 35 | promise.success(result) 36 | } else { 37 | promise.failure(ex) 38 | } 39 | () 40 | }, executor) 41 | 42 | promise.future 43 | } 44 | } 45 | 46 | implicit class ScalaOption2JavaOptional[T](val o: Option[T]) extends AnyVal { 47 | def asJava: Optional[T] = o match { 48 | case Some(value) => Optional.of(value) 49 | case None => Optional.empty() 50 | } 51 | } 52 | 53 | implicit class ScalaPropertiesConversions(val messageProperties: ScalaProperties) extends AnyVal { 54 | def asAMQP: AMQP.BasicProperties = { 55 | val builder = new BasicProperties.Builder() 56 | 57 | if (messageProperties.headers.nonEmpty) { 58 | builder.headers(messageProperties.headers.asJava) 59 | } 60 | messageProperties.contentType.foreach(builder.contentType) 61 | messageProperties.contentEncoding.foreach(builder.contentEncoding) 62 | messageProperties.deliveryMode.foreach(dm => builder.deliveryMode(dm.code)) 63 | messageProperties.priority.foreach(builder.priority) 64 | messageProperties.correlationId.foreach(builder.correlationId) 65 | messageProperties.replyTo.foreach(builder.replyTo) 66 | messageProperties.expiration.foreach(builder.expiration) 67 | messageProperties.messageId.foreach(builder.messageId) 68 | messageProperties.timestamp.map(i => new Date(i.toEpochMilli)).foreach(builder.timestamp) 69 | messageProperties.`type`.foreach(builder.`type`) 70 | messageProperties.userId.foreach(builder.userId) 71 | messageProperties.appId.foreach(builder.appId) 72 | messageProperties.clusterId.foreach(builder.clusterId) 73 | 74 | builder.build() 75 | } 76 | } 77 | 78 | implicit class AmqpPropertiesConversions(val properties: AMQP.BasicProperties) extends AnyVal { 79 | def asScala: ScalaProperties = { 80 | ScalaProperties( 81 | Option(properties.getContentType), 82 | Option(properties.getContentEncoding), 83 | Option(properties.getHeaders).map(_.asScala.toMap).getOrElse(Map.empty), 84 | DeliveryMode.fromCode(properties.getDeliveryMode), 85 | Option(properties.getPriority), 86 | Option(properties.getCorrelationId), 87 | Option(properties.getReplyTo), 88 | Option(properties.getExpiration), 89 | Option(properties.getMessageId), 90 | Option(properties.getTimestamp).map(_.toInstant), 91 | Option(properties.getType), 92 | Option(properties.getUserId), 93 | Option(properties.getAppId), 94 | Option(properties.getClusterId) 95 | ) 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/MultiFormatConsumer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.Sync 4 | import cats.implicits.{catsSyntaxApplicativeError, catsSyntaxFlatMapOps, toFunctorOps} 5 | import com.avast.bytes.Bytes 6 | import com.avast.clients.rabbitmq.api._ 7 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 8 | 9 | import scala.collection.immutable 10 | 11 | class MultiFormatConsumer[F[_], A] private (supportedConverters: immutable.Seq[CheckedDeliveryConverter[A]], 12 | action: Delivery[A] => F[DeliveryResult])(implicit F: Sync[F]) 13 | extends (Delivery[Bytes] => F[DeliveryResult]) { 14 | private val logger = ImplicitContextLogger.createLogger[F, MultiFormatConsumer[F, A]] 15 | 16 | override def apply(delivery: Delivery[Bytes]): F[DeliveryResult] = { 17 | implicit val correlationId: CorrelationId = CorrelationId(delivery.properties.correlationId.getOrElse("unknown")) 18 | 19 | F.delay { 20 | supportedConverters 21 | .collectFirst { 22 | case c if c.canConvert(delivery) => 23 | delivery.flatMap[A] { d => 24 | c.convert(d.body) match { 25 | case Right(a) => d.copy(body = a) 26 | case Left(ce) => d.toMalformed(ce) 27 | } 28 | } 29 | } 30 | .getOrElse { 31 | delivery.toMalformed(ConversionException(s"Could not find suitable converter for $delivery")) 32 | } 33 | } 34 | .recoverWith { 35 | case e => 36 | logger.debug(e)("Error while converting").as { 37 | delivery.toMalformed(ConversionException("Error while converting", e)) 38 | } 39 | } >>= action 40 | } 41 | } 42 | 43 | object MultiFormatConsumer { 44 | def forType[F[_]: Sync, A](supportedConverters: CheckedDeliveryConverter[A]*)( 45 | action: Delivery[A] => F[DeliveryResult]): MultiFormatConsumer[F, A] = { 46 | new MultiFormatConsumer[F, A](supportedConverters.toList, action) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/RecoveryDelayHandlers.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import com.rabbitmq.client.RecoveryDelayHandler 4 | 5 | import scala.concurrent.duration._ 6 | 7 | // Delay intervals should avoid values that are too low (lower than 2 seconds). 8 | // https://www.rabbitmq.com/api-guide.html#automatic-recovery-limitations 9 | object RecoveryDelayHandlers { 10 | case class Linear(initialDelay: Duration = 5.second, period: Duration = 5.seconds) extends RecoveryDelayHandler { 11 | override def getDelay(recoveryAttempts: Int): Long = { 12 | if (recoveryAttempts == 0) initialDelay.toMillis else period.toMillis 13 | } 14 | } 15 | 16 | case class Exponential(override val initialDelay: Duration = 2.second, 17 | override val period: Duration = 2.seconds, 18 | override val factor: Double = 2.0, 19 | override val maxLength: Duration = 32.seconds) 20 | extends ExponentialDelay(initialDelay, period, factor, maxLength) with RecoveryDelayHandler { 21 | override def getDelay(recoveryAttempts: Int): Long = { 22 | getExponentialDelay(recoveryAttempts).toMillis 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/RepublishStrategy.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.{Blocker, ContextShift, Sync} 4 | import cats.implicits.{catsSyntaxApplicativeError, catsSyntaxFlatMapOps, toFlatMapOps} 5 | import com.avast.bytes.Bytes 6 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 7 | import com.rabbitmq.client.AMQP.BasicProperties 8 | 9 | import scala.util.{Left, Right} 10 | 11 | trait RepublishStrategy[F[_]] { 12 | def republish(blocker: Blocker, channel: ServerChannel, consumerName: String)(originalQueueName: String, 13 | properties: BasicProperties, 14 | rawBody: Bytes)(implicit dctx: DeliveryContext): F[Unit] 15 | } 16 | 17 | object RepublishStrategy { 18 | 19 | case class CustomExchange[F[_]: Sync: ContextShift](exchangeName: String) extends RepublishStrategy[F] { 20 | private val logger = ImplicitContextLogger.createLogger[F, CustomExchange[F]] 21 | 22 | def republish(blocker: Blocker, channel: ServerChannel, consumerName: String)( 23 | originalQueueName: String, 24 | properties: BasicProperties, 25 | rawBody: Bytes)(implicit dctx: DeliveryContext): F[Unit] = { 26 | import dctx._ 27 | 28 | logger.debug { 29 | s"[$consumerName] Republishing delivery ($messageId, $deliveryTag) to end of queue '$originalQueueName' through '$exchangeName'($originalQueueName)" 30 | } >> 31 | blocker 32 | .delay { 33 | if (!channel.isOpen) throw new IllegalStateException("Cannot republish delivery on closed channel") 34 | channel.basicPublish(exchangeName, originalQueueName, properties, rawBody.toByteArray) 35 | channel.basicAck(deliveryTag.value, false) 36 | } 37 | .attempt 38 | .flatMap { 39 | case Right(()) => Sync[F].unit 40 | case Left(e) => logger.warn(e)(s"[$consumerName] Error while republishing the delivery $messageId") 41 | } 42 | } 43 | } 44 | 45 | case class DefaultExchange[F[_]: Sync: ContextShift]() extends RepublishStrategy[F] { 46 | private val logger = ImplicitContextLogger.createLogger[F, DefaultExchange[F]] 47 | 48 | def republish(blocker: Blocker, channel: ServerChannel, consumerName: String)( 49 | originalQueueName: String, 50 | properties: BasicProperties, 51 | rawBody: Bytes)(implicit dctx: DeliveryContext): F[Unit] = { 52 | import dctx._ 53 | 54 | logger.debug { 55 | s"[$consumerName] Republishing delivery ($messageId, $deliveryTag) to end of queue '$originalQueueName' (through default exchange)" 56 | } >> 57 | blocker 58 | .delay { 59 | if (!channel.isOpen) throw new IllegalStateException("Cannot republish delivery on closed channel") 60 | channel.basicPublish("", originalQueueName, properties, rawBody.toByteArray) 61 | channel.basicAck(deliveryTag.value, false) 62 | } 63 | .attempt 64 | .flatMap { 65 | case Right(()) => Sync[F].unit 66 | case Left(e) => logger.warn(e)(s"[$consumerName] Error while republishing the delivery $messageId") 67 | } 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/converters.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.api.{ConversionException, Delivery, MessageProperties} 5 | 6 | import scala.annotation.implicitNotFound 7 | 8 | /** Tries to convert `Delivery[Bytes]` to `Delivery[A]`. 9 | */ 10 | @implicitNotFound("Could not find DeliveryConverter for ${A}, try to import or define some") 11 | trait DeliveryConverter[A] { 12 | def convert(d: Bytes): Either[ConversionException, A] 13 | } 14 | 15 | /** Tries to convert `Delivery[Bytes]` to `Delivery[A]`. Contains check whether the `Delivery[Bytes]` fits for the conversion. 16 | */ 17 | @implicitNotFound("Could not find CheckedDeliveryConverter for ${A}, try to import or define some") 18 | trait CheckedDeliveryConverter[A] extends DeliveryConverter[A] { 19 | def canConvert(d: Delivery[Bytes]): Boolean 20 | } 21 | 22 | object DeliveryConverter { 23 | implicit val identity: DeliveryConverter[Bytes] = (b: Bytes) => Right(b) 24 | implicit val bytesArray: DeliveryConverter[Array[Byte]] = (b: Bytes) => Right(b.toByteArray) 25 | implicit val utf8Text: CheckedDeliveryConverter[String] = new CheckedDeliveryConverter[String] { 26 | override def canConvert(d: Delivery[Bytes]): Boolean = d.properties.contentType match { 27 | case Some(contentType) => 28 | val ct = contentType.toLowerCase 29 | ct.startsWith("text") && (ct.contains("charset=utf-8") || ct.contains("charset=\"utf-8\"")) 30 | case None => true 31 | } 32 | override def convert(b: Bytes): Either[ConversionException, String] = Right(b.toStringUtf8) 33 | } 34 | } 35 | 36 | @implicitNotFound("Could not find ProductConverter for ${A}, try to import or define some") 37 | trait ProductConverter[A] { 38 | def convert(p: A): Either[ConversionException, Bytes] 39 | 40 | /** Fills some properties for the producer's message. It's usually just Content-Type but that is implementation-specific. 41 | */ 42 | def fillProperties(properties: MessageProperties): MessageProperties 43 | } 44 | 45 | object ProductConverter { 46 | implicit val identity: ProductConverter[Bytes] = new ProductConverter[Bytes] { 47 | override def convert(p: Bytes): Either[ConversionException, Bytes] = Right(p) 48 | override def fillProperties(properties: MessageProperties): MessageProperties = properties.contentType match { 49 | case None => properties.copy(contentType = Some("application/octet-stream")) 50 | case _ => properties 51 | } 52 | } 53 | implicit val bytesArray: ProductConverter[Array[Byte]] = new ProductConverter[Array[Byte]] { 54 | override def convert(p: Array[Byte]): Either[ConversionException, Bytes] = Right(Bytes.copyFrom(p)) 55 | override def fillProperties(properties: MessageProperties): MessageProperties = properties.contentType match { 56 | case None => properties.copy(contentType = Some("application/octet-stream")) 57 | case _ => properties 58 | } 59 | } 60 | implicit val utf8Text: ProductConverter[String] = new ProductConverter[String] { 61 | override def convert(p: String): Either[ConversionException, Bytes] = Right(Bytes.copyFromUtf8(p)) 62 | override def fillProperties(properties: MessageProperties): MessageProperties = properties.contentType match { 63 | case None => properties.copy(contentType = Some("text/plain; charset=utf-8")) 64 | case _ => properties 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/logging/ImplicitContextLogger.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.logging 2 | 3 | import cats.effect.Sync 4 | 5 | import scala.reflect.ClassTag 6 | 7 | private[rabbitmq] class ImplicitContextLogger[F[_]](private val contextLessLogger: GenericPlainLogger[F]) { 8 | 9 | // contextLessLogger.info() 10 | def info[Ctx <: LoggingContext](msg: => String)(implicit ctx: Ctx): F[Unit] = contextLessLogger.info(ctx.asContextMap)(msg) 11 | def info[Ctx <: LoggingContext](t: Throwable)(msg: => String)(implicit ctx: Ctx): F[Unit] = 12 | contextLessLogger.info(ctx.asContextMap, t)(msg) 13 | def plainInfo(msg: => String): F[Unit] = contextLessLogger.info(msg) 14 | def plainInfo(t: Throwable)(msg: => String): F[Unit] = contextLessLogger.info(t)(msg) 15 | 16 | // contextLessLogger.warn() 17 | def warn[Ctx <: LoggingContext](msg: => String)(implicit ctx: Ctx): F[Unit] = contextLessLogger.warn(ctx.asContextMap)(msg) 18 | def warn[Ctx <: LoggingContext](t: Throwable)(msg: => String)(implicit ctx: Ctx): F[Unit] = 19 | contextLessLogger.warn(ctx.asContextMap, t)(msg) 20 | def plainWarn(msg: => String): F[Unit] = contextLessLogger.warn(msg) 21 | def plainWarn(t: Throwable)(msg: => String): F[Unit] = contextLessLogger.warn(t)(msg) 22 | 23 | // contextLessLogger.error() 24 | def error[Ctx <: LoggingContext](msg: => String)(implicit ctx: Ctx): F[Unit] = contextLessLogger.error(ctx.asContextMap)(msg) 25 | def error[Ctx <: LoggingContext](t: Throwable)(msg: => String)(implicit ctx: Ctx): F[Unit] = 26 | contextLessLogger.error(ctx.asContextMap, t)(msg) 27 | def plainError(msg: => String): F[Unit] = contextLessLogger.error(msg) 28 | def plainError(t: Throwable)(msg: => String): F[Unit] = contextLessLogger.error(t)(msg) 29 | 30 | // contextLessLogger.debug() 31 | def debug[Ctx <: LoggingContext](msg: => String)(implicit ctx: Ctx): F[Unit] = contextLessLogger.debug(ctx.asContextMap)(msg) 32 | def debug[Ctx <: LoggingContext](t: Throwable)(msg: => String)(implicit ctx: Ctx): F[Unit] = 33 | contextLessLogger.debug(ctx.asContextMap, t)(msg) 34 | def plainDebug(msg: => String): F[Unit] = contextLessLogger.debug(msg) 35 | def plainDebug(t: Throwable)(msg: => String): F[Unit] = contextLessLogger.debug(t)(msg) 36 | 37 | // contextLessLogger.trace() 38 | def trace[Ctx <: LoggingContext](msg: => String)(implicit ctx: Ctx): F[Unit] = contextLessLogger.trace(ctx.asContextMap)(msg) 39 | def trace[Ctx <: LoggingContext](t: Throwable)(msg: => String)(implicit ctx: Ctx): F[Unit] = 40 | contextLessLogger.trace(ctx.asContextMap, t)(msg) 41 | def plainTrace(msg: => String): F[Unit] = contextLessLogger.trace(msg) 42 | def plainTrace(t: Throwable)(msg: => String): F[Unit] = contextLessLogger.trace(t)(msg) 43 | } 44 | 45 | private[rabbitmq] object ImplicitContextLogger { 46 | def createLogger[F[_]: Sync, A](implicit ct: ClassTag[A]): ImplicitContextLogger[F] = 47 | new ImplicitContextLogger(createPlainLogger[F, A]) 48 | } 49 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/logging/LoggingContext.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.logging 2 | 3 | private[rabbitmq] trait LoggingContext { 4 | def asContextMap: Map[String, String] 5 | } 6 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/logging/package.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.Sync 4 | import org.typelevel.log4cats.SelfAwareStructuredLogger 5 | import org.typelevel.log4cats.slf4j.Slf4jLogger 6 | 7 | import scala.reflect.ClassTag 8 | 9 | package object logging { 10 | type GenericPlainLogger[F[_]] = SelfAwareStructuredLogger[F] 11 | 12 | private[rabbitmq] def createPlainLogger[F[_]: Sync, A](implicit ct: ClassTag[A]): GenericPlainLogger[F] = 13 | Slf4jLogger.getLoggerFromClass(ct.runtimeClass) 14 | } 15 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/publisher/BaseRabbitMQProducer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.publisher 2 | 3 | import cats.effect.{Blocker, ContextShift, Effect, Sync} 4 | import cats.syntax.applicativeError._ 5 | import cats.syntax.flatMap._ 6 | import cats.syntax.functor._ 7 | import com.avast.bytes.Bytes 8 | import com.avast.clients.rabbitmq.JavaConverters._ 9 | import com.avast.clients.rabbitmq.api.CorrelationIdStrategy.FromPropertiesOrRandomNew 10 | import com.avast.clients.rabbitmq.api._ 11 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 12 | import com.avast.clients.rabbitmq.{startAndForget, CorrelationId, ProductConverter, ServerChannel} 13 | import com.avast.metrics.scalaeffectapi.Monitor 14 | import com.rabbitmq.client.AMQP.BasicProperties 15 | import com.rabbitmq.client.{AlreadyClosedException, ReturnListener} 16 | 17 | import java.util.UUID 18 | import scala.util.control.NonFatal 19 | 20 | abstract class BaseRabbitMQProducer[F[_], A: ProductConverter](name: String, 21 | exchangeName: String, 22 | channel: ServerChannel, 23 | defaultProperties: MessageProperties, 24 | reportUnroutable: Boolean, 25 | sizeLimitBytes: Option[Int], 26 | blocker: Blocker, 27 | logger: ImplicitContextLogger[F], 28 | monitor: Monitor[F])(implicit F: Effect[F], cs: ContextShift[F]) 29 | extends RabbitMQProducer[F, A] { 30 | 31 | private val sentMeter = monitor.meter("sent") 32 | private val sentFailedMeter = monitor.meter("sentFailed") 33 | private val unroutableMeter = monitor.meter("unroutable") 34 | 35 | private val converter = implicitly[ProductConverter[A]] 36 | 37 | private val sendLock = new Object 38 | 39 | channel.addReturnListener(if (reportUnroutable) LoggingReturnListener else NoOpReturnListener) 40 | 41 | def sendMessage(routingKey: String, body: Bytes, properties: MessageProperties)(implicit correlationId: CorrelationId): F[Unit] 42 | 43 | override def send(routingKey: String, body: A, properties: Option[MessageProperties] = None)( 44 | implicit cidStrategy: CorrelationIdStrategy = FromPropertiesOrRandomNew(properties)): F[Unit] = { 45 | implicit val correlationId: CorrelationId = CorrelationId(cidStrategy.toCIDValue) 46 | 47 | val finalProperties = converter.fillProperties { 48 | val initialProperties = properties.getOrElse(defaultProperties) 49 | 50 | initialProperties.copy( 51 | messageId = initialProperties.messageId.orElse(Some(UUID.randomUUID().toString)), 52 | correlationId = Some(correlationId.value) // it was taken from it if it was there 53 | ) 54 | } 55 | 56 | converter.convert(body) match { 57 | case Right(convertedBody) => 58 | for { 59 | _ <- checkSize(convertedBody, routingKey) 60 | _ <- processErrors(sendMessage(routingKey, convertedBody, finalProperties), routingKey) 61 | } yield () 62 | case Left(ce) => Sync[F].raiseError(ce) 63 | } 64 | } 65 | 66 | protected def basicSend(routingKey: String, body: Bytes, properties: MessageProperties, preSendAction: Long => Unit = (_: Long) => ())( 67 | implicit correlationId: CorrelationId): F[Long] = { 68 | for { 69 | _ <- logger.debug(s"Sending message with ${body.size()} B to exchange $exchangeName with routing key '$routingKey' and $properties") 70 | sequenceNumber <- blocker.delay { 71 | sendLock.synchronized { 72 | // see https://www.rabbitmq.com/api-guide.html#channel-threads 73 | val sequenceNumber = channel.getNextPublishSeqNo 74 | preSendAction(sequenceNumber) 75 | channel.basicPublish(exchangeName, routingKey, properties.asAMQP, body.toByteArray) 76 | sequenceNumber 77 | } 78 | } 79 | _ <- sentMeter.mark 80 | } yield sequenceNumber 81 | } 82 | 83 | private def processErrors(from: F[Unit], routingKey: String)(implicit correlationId: CorrelationId): F[Unit] = { 84 | from.recoverWith { 85 | case ce: AlreadyClosedException => 86 | logger.debug(ce)(s"[$name] Failed to send message with routing key '$routingKey' to exchange '$exchangeName'") >> 87 | sentFailedMeter.mark >> 88 | F.raiseError[Unit](ChannelNotRecoveredException("Channel closed, wait for recovery", ce)) 89 | 90 | case NonFatal(e) => 91 | logger.debug(e)(s"[$name] Failed to send message with routing key '$routingKey' to exchange '$exchangeName'") >> 92 | sentFailedMeter.mark >> 93 | F.raiseError[Unit](e) 94 | } 95 | } 96 | 97 | private def checkSize(bytes: Bytes, routingKey: String)(implicit correlationId: CorrelationId): F[Unit] = { 98 | sizeLimitBytes match { 99 | case Some(limit) => 100 | val size = bytes.size() 101 | if (size >= limit) { 102 | logger.warn { 103 | s"[$name] Will not send message with $size B to exchange $exchangeName with routing key '$routingKey' as it is over the limit $limit B" 104 | } >> F.raiseError[Unit](TooBigMessage(s"Message too big ($size/$limit)")) 105 | } else F.unit 106 | 107 | case None => F.unit 108 | } 109 | } 110 | 111 | // scalastyle:off 112 | private object LoggingReturnListener extends ReturnListener { 113 | override def handleReturn(replyCode: Int, 114 | replyText: String, 115 | exchange: String, 116 | routingKey: String, 117 | properties: BasicProperties, 118 | body: Array[Byte]): Unit = { 119 | startAndForget { 120 | unroutableMeter.mark >> 121 | logger.plainWarn( 122 | s"[$name] Message sent with routingKey '$routingKey' to exchange '$exchange' (message ID '${properties.getMessageId}', body size ${body.length} B) is unroutable ($replyCode: $replyText)" 123 | ) 124 | } 125 | } 126 | } 127 | 128 | private object NoOpReturnListener extends ReturnListener { 129 | override def handleReturn(replyCode: Int, 130 | replyText: String, 131 | exchange: String, 132 | routingKey: String, 133 | properties: BasicProperties, 134 | body: Array[Byte]): Unit = { 135 | startAndForget { 136 | unroutableMeter.mark 137 | } 138 | } 139 | } 140 | 141 | } 142 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/publisher/DefaultRabbitMQProducer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.publisher 2 | 3 | import cats.effect.{Blocker, ConcurrentEffect, ContextShift} 4 | import cats.implicits.toFunctorOps 5 | import com.avast.bytes.Bytes 6 | import com.avast.clients.rabbitmq.api.MessageProperties 7 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 8 | import com.avast.clients.rabbitmq.{CorrelationId, ProductConverter, ServerChannel} 9 | import com.avast.metrics.scalaeffectapi.Monitor 10 | 11 | class DefaultRabbitMQProducer[F[_], A: ProductConverter](name: String, 12 | exchangeName: String, 13 | channel: ServerChannel, 14 | defaultProperties: MessageProperties, 15 | reportUnroutable: Boolean, 16 | sizeLimitBytes: Option[Int], 17 | blocker: Blocker, 18 | logger: ImplicitContextLogger[F], 19 | monitor: Monitor[F])(implicit F: ConcurrentEffect[F], cs: ContextShift[F]) 20 | extends BaseRabbitMQProducer[F, A](name, 21 | exchangeName, 22 | channel, 23 | defaultProperties, 24 | reportUnroutable, 25 | sizeLimitBytes, 26 | blocker, 27 | logger, 28 | monitor) { 29 | override def sendMessage(routingKey: String, body: Bytes, properties: MessageProperties)(implicit correlationId: CorrelationId): F[Unit] = 30 | basicSend(routingKey, body, properties).void 31 | } 32 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/publisher/PublishConfirmsRabbitMQProducer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.publisher 2 | 3 | import cats.effect.Concurrent.ops.toAllConcurrentOps 4 | import cats.effect.concurrent.Deferred 5 | import cats.effect.{Blocker, ConcurrentEffect, ContextShift} 6 | import cats.implicits._ 7 | import com.avast.bytes.Bytes 8 | import com.avast.clients.rabbitmq.api.{MaxAttemptsReached, MessageProperties, NotAcknowledgedPublish} 9 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 10 | import com.avast.clients.rabbitmq.{CorrelationId, ProductConverter, ServerChannel, startAndForget} 11 | import com.avast.metrics.scalaeffectapi.Monitor 12 | import com.rabbitmq.client.ConfirmListener 13 | 14 | import java.util.concurrent.ConcurrentHashMap 15 | import scala.jdk.CollectionConverters._ 16 | class PublishConfirmsRabbitMQProducer[F[_], A: ProductConverter](name: String, 17 | exchangeName: String, 18 | channel: ServerChannel, 19 | defaultProperties: MessageProperties, 20 | sendAttempts: Int, 21 | reportUnroutable: Boolean, 22 | sizeLimitBytes: Option[Int], 23 | blocker: Blocker, 24 | logger: ImplicitContextLogger[F], 25 | monitor: Monitor[F])(implicit F: ConcurrentEffect[F], cs: ContextShift[F]) 26 | extends BaseRabbitMQProducer[F, A](name, 27 | exchangeName, 28 | channel, 29 | defaultProperties, 30 | reportUnroutable, 31 | sizeLimitBytes, 32 | blocker, 33 | logger, 34 | monitor) { 35 | 36 | channel.confirmSelect() 37 | channel.addConfirmListener(DefaultConfirmListener) 38 | 39 | private val acked = monitor.meter("acked") 40 | private val nacked = monitor.meter("nacked") 41 | 42 | private[rabbitmq] val confirmationCallbacks = { 43 | new ConcurrentHashMap[Long, Deferred[F, Either[NotAcknowledgedPublish, Unit]]]().asScala 44 | } 45 | 46 | override def sendMessage(routingKey: String, body: Bytes, properties: MessageProperties)(implicit correlationId: CorrelationId): F[Unit] = 47 | sendWithAck(routingKey, body, properties, 1) 48 | 49 | private def sendWithAck(routingKey: String, body: Bytes, properties: MessageProperties, attemptCount: Int)( 50 | implicit correlationId: CorrelationId): F[Unit] = { 51 | 52 | if (attemptCount > sendAttempts) { 53 | F.raiseError(MaxAttemptsReached("Exhausted max number of attempts")) 54 | } else { 55 | for { 56 | confirmationCallback <- Deferred.apply[F, Either[NotAcknowledgedPublish, Unit]] 57 | sequenceNumber <- basicSend(routingKey, body, properties, (sequenceNumber: Long) => { 58 | confirmationCallbacks += sequenceNumber -> confirmationCallback 59 | }) 60 | result <- confirmationCallback.get 61 | _ <- F.delay(confirmationCallbacks -= sequenceNumber) 62 | _ <- result match { 63 | case Left(err) => 64 | val sendResult = if (sendAttempts > 1) { 65 | logger.plainTrace(s"Republishing nacked message with sequenceNumber $sequenceNumber") >> 66 | sendWithAck(routingKey, body, properties, attemptCount + 1) 67 | } else { 68 | F.raiseError(err) 69 | } 70 | nacked.mark >> sendResult 71 | case Right(_) => 72 | acked.mark 73 | } 74 | } yield () 75 | } 76 | } 77 | 78 | private[rabbitmq] object DefaultConfirmListener extends ConfirmListener { 79 | 80 | override def handleAck(deliveryTag: Long, multiple: Boolean): Unit = { 81 | startAndForget { 82 | logger.plainTrace(s"Acked $deliveryTag, multiple: $multiple") >> completeDefer(deliveryTag, multiple, Right(())) 83 | } 84 | } 85 | 86 | override def handleNack(deliveryTag: Long, multiple: Boolean): Unit = { 87 | startAndForget { 88 | logger.plainTrace(s"Nacked $deliveryTag, multiple: $multiple") >> completeDefer( 89 | deliveryTag, 90 | multiple, 91 | Left(NotAcknowledgedPublish(s"Broker was unable to process the message", messageId = deliveryTag))) 92 | } 93 | } 94 | 95 | private def completeDefer(deliveryTag: Long, multiple: Boolean, result: Either[NotAcknowledgedPublish, Unit]): F[Unit] = { 96 | if (multiple) { 97 | confirmationCallbacks 98 | .filter { 99 | case (sequenceNumber, _) => sequenceNumber <= deliveryTag 100 | } 101 | .values 102 | .toList 103 | .traverse { callback => 104 | callback.complete(result).start 105 | } 106 | .void 107 | } else { 108 | confirmationCallbacks.get(deliveryTag) match { 109 | case Some(callback) => callback.complete(result) 110 | case None => 111 | logger.plainDebug(s"Received confirmation for unknown delivery tag $deliveryTag with result $result.") 112 | } 113 | } 114 | } 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /core/src/main/scala/com/avast/clients/rabbitmq/rabbitmq.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients 2 | 3 | import cats.effect.concurrent.Ref 4 | import cats.effect.{Effect, Sync} 5 | import cats.implicits.catsSyntaxFlatMapOps 6 | import com.avast.bytes.Bytes 7 | import com.avast.clients.rabbitmq.DefaultRabbitMQConsumer.{FederationOriginalRoutingKeyHeaderName, RepublishOriginalRoutingKeyHeaderName} 8 | import com.avast.clients.rabbitmq.api._ 9 | import com.rabbitmq.client.{RecoverableChannel, RecoverableConnection} 10 | import fs2.RaiseThrowable 11 | 12 | import java.util.concurrent.Executors 13 | import scala.concurrent.Future 14 | import scala.language.implicitConversions 15 | 16 | package object rabbitmq { 17 | private[rabbitmq] type ServerConnection = RecoverableConnection 18 | private[rabbitmq] type ServerChannel = RecoverableChannel 19 | 20 | private[rabbitmq] val ses = Executors.newScheduledThreadPool(2) 21 | 22 | type DeliveryReadAction[F[_], -A] = Delivery[A] => F[DeliveryResult] 23 | type ParsingFailureAction[F[_]] = (String, Delivery[Bytes], ConversionException) => F[DeliveryResult] 24 | 25 | implicit class StreamOps[F[_], A](val stream: fs2.Stream[F, A]) { 26 | def makeResilient(maxErrors: Int)(logError: Throwable => F[Unit])(implicit F: Sync[F], rt: RaiseThrowable[F]): fs2.Stream[F, A] = { 27 | fs2.Stream.eval(Ref[F].of(maxErrors)).flatMap { failureCounter => 28 | lazy val resilientStream: fs2.Stream[F, A] = stream.handleErrorWith { err => 29 | fs2.Stream.eval(logError(err) >> failureCounter.modify(a => (a - 1, a - 1))).flatMap { attemptsRest => 30 | if (attemptsRest <= 0) fs2.Stream.raiseError[F](err) else resilientStream 31 | } 32 | } 33 | 34 | resilientStream 35 | } 36 | } 37 | } 38 | 39 | private[rabbitmq] implicit class DeliveryOps[A](val d: Delivery[A]) extends AnyVal { 40 | def mapBody[B](f: A => B): Delivery[B] = d match { 41 | case ok: Delivery.Ok[A] => ok.copy(body = f(ok.body)) 42 | case m: Delivery.MalformedContent => m 43 | } 44 | 45 | def flatMap[B](f: Delivery.Ok[A] => Delivery[B]): Delivery[B] = d match { 46 | case ok: Delivery.Ok[A] => f(ok) 47 | case m: Delivery.MalformedContent => m 48 | } 49 | 50 | def withRedactedBody: Delivery[String] = mapBody(_ => "--redacted--") 51 | } 52 | 53 | private[rabbitmq] implicit class DeliveryBytesOps(val d: Delivery[Bytes]) extends AnyVal { 54 | 55 | def toMalformed(ce: ConversionException): Delivery.MalformedContent = d match { 56 | case ok: Delivery.Ok[Bytes] => Delivery.MalformedContent(ok.body, ok.properties, ok.routingKey, ce) 57 | case m: Delivery.MalformedContent => m.copy(ce = ce) 58 | } 59 | } 60 | 61 | private[rabbitmq] implicit class BasicPropertiesOps(val props: com.rabbitmq.client.AMQP.BasicProperties) extends AnyVal { 62 | def getOriginalRoutingKey: Option[String] = { 63 | for { 64 | headers <- Option(props.getHeaders) 65 | ork <- { 66 | Option(headers.get(RepublishOriginalRoutingKeyHeaderName)) 67 | .orElse(Option(headers.get(FederationOriginalRoutingKeyHeaderName))) 68 | } 69 | } yield ork.toString 70 | } 71 | } 72 | 73 | private[rabbitmq] implicit class RunningF[F[_], A](val f: F[A]) extends AnyVal { 74 | def unsafeStartAndForget()(implicit F: Effect[F]): Unit = { 75 | F.toIO(f).unsafeToFuture() 76 | () 77 | } 78 | } 79 | 80 | private[rabbitmq] def startAndForget[F[_]: Effect](f: F[Unit]): Unit = { 81 | f.unsafeStartAndForget 82 | } 83 | 84 | } 85 | -------------------------------------------------------------------------------- /core/src/test/proto/ExampleEvents.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package com.avast.clients.rabbitmq.test; 4 | 5 | message FileSource { 6 | bytes file_id = 1; 7 | string source = 2; 8 | } 9 | 10 | message NewFileSourceAdded { 11 | repeated FileSource file_sources = 1; 12 | } 13 | -------------------------------------------------------------------------------- /core/src/test/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | System.out 5 | 6 | 7 | %d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %-35logger{35}: %msg\(%file:%line\)%n%xThrowable{full} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/ByteUtils.java: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq; 2 | 3 | import com.avast.bytes.Bytes; 4 | 5 | import java.time.Duration; 6 | import java.util.Base64; 7 | import java.util.Locale; 8 | 9 | public final class ByteUtils { 10 | private static final char[] HEX_CHARS = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}; 11 | private static final Base64.Encoder BASE_64_ENCODER = Base64.getEncoder(); 12 | private static final Base64.Decoder BASE_64_DECODER = Base64.getDecoder(); 13 | 14 | private ByteUtils() { 15 | } 16 | 17 | /** 18 | * Converts an integer to an array of bytes. 19 | */ 20 | public static byte[] intToBytes(int value) { 21 | return intToBytes(value, new byte[4]); 22 | } 23 | 24 | /** 25 | * Fills in the array with bytes of the integer. 26 | */ 27 | public static byte[] intToBytes(int value, byte[] arr) { 28 | arr[0] = (byte) ((value >> 24) & 0xff); 29 | arr[1] = (byte) ((value >> 16) & 0xff); 30 | arr[2] = (byte) ((value >> 8) & 0xff); 31 | arr[3] = (byte) (value & 0xff); 32 | return arr; 33 | } 34 | 35 | /** 36 | * Converts an array of bytes to integer. 37 | *

38 | * The input array must have at least 1 and maximum 4 bytes in big endian. 39 | * 40 | * @param bytes 1-4 bytes of integer (BIG ENDIAN) 41 | * @return resulting integer 42 | */ 43 | public static int bytesToInt(byte[] bytes) { 44 | int i = 0; 45 | int shift = (bytes.length - 1) * 8; 46 | int result = 0; 47 | while (i < bytes.length) { 48 | result |= (((int) bytes[i]) & 0xff) << shift; 49 | i += 1; 50 | shift -= 8; 51 | } 52 | return result; 53 | } 54 | 55 | /** 56 | * Converts a long to an array of bytes. 57 | */ 58 | public static byte[] longToBytes(long value) { 59 | return longToBytes(value, new byte[8]); 60 | } 61 | 62 | /** 63 | * Fills in the array with bytes of the long. 64 | */ 65 | public static byte[] longToBytes(long value, byte[] arr) { 66 | arr[0] = (byte) ((value >> 56) & 0xff); 67 | arr[1] = (byte) ((value >> 48) & 0xff); 68 | arr[2] = (byte) ((value >> 40) & 0xff); 69 | arr[3] = (byte) ((value >> 32) & 0xff); 70 | arr[4] = (byte) ((value >> 24) & 0xff); 71 | arr[5] = (byte) ((value >> 16) & 0xff); 72 | arr[6] = (byte) ((value >> 8) & 0xff); 73 | arr[7] = (byte) (value & 0xff); 74 | return arr; 75 | } 76 | 77 | /** 78 | * Converts an array of bytes to long. 79 | *

80 | * The input array must have at least 1 and maximum 8 bytes in big endian. 81 | * 82 | * @param bytes 1-8 bytes of long (BIG ENDIAN) 83 | * @return resulting long 84 | */ 85 | public static long bytesToLong(byte[] bytes) { 86 | int i = 0; 87 | int shift = (bytes.length - 1) * 8; 88 | long result = 0L; 89 | while (i < bytes.length) { 90 | result |= (((long) bytes[i]) & 0xff) << shift; 91 | i += 1; 92 | shift -= 8; 93 | } 94 | return result; 95 | } 96 | 97 | /** 98 | * Converts an array of bytes into hexadecimal string. 99 | * 100 | * @return hex string representation with lower case characters 101 | */ 102 | public static String bytesToHex(byte[] arr) { 103 | char[] result = new char[arr.length * 2]; 104 | 105 | for (int i = 0; i < arr.length; ++i) { 106 | result[i * 2] = HEX_CHARS[(arr[i] >> 4) & 0xF]; 107 | result[i * 2 + 1] = HEX_CHARS[(arr[i] & 0xF)]; 108 | } 109 | 110 | return new String(result); 111 | } 112 | 113 | /** 114 | * Converts {@link Bytes} into hexadecimal lowercase string. 115 | * 116 | * @return hex string representation with lower case characters 117 | */ 118 | public static String bytesToHex(Bytes bytes) { 119 | int size = bytes.size(); 120 | char[] result = new char[size * 2]; 121 | 122 | for (int i = 0; i < size; ++i) { 123 | result[i * 2] = HEX_CHARS[(bytes.byteAt(i) >> 4) & 0xF]; 124 | result[i * 2 + 1] = HEX_CHARS[(bytes.byteAt(i) & 0xF)]; 125 | } 126 | 127 | return new String(result); 128 | } 129 | 130 | /** 131 | * Converts a hexadecimal string into an array of bytes. 132 | *

133 | * The string must have even number of characters. 134 | * 135 | * @param hex hexadecimal string with even number of characters 136 | */ 137 | public static byte[] hexToBytes(String hex) { 138 | final int length = hex.length(); 139 | if (length % 2 != 0) { 140 | throw new IllegalArgumentException("HexString needs to be even-length: " + hex); 141 | } 142 | byte[] result = new byte[length / 2]; 143 | for (int i = 0; i < length; i += 2) { 144 | int high = Character.getNumericValue(hex.charAt(i)); 145 | int low = Character.getNumericValue(hex.charAt(i + 1)); 146 | if (high == -1 || low == -1) { 147 | throw new IllegalArgumentException("HexString contains illegal characters: " + hex); 148 | } 149 | result[i / 2] = (byte) (high * 16 + low); 150 | } 151 | return result; 152 | } 153 | 154 | /** 155 | * Converts a hexadecimal string into {@link Bytes} 156 | *

157 | * The string must have even number of characters. 158 | * 159 | * @param hex hexadecimal string with even number of characters 160 | */ 161 | public static Bytes hexToBytesImmutable(String hex) { 162 | return Bytes.copyFromHex(hex); 163 | } 164 | 165 | /** 166 | * Converts an array of bytes into Base64 string. 167 | */ 168 | public static String bytesToBase64(byte[] arr) { 169 | return BASE_64_ENCODER.encodeToString(arr); 170 | } 171 | 172 | /** 173 | * Converts {@link Bytes} into Base64 string. 174 | */ 175 | public static String bytesToBase64(Bytes bytes) { 176 | return bytesToBase64(bytes.toByteArray()); 177 | } 178 | 179 | /** 180 | * Converts a Base64 string into an array of bytes. 181 | */ 182 | public static byte[] base64ToBytes(String base64) { 183 | return BASE_64_DECODER.decode(base64); 184 | } 185 | 186 | /** 187 | * Converts a Base64 string into {@link Bytes}. 188 | */ 189 | public static Bytes base64ToBytesImmutable(String base64) { 190 | return Bytes.copyFrom(base64ToBytes(base64)); 191 | } 192 | 193 | /** 194 | * Converts bytes to a human-readable string. 195 | */ 196 | public static String bytesToHumanReadableFormat(long bytes) { 197 | return bytesToHumanReadableFormat(bytes, false); 198 | } 199 | 200 | /** 201 | * Converts bytes to a human-readable string. 202 | * 203 | * @param si whether to use SI units (multiply by 1000 instead of 1024) 204 | */ 205 | public static String bytesToHumanReadableFormat(long bytes, boolean si) { 206 | int unit = si ? 1000 : 1024; 207 | if (bytes < unit) { 208 | return bytes + " B"; 209 | } 210 | int exp = (int) (Math.log(bytes) / Math.log(unit)); 211 | String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp - 1) + (si ? "" : "i"); 212 | return String.format(Locale.ENGLISH, "%.1f %sB", bytes / Math.pow(unit, exp), pre); 213 | } 214 | 215 | /** 216 | * Converts byte and duration into data-transfer rate human-readable format. 217 | */ 218 | public static String transferRateToHumanReadableFormat(long bytes, Duration duration) { 219 | return transferRateToHumanReadableFormat(bytes, duration, false); 220 | } 221 | 222 | /** 223 | * Converts byte and duration into data-transfer rate human-readable format. 224 | * 225 | * @param si whether to use SI units (multiply by 1000 instead of 1024) 226 | */ 227 | public static String transferRateToHumanReadableFormat(long bytes, Duration duration, boolean si) { 228 | long seconds = Math.max(duration.toMillis() / 1000, 1); 229 | return bytesToHumanReadableFormat(bytes / seconds, si) + "/s"; 230 | } 231 | 232 | } 233 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/DefaultRabbitMQProducerTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.ConcurrentEffect 4 | import com.avast.bytes.Bytes 5 | import com.avast.clients.rabbitmq.api._ 6 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 7 | import com.avast.clients.rabbitmq.publisher.DefaultRabbitMQProducer 8 | import com.avast.metrics.scalaeffectapi.Monitor 9 | import com.rabbitmq.client.AMQP 10 | import com.rabbitmq.client.impl.recovery.AutorecoveringChannel 11 | import monix.eval.Task 12 | import monix.execution.Scheduler.Implicits.global 13 | import org.mockito.Mockito._ 14 | import org.mockito.{ArgumentCaptor, Matchers} 15 | 16 | import scala.util.Random 17 | 18 | class DefaultRabbitMQProducerTest extends TestBase { 19 | 20 | // default, is overridden in some tests 21 | implicit val cidStrategy: CorrelationIdStrategy = CorrelationIdStrategy.RandomNew 22 | 23 | test("basic") { 24 | val exchangeName = Random.nextString(10) 25 | val routingKey = Random.nextString(10) 26 | 27 | val channel = mock[AutorecoveringChannel] 28 | 29 | val producer = new DefaultRabbitMQProducer[Task, Bytes]( 30 | name = "test", 31 | exchangeName = exchangeName, 32 | channel = channel, 33 | monitor = Monitor.noOp(), 34 | defaultProperties = MessageProperties.empty, 35 | reportUnroutable = false, 36 | sizeLimitBytes = None, 37 | blocker = TestBase.testBlocker, 38 | logger = ImplicitContextLogger.createLogger 39 | ) 40 | 41 | val properties = new AMQP.BasicProperties.Builder() 42 | .contentType("application/octet-stream") 43 | .build() 44 | 45 | val body = Bytes.copyFromUtf8(Random.nextString(10)) 46 | 47 | producer.send(routingKey, body, Some(MessageProperties.empty)).await 48 | 49 | val captor = ArgumentCaptor.forClass(classOf[AMQP.BasicProperties]) 50 | 51 | verify(channel, times(1)).basicPublish(Matchers.eq(exchangeName), 52 | Matchers.eq(routingKey), 53 | captor.capture(), 54 | Matchers.eq(body.toByteArray)) 55 | 56 | val caughtProperties = captor.getValue 57 | 58 | assert(caughtProperties.getCorrelationId != null) 59 | 60 | val caughtWithoutIds = caughtProperties.builder().messageId(null).correlationId(null).build() 61 | 62 | assertResult(properties.toString)(caughtWithoutIds.toString) // AMQP.BasicProperties doesn't have `equals` method :-/ 63 | } 64 | 65 | test("correlation id is taken from properties") { 66 | val exchangeName = Random.nextString(10) 67 | val routingKey = Random.nextString(10) 68 | 69 | val channel = mock[AutorecoveringChannel] 70 | 71 | val producer = new DefaultRabbitMQProducer[Task, Bytes]( 72 | name = "test", 73 | exchangeName = exchangeName, 74 | channel = channel, 75 | monitor = Monitor.noOp(), 76 | defaultProperties = MessageProperties.empty, 77 | reportUnroutable = false, 78 | sizeLimitBytes = None, 79 | blocker = TestBase.testBlocker, 80 | logger = ImplicitContextLogger.createLogger 81 | ) 82 | 83 | val cid = Random.nextString(10) 84 | val cid2 = Random.nextString(10) 85 | 86 | val body = Bytes.copyFromUtf8(Random.nextString(10)) 87 | 88 | val mp = Some( 89 | MessageProperties(correlationId = Some(cid), headers = Map(CorrelationIdStrategy.CorrelationIdKeyName -> cid2.asInstanceOf[AnyRef])) 90 | ) 91 | 92 | implicit val cidStrategy: CorrelationIdStrategy = CorrelationIdStrategy.FromPropertiesOrRandomNew(mp) 93 | 94 | producer.send(routingKey, body, mp).await 95 | 96 | val captor = ArgumentCaptor.forClass(classOf[AMQP.BasicProperties]) 97 | 98 | verify(channel, times(1)).basicPublish(Matchers.eq(exchangeName), 99 | Matchers.eq(routingKey), 100 | captor.capture(), 101 | Matchers.eq(body.toByteArray)) 102 | 103 | // check that the one from properties was used 104 | assertResult(cid)(captor.getValue.getCorrelationId) 105 | } 106 | 107 | test("correlation id is taken from header if not in properties") { 108 | val exchangeName = Random.nextString(10) 109 | val routingKey = Random.nextString(10) 110 | 111 | val channel = mock[AutorecoveringChannel] 112 | 113 | val producer = new DefaultRabbitMQProducer[Task, Bytes]( 114 | name = "test", 115 | exchangeName = exchangeName, 116 | channel = channel, 117 | monitor = Monitor.noOp(), 118 | defaultProperties = MessageProperties.empty, 119 | reportUnroutable = false, 120 | sizeLimitBytes = None, 121 | blocker = TestBase.testBlocker, 122 | logger = ImplicitContextLogger.createLogger 123 | ) 124 | 125 | val cid = Random.nextString(10) 126 | 127 | val body = Bytes.copyFromUtf8(Random.nextString(10)) 128 | 129 | val mp = Some(MessageProperties(headers = Map(CorrelationIdStrategy.CorrelationIdKeyName -> cid.asInstanceOf[AnyRef]))) 130 | 131 | implicit val cidStrategy: CorrelationIdStrategy = CorrelationIdStrategy.FromPropertiesOrRandomNew(mp) 132 | 133 | producer.send(routingKey, body, mp).await 134 | 135 | val captor = ArgumentCaptor.forClass(classOf[AMQP.BasicProperties]) 136 | 137 | verify(channel, times(1)).basicPublish(Matchers.eq(exchangeName), 138 | Matchers.eq(routingKey), 139 | captor.capture(), 140 | Matchers.eq(body.toByteArray)) 141 | 142 | // check that the one from headers was used 143 | assertResult(cid)(captor.getValue.getCorrelationId) 144 | } 145 | 146 | test("correlation id is generated if not in header nor properties") { 147 | val exchangeName = Random.nextString(10) 148 | val routingKey = Random.nextString(10) 149 | 150 | val channel = mock[AutorecoveringChannel] 151 | 152 | val producer = new DefaultRabbitMQProducer[Task, Bytes]( 153 | name = "test", 154 | exchangeName = exchangeName, 155 | channel = channel, 156 | monitor = Monitor.noOp(), 157 | defaultProperties = MessageProperties.empty, 158 | reportUnroutable = false, 159 | sizeLimitBytes = None, 160 | blocker = TestBase.testBlocker, 161 | logger = ImplicitContextLogger.createLogger 162 | ) 163 | 164 | val body = Bytes.copyFromUtf8(Random.nextString(10)) 165 | 166 | producer.send(routingKey, body).await 167 | 168 | val captor = ArgumentCaptor.forClass(classOf[AMQP.BasicProperties]) 169 | 170 | verify(channel, times(1)).basicPublish(Matchers.eq(exchangeName), 171 | Matchers.eq(routingKey), 172 | captor.capture(), 173 | Matchers.eq(body.toByteArray)) 174 | 175 | // check that some CID was generated 176 | assert(captor.getValue.getCorrelationId != null) 177 | } 178 | 179 | test("too big message is denied") { 180 | val exchangeName = Random.nextString(10) 181 | val routingKey = Random.nextString(10) 182 | 183 | val limit = 500 184 | 185 | val channel = mock[AutorecoveringChannel] 186 | 187 | val producer = new DefaultRabbitMQProducer[Task, Bytes]( 188 | name = "test", 189 | exchangeName = exchangeName, 190 | channel = channel, 191 | monitor = Monitor.noOp(), 192 | defaultProperties = MessageProperties.empty, 193 | reportUnroutable = false, 194 | sizeLimitBytes = Some(limit), 195 | blocker = TestBase.testBlocker, 196 | logger = ImplicitContextLogger.createLogger 197 | ) 198 | 199 | // don't test anything except it doesn't fail 200 | producer.send(routingKey, Bytes.copyFrom(Array.fill(499)(32.toByte))).await 201 | 202 | assertThrows[TooBigMessage] { 203 | producer.send(routingKey, Bytes.copyFrom(Array.fill(501)(32.toByte))).await 204 | } 205 | 206 | assertThrows[TooBigMessage] { 207 | producer.send(routingKey, Bytes.copyFrom(Array.fill(Random.nextInt(1000) + 500)(32.toByte))).await 208 | } 209 | } 210 | } 211 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/MultiFormatConsumerTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.api._ 5 | import com.avast.clients.rabbitmq.extras.format._ 6 | import io.circe.Decoder 7 | import io.circe.generic.extras.Configuration 8 | import io.circe.generic.extras.auto._ 9 | import monix.eval.Task 10 | import monix.execution.Scheduler.Implicits.global 11 | 12 | class MultiFormatConsumerTest extends TestBase { 13 | 14 | val StringDeliveryConverter: CheckedDeliveryConverter[String] = new CheckedDeliveryConverter[String] { 15 | override def canConvert(d: Delivery[Bytes]): Boolean = d.properties.contentType.contains("text/plain") 16 | 17 | override def convert(b: Bytes): Either[ConversionException, String] = Right(b.toStringUtf8) 18 | } 19 | 20 | private implicit val c: Configuration = Configuration.default.withSnakeCaseMemberNames 21 | private implicit val d: Decoder[Bytes] = Decoder.decodeString.map(ByteUtils.hexToBytesImmutable) 22 | 23 | case class FileSource(fileId: Bytes, source: String) 24 | 25 | case class NewFileSourceAdded(fileSources: Seq[FileSource]) 26 | 27 | test("basic") { 28 | val consumer = MultiFormatConsumer.forType[Task, String](StringDeliveryConverter) { 29 | case d: Delivery.Ok[String] => 30 | assertResult("abc321")(d.body) 31 | Task.now(DeliveryResult.Ack) 32 | 33 | case _ => fail() 34 | } 35 | 36 | val delivery = Delivery( 37 | body = Bytes.copyFromUtf8("abc321"), 38 | properties = MessageProperties(contentType = Some("text/plain")), 39 | routingKey = "" 40 | ) 41 | 42 | val result = consumer.apply(delivery).await 43 | 44 | assertResult(DeliveryResult.Ack)(result) 45 | } 46 | 47 | test("non-supported content-type") { 48 | val consumer = MultiFormatConsumer.forType[Task, String](StringDeliveryConverter) { 49 | case _: Delivery.Ok[String] => 50 | Task.now(DeliveryResult.Ack) 51 | case _ => 52 | Task.now(DeliveryResult.Reject) 53 | } 54 | 55 | val delivery = Delivery( 56 | body = Bytes.copyFromUtf8("abc321"), 57 | properties = MessageProperties(contentType = Some("text/javascript")), 58 | routingKey = "" 59 | ) 60 | 61 | val result = consumer.apply(delivery).await 62 | 63 | assertResult(DeliveryResult.Reject)(result) 64 | } 65 | 66 | test("json") { 67 | val consumer = MultiFormatConsumer.forType[Task, NewFileSourceAdded](JsonDeliveryConverter.derive()) { 68 | case d: Delivery.Ok[NewFileSourceAdded] => 69 | assertResult( 70 | NewFileSourceAdded( 71 | Seq( 72 | FileSource(Bytes.copyFromUtf8("abc"), "theSource"), 73 | FileSource(Bytes.copyFromUtf8("def"), "theSource") 74 | )))(d.body) 75 | 76 | Task.now(DeliveryResult.Ack) 77 | 78 | case _ => Task.now(DeliveryResult.Reject) 79 | } 80 | 81 | val delivery = Delivery( 82 | body = Bytes.copyFromUtf8(s""" 83 | | { "file_sources": [ 84 | | { "file_id": "${ByteUtils.bytesToHex("abc".getBytes)}", "source": "theSource" }, 85 | | { "file_id": "${ByteUtils.bytesToHex("def".getBytes)}", "source": "theSource" } 86 | | ]} 87 | """.stripMargin), 88 | properties = MessageProperties(contentType = Some("application/json")), 89 | routingKey = "" 90 | ) 91 | 92 | val result = consumer.apply(delivery).await 93 | 94 | assertResult(DeliveryResult.Ack)(result) 95 | } 96 | 97 | } 98 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/PackageTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import monix.eval.Task 4 | import monix.execution.Scheduler.Implicits.global 5 | 6 | import java.util.concurrent.atomic.AtomicInteger 7 | import scala.concurrent.duration._ 8 | 9 | class PackageTest extends TestBase { 10 | test("StreamOps.makeResilient") { 11 | val c = new AtomicInteger(0) 12 | 13 | val stream = fs2.Stream.eval(Task.now(42)) 14 | val failing = stream.flatMap(_ => fs2.Stream.raiseError[Task](new RuntimeException("my exception"))) 15 | val resilient = failing.makeResilient(3) { _ => 16 | Task.delay(c.incrementAndGet()) 17 | } 18 | 19 | assertThrows[RuntimeException] { 20 | resilient.compile.drain.runSyncUnsafe(1.second) 21 | } 22 | 23 | assertResult(3)(c.get()) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/PublisherConfirmsRabbitMQProducerTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.api.{MessageProperties, NotAcknowledgedPublish} 5 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 6 | import com.avast.clients.rabbitmq.publisher.PublishConfirmsRabbitMQProducer 7 | import com.avast.metrics.scalaeffectapi.Monitor 8 | import com.rabbitmq.client.impl.recovery.AutorecoveringChannel 9 | import monix.eval.Task 10 | import monix.execution.Scheduler.Implicits.global 11 | import monix.execution.atomic.AtomicInt 12 | import org.mockito.Matchers 13 | import org.mockito.Matchers.any 14 | import org.mockito.Mockito.{times, verify, when} 15 | 16 | import scala.concurrent.Await 17 | import scala.concurrent.duration.DurationInt 18 | import scala.util.Random 19 | 20 | class PublisherConfirmsRabbitMQProducerTest extends TestBase { 21 | 22 | test("Message is acked after one retry") { 23 | val exchangeName = Random.nextString(10) 24 | val routingKey = Random.nextString(10) 25 | 26 | val nextSeqNumber = AtomicInt(0) 27 | val channel = mock[AutorecoveringChannel] 28 | when(channel.getNextPublishSeqNo).thenAnswer(_ => nextSeqNumber.get()) 29 | when(channel.basicPublish(any(), any(), any(), any())).thenAnswer(_ => { 30 | nextSeqNumber.increment() 31 | }) 32 | 33 | val producer = new PublishConfirmsRabbitMQProducer[Task, Bytes]( 34 | name = "test", 35 | exchangeName = exchangeName, 36 | channel = channel, 37 | monitor = Monitor.noOp(), 38 | defaultProperties = MessageProperties.empty, 39 | reportUnroutable = false, 40 | sizeLimitBytes = None, 41 | blocker = TestBase.testBlocker, 42 | logger = ImplicitContextLogger.createLogger, 43 | sendAttempts = 2 44 | ) 45 | 46 | val body = Bytes.copyFrom(Array.fill(499)(32.toByte)) 47 | 48 | val publishFuture = producer.send(routingKey, body).runToFuture 49 | 50 | while (nextSeqNumber.get() < 1) { Thread.sleep(5) } 51 | producer.DefaultConfirmListener.handleNack(0, multiple = false) 52 | 53 | while (nextSeqNumber.get() < 2) { Thread.sleep(5) } 54 | producer.DefaultConfirmListener.handleAck(1, multiple = false) 55 | 56 | Await.result(publishFuture, 10.seconds) 57 | 58 | verify(channel, times(2)) 59 | .basicPublish(Matchers.eq(exchangeName), Matchers.eq(routingKey), any(), Matchers.eq(body.toByteArray)) 60 | } 61 | 62 | test("Message not acked returned if number of attempts exhausted") { 63 | val exchangeName = Random.nextString(10) 64 | val routingKey = Random.nextString(10) 65 | 66 | val nextSeqNumber = AtomicInt(0) 67 | val channel = mock[AutorecoveringChannel] 68 | when(channel.getNextPublishSeqNo).thenAnswer(_ => nextSeqNumber.get()) 69 | when(channel.basicPublish(any(), any(), any(), any())).thenAnswer(_ => { 70 | nextSeqNumber.increment() 71 | }) 72 | 73 | val producer = new PublishConfirmsRabbitMQProducer[Task, Bytes]( 74 | name = "test", 75 | exchangeName = exchangeName, 76 | channel = channel, 77 | monitor = Monitor.noOp(), 78 | defaultProperties = MessageProperties.empty, 79 | reportUnroutable = false, 80 | sizeLimitBytes = None, 81 | blocker = TestBase.testBlocker, 82 | logger = ImplicitContextLogger.createLogger, 83 | sendAttempts = 1 84 | ) 85 | 86 | val body = Bytes.copyFrom(Array.fill(499)(64.toByte)) 87 | 88 | val publishTask = producer.send(routingKey, body).runToFuture 89 | 90 | while (nextSeqNumber.get() < 1) { 91 | Thread.sleep(5) 92 | } 93 | 94 | producer.DefaultConfirmListener.handleNack(0, multiple = false) 95 | 96 | assertThrows[NotAcknowledgedPublish] { 97 | Await.result(publishTask, 1.seconds) 98 | } 99 | 100 | verify(channel).basicPublish(Matchers.eq(exchangeName), Matchers.eq(routingKey), any(), Matchers.eq(body.toByteArray)) 101 | } 102 | 103 | test("Multiple messages are fully acked one by one") { 104 | val exchangeName = Random.nextString(10) 105 | val routingKey = Random.nextString(10) 106 | 107 | val seqNumbers = (0 to 499).toList 108 | val nextSeqNumber = AtomicInt(0) 109 | 110 | val channel = mock[AutorecoveringChannel] 111 | when(channel.getNextPublishSeqNo).thenAnswer(_ => nextSeqNumber.get()) 112 | when(channel.basicPublish(any(), any(), any(), any())).thenAnswer(_ => { 113 | nextSeqNumber.increment() 114 | }) 115 | 116 | val producer = new PublishConfirmsRabbitMQProducer[Task, Bytes]( 117 | name = "test", 118 | exchangeName = exchangeName, 119 | channel = channel, 120 | monitor = Monitor.noOp(), 121 | defaultProperties = MessageProperties.empty, 122 | reportUnroutable = false, 123 | sizeLimitBytes = None, 124 | blocker = TestBase.testBlocker, 125 | logger = ImplicitContextLogger.createLogger, 126 | sendAttempts = 2 127 | ) 128 | 129 | val body = Bytes.copyFrom(Array.fill(499)(Random.nextInt(255).toByte)) 130 | 131 | val publishFuture = Task.parSequence { 132 | seqNumbers.map(_ => producer.send(routingKey, body)) 133 | }.runToFuture 134 | 135 | seqNumbers.foreach { seqNumber => 136 | while (nextSeqNumber.get() <= seqNumber) { Thread.sleep(5) } 137 | producer.DefaultConfirmListener.handleAck(seqNumber, multiple = false) 138 | } 139 | 140 | Await.result(publishFuture, 15.seconds) 141 | 142 | assertResult(seqNumbers.length)(nextSeqNumber.get()) 143 | verify(channel, times(seqNumbers.length)) 144 | .basicPublish(Matchers.eq(exchangeName), Matchers.eq(routingKey), any(), Matchers.eq(body.toByteArray)) 145 | } 146 | 147 | test("Multiple messages are fully acked at once") { 148 | val exchangeName = Random.nextString(10) 149 | val routingKey = Random.nextString(10) 150 | 151 | val messageCount = 500 152 | val seqNumbers = (0 until messageCount).toList 153 | val nextSeqNumber = AtomicInt(0) 154 | 155 | val channel = mock[AutorecoveringChannel] 156 | when(channel.getNextPublishSeqNo).thenAnswer(_ => nextSeqNumber.get()) 157 | when(channel.basicPublish(any(), any(), any(), any())).thenAnswer(_ => { 158 | nextSeqNumber.increment() 159 | }) 160 | 161 | val producer = new PublishConfirmsRabbitMQProducer[Task, Bytes]( 162 | name = "test", 163 | exchangeName = exchangeName, 164 | channel = channel, 165 | monitor = Monitor.noOp(), 166 | defaultProperties = MessageProperties.empty, 167 | reportUnroutable = false, 168 | sizeLimitBytes = None, 169 | blocker = TestBase.testBlocker, 170 | logger = ImplicitContextLogger.createLogger, 171 | sendAttempts = 2 172 | ) 173 | 174 | val body = Bytes.copyFrom(Array.fill(499)(Random.nextInt(255).toByte)) 175 | 176 | val publishFuture = Task.parSequence { 177 | seqNumbers.map(_ => producer.send(routingKey, body)) 178 | }.runToFuture 179 | 180 | while (nextSeqNumber.get() < messageCount) { Thread.sleep(5) } 181 | producer.DefaultConfirmListener.handleAck(messageCount, multiple = true) 182 | 183 | Await.result(publishFuture, 15.seconds) 184 | 185 | assertResult(seqNumbers.length)(nextSeqNumber.get()) 186 | verify(channel, times(seqNumbers.length)) 187 | .basicPublish(Matchers.eq(exchangeName), Matchers.eq(routingKey), any(), Matchers.eq(body.toByteArray)) 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/RecoveryDelayHandlersTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import scala.concurrent.duration._ 4 | import scala.util.Random 5 | 6 | class RecoveryDelayHandlersTest extends TestBase { 7 | test("linear") { 8 | val rdh = RecoveryDelayHandlers.Linear(10.millis, 42.millis) 9 | 10 | assertResult(10)(rdh.getDelay(0)) 11 | 12 | for (_ <- 1 to 200) { 13 | assertResult(42)(rdh.getDelay(Random.nextInt() + 1)) 14 | } 15 | } 16 | 17 | test("exponential") { 18 | val rdh = RecoveryDelayHandlers.Exponential(1.millis, 5.millis, 2.0, 42.millis) 19 | 20 | assertResult(1)(rdh.getDelay(0)) 21 | 22 | assertResult(5)(rdh.getDelay(1)) 23 | assertResult(10)(rdh.getDelay(2)) 24 | assertResult(20)(rdh.getDelay(3)) 25 | assertResult(40)(rdh.getDelay(4)) 26 | assertResult(42)(rdh.getDelay(5)) 27 | assertResult(42)(rdh.getDelay(6)) 28 | assertResult(42)(rdh.getDelay(7)) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/RepublishStrategyTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.RabbitMQConnection.DefaultListeners 5 | import com.avast.clients.rabbitmq.api.DeliveryResult 6 | import com.avast.clients.rabbitmq.api.DeliveryResult.Republish 7 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 8 | import com.avast.metrics.scalaeffectapi.Monitor 9 | import com.rabbitmq.client.AMQP.BasicProperties 10 | import com.rabbitmq.client.Envelope 11 | import com.rabbitmq.client.impl.recovery.AutorecoveringChannel 12 | import monix.eval.Task 13 | import monix.execution.Scheduler.Implicits.global 14 | import org.mockito.Mockito._ 15 | import org.mockito.{ArgumentCaptor, Matchers} 16 | import org.scalatest.time.{Seconds, Span} 17 | import org.slf4j.event.Level 18 | 19 | import java.util.UUID 20 | import scala.collection.immutable 21 | import scala.concurrent.duration.DurationInt 22 | import scala.jdk.CollectionConverters._ 23 | import scala.util.Random 24 | 25 | class RepublishStrategyTest extends TestBase { 26 | 27 | private val connectionInfo = RabbitMQConnectionInfo(immutable.Seq("localhost"), "/", None) 28 | private val pmhHelper = PoisonedMessageHandlerHelper[Task, RepublishStrategyTest](Monitor.noOp(), redactPayload = false) 29 | 30 | test("default exchange") { 31 | val messageId = UUID.randomUUID().toString 32 | 33 | val deliveryTag = Random.nextInt(1000) 34 | 35 | val envelope = mock[Envelope] 36 | when(envelope.getDeliveryTag).thenReturn(deliveryTag) 37 | 38 | val originalUserId = "OriginalUserId" 39 | val properties = new BasicProperties.Builder().messageId(messageId).userId(originalUserId).build() 40 | 41 | val channel = mock[AutorecoveringChannel] 42 | when(channel.isOpen).thenReturn(true) 43 | 44 | val consumer = newConsumer(channel, RepublishStrategy.DefaultExchange[Task]()) { delivery => 45 | assertResult(Some(messageId))(delivery.properties.messageId) 46 | 47 | Task.now(DeliveryResult.Republish()) 48 | } 49 | 50 | val body = Random.nextString(5).getBytes 51 | consumer.handleDelivery("abcd", envelope, properties, body) 52 | 53 | eventually(timeout(Span(1, Seconds)), interval(Span(0.1, Seconds))) { 54 | verify(channel, times(1)).basicAck(deliveryTag, false) 55 | verify(channel, times(0)).basicReject(deliveryTag, true) 56 | verify(channel, times(0)).basicReject(deliveryTag, false) 57 | 58 | val propertiesCaptor = ArgumentCaptor.forClass(classOf[BasicProperties]) 59 | verify(channel, times(1)).basicPublish(Matchers.eq(""), Matchers.eq("queueName"), propertiesCaptor.capture(), Matchers.eq(body)) 60 | assertResult(Some(originalUserId))(propertiesCaptor.getValue.getHeaders.asScala.get(DefaultRabbitMQConsumer.RepublishOriginalUserId)) 61 | } 62 | } 63 | 64 | test("custom exchange") { 65 | val messageId = UUID.randomUUID().toString 66 | 67 | val deliveryTag = Random.nextInt(1000) 68 | 69 | val envelope = mock[Envelope] 70 | when(envelope.getDeliveryTag).thenReturn(deliveryTag) 71 | 72 | val originalUserId = "OriginalUserId" 73 | val properties = new BasicProperties.Builder().messageId(messageId).userId(originalUserId).build() 74 | 75 | val channel = mock[AutorecoveringChannel] 76 | when(channel.isOpen).thenReturn(true) 77 | 78 | val consumer = newConsumer(channel, RepublishStrategy.CustomExchange("myCustomExchange")) { delivery => 79 | assertResult(Some(messageId))(delivery.properties.messageId) 80 | 81 | Task.now(DeliveryResult.Republish()) 82 | } 83 | 84 | val body = Random.nextString(5).getBytes 85 | consumer.handleDelivery("abcd", envelope, properties, body) 86 | 87 | eventually(timeout(Span(1, Seconds)), interval(Span(0.1, Seconds))) { 88 | verify(channel, times(1)).basicAck(deliveryTag, false) 89 | verify(channel, times(0)).basicReject(deliveryTag, true) 90 | verify(channel, times(0)).basicReject(deliveryTag, false) 91 | 92 | val propertiesCaptor = ArgumentCaptor.forClass(classOf[BasicProperties]) 93 | verify(channel, times(1)).basicPublish(Matchers.eq("myCustomExchange"), 94 | Matchers.eq("queueName"), 95 | propertiesCaptor.capture(), 96 | Matchers.eq(body)) 97 | assertResult(Some(originalUserId))(propertiesCaptor.getValue.getHeaders.asScala.get(DefaultRabbitMQConsumer.RepublishOriginalUserId)) 98 | } 99 | } 100 | 101 | private def newConsumer(channel: ServerChannel, republishStrategy: RepublishStrategy[Task])( 102 | userAction: DeliveryReadAction[Task, Bytes]): DefaultRabbitMQConsumer[Task, Bytes] = { 103 | val base = 104 | new ConsumerBase[Task, Bytes]("test", "queueName", false, TestBase.testBlocker, ImplicitContextLogger.createLogger, Monitor.noOp()) 105 | 106 | val channelOps = new ConsumerChannelOps[Task, Bytes]( 107 | "test", 108 | "queueName", 109 | channel, 110 | TestBase.testBlocker, 111 | republishStrategy, 112 | PMH, 113 | connectionInfo, 114 | ImplicitContextLogger.createLogger, 115 | Monitor.noOp() 116 | ) 117 | 118 | new DefaultRabbitMQConsumer[Task, Bytes]( 119 | base, 120 | channelOps, 121 | 10.seconds, 122 | DeliveryResult.Republish(), 123 | Level.ERROR, 124 | Republish(), 125 | DefaultListeners.defaultConsumerListener, 126 | )(userAction) 127 | } 128 | 129 | object PMH extends LoggingPoisonedMessageHandler[Task, Bytes](3, None, pmhHelper) 130 | } 131 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/TestBase.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | import cats.effect.{Blocker, ContextShift, IO, Resource, Timer} 3 | import cats.implicits.catsSyntaxFlatMapOps 4 | import com.typesafe.scalalogging.StrictLogging 5 | import monix.eval.Task 6 | import monix.execution.Scheduler 7 | import monix.execution.Scheduler.Implicits.global 8 | import monix.execution.schedulers.CanBlock 9 | import org.junit.runner.RunWith 10 | import org.scalatest.FunSuite 11 | import org.scalatest.concurrent.Eventually 12 | import org.scalatestplus.junit.JUnitRunner 13 | import org.scalatestplus.mockito.MockitoSugar 14 | 15 | import java.util.concurrent.Executors 16 | import scala.concurrent.duration._ 17 | import scala.concurrent.{ExecutionContext, TimeoutException} 18 | import scala.language.implicitConversions 19 | 20 | @RunWith(classOf[JUnitRunner]) 21 | class TestBase extends FunSuite with MockitoSugar with Eventually with StrictLogging { 22 | protected implicit def taskToOps[A](t: Task[A]): TaskOps[A] = new TaskOps[A](t) 23 | protected implicit def IOToOps[A](t: IO[A]): IOOps[A] = new IOOps[A](t) 24 | protected implicit def resourceToIOOps[A](t: Resource[IO, A]): ResourceIOOps[A] = new ResourceIOOps[A](t) 25 | protected implicit def resourceToTaskOps[A](t: Resource[Task, A]): ResourceTaskOps[A] = new ResourceTaskOps[A](t) 26 | } 27 | 28 | object TestBase { 29 | val testBlockingScheduler: Scheduler = Scheduler.io(name = "test-blocking") 30 | val testBlocker: Blocker = Blocker.liftExecutorService(Executors.newCachedThreadPool()) 31 | } 32 | 33 | class TaskOps[A](t: Task[A]) { 34 | def await(duration: Duration): A = t.runSyncUnsafe(duration) 35 | def await: A = await(10.seconds) 36 | } 37 | 38 | class IOOps[A](t: IO[A]) { 39 | private implicit val cs: ContextShift[IO] = IO.contextShift(TestBase.testBlockingScheduler) 40 | private implicit val timer: Timer[IO] = IO.timer(ExecutionContext.global) 41 | 42 | def await(duration: FiniteDuration): A = (cs.shift >> t.timeout(duration)).unsafeRunSync() 43 | def await: A = await(10.seconds) 44 | } 45 | 46 | class ResourceIOOps[A](val r: Resource[IO, A]) extends AnyVal { 47 | def withResource[B](f: A => B): B = { 48 | withResource(f, Duration.Inf) 49 | } 50 | 51 | def withResource[B](f: A => B, timeout: Duration): B = { 52 | r.use(a => IO(f).map(_(a))).unsafeRunTimed(timeout).getOrElse(throw new TimeoutException("Timeout has occurred")) 53 | } 54 | } 55 | 56 | class ResourceTaskOps[A](val r: Resource[Task, A]) extends AnyVal { 57 | def withResource[B](f: A => B): B = { 58 | withResource(f, Duration.Inf) 59 | } 60 | 61 | def withResource[B](f: A => B, timeout: Duration): B = { 62 | r.use(a => Task.delay(f(a))).runSyncUnsafe(timeout)(TestBase.testBlockingScheduler, CanBlock.permit) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/TestDeliveryContext.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import com.rabbitmq.client.AMQP.BasicProperties 4 | 5 | import scala.util.Random 6 | 7 | object TestDeliveryContext { 8 | private def randomString(length: Int): String = { 9 | Random.alphanumeric.take(length).mkString("") 10 | } 11 | 12 | def create(): DeliveryContext = { 13 | DeliveryContext( 14 | messageId = MessageId("test-message-id"), 15 | correlationId = Some(CorrelationId("test-corr-id")), 16 | deliveryTag = DeliveryTag(42), 17 | routingKey = RoutingKey(randomString(10)), 18 | fixedProperties = new BasicProperties() 19 | ) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/TestHelper.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import java.net.URLEncoder 4 | import java.nio.charset.StandardCharsets 5 | 6 | import io.circe.Decoder 7 | import io.circe.generic.auto._ 8 | import io.circe.parser._ 9 | import scalaj.http.Http 10 | 11 | import scala.util.Success 12 | 13 | //noinspection ScalaStyle 14 | class TestHelper(host: String, port: Int) { 15 | 16 | final val RootUri = s"http://$host:$port/api" 17 | 18 | object queue { 19 | 20 | def getMessagesCount(queueName: String): Int = { 21 | val encoded = URLEncoder.encode(queueName, StandardCharsets.UTF_8.toString) 22 | 23 | val resp = Http(s"$RootUri/queues/%2f/$encoded").auth("guest", "guest").asString.body 24 | 25 | decode[QueueProperties](resp) match { 26 | case Right(p) => p.messages 27 | case r => throw new IllegalStateException(s"Wrong response $r") 28 | } 29 | } 30 | 31 | def getPublishedCount(queueName: String): Int = { 32 | val encoded = URLEncoder.encode(queueName, StandardCharsets.UTF_8.toString) 33 | 34 | val resp = Http(s"$RootUri/queues/%2f/$encoded").auth("guest", "guest").asString.body 35 | 36 | decode[QueueProperties](resp) match { 37 | case Right(p) => 38 | p.message_stats.map(_.publish).getOrElse { 39 | Console.err.println(s"Could not extract published_count for $queueName!") 40 | 0 41 | } 42 | case r => throw new IllegalStateException(s"Wrong response $r") 43 | } 44 | } 45 | 46 | def getArguments(queueName: String): Map[String, Any] = { 47 | val encoded = URLEncoder.encode(queueName, StandardCharsets.UTF_8.toString) 48 | 49 | val resp = Http(s"$RootUri/queues/%2f/$encoded").auth("guest", "guest").asString.body 50 | 51 | decode[QueueProperties](resp) match { 52 | case Right(p) => 53 | p.arguments.getOrElse { 54 | Console.err.println(s"Could not extract arguments for $queueName!") 55 | Map.empty 56 | } 57 | case r => throw new IllegalStateException(s"Wrong response $r") 58 | } 59 | } 60 | 61 | def delete(queueName: String, ifEmpty: Boolean = false, ifUnused: Boolean = false): Unit = { 62 | println(s"Deleting queue: $queueName") 63 | val encoded = URLEncoder.encode(queueName, StandardCharsets.UTF_8.toString) 64 | 65 | val resp = 66 | Http(s"$RootUri/queues/%2f/$encoded?if-empty=$ifEmpty&if-unused=$ifUnused").method("DELETE").auth("guest", "guest").asString 67 | 68 | val content = resp.body 69 | 70 | val message = s"Delete queue response ${resp.statusLine}: '$content'" 71 | println(message) 72 | 73 | if (!resp.isSuccess && resp.code != 404) { 74 | throw new IllegalStateException(message) 75 | } 76 | } 77 | 78 | private implicit val anyDecoder: Decoder[Any] = Decoder.decodeJson.emapTry { json => 79 | // we are in test, it's enough to support just Int and String here 80 | if (json.isNumber) { 81 | json.as[Int].toTry 82 | } else if (json.isString) { 83 | json.as[String].toTry 84 | } else Success(null) 85 | } 86 | 87 | private case class QueueProperties(messages: Int, message_stats: Option[MessagesStats], arguments: Option[Map[String, Any]]) 88 | private case class MessagesStats(publish: Int, ack: Option[Int]) 89 | } 90 | 91 | object exchange { 92 | 93 | def getPublishedCount(exchangeName: String): Int = { 94 | val encoded = URLEncoder.encode(exchangeName, StandardCharsets.UTF_8.toString) 95 | 96 | val resp = Http(s"$RootUri/exchanges/%2f/$encoded").auth("guest", "guest").asString.body 97 | 98 | decode[ExchangeProperties](resp) match { 99 | case Right(p) => 100 | p.message_stats.map(_.publish_in).getOrElse { 101 | Console.err.println(s"Could not extract published_count for $exchangeName!") 102 | 0 103 | } 104 | case r => throw new IllegalStateException(s"Wrong response $r") 105 | } 106 | } 107 | 108 | private case class ExchangeProperties(message_stats: Option[MessagesStats]) 109 | private case class MessagesStats(publish_in: Int, publish_out: Int) 110 | } 111 | 112 | } 113 | -------------------------------------------------------------------------------- /core/src/test/scala/com/avast/clients/rabbitmq/TestMonitor.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import cats.effect.Sync 4 | import com.avast.metrics.api.Naming 5 | import com.avast.metrics.dropwizard.MetricsMonitor 6 | import com.avast.metrics.scalaapi 7 | import com.avast.metrics.scalaeffectapi._ 8 | import com.codahale.metrics.MetricRegistry 9 | 10 | import scala.jdk.CollectionConverters._ 11 | 12 | class TestMonitor[F[_]: Sync] extends Monitor[F] { 13 | 14 | private val metricsRegistry = new MetricRegistry 15 | val underlying: Monitor[F] = Monitor.wrapJava[F](new MetricsMonitor(metricsRegistry, Naming.defaultNaming())) 16 | 17 | override def named(name: String): Monitor[F] = underlying.named(name) 18 | override def named(name1: String, name2: String, restOfNames: String*): Monitor[F] = underlying.named(name1, name2, restOfNames: _*) 19 | override def meter(name: String): Meter[F] = underlying.meter(name) 20 | override def counter(name: String): Counter[F] = underlying.counter(name) 21 | override def histogram(name: String): Histogram[F] = underlying.histogram(name) 22 | override def timer(name: String): Timer[F] = underlying.timer(name) 23 | override def timerPair(name: String): TimerPair[F] = underlying.timerPair(name) 24 | override def gauge: GaugeFactory[F] = underlying.gauge 25 | override def asPlainScala: scalaapi.Monitor = underlying.asPlainScala 26 | override def asJava: com.avast.metrics.api.Monitor = underlying.asJava 27 | override def getName: String = underlying.getName 28 | 29 | override def close(): Unit = underlying.close() 30 | 31 | val registry: Registry = new Registry(metricsRegistry) 32 | } 33 | 34 | class Registry(registry: MetricRegistry) { 35 | def meterCount(path: String): Long = registry.getMeters.asScala(path.replace('.', '/')).getCount 36 | def timerCount(path: String): Long = registry.getTimers.asScala(path.replace('.', '/')).getCount 37 | def timerPairCountSuccesses(path: String): Long = registry.getTimers.asScala(path.replace('.', '/') + "Successes").getCount 38 | def timerPairCountFailures(path: String): Long = registry.getTimers.asScala(path.replace('.', '/') + "Failures").getCount 39 | def gaugeLongValue(path: String): Long = registry.getGauges.asScala(path.replace('.', '/')).getValue.asInstanceOf[Long] 40 | } 41 | -------------------------------------------------------------------------------- /extras-circe/README.md: -------------------------------------------------------------------------------- 1 | # RabbitMQ client extras - Circe 2 | 3 | This is an extra module with some optional functionality dependent on [Circe](https://github.com/circe/circe) (library for decoding JSON into 4 | Scala case classes). 5 | ```groovy 6 | compile 'com.avast.clients.rabbitmq:rabbitmq-client-extras-circe_$scalaVersion:x.x.x' 7 | ``` 8 | 9 | ## JsonDeliveryConverter 10 | 11 | This is an implementation of [DeliveryConverter](../core/src/main/scala/com/avast/clients/rabbitmq/DeliveryConverter.scala) which adds support 12 | for JSON decoding done by [Circe](https://github.com/circe/circe). 13 | 14 | The suitability of the converter for concrete message is decided based on Content-Type property - `application/json` is supported. 15 | 16 | See [Providing converters](../README.md#providing-converters-for-producer/consumer) and [MultiFormatConsumer](../README.md#multiformatconsumer) 17 | description for usage. 18 | -------------------------------------------------------------------------------- /extras-circe/build.gradle: -------------------------------------------------------------------------------- 1 | archivesBaseName = "rabbitmq-client-extras-circe_$scalaVersion" 2 | 3 | dependencies { 4 | api project(":core") 5 | api project(":extras") 6 | 7 | api "io.circe:circe-core_$scalaVersion:$circeVersion" 8 | api "io.circe:circe-parser_$scalaVersion:$circeVersion" 9 | } 10 | -------------------------------------------------------------------------------- /extras-circe/src/main/scala/com/avast/clients/rabbitmq/extras/format/JsonDeliveryConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import cats.syntax.either._ 4 | import com.avast.bytes.Bytes 5 | import com.avast.clients.rabbitmq.CheckedDeliveryConverter 6 | import com.avast.clients.rabbitmq.api.{ConversionException, Delivery} 7 | import io.circe.Decoder 8 | import io.circe.parser.decode 9 | 10 | import scala.annotation.implicitNotFound 11 | import scala.reflect.ClassTag 12 | 13 | @implicitNotFound( 14 | "Could not generate JsonDeliveryConverter for ${A}, try to import or define some\nMaybe you're missing some circe imports?") 15 | trait JsonDeliveryConverter[A] extends CheckedDeliveryConverter[A] 16 | 17 | object JsonDeliveryConverter { 18 | def derive[A: JsonDeliveryConverter](): JsonDeliveryConverter[A] = implicitly[JsonDeliveryConverter[A]] 19 | 20 | implicit def createJsonDeliveryConverter[A: Decoder: ClassTag]: JsonDeliveryConverter[A] = new JsonDeliveryConverter[A] { 21 | override def convert(body: Bytes): Either[ConversionException, A] = { 22 | decode[A](body.toStringUtf8) 23 | .leftMap { 24 | ConversionException(s"Could not decode class ${implicitly[ClassTag[A]].runtimeClass.getName} from json", _) 25 | } 26 | } 27 | 28 | override def canConvert(d: Delivery[Bytes]): Boolean = d.properties.contentType.map(_.toLowerCase).contains("application/json") 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /extras-circe/src/main/scala/com/avast/clients/rabbitmq/extras/format/JsonProductConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.ProductConverter 5 | import com.avast.clients.rabbitmq.api.{ConversionException, MessageProperties} 6 | import io.circe.Encoder 7 | import io.circe.syntax._ 8 | 9 | import scala.annotation.implicitNotFound 10 | import scala.reflect.ClassTag 11 | import scala.util.control.NonFatal 12 | 13 | @implicitNotFound( 14 | "Could not generate JsonProductConverter for ${A}, try to import or define some\nMaybe you're missing some circe imports?") 15 | trait JsonProductConverter[A] extends ProductConverter[A] 16 | 17 | object JsonProductConverter { 18 | def derive[A: JsonProductConverter](): JsonProductConverter[A] = implicitly[JsonProductConverter[A]] 19 | 20 | implicit def createJsonProductConverter[A: Encoder: ClassTag]: JsonProductConverter[A] = new JsonProductConverter[A] { 21 | override def convert(p: A): Either[ConversionException, Bytes] = { 22 | try { 23 | Right(Bytes.copyFromUtf8(p.asJson.noSpaces)) 24 | } catch { 25 | case NonFatal(e) => 26 | Left { 27 | ConversionException(s"Could not encode class ${implicitly[ClassTag[A]].runtimeClass.getName} to json", e) 28 | } 29 | } 30 | } 31 | 32 | override def fillProperties(properties: MessageProperties): MessageProperties = { 33 | properties.copy( 34 | contentType = Some("application/json") 35 | ) 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /extras-protobuf/README.md: -------------------------------------------------------------------------------- 1 | # RabbitMQ client extras - Protobuf 2 | 3 | This is an extra module that allows to publish and consume events defined as [Google Protocol Buffers](https://developers.google.com/protocol-buffers/) messages. 4 | ```groovy 5 | compile 'com.avast.clients.rabbitmq:rabbitmq-client-extras-protobuf_$scalaVersion:x.x.x' 6 | ``` 7 | 8 | ## Converters 9 | * `ProtobufAsBinaryDeliveryConverter` 10 | * `ProtobufAsBinaryProductConverter` 11 | * `ProtobufAsJsonDeliveryConverter` 12 | * `ProtobufAsJsonProductConverter` 13 | 14 | ## Consumers 15 | There is `ProtobufConsumer` consumer that is able to consume both binary and JSON events defined as a Protobuf message. 16 | -------------------------------------------------------------------------------- /extras-protobuf/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.google.protobuf' 2 | 3 | archivesBaseName = "rabbitmq-client-extras-protobuf_$scalaVersion" 4 | 5 | dependencies { 6 | api project(":core") 7 | 8 | api "com.avast.bytes:bytes-gpb:$bytesVersion" 9 | 10 | api "com.google.protobuf:protobuf-java:$protobufVersion" 11 | api "com.google.protobuf:protobuf-java-util:$protobufVersion" 12 | } 13 | 14 | protobuf { 15 | protoc { 16 | artifact = "com.google.protobuf:protoc:$protobufVersion" 17 | } 18 | } 19 | 20 | -------------------------------------------------------------------------------- /extras-protobuf/src/main/scala/com/avast/clients/rabbitmq/extras/format/ProtobufAsBinaryDeliveryConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.CheckedDeliveryConverter 5 | import com.avast.clients.rabbitmq.api.{ConversionException, Delivery} 6 | import com.google.protobuf.{GeneratedMessageV3, Message} 7 | 8 | import scala.annotation.implicitNotFound 9 | import scala.reflect.ClassTag 10 | import scala.util.control.NonFatal 11 | 12 | @implicitNotFound("Could not generate ProtobufAsBinaryDeliveryConverter for ${A}, try to import or define some") 13 | trait ProtobufAsBinaryDeliveryConverter[A <: GeneratedMessageV3] extends CheckedDeliveryConverter[A] 14 | 15 | object ProtobufAsBinaryDeliveryConverter { 16 | def derive[A <: GeneratedMessageV3: ProtobufAsBinaryDeliveryConverter](): ProtobufAsBinaryDeliveryConverter[A] = 17 | implicitly[ProtobufAsBinaryDeliveryConverter[A]] 18 | 19 | implicit def createBinaryDeliveryConverter[A <: GeneratedMessageV3: ClassTag]: ProtobufAsBinaryDeliveryConverter[A] = 20 | new ProtobufAsBinaryDeliveryConverter[A] { 21 | 22 | private val newBuilderMethod = implicitly[ClassTag[A]].runtimeClass.getMethod("newBuilder") 23 | 24 | override def convert(body: Bytes): Either[ConversionException, A] = 25 | try { 26 | val builder = newBuilderMethod.invoke(null).asInstanceOf[Message.Builder] 27 | Right(builder.mergeFrom(body.toByteArray).build().asInstanceOf[A]) 28 | } catch { 29 | case NonFatal(e) => 30 | Left { 31 | ConversionException(s"Could not decode class ${implicitly[ClassTag[A]].runtimeClass.getName} from binary", e) 32 | } 33 | } 34 | 35 | override def canConvert(d: Delivery[Bytes]): Boolean = d.properties.contentType.map(_.toLowerCase).exists(ct => ct == "application/protobuf" || ct == "application/x-protobuf") 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /extras-protobuf/src/main/scala/com/avast/clients/rabbitmq/extras/format/ProtobufAsBinaryProductConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.bytes.gpb.ByteStringBytes 5 | import com.avast.clients.rabbitmq.ProductConverter 6 | import com.avast.clients.rabbitmq.api.{ConversionException, MessageProperties} 7 | import com.google.protobuf.MessageLite 8 | 9 | import scala.annotation.implicitNotFound 10 | import scala.reflect.ClassTag 11 | import scala.util.control.NonFatal 12 | 13 | @implicitNotFound("Could not generate ProtobufAsBinaryProductConverter for ${A}, try to import or define some") 14 | trait ProtobufAsBinaryProductConverter[A <: MessageLite] extends ProductConverter[A] 15 | 16 | object ProtobufAsBinaryProductConverter { 17 | def derive[A <: MessageLite: ProtobufAsBinaryProductConverter](): ProtobufAsBinaryProductConverter[A] = 18 | implicitly[ProtobufAsBinaryProductConverter[A]] 19 | 20 | implicit def createBinaryProductConverter[A <: MessageLite: ClassTag]: ProtobufAsBinaryProductConverter[A] = 21 | new ProtobufAsBinaryProductConverter[A] { 22 | override def convert(p: A): Either[ConversionException, Bytes] = 23 | try { 24 | Right(ByteStringBytes.wrap(p.toByteString)) 25 | } catch { 26 | case NonFatal(e) => 27 | Left { 28 | ConversionException(s"Could not encode class ${implicitly[ClassTag[A]].runtimeClass.getName} to binary", e) 29 | } 30 | } 31 | 32 | override def fillProperties(properties: MessageProperties): MessageProperties = { 33 | properties.copy( 34 | contentType = Some("application/protobuf") 35 | ) 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /extras-protobuf/src/main/scala/com/avast/clients/rabbitmq/extras/format/ProtobufAsJsonDeliveryConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.CheckedDeliveryConverter 5 | import com.avast.clients.rabbitmq.api.{ConversionException, Delivery} 6 | import com.google.protobuf.util.JsonFormat 7 | import com.google.protobuf.{GeneratedMessageV3, Message} 8 | 9 | import scala.annotation.implicitNotFound 10 | import scala.reflect.ClassTag 11 | import scala.util.control.NonFatal 12 | 13 | @implicitNotFound("Could not generate ProtobufDeliveryConverter for ${A}, try to import or define some") 14 | trait ProtobufAsJsonDeliveryConverter[A <: GeneratedMessageV3] extends CheckedDeliveryConverter[A] 15 | 16 | object ProtobufAsJsonDeliveryConverter { 17 | def derive[A <: GeneratedMessageV3: ProtobufAsJsonDeliveryConverter](): ProtobufAsJsonDeliveryConverter[A] = 18 | implicitly[ProtobufAsJsonDeliveryConverter[A]] 19 | 20 | private val jsonParser = JsonFormat.parser().ignoringUnknownFields() 21 | 22 | implicit def createJsonDeliveryConverter[A <: GeneratedMessageV3: ClassTag]: ProtobufAsJsonDeliveryConverter[A] = 23 | new ProtobufAsJsonDeliveryConverter[A] { 24 | 25 | private val newBuilderMethod = implicitly[ClassTag[A]].runtimeClass.getMethod("newBuilder") 26 | 27 | override def convert(body: Bytes): Either[ConversionException, A] = 28 | try { 29 | val builder = newBuilderMethod.invoke(null).asInstanceOf[Message.Builder] 30 | jsonParser.merge(body.toStringUtf8, builder) 31 | Right(builder.build().asInstanceOf[A]) 32 | } catch { 33 | case NonFatal(e) => 34 | Left { 35 | ConversionException(s"Could not decode class ${implicitly[ClassTag[A]].runtimeClass.getName} from json", e) 36 | } 37 | } 38 | 39 | override def canConvert(d: Delivery[Bytes]): Boolean = d.properties.contentType.map(_.toLowerCase).contains("application/json") 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /extras-protobuf/src/main/scala/com/avast/clients/rabbitmq/extras/format/ProtobufAsJsonProductConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.ProductConverter 5 | import com.avast.clients.rabbitmq.api.{ConversionException, MessageProperties} 6 | import com.google.protobuf.MessageOrBuilder 7 | import com.google.protobuf.util.JsonFormat 8 | 9 | import scala.annotation.implicitNotFound 10 | import scala.reflect.ClassTag 11 | import scala.util.control.NonFatal 12 | 13 | @implicitNotFound("Could not generate ProtobufAsJsonProductConverter for ${A}, try to import or define some") 14 | trait ProtobufAsJsonProductConverter[A <: MessageOrBuilder] extends ProductConverter[A] 15 | 16 | object ProtobufAsJsonProductConverter { 17 | def derive[A <: MessageOrBuilder: ProtobufAsJsonProductConverter](): ProtobufAsJsonProductConverter[A] = 18 | implicitly[ProtobufAsJsonProductConverter[A]] 19 | 20 | // Printing is intentionally configured to use integer for enums 21 | // because then the serialized data is valid even after values renaming. 22 | private val jsonPrinter = JsonFormat.printer().printingEnumsAsInts().includingDefaultValueFields() 23 | 24 | implicit def createJsonProductConverter[A <: MessageOrBuilder: ClassTag]: ProtobufAsJsonProductConverter[A] = 25 | new ProtobufAsJsonProductConverter[A] { 26 | override def convert(p: A): Either[ConversionException, Bytes] = 27 | try { 28 | Right(Bytes.copyFromUtf8(jsonPrinter.print(p))) 29 | } catch { 30 | case NonFatal(e) => 31 | Left { 32 | ConversionException(s"Could not encode class ${implicitly[ClassTag[A]].runtimeClass.getName} to json", e) 33 | } 34 | } 35 | 36 | override def fillProperties(properties: MessageProperties): MessageProperties = { 37 | properties.copy( 38 | contentType = Some("application/json") 39 | ) 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /extras-protobuf/src/main/scala/com/avast/clients/rabbitmq/extras/format/ProtobufConsumer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import cats.effect.Sync 4 | import com.avast.bytes.Bytes 5 | import com.avast.clients.rabbitmq.MultiFormatConsumer 6 | import com.avast.clients.rabbitmq.api.{Delivery, DeliveryResult} 7 | import com.google.protobuf.GeneratedMessageV3 8 | 9 | import scala.reflect.ClassTag 10 | 11 | object ProtobufConsumer { 12 | def create[F[_]: Sync, A <: GeneratedMessageV3: ClassTag]( 13 | action: Delivery[A] => F[DeliveryResult]): (Delivery[Bytes] => F[DeliveryResult]) = 14 | MultiFormatConsumer.forType[F, A](ProtobufAsJsonDeliveryConverter.derive(), ProtobufAsBinaryDeliveryConverter.derive())(action) 15 | } 16 | -------------------------------------------------------------------------------- /extras-protobuf/src/test/proto/ExampleEvents.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package com.avast.clients.rabbitmq.test; 4 | 5 | message FileSource { 6 | string file_id = 1; 7 | string source = 2; 8 | } 9 | -------------------------------------------------------------------------------- /extras-protobuf/src/test/scala/com/avast/clients/rabbitmq/extras/format/ProtobufConsumerTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.api._ 5 | import com.avast.clients.rabbitmq.test.ExampleEvents 6 | import com.google.protobuf.util.JsonFormat 7 | import monix.eval.Task 8 | import monix.execution.Scheduler.Implicits.global 9 | import org.junit.runner.RunWith 10 | import org.scalatest.{FlatSpec, Matchers} 11 | import org.scalatestplus.junit.JUnitRunner 12 | 13 | @RunWith(classOf[JUnitRunner]) 14 | class ProtobufConsumerTest extends FlatSpec with Matchers { 15 | private val event = ExampleEvents.FileSource.newBuilder().setFileId("fileId").setSource("source").build() 16 | 17 | it must "consume JSON and binary events" in { 18 | val consumer = ProtobufConsumer.create[Task, ExampleEvents.FileSource] { 19 | case Delivery.Ok(actual, _, _) => 20 | actual shouldBe event 21 | Task.pure(DeliveryResult.Ack) 22 | 23 | case _ => fail("malformed") 24 | } 25 | consumer(Delivery.Ok(Bytes.copyFrom(event.toByteArray), MessageProperties.empty.copy(contentType = Some("application/protobuf")), "")) 26 | .runSyncUnsafe() shouldBe DeliveryResult.Ack 27 | consumer( 28 | Delivery.Ok(Bytes.copyFromUtf8(JsonFormat.printer().print(event)), 29 | MessageProperties.empty.copy(contentType = Some("application/json")), 30 | "")).runSyncUnsafe() shouldBe DeliveryResult.Ack 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /extras-protobuf/src/test/scala/com/avast/clients/rabbitmq/extras/format/ProtobufConvertersTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.test.ExampleEvents 5 | import com.google.protobuf.util.JsonFormat 6 | import org.junit.runner.RunWith 7 | import org.scalatest.{FlatSpec, Matchers} 8 | import org.scalatestplus.junit.JUnitRunner 9 | 10 | @RunWith(classOf[JUnitRunner]) 11 | class ProtobufConvertersTest extends FlatSpec with Matchers { 12 | private val event = ExampleEvents.FileSource.newBuilder().setFileId("fileId").setSource("source").build() 13 | 14 | it must "deserialize binary event" in { 15 | val target = ProtobufAsBinaryDeliveryConverter.derive[ExampleEvents.FileSource]() 16 | val Right(actual) = target.convert(Bytes.copyFrom(event.toByteArray)) 17 | actual shouldBe event 18 | } 19 | 20 | it must "deserialize json event" in { 21 | val target = ProtobufAsJsonDeliveryConverter.derive[ExampleEvents.FileSource]() 22 | val Right(actual) = target.convert(Bytes.copyFromUtf8(JsonFormat.printer().print(event))) 23 | actual shouldBe event 24 | } 25 | 26 | it must "serialize binary event" in { 27 | val target = ProtobufAsBinaryProductConverter.derive[ExampleEvents.FileSource]() 28 | val Right(actual) = target.convert(event) 29 | actual.toByteArray shouldBe event.toByteArray 30 | } 31 | 32 | it must "serialize json event" in { 33 | val target = ProtobufAsJsonProductConverter.derive[ExampleEvents.FileSource]() 34 | val Right(actual) = target.convert(event) 35 | actual shouldBe Bytes.copyFromUtf8(JsonFormat.printer().print(event)) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /extras-scalapb/README.md: -------------------------------------------------------------------------------- 1 | # RabbitMQ client extras - ScalaPB 2 | 3 | This is an extra module that allows to publish and consume events defined as [Google Protocol Buffers](https://developers.google.com/protocol-buffers/) messages, generated to Scala using [ScalaPB](https://scalapb.github.io/). 4 | ```groovy 5 | compile 'com.avast.clients.rabbitmq:rabbitmq-client-extras-scalapb_$scalaVersion:x.x.x' 6 | ``` 7 | 8 | ## Converters 9 | * `ScalaPBAsBinaryDeliveryConverter` 10 | * `ScalaPBAsBinaryProductConverter` 11 | * `ScalaPBAsJsonDeliveryConverter` 12 | * `ScalaPBAsJsonProductConverter` 13 | 14 | ## Consumers 15 | There is `ScalaPBConsumer` consumer that is able to consume both binary and JSON events defined as a Protobuf message. 16 | -------------------------------------------------------------------------------- /extras-scalapb/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.google.protobuf' 2 | 3 | archivesBaseName = "rabbitmq-client-extras-scalapb_$scalaVersion" 4 | 5 | dependencies { 6 | api project(":core") 7 | 8 | api "com.avast.bytes:bytes-gpb:${bytesVersion}" 9 | 10 | api "com.google.protobuf:protobuf-java:$protobufVersion" 11 | api "com.thesamet.scalapb:scalapb-runtime_$scalaVersion:$scalapbVersion" 12 | api "com.thesamet.scalapb:scalapb-json4s_$scalaVersion:$scalapbJson4sVersion" 13 | } 14 | 15 | protobuf { 16 | protoc { 17 | artifact = "com.google.protobuf:protoc:$protobufVersion" 18 | } 19 | plugins { 20 | scalapb { 21 | artifact = (org.gradle.nativeplatform.platform.internal.DefaultNativePlatform.getCurrentOperatingSystem().isWindows()) ? 22 | "com.thesamet.scalapb:protoc-gen-scala:${scalapbVersion}:windows@bat" : 23 | "com.thesamet.scalapb:protoc-gen-scala:${scalapbVersion}:unix@sh" 24 | } 25 | } 26 | generateProtoTasks { 27 | all().each { task -> 28 | task.plugins { 29 | scalapb { 30 | option 'flat_package' 31 | } 32 | } 33 | } 34 | } 35 | } 36 | 37 | sourceSets { 38 | test { 39 | scala { 40 | srcDir "${protobuf.generatedFilesBaseDir}/test/scalapb" 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /extras-scalapb/src/main/scala/com/avast/clients/rabbitmq/extras/format/ScalaPBAsBinaryDeliveryConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.CheckedDeliveryConverter 5 | import com.avast.clients.rabbitmq.api.{ConversionException, Delivery} 6 | import scalapb.{GeneratedMessage, GeneratedMessageCompanion} 7 | 8 | import scala.annotation.implicitNotFound 9 | import scala.reflect.ClassTag 10 | import scala.util.control.NonFatal 11 | 12 | @implicitNotFound("Could not generate ScalaPBAsBinaryDeliveryConverter for ${A}, try to import or define some") 13 | trait ScalaPBAsBinaryDeliveryConverter[A <: GeneratedMessage] extends CheckedDeliveryConverter[A] 14 | 15 | object ScalaPBAsBinaryDeliveryConverter { 16 | def derive[A <: GeneratedMessage: GeneratedMessageCompanion: ScalaPBAsBinaryDeliveryConverter](): ScalaPBAsBinaryDeliveryConverter[A] = 17 | implicitly[ScalaPBAsBinaryDeliveryConverter[A]] 18 | 19 | implicit def createBinaryDeliveryConverter[A <: GeneratedMessage: GeneratedMessageCompanion: ClassTag]: ScalaPBAsBinaryDeliveryConverter[A] = 20 | new ScalaPBAsBinaryDeliveryConverter[A] { 21 | 22 | override def convert(body: Bytes): Either[ConversionException, A] = 23 | try { 24 | Right(implicitly[GeneratedMessageCompanion[A]].parseFrom(body.toByteArray)) 25 | } catch { 26 | case NonFatal(e) => 27 | Left { 28 | ConversionException(s"Could not decode class ${implicitly[ClassTag[A]].runtimeClass.getName} from binary", e) 29 | } 30 | } 31 | 32 | override def canConvert(d: Delivery[Bytes]): Boolean = d.properties.contentType.map(_.toLowerCase).exists(ct => ct == "application/protobuf" || ct == "application/x-protobuf") 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /extras-scalapb/src/main/scala/com/avast/clients/rabbitmq/extras/format/ScalaPBAsBinaryProductConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.bytes.gpb.ByteStringBytes 5 | import com.avast.clients.rabbitmq.ProductConverter 6 | import com.avast.clients.rabbitmq.api.{ConversionException, MessageProperties} 7 | import scalapb.GeneratedMessage 8 | 9 | import scala.annotation.implicitNotFound 10 | import scala.reflect.ClassTag 11 | import scala.util.control.NonFatal 12 | 13 | @implicitNotFound("Could not generate ScalaPBAsBinaryProductConverter for ${A}, try to import or define some") 14 | trait ScalaPBAsBinaryProductConverter[A <: GeneratedMessage] extends ProductConverter[A] 15 | 16 | object ScalaPBAsBinaryProductConverter { 17 | def derive[A <: GeneratedMessage: ScalaPBAsBinaryProductConverter](): ScalaPBAsBinaryProductConverter[A] = 18 | implicitly[ScalaPBAsBinaryProductConverter[A]] 19 | 20 | implicit def createBinaryProductConverter[A <: GeneratedMessage: ClassTag]: ScalaPBAsBinaryProductConverter[A] = 21 | new ScalaPBAsBinaryProductConverter[A] { 22 | override def convert(p: A): Either[ConversionException, Bytes] = 23 | try { 24 | Right(ByteStringBytes.wrap(p.toByteString)) 25 | } catch { 26 | case NonFatal(e) => 27 | Left { 28 | ConversionException(s"Could not encode class ${implicitly[ClassTag[A]].runtimeClass.getName} to binary", e) 29 | } 30 | } 31 | 32 | override def fillProperties(properties: MessageProperties): MessageProperties = { 33 | properties.copy( 34 | contentType = Some("application/protobuf") 35 | ) 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /extras-scalapb/src/main/scala/com/avast/clients/rabbitmq/extras/format/ScalaPBAsJsonDeliveryConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.CheckedDeliveryConverter 5 | import com.avast.clients.rabbitmq.api.{ConversionException, Delivery} 6 | import com.google.protobuf.Message 7 | import scalapb.{GeneratedMessage, GeneratedMessageCompanion} 8 | import scalapb.json4s.Parser 9 | 10 | import scala.annotation.implicitNotFound 11 | import scala.reflect.ClassTag 12 | import scala.util.control.NonFatal 13 | 14 | @implicitNotFound("Could not generate ScalaPBAsJsonDeliveryConverter for ${A}, try to import or define some") 15 | trait ScalaPBAsJsonDeliveryConverter[A <: GeneratedMessage] extends CheckedDeliveryConverter[A] 16 | 17 | object ScalaPBAsJsonDeliveryConverter { 18 | def derive[A <: GeneratedMessage: GeneratedMessageCompanion: ScalaPBAsJsonDeliveryConverter](): ScalaPBAsJsonDeliveryConverter[A] = 19 | implicitly[ScalaPBAsJsonDeliveryConverter[A]] 20 | 21 | private val jsonParser = new Parser().ignoringUnknownFields 22 | 23 | implicit def createJsonDeliveryConverter[A <: GeneratedMessage: GeneratedMessageCompanion: ClassTag]: ScalaPBAsJsonDeliveryConverter[A] = 24 | new ScalaPBAsJsonDeliveryConverter[A] { 25 | 26 | override def convert(body: Bytes): Either[ConversionException, A] = 27 | try { 28 | Right(jsonParser.fromJsonString[A](body.toStringUtf8)) 29 | } catch { 30 | case NonFatal(e) => 31 | Left { 32 | ConversionException(s"Could not decode class ${implicitly[ClassTag[A]].runtimeClass.getName} from json", e) 33 | } 34 | } 35 | 36 | override def canConvert(d: Delivery[Bytes]): Boolean = d.properties.contentType.map(_.toLowerCase).contains("application/json") 37 | } 38 | 39 | } 40 | -------------------------------------------------------------------------------- /extras-scalapb/src/main/scala/com/avast/clients/rabbitmq/extras/format/ScalaPBAsJsonProductConverter.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.ProductConverter 5 | import com.avast.clients.rabbitmq.api.{ConversionException, MessageProperties} 6 | import scalapb.GeneratedMessage 7 | import scalapb.json4s.Printer 8 | 9 | import scala.annotation.implicitNotFound 10 | import scala.reflect.ClassTag 11 | import scala.util.control.NonFatal 12 | 13 | @implicitNotFound("Could not generate ScalaPBAsJsonProductConverter for ${A}, try to import or define some") 14 | trait ScalaPBAsJsonProductConverter[A <: GeneratedMessage] extends ProductConverter[A] 15 | 16 | object ScalaPBAsJsonProductConverter { 17 | def derive[A <: GeneratedMessage: ScalaPBAsJsonProductConverter](): ScalaPBAsJsonProductConverter[A] = 18 | implicitly[ScalaPBAsJsonProductConverter[A]] 19 | 20 | // Printing is intentionally configured to use integer for enums 21 | // because then the serialized data is valid even after values renaming. 22 | private val jsonPrinter = new Printer().formattingEnumsAsNumber.includingDefaultValueFields 23 | 24 | implicit def createJsonProductConverter[A <: GeneratedMessage: ClassTag]: ScalaPBAsJsonProductConverter[A] = 25 | new ScalaPBAsJsonProductConverter[A] { 26 | override def convert(p: A): Either[ConversionException, Bytes] = 27 | try { 28 | Right(Bytes.copyFromUtf8(jsonPrinter.print(p))) 29 | } catch { 30 | case NonFatal(e) => 31 | Left { 32 | ConversionException(s"Could not encode class ${implicitly[ClassTag[A]].runtimeClass.getName} to json", e) 33 | } 34 | } 35 | 36 | override def fillProperties(properties: MessageProperties): MessageProperties = { 37 | properties.copy( 38 | contentType = Some("application/json") 39 | ) 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /extras-scalapb/src/main/scala/com/avast/clients/rabbitmq/extras/format/ScalaPBConsumer.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import cats.effect.Sync 4 | import com.avast.bytes.Bytes 5 | import com.avast.clients.rabbitmq.MultiFormatConsumer 6 | import com.avast.clients.rabbitmq.api.{Delivery, DeliveryResult} 7 | import scalapb.{GeneratedMessage, GeneratedMessageCompanion} 8 | 9 | import scala.reflect.ClassTag 10 | 11 | object ScalaPBConsumer { 12 | def create[F[_]: Sync, A <: GeneratedMessage: GeneratedMessageCompanion: ClassTag]( 13 | action: Delivery[A] => F[DeliveryResult]): (Delivery[Bytes] => F[DeliveryResult]) = 14 | MultiFormatConsumer.forType[F, A](ScalaPBAsJsonDeliveryConverter.derive(), ScalaPBAsBinaryDeliveryConverter.derive())(action) 15 | } 16 | -------------------------------------------------------------------------------- /extras-scalapb/src/test/proto/ExampleEvents.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package com.avast.clients.rabbitmq.test; 4 | 5 | message FileSource { 6 | string file_id = 1; 7 | string source = 2; 8 | } 9 | -------------------------------------------------------------------------------- /extras-scalapb/src/test/scala/com/avast/clients/rabbitmq/extras/format/ScalaPBConsumerTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.api._ 5 | import com.avast.clients.rabbitmq.test.FileSource 6 | import monix.eval.Task 7 | import monix.execution.Scheduler.Implicits.global 8 | import org.junit.runner.RunWith 9 | import org.scalatest.{FlatSpec, Matchers} 10 | import org.scalatestplus.junit.JUnitRunner 11 | import scalapb.json4s.Printer 12 | 13 | @RunWith(classOf[JUnitRunner]) 14 | class ScalaPBConsumerTest extends FlatSpec with Matchers { 15 | private val event = FileSource("fileId", "source") 16 | 17 | it must "consume JSON and binary events" in { 18 | val consumer = ScalaPBConsumer.create[Task, FileSource] { 19 | case Delivery.Ok(actual, _, _) => 20 | actual shouldBe event 21 | Task.pure(DeliveryResult.Ack) 22 | 23 | case _ => fail("malformed") 24 | } 25 | consumer(Delivery.Ok(Bytes.copyFrom(event.toByteArray), MessageProperties.empty.copy(contentType = Some("application/protobuf")), "")) 26 | .runSyncUnsafe() shouldBe DeliveryResult.Ack 27 | consumer( 28 | Delivery.Ok(Bytes.copyFromUtf8(new Printer().print(event)), MessageProperties.empty.copy(contentType = Some("application/json")), "")) 29 | .runSyncUnsafe() shouldBe DeliveryResult.Ack 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /extras-scalapb/src/test/scala/com/avast/clients/rabbitmq/extras/format/ScalaPBConvertersTest.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras.format 2 | 3 | import com.avast.bytes.Bytes 4 | import com.avast.clients.rabbitmq.test.FileSource 5 | import org.junit.runner.RunWith 6 | import org.scalatest.{FlatSpec, Matchers} 7 | import org.scalatestplus.junit.JUnitRunner 8 | import scalapb.json4s.Printer 9 | 10 | @RunWith(classOf[JUnitRunner]) 11 | class ScalaPBConvertersTest extends FlatSpec with Matchers { 12 | private val event = FileSource("fileId", "source") 13 | 14 | it must "deserialize binary event" in { 15 | val target = ScalaPBAsBinaryDeliveryConverter.derive[FileSource]() 16 | val Right(actual) = target.convert(Bytes.copyFrom(event.toByteArray)) 17 | actual shouldBe event 18 | } 19 | 20 | it must "deserialize json event" in { 21 | val target = ScalaPBAsJsonDeliveryConverter.derive[FileSource]() 22 | val Right(actual) = target.convert(Bytes.copyFromUtf8(new Printer().print(event))) 23 | actual shouldBe event 24 | } 25 | 26 | it must "serialize binary event" in { 27 | val target = ScalaPBAsBinaryProductConverter.derive[FileSource]() 28 | val Right(actual) = target.convert(event) 29 | actual.toByteArray shouldBe event.toByteArray 30 | } 31 | 32 | it must "serialize json event" in { 33 | val target = ScalaPBAsJsonProductConverter.derive[FileSource]() 34 | val Right(actual) = target.convert(event) 35 | actual shouldBe Bytes.copyFromUtf8(new Printer().print(event)) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /extras/README.md: -------------------------------------------------------------------------------- 1 | # RabbitMQ client extras 2 | 3 | This is an extra module with some optional functionality. 4 | 5 | ```groovy 6 | compile 'com.avast.clients.rabbitmq:rabbitmq-client-extras_$scalaVersion:x.x.x' 7 | ``` 8 | 9 | ## HealthCheck 10 | 11 | The library is not able to recover from all failures so it 12 | provides [HealthCheck class](src/main/scala/com/avast/clients/rabbitmq/extras/HealthCheck.scala) 13 | that indicates if the application is OK or not - then it should be restarted. To use that class, simply pass the `rabbitExceptionHandler` 14 | field as listener when constructing the RabbitMQ classes. Then you can call `getStatus` method. 15 | -------------------------------------------------------------------------------- /extras/build.gradle: -------------------------------------------------------------------------------- 1 | archivesBaseName = "rabbitmq-client-extras_$scalaVersion" 2 | 3 | dependencies { 4 | api project(":core") 5 | 6 | testImplementation project(":pureconfig") 7 | } 8 | -------------------------------------------------------------------------------- /extras/src/main/scala/com/avast/clients/rabbitmq/extras/HealthCheck.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras 2 | 3 | import cats.effect.Sync 4 | import cats.effect.concurrent.Ref 5 | import cats.implicits.catsSyntaxFlatMapOps 6 | import com.avast.clients.rabbitmq.extras.HealthCheckStatus.{Failure, Ok} 7 | import com.avast.clients.rabbitmq.logging.ImplicitContextLogger 8 | import com.avast.clients.rabbitmq.{ChannelListener, ConnectionListener, ConsumerListener} 9 | import com.rabbitmq.client._ 10 | 11 | sealed trait HealthCheckStatus 12 | 13 | object HealthCheckStatus { 14 | 15 | case object Ok extends HealthCheckStatus 16 | 17 | case class Failure(msg: String, f: Throwable) extends HealthCheckStatus 18 | 19 | } 20 | 21 | /* 22 | * The library is not able to recover from all failures so it provides this class that indicates if the application 23 | * is OK or not - then it should be restarted. 24 | * To use this class, simply pass the `rabbitExceptionHandler` as listener when constructing the RabbitMQ classes. 25 | * Then you can call `getStatus` method. 26 | */ 27 | class HealthCheck[F[_]: Sync] { 28 | private val F: Sync[F] = Sync[F] // scalastyle:ignore 29 | 30 | private val logger = ImplicitContextLogger.createLogger[F, HealthCheck[F]] 31 | 32 | private val ref = Ref.unsafe[F, HealthCheckStatus](Ok) 33 | 34 | def getStatus: F[HealthCheckStatus] = ref.get 35 | 36 | def fail(e: Throwable): F[Unit] = { 37 | val s = Failure(e.getClass.getName + ": " + e.getMessage, e) 38 | ref.set(s) >> 39 | logger.plainWarn(e)(s"Failing HealthCheck with '${s.msg}'") 40 | } 41 | 42 | def rabbitExceptionHandler: ConnectionListener[F] with ChannelListener[F] with ConsumerListener[F] = 43 | new ConnectionListener[F] with ChannelListener[F] with ConsumerListener[F] { 44 | override def onRecoveryCompleted(connection: Connection): F[Unit] = F.unit 45 | 46 | override def onRecoveryStarted(connection: Connection): F[Unit] = F.unit 47 | 48 | override def onRecoveryFailure(connection: Connection, failure: Throwable): F[Unit] = fail(failure) 49 | 50 | override def onCreate(connection: Connection): F[Unit] = F.unit 51 | 52 | override def onCreateFailure(failure: Throwable): F[Unit] = fail(failure) 53 | 54 | override def onRecoveryCompleted(channel: Channel): F[Unit] = F.unit 55 | 56 | override def onRecoveryStarted(channel: Channel): F[Unit] = F.unit 57 | 58 | override def onRecoveryFailure(channel: Channel, failure: Throwable): F[Unit] = fail(failure) 59 | 60 | override def onCreate(channel: Channel): F[Unit] = F.unit 61 | 62 | override def onError(consumer: Consumer, consumerName: String, channel: Channel, failure: Throwable): F[Unit] = fail(failure) 63 | 64 | override def onShutdown(consumer: Consumer, 65 | channel: Channel, 66 | consumerName: String, 67 | consumerTag: String, 68 | cause: ShutdownSignalException): F[Unit] = fail(cause) 69 | 70 | override def onShutdown(connection: Connection, cause: ShutdownSignalException): F[Unit] = fail(cause) 71 | 72 | override def onShutdown(cause: ShutdownSignalException, channel: Channel): F[Unit] = fail(cause) 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /extras/src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | myConfig { 2 | hosts = [${rabbit.host}":"${rabbit.tcp.5672}] 3 | virtualHost = "/" 4 | 5 | name = "TestConnection" 6 | 7 | credentials { 8 | enabled = true 9 | 10 | username = "guest" 11 | password = "guest" 12 | } 13 | 14 | connectionTimeout = 5s 15 | 16 | republishStrategy { 17 | type = CustomExchange 18 | 19 | exchangeName = "EXCHANGE5" 20 | 21 | exchangeDeclare = true 22 | exchangeAutoBind = true 23 | } 24 | 25 | consumers { 26 | testing { 27 | name = "Testing" 28 | 29 | queueName = "QUEUE1" 30 | 31 | processTimeout = 500 ms 32 | 33 | declare { 34 | enabled = true 35 | } 36 | 37 | bindings = [ 38 | { 39 | routingKeys = ["test"] 40 | 41 | exchange { 42 | name = "EXCHANGE1" 43 | 44 | declare { 45 | enabled = true 46 | 47 | type = "direct" 48 | } 49 | } 50 | }, { 51 | routingKeys = ["test2"] 52 | 53 | exchange { 54 | name = "EXCHANGE2" 55 | 56 | declare { 57 | enabled = true 58 | 59 | type = "direct" 60 | } 61 | } 62 | } 63 | ] 64 | } 65 | 66 | testingPull { 67 | name = "Testing" 68 | 69 | queueName = "QUEUE1" 70 | 71 | declare { 72 | enabled = true 73 | } 74 | 75 | bindings = [ 76 | { 77 | routingKeys = ["test"] 78 | 79 | exchange { 80 | name = "EXCHANGE1" 81 | 82 | declare { 83 | enabled = true 84 | 85 | type = "direct" 86 | } 87 | } 88 | }, { 89 | routingKeys = ["test2"] 90 | 91 | exchange { 92 | name = "EXCHANGE2" 93 | 94 | declare { 95 | enabled = true 96 | 97 | type = "direct" 98 | } 99 | } 100 | } 101 | ] 102 | } 103 | 104 | testingStreaming { 105 | name = "Testing" 106 | 107 | queueName = "QUEUE1" 108 | 109 | declare { 110 | enabled = true 111 | } 112 | 113 | prefetchCount = 500 114 | 115 | bindings = [ 116 | { 117 | routingKeys = ["test"] 118 | 119 | exchange { 120 | name = "EXCHANGE1" 121 | 122 | declare { 123 | enabled = true 124 | 125 | type = "direct" 126 | } 127 | } 128 | }, { 129 | routingKeys = ["test2"] 130 | 131 | exchange { 132 | name = "EXCHANGE2" 133 | 134 | declare { 135 | enabled = true 136 | 137 | type = "direct" 138 | } 139 | } 140 | } 141 | ] 142 | } 143 | 144 | testingStreamingWithTimeout { 145 | name = "Testing" 146 | 147 | queueName = "QUEUE1" 148 | 149 | declare { 150 | enabled = true 151 | } 152 | 153 | prefetchCount = 100 154 | queueBufferSize = 2 155 | 156 | processTimeout = 500 ms 157 | 158 | bindings = [ 159 | { 160 | routingKeys = ["test"] 161 | 162 | exchange { 163 | name = "EXCHANGE1" 164 | 165 | declare { 166 | enabled = true 167 | 168 | type = "direct" 169 | } 170 | } 171 | }, { 172 | routingKeys = ["test2"] 173 | 174 | exchange { 175 | name = "EXCHANGE2" 176 | 177 | declare { 178 | enabled = true 179 | 180 | type = "direct" 181 | } 182 | } 183 | } 184 | ] 185 | } 186 | } 187 | 188 | producers { 189 | testing { 190 | name = "Testing" 191 | 192 | exchange = "EXCHANGE1" 193 | 194 | declare { 195 | enabled = true 196 | 197 | type = "direct" 198 | } 199 | } 200 | 201 | testing2 { 202 | name = "Testing2" 203 | 204 | exchange = "EXCHANGE2" 205 | 206 | declare { 207 | enabled = true 208 | 209 | type = "direct" 210 | } 211 | } 212 | 213 | testing3 { 214 | name = "Testing3" 215 | 216 | exchange = "EXCHANGE4" 217 | 218 | declare { 219 | enabled = true 220 | 221 | type = "direct" 222 | } 223 | } 224 | } 225 | 226 | declarations { 227 | foo { 228 | declareExchange { 229 | name = "EXCHANGE3" 230 | type = "direct" 231 | } 232 | } 233 | 234 | bindExchange1 { 235 | sourceExchangeName = "EXCHANGE4" 236 | routingKeys = ["test"] 237 | destExchangeName = "EXCHANGE3" 238 | } 239 | 240 | bindExchange2 { 241 | sourceExchangeName = "EXCHANGE4" 242 | routingKeys = ["test"] 243 | destExchangeName = "EXCHANGE1" 244 | } 245 | 246 | declareQueue { 247 | name = "QUEUE2" 248 | 249 | arguments = {"x-max-length": 10000} 250 | } 251 | 252 | bindQueue { 253 | queueName = "QUEUE2" 254 | routingKeys = ["test"] 255 | exchangeName = "EXCHANGE3" 256 | } 257 | } 258 | } 259 | -------------------------------------------------------------------------------- /extras/src/test/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | System.out 5 | 6 | 7 | %d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %-35logger{35}: %msg\(%file:%line\)%n%xThrowable{full} 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /extras/src/test/scala/com/avast/clients/rabbitmq/extras/TestBase.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras 2 | 3 | import cats.effect.{Blocker, IO, Resource} 4 | import com.typesafe.scalalogging.StrictLogging 5 | import monix.eval.Task 6 | import monix.execution.Scheduler 7 | import monix.execution.Scheduler.Implicits.global 8 | import monix.execution.schedulers.CanBlock 9 | import org.junit.runner.RunWith 10 | import org.scalatest.FunSuite 11 | import org.scalatest.concurrent.Eventually 12 | import org.scalatestplus.junit.JUnitRunner 13 | import org.scalatestplus.mockito.MockitoSugar 14 | 15 | import java.util.concurrent.Executors 16 | import scala.concurrent.TimeoutException 17 | import scala.concurrent.duration._ 18 | import scala.language.implicitConversions 19 | 20 | @RunWith(classOf[JUnitRunner]) 21 | class TestBase extends FunSuite with MockitoSugar with Eventually with StrictLogging { 22 | protected implicit def taskToOps[A](t: Task[A]): TaskOps[A] = new TaskOps[A](t) 23 | protected implicit def resourceToIOOps[A](t: Resource[IO, A]): ResourceIOOps[A] = new ResourceIOOps[A](t) 24 | protected implicit def resourceToTaskOps[A](t: Resource[Task, A]): ResourceTaskOps[A] = new ResourceTaskOps[A](t) 25 | } 26 | 27 | object TestBase { 28 | val testBlockingScheduler: Scheduler = Scheduler.io(name = "test-blocking") 29 | val testBlocker: Blocker = Blocker.liftExecutorService(Executors.newCachedThreadPool()) 30 | } 31 | 32 | class TaskOps[A](t: Task[A]) { 33 | def await(duration: Duration): A = t.runSyncUnsafe(duration) 34 | def await: A = await(10.seconds) 35 | } 36 | 37 | class ResourceIOOps[A](val r: Resource[IO, A]) extends AnyVal { 38 | def withResource[B](f: A => B): B = { 39 | withResource(f, Duration.Inf) 40 | } 41 | 42 | def withResource[B](f: A => B, timeout: Duration): B = { 43 | r.use(a => IO(f).map(_(a))).unsafeRunTimed(timeout).getOrElse(throw new TimeoutException("Timeout has occurred")) 44 | } 45 | } 46 | 47 | class ResourceTaskOps[A](val r: Resource[Task, A]) extends AnyVal { 48 | def withResource[B](f: A => B): B = { 49 | withResource(f, Duration.Inf) 50 | } 51 | 52 | def withResource[B](f: A => B, timeout: Duration): B = { 53 | r.use(a => Task.delay(f(a))).runSyncUnsafe(timeout)(TestBase.testBlockingScheduler, CanBlock.permit) 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /extras/src/test/scala/com/avast/clients/rabbitmq/extras/TestHelper.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras 2 | 3 | import io.circe.Decoder 4 | import io.circe.generic.auto._ 5 | import io.circe.parser._ 6 | import scalaj.http.Http 7 | 8 | import java.net.URLEncoder 9 | import java.nio.charset.StandardCharsets 10 | import scala.util.Success 11 | 12 | //noinspection ScalaStyle 13 | class TestHelper(host: String, port: Int) { 14 | 15 | final val RootUri = s"http://$host:$port/api" 16 | 17 | object queue { 18 | 19 | def getMessagesCount(queueName: String): Int = { 20 | val encoded = URLEncoder.encode(queueName, StandardCharsets.UTF_8.toString) 21 | 22 | val resp = Http(s"$RootUri/queues/%2f/$encoded").auth("guest", "guest").asString.body 23 | 24 | println("MESSAGES COUNT:") 25 | println(resp) 26 | 27 | decode[QueueProperties](resp) match { 28 | case Right(p) => p.messages 29 | case r => throw new IllegalStateException(s"Wrong response $r") 30 | } 31 | } 32 | 33 | def getPublishedCount(queueName: String): Int = { 34 | val encoded = URLEncoder.encode(queueName, StandardCharsets.UTF_8.toString) 35 | 36 | val resp = Http(s"$RootUri/queues/%2f/$encoded").auth("guest", "guest").asString.body 37 | 38 | println("PUBLISHED COUNT:") 39 | println(resp) 40 | 41 | decode[QueueProperties](resp) match { 42 | case Right(p) => 43 | p.message_stats.map(_.publish).getOrElse { 44 | Console.err.println(s"Could not extract published_count for $queueName!") 45 | 0 46 | } 47 | case r => throw new IllegalStateException(s"Wrong response $r") 48 | } 49 | } 50 | 51 | def getArguments(queueName: String): Map[String, Any] = { 52 | val encoded = URLEncoder.encode(queueName, StandardCharsets.UTF_8.toString) 53 | 54 | val resp = Http(s"$RootUri/queues/%2f/$encoded").auth("guest", "guest").asString.body 55 | 56 | decode[QueueProperties](resp) match { 57 | case Right(p) => 58 | p.arguments.getOrElse { 59 | Console.err.println(s"Could not extract arguments for $queueName!") 60 | Map.empty 61 | } 62 | case r => throw new IllegalStateException(s"Wrong response $r") 63 | } 64 | } 65 | 66 | def delete(queueName: String, ifEmpty: Boolean = false, ifUnused: Boolean = false): Unit = { 67 | println(s"Deleting queue: $queueName") 68 | val encoded = URLEncoder.encode(queueName, StandardCharsets.UTF_8.toString) 69 | 70 | val resp = 71 | Http(s"$RootUri/queues/%2f/$encoded?if-empty=$ifEmpty&if-unused=$ifUnused").method("DELETE").auth("guest", "guest").asString 72 | 73 | val content = resp.body 74 | 75 | val message = s"Delete queue response ${resp.statusLine}: '$content'" 76 | println(message) 77 | 78 | if (!resp.isSuccess && resp.code != 404) { 79 | throw new IllegalStateException(message) 80 | } 81 | } 82 | 83 | private implicit val anyDecoder: Decoder[Any] = Decoder.decodeJson.emapTry { json => 84 | // we are in test, it's enough to support just Int and String here 85 | if (json.isNumber) { 86 | json.as[Int].toTry 87 | } else if (json.isString) { 88 | json.as[String].toTry 89 | } else Success(null) 90 | } 91 | 92 | private case class QueueProperties(messages: Int, message_stats: Option[MessagesStats], arguments: Option[Map[String, Any]]) 93 | private case class MessagesStats(publish: Int, ack: Option[Int]) 94 | } 95 | 96 | object exchange { 97 | 98 | def getPublishedCount(exchangeName: String): Int = { 99 | val encoded = URLEncoder.encode(exchangeName, StandardCharsets.UTF_8.toString) 100 | 101 | val resp = Http(s"$RootUri/exchanges/%2f/$encoded").auth("guest", "guest").asString.body 102 | 103 | println("PUBLISHED COUNT:") 104 | println(resp) 105 | 106 | decode[ExchangeProperties](resp) match { 107 | case Right(p) => 108 | p.message_stats.map(_.publish_in).getOrElse { 109 | Console.err.println(s"Could not extract published_count for $exchangeName!") 110 | 0 111 | } 112 | case r => throw new IllegalStateException(s"Wrong response $r") 113 | } 114 | } 115 | 116 | private case class ExchangeProperties(message_stats: Option[MessagesStats]) 117 | private case class MessagesStats(publish_in: Int, publish_out: Int) 118 | } 119 | 120 | } 121 | -------------------------------------------------------------------------------- /extras/src/test/scala/com/avast/clients/rabbitmq/extras/TestMonitor.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.extras 2 | 3 | import cats.effect.Sync 4 | import com.avast.metrics.api.Naming 5 | import com.avast.metrics.dropwizard.MetricsMonitor 6 | import com.avast.metrics.scalaapi 7 | import com.avast.metrics.scalaeffectapi._ 8 | import com.codahale.metrics.MetricRegistry 9 | 10 | import scala.jdk.CollectionConverters._ 11 | 12 | class TestMonitor[F[_]: Sync] extends Monitor[F] { 13 | 14 | private val metricsRegistry = new MetricRegistry 15 | val underlying: Monitor[F] = Monitor.wrapJava[F](new MetricsMonitor(metricsRegistry, Naming.defaultNaming())) 16 | 17 | override def named(name: String): Monitor[F] = underlying.named(name) 18 | override def named(name1: String, name2: String, restOfNames: String*): Monitor[F] = underlying.named(name1, name2, restOfNames: _*) 19 | override def meter(name: String): Meter[F] = underlying.meter(name) 20 | override def counter(name: String): Counter[F] = underlying.counter(name) 21 | override def histogram(name: String): Histogram[F] = underlying.histogram(name) 22 | override def timer(name: String): Timer[F] = underlying.timer(name) 23 | override def timerPair(name: String): TimerPair[F] = underlying.timerPair(name) 24 | override def gauge: GaugeFactory[F] = underlying.gauge 25 | override def asPlainScala: scalaapi.Monitor = underlying.asPlainScala 26 | override def asJava: com.avast.metrics.api.Monitor = underlying.asJava 27 | override def getName: String = underlying.getName 28 | 29 | override def close(): Unit = underlying.close() 30 | 31 | val registry: Registry = new Registry(metricsRegistry) 32 | } 33 | 34 | class Registry(registry: MetricRegistry) { 35 | def meterCount(path: String): Long = registry.getMeters.asScala(path.replace('.', '/')).getCount 36 | def timerCount(path: String): Long = registry.getTimers.asScala(path.replace('.', '/')).getCount 37 | def timerPairCountSuccesses(path: String): Long = registry.getTimers.asScala(path.replace('.', '/') + "Successes").getCount 38 | def timerPairCountFailures(path: String): Long = registry.getTimers.asScala(path.replace('.', '/') + "Failures").getCount 39 | } 40 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/avast/rabbitmq-scala-client/e28eda3788a728ace68d61ada9c5838d994f4b29/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-all.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | @rem SPDX-License-Identifier: Apache-2.0 17 | @rem 18 | 19 | @if "%DEBUG%"=="" @echo off 20 | @rem ########################################################################## 21 | @rem 22 | @rem Gradle startup script for Windows 23 | @rem 24 | @rem ########################################################################## 25 | 26 | @rem Set local scope for the variables with windows NT shell 27 | if "%OS%"=="Windows_NT" setlocal 28 | 29 | set DIRNAME=%~dp0 30 | if "%DIRNAME%"=="" set DIRNAME=. 31 | @rem This is normally unused 32 | set APP_BASE_NAME=%~n0 33 | set APP_HOME=%DIRNAME% 34 | 35 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 36 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 37 | 38 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 39 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 40 | 41 | @rem Find java.exe 42 | if defined JAVA_HOME goto findJavaFromJavaHome 43 | 44 | set JAVA_EXE=java.exe 45 | %JAVA_EXE% -version >NUL 2>&1 46 | if %ERRORLEVEL% equ 0 goto execute 47 | 48 | echo. 1>&2 49 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 50 | echo. 1>&2 51 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 52 | echo location of your Java installation. 1>&2 53 | 54 | goto fail 55 | 56 | :findJavaFromJavaHome 57 | set JAVA_HOME=%JAVA_HOME:"=% 58 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 59 | 60 | if exist "%JAVA_EXE%" goto execute 61 | 62 | echo. 1>&2 63 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 64 | echo. 1>&2 65 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 66 | echo location of your Java installation. 1>&2 67 | 68 | goto fail 69 | 70 | :execute 71 | @rem Setup the command line 72 | 73 | set CLASSPATH= 74 | 75 | 76 | @rem Execute Gradle 77 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %* 78 | 79 | :end 80 | @rem End local scope for the variables with windows NT shell 81 | if %ERRORLEVEL% equ 0 goto mainEnd 82 | 83 | :fail 84 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 85 | rem the _cmd.exe /c_ return code! 86 | set EXIT_CODE=%ERRORLEVEL% 87 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 88 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 89 | exit /b %EXIT_CODE% 90 | 91 | :mainEnd 92 | if "%OS%"=="Windows_NT" endlocal 93 | 94 | :omega 95 | -------------------------------------------------------------------------------- /pureconfig/build.gradle: -------------------------------------------------------------------------------- 1 | archivesBaseName = "rabbitmq-client-pureconfig_$scalaVersion" 2 | 3 | dependencies { 4 | api project(":core") 5 | 6 | api "com.typesafe:config:$typesafeConfigVersion" 7 | api "com.github.pureconfig:pureconfig_${scalaVersion}:$pureconfigVersion" 8 | } 9 | -------------------------------------------------------------------------------- /pureconfig/src/main/scala/com/avast/clients/rabbitmq/pureconfig/ConfigRabbitMQConnection.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq.pureconfig 2 | 3 | import _root_.pureconfig._ 4 | import _root_.pureconfig.error.ConfigReaderException 5 | import cats.effect.{ConcurrentEffect, Resource} 6 | import com.avast.clients.rabbitmq.api._ 7 | import com.avast.clients.rabbitmq.{ 8 | BindExchangeConfig, 9 | BindQueueConfig, 10 | ChannelListener, 11 | ConnectionListener, 12 | ConsumerConfig, 13 | ConsumerListener, 14 | DeclareExchangeConfig, 15 | DeclareQueueConfig, 16 | DeliveryConverter, 17 | DeliveryReadAction, 18 | ProducerConfig, 19 | ProductConverter, 20 | PullConsumerConfig, 21 | RabbitMQConnection, 22 | ServerChannel, 23 | StreamingConsumerConfig 24 | } 25 | import com.avast.metrics.scalaeffectapi.Monitor 26 | 27 | import scala.reflect.ClassTag 28 | 29 | trait ConfigRabbitMQConnection[F[_]] { 30 | 31 | def newChannel(): Resource[F, ServerChannel] 32 | 33 | /** Creates new instance of consumer, using the TypeSafe configuration passed to the factory and consumer name. 34 | * 35 | * @param configName Name of configuration of the consumer. 36 | * @param monitor Monitor[F] for metrics. 37 | * @param readAction Action executed for each delivered message. You should never return a failed F. 38 | */ 39 | def newConsumer[A: DeliveryConverter](configName: String, monitor: Monitor[F])( 40 | readAction: DeliveryReadAction[F, A]): Resource[F, RabbitMQConsumer[F]] 41 | 42 | /** Creates new instance of producer, using the TypeSafe configuration passed to the factory and producer name. 43 | * 44 | * @param configName Name of configuration of the producer. 45 | * @param monitor Monitor[F] for metrics. 46 | */ 47 | def newProducer[A: ProductConverter](configName: String, monitor: Monitor[F]): Resource[F, RabbitMQProducer[F, A]] 48 | 49 | /** Creates new instance of pull consumer, using the TypeSafe configuration passed to the factory and consumer name. 50 | * 51 | * @param configName Name of configuration of the consumer. 52 | * @param monitor Monitor[F] for metrics. 53 | */ 54 | def newPullConsumer[A: DeliveryConverter](configName: String, monitor: Monitor[F]): Resource[F, RabbitMQPullConsumer[F, A]] 55 | 56 | /** Creates new instance of streaming consumer, using the TypeSafe configuration passed to the factory and consumer name. 57 | * 58 | * @param configName Name of configuration of the consumer. 59 | * @param monitor Monitor[F] for metrics. 60 | */ 61 | def newStreamingConsumer[A: DeliveryConverter](configName: String, monitor: Monitor[F]): Resource[F, RabbitMQStreamingConsumer[F, A]] 62 | 63 | /** 64 | * Declares and additional exchange, using the TypeSafe configuration passed to the factory and config name. 65 | */ 66 | def declareExchange(configName: String): F[Unit] 67 | 68 | /** 69 | * Declares and additional queue, using the TypeSafe configuration passed to the factory and config name. 70 | */ 71 | def declareQueue(configName: String): F[Unit] 72 | 73 | /** 74 | * Binds a queue to an exchange, using the TypeSafe configuration passed to the factory and config name.
75 | * Failure indicates that the binding has failed for AT LEAST one routing key. 76 | */ 77 | def bindQueue(configName: String): F[Unit] 78 | 79 | /** 80 | * Binds an exchange to an another exchange, using the TypeSafe configuration passed to the factory and config name.
81 | * Failure indicates that the binding has failed for AT LEAST one routing key. 82 | */ 83 | def bindExchange(configName: String): F[Unit] 84 | 85 | /** Executes a specified action with newly created [[ServerChannel]] which is then closed. 86 | * 87 | * @see #newChannel() 88 | * @return Result of performed action. 89 | */ 90 | def withChannel[A](f: ServerChannel => F[A]): F[A] 91 | 92 | def connectionListener: ConnectionListener[F] 93 | def channelListener: ChannelListener[F] 94 | def consumerListener: ConsumerListener[F] 95 | } 96 | 97 | class DefaultConfigRabbitMQConnection[F[_]](config: ConfigCursor, wrapped: RabbitMQConnection[F])( 98 | implicit F: ConcurrentEffect[F], 99 | consumerConfigReader: ConfigReader[ConsumerConfig], 100 | producerConfigReader: ConfigReader[ProducerConfig], 101 | pullConsumerConfigReader: ConfigReader[PullConsumerConfig], 102 | streamingConsumerConfigReader: ConfigReader[StreamingConsumerConfig], 103 | declareExchangeConfigReader: ConfigReader[DeclareExchangeConfig], 104 | declareQueueConfigReader: ConfigReader[DeclareQueueConfig], 105 | bindQueueConfigReader: ConfigReader[BindQueueConfig], 106 | bindExchangeConfigReader: ConfigReader[BindExchangeConfig] 107 | ) extends ConfigRabbitMQConnection[F] { 108 | import cats.syntax.flatMap._ 109 | 110 | override def newChannel(): Resource[F, ServerChannel] = wrapped.newChannel() 111 | 112 | override def withChannel[A](f: ServerChannel => F[A]): F[A] = wrapped.withChannel(f) 113 | 114 | override val connectionListener: ConnectionListener[F] = wrapped.connectionListener 115 | 116 | override val channelListener: ChannelListener[F] = wrapped.channelListener 117 | 118 | override val consumerListener: ConsumerListener[F] = wrapped.consumerListener 119 | 120 | override def newConsumer[A: DeliveryConverter](configName: String, monitor: Monitor[F])( 121 | readAction: DeliveryReadAction[F, A]): Resource[F, RabbitMQConsumer[F]] = { 122 | Resource.eval(loadConfig[ConsumerConfig](ConsumersRootName, configName)) >>= (wrapped.newConsumer(_, monitor)(readAction)) 123 | } 124 | 125 | override def newProducer[A: ProductConverter](configName: String, monitor: Monitor[F]): Resource[F, RabbitMQProducer[F, A]] = { 126 | Resource.eval(loadConfig[ProducerConfig](ProducersRootName, configName)) >>= (wrapped.newProducer(_, monitor)) 127 | } 128 | 129 | override def newPullConsumer[A: DeliveryConverter](configName: String, monitor: Monitor[F]): Resource[F, RabbitMQPullConsumer[F, A]] = { 130 | Resource.eval(loadConfig[PullConsumerConfig](ConsumersRootName, configName)) >>= (wrapped.newPullConsumer(_, monitor)) 131 | } 132 | 133 | override def newStreamingConsumer[A: DeliveryConverter](configName: String, 134 | monitor: Monitor[F]): Resource[F, RabbitMQStreamingConsumer[F, A]] = { 135 | Resource.eval(loadConfig[StreamingConsumerConfig](ConsumersRootName, configName)) >>= (wrapped.newStreamingConsumer(_, monitor)) 136 | } 137 | 138 | override def declareExchange(configName: String): F[Unit] = { 139 | loadConfig[DeclareExchangeConfig](DeclarationsRootName, configName) >>= wrapped.declareExchange 140 | } 141 | 142 | override def declareQueue(configName: String): F[Unit] = { 143 | loadConfig[DeclareQueueConfig](DeclarationsRootName, configName) >>= wrapped.declareQueue 144 | } 145 | 146 | override def bindQueue(configName: String): F[Unit] = { 147 | loadConfig[BindQueueConfig](DeclarationsRootName, configName) >>= wrapped.bindQueue 148 | } 149 | 150 | override def bindExchange(configName: String): F[Unit] = { 151 | loadConfig[BindExchangeConfig](DeclarationsRootName, configName) >>= wrapped.bindExchange 152 | } 153 | 154 | private def loadConfig[C](section: String, name: String)(implicit ct: ClassTag[C], reader: ConfigReader[C]): F[C] = { 155 | F.delay { 156 | val segments: Seq[PathSegment.Key] = (section +: name.split('.').toSeq).map(PathSegment.Key) 157 | 158 | config.fluent 159 | .at(segments: _*) 160 | .cursor 161 | .flatMap(reader.from) 162 | .fold(errs => throw ConfigReaderException(errs), identity) 163 | } 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /pureconfig/src/main/scala/com/avast/clients/rabbitmq/pureconfig/pureconfig.scala: -------------------------------------------------------------------------------- 1 | package com.avast.clients.rabbitmq 2 | 3 | import _root_.pureconfig._ 4 | import _root_.pureconfig.error.ConfigReaderException 5 | import cats.effect.{ConcurrentEffect, ContextShift, Resource, Sync, Timer} 6 | import com.typesafe.config.Config 7 | 8 | import java.util.concurrent.ExecutorService 9 | import javax.net.ssl.SSLContext 10 | import scala.language.implicitConversions 11 | 12 | package object pureconfig { 13 | 14 | private[pureconfig] val ConsumersRootName = "consumers" 15 | private[pureconfig] val ProducersRootName = "producers" 16 | private[pureconfig] val DeclarationsRootName = "declarations" 17 | 18 | implicit class RabbitMQConnectionOps(val f: RabbitMQConnection.type) extends AnyVal { 19 | def fromConfig[F[_]: ConcurrentEffect: Timer: ContextShift](config: Config, 20 | blockingExecutor: ExecutorService, 21 | sslContext: Option[SSLContext] = None, 22 | connectionListener: Option[ConnectionListener[F]] = None, 23 | channelListener: Option[ChannelListener[F]] = None, 24 | consumerListener: Option[ConsumerListener[F]] = None)( 25 | implicit connectionConfigReader: ConfigReader[RabbitMQConnectionConfig] = implicits.CamelCase.connectionConfigReader, 26 | consumerConfigReader: ConfigReader[ConsumerConfig] = implicits.CamelCase.consumerConfigReader, 27 | producerConfigReader: ConfigReader[ProducerConfig] = implicits.CamelCase.producerConfigReader, 28 | pullConsumerConfigReader: ConfigReader[PullConsumerConfig] = implicits.CamelCase.pullConsumerConfigReader, 29 | streamingConsumerConfigReader: ConfigReader[StreamingConsumerConfig] = implicits.CamelCase.streamingConsumerConfigReader, 30 | declareExchangeConfigReader: ConfigReader[DeclareExchangeConfig] = implicits.CamelCase.declareExchangeConfigReader, 31 | declareQueueConfigReader: ConfigReader[DeclareQueueConfig] = implicits.CamelCase.declareQueueConfigReader, 32 | bindQueueConfigReader: ConfigReader[BindQueueConfig] = implicits.CamelCase.bindQueueConfigReader, 33 | bindExchangeConfigReader: ConfigReader[BindExchangeConfig] = implicits.CamelCase.bindExchangeConfigReader) 34 | : Resource[F, ConfigRabbitMQConnection[F]] = { 35 | val configSource = ConfigSource.fromConfig(config) 36 | 37 | for { 38 | connectionConfig <- Resource.eval(Sync[F].delay { configSource.loadOrThrow[RabbitMQConnectionConfig] }) 39 | connection <- RabbitMQConnection.make(connectionConfig, 40 | blockingExecutor, 41 | sslContext, 42 | connectionListener, 43 | channelListener, 44 | consumerListener) 45 | } yield 46 | new DefaultConfigRabbitMQConnection[F]( 47 | config = configSource.cursor().fold(errs => throw ConfigReaderException(errs), identity), 48 | wrapped = connection 49 | ) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /recovery-testing/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'docker-compose' 2 | 3 | archivesBaseName = "rabbitmq-client-core_$scalaVersion" 4 | 5 | dockerCompose.isRequiredBy(test) 6 | 7 | test.doFirst { 8 | dockerCompose.exposeAsSystemProperties(test) 9 | } 10 | 11 | dependencies { 12 | api project(":pureconfig") 13 | 14 | api "com.spotify:docker-client:8.16.0" 15 | } 16 | -------------------------------------------------------------------------------- /recovery-testing/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | cluster_rabbit1: 4 | image: lucifer8591/rabbitmq-server:3.7.17 5 | hostname: cluster_rabbit1 6 | ports: 7 | - "20001:5672" 8 | - "15672:15672" 9 | environment: 10 | - RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER:-admin} 11 | - RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS:-admin} 12 | cluster_rabbit2: 13 | image: lucifer8591/rabbitmq-server:3.7.17 14 | hostname: cluster_rabbit2 15 | links: 16 | - cluster_rabbit1 17 | environment: 18 | - CLUSTERED=true 19 | - CLUSTER_WITH=cluster_rabbit1 20 | - RAM_NODE=true 21 | ports: 22 | - "20002:5672" 23 | - "15673:15672" 24 | cluster_rabbit3: 25 | image: lucifer8591/rabbitmq-server:3.7.17 26 | hostname: cluster_rabbit3 27 | links: 28 | - cluster_rabbit1 29 | - cluster_rabbit2 30 | environment: 31 | - CLUSTERED=true 32 | - CLUSTER_WITH=cluster_rabbit1 33 | ports: 34 | - "20003:5672" 35 | -------------------------------------------------------------------------------- /recovery-testing/gradle.properties: -------------------------------------------------------------------------------- 1 | notPublished=true 2 | -------------------------------------------------------------------------------- /recovery-testing/src/test/resources/reference.conf: -------------------------------------------------------------------------------- 1 | recoveryTesting { 2 | hosts = ["127.0.0.1:20001", "127.0.0.1:20002", "127.0.0.1:20003"] // the order is irrelevant 3 | virtualHost = "/" 4 | 5 | name = "TestingConnection" 6 | 7 | credentials { 8 | enabled = true 9 | 10 | username = "admin" 11 | password = "admin" 12 | } 13 | 14 | connectionTimeout = 1s 15 | 16 | networkRecovery { 17 | enabled = true // enabled by default 18 | 19 | handler { 20 | type = "exponential" // exponential, linear 21 | 22 | initialDelay = 10s 23 | period = 2s 24 | maxLength = 32s 25 | } 26 | } 27 | 28 | 29 | consumers { 30 | consumer { 31 | name = "TestingConsumer" 32 | 33 | queueName = "RecoveryTesting" 34 | 35 | prefetchCount = 5 36 | processTimeout = 0 37 | 38 | declare { 39 | enabled = true 40 | 41 | durable = true 42 | autoDelete = false 43 | exclusive = false 44 | } 45 | 46 | bindings = [] 47 | } 48 | } 49 | 50 | producers { 51 | producer { 52 | name = "TestingProducer" 53 | exchange = "" 54 | } 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /recovery-testing/src/test/scala/testing/RecoveryTest.scala: -------------------------------------------------------------------------------- 1 | package testing 2 | 3 | import cats.effect.Blocker 4 | import cats.effect.concurrent.Ref 5 | import com.avast.clients.rabbitmq.api._ 6 | import com.avast.clients.rabbitmq.pureconfig._ 7 | import com.avast.clients.rabbitmq.{ConnectionListener, RabbitMQConnection} 8 | import com.avast.metrics.scalaeffectapi.Monitor 9 | import com.rabbitmq.client.{Connection, ShutdownSignalException} 10 | import com.spotify.docker.client.DefaultDockerClient 11 | import com.typesafe.config.ConfigFactory 12 | import com.typesafe.scalalogging.StrictLogging 13 | import monix.eval.Task 14 | import monix.execution.Scheduler.Implicits.global 15 | import org.junit.runner.RunWith 16 | import org.scalatest.{FunSuite, Ignore} 17 | import org.scalatestplus.junit.JUnitRunner 18 | import org.typelevel.log4cats.slf4j.Slf4jLogger 19 | 20 | import java.util.concurrent.Executors 21 | import scala.concurrent.duration.DurationInt 22 | import scala.jdk.CollectionConverters._ 23 | import scala.util.Random 24 | 25 | @Ignore // we don't want to run this test during every build 26 | @RunWith(classOf[JUnitRunner]) 27 | class RecoveryTest extends FunSuite with StrictLogging { 28 | private lazy val config = ConfigFactory.load().getConfig("recoveryTesting") 29 | private lazy val queueName = config.getString("consumers.consumer.queueName") 30 | private lazy val blockingExecutor = Executors.newCachedThreadPool() 31 | private lazy val blocker = Blocker.liftExecutorService(blockingExecutor) 32 | 33 | private lazy val dockerClient = new DefaultDockerClient("unix:///var/run/docker.sock") 34 | 35 | val run: Task[Unit] = { 36 | val messagesReceived = Ref.unsafe[Task, Set[String]](Set.empty) 37 | val messagesSent = Ref.unsafe[Task, Set[String]](Set.empty) 38 | 39 | val messagesBatchSize = 200 40 | 41 | val connectionListener = new RecoveryAwareListener 42 | 43 | RabbitMQConnection 44 | .fromConfig[Task](config, blockingExecutor, connectionListener = Some(connectionListener)) 45 | .use { conn => 46 | val consumer = conn.newConsumer[String]("consumer", Monitor.noOp()) { 47 | case Delivery.Ok(body, props, _) if body == "ahoj" => 48 | Task.sleep(150.millis) >> 49 | messagesReceived.update(_ + props.messageId.getOrElse(sys.error("no MSG ID?"))).as(DeliveryResult.Ack) 50 | 51 | case d => 52 | Task { 53 | println(s"Delivery failure! $d") 54 | DeliveryResult.Reject 55 | } // what?? 56 | } 57 | 58 | val producer = conn.newProducer[String]("producer", Monitor.noOp()) 59 | 60 | consumer 61 | .use { _ => 62 | producer 63 | .use { producer => 64 | // Sends $messagesBatchSize messages, tries to retry in case of failure. 65 | val sendMessages = fs2.Stream 66 | .range[Task](0, messagesBatchSize) 67 | .evalMap { _ => 68 | Task { 69 | Random.alphanumeric.take(10).mkString 70 | }.flatMap { id => 71 | val props = MessageProperties(messageId = Some(id)) 72 | 73 | Task.sleep(100.millis) >> 74 | retryExponentially(producer.send(queueName, "ahoj", Some(props)), 2) >> 75 | messagesSent.update(_ + id) 76 | } 77 | } 78 | .compile 79 | .drain 80 | 81 | // Phase 1: Sends & receives $messagesBatchSize messages. Restart nodes on background. 82 | val phase1 = sendMessages.start 83 | .flatMap { producerFiber => 84 | val containers = dockerClient.listContainers().asScala.toVector 85 | 86 | def restart(i: Int): Task[Unit] = { 87 | val container = containers(i) 88 | Task { println(s"#######\n#######\nRestarting ${container.names().asScala.mkString(" ")}\n#######\n#######") } >> 89 | blocker.delay[Task, Unit] { dockerClient.restartContainer(container.id(), 0) } 90 | } 91 | 92 | val restartAll = (0 to 2).foldLeft(Task.unit) { case (p, n) => p >> Task.sleep(20.seconds) >> restart(n) } 93 | 94 | restartAll >> 95 | Task.sleep(20.seconds) >> 96 | producerFiber.join 97 | } 98 | 99 | // Phase 2: Sends & receives $messagesBatchSize messages. To verify everything was recovered successfully. 100 | val phase2 = sendMessages 101 | 102 | Task(println("Phase 1 START")) >> 103 | phase1 >> 104 | Task(println("Phase 2 START")) >> 105 | phase2 >> 106 | Task(println("Wait for consumer to finish")) >> 107 | Task.sleep(10.seconds) // let the consumer finish the job 108 | } 109 | } >> { // Verify the behavior: 110 | for { 111 | sent <- messagesSent.get 112 | received <- messagesReceived.get 113 | recStarted <- connectionListener.recoveryStarted.get 114 | recCompleted <- connectionListener.recoveryCompleted.get 115 | } yield { 116 | val inFlight = sent.size - received.size 117 | println(s"Sent ${sent.size}, received (unique) ${received.size}, in-flight: $inFlight") 118 | println(s"Recovery: started ${recStarted}x, completed ${recCompleted}X") 119 | 120 | assertResult(sent)(received) // ensure everything was transferred (note: at least once) 121 | assertResult(messagesBatchSize * 2)(received.size) 122 | 123 | assertResult(recStarted)(recCompleted) 124 | assert(recStarted > 0) // ensure there was at least one recovery, might be more 125 | } 126 | } 127 | } 128 | .void 129 | } 130 | 131 | test("run") { 132 | run.runSyncUnsafe(5.minutes) 133 | } 134 | 135 | private def retryExponentially[A](t: Task[A], maxRetries: Int): Task[A] = { 136 | t.onErrorRestartLoop(maxRetries) { 137 | case (err, counter, f) => 138 | (err, counter) match { 139 | case (t, n) if n > 0 => 140 | val backOffTime = 2 << (maxRetries - counter) // backOffTimes 2, 4 & 8 seconds respectively 141 | Task(logger.error(s"${t.getClass.getName} encountered - going to do one more attempt after $backOffTime seconds")) >> 142 | f(counter - 1).delayExecution(backOffTime.seconds) 143 | 144 | case (err, _) => Task.raiseError(err) 145 | } 146 | } 147 | } 148 | } 149 | 150 | class RecoveryAwareListener extends ConnectionListener[Task] { 151 | val recoveryStarted: Ref[Task, Int] = Ref.unsafe[Task, Int](0) 152 | val recoveryCompleted: Ref[Task, Int] = Ref.unsafe[Task, Int](0) 153 | 154 | private val logger = Slf4jLogger.getLoggerFromClass[Task](this.getClass) 155 | 156 | override def onCreate(connection: Connection): Task[Unit] = { 157 | logger.info(s"Connection created: $connection (name ${connection.getClientProvidedName})") 158 | } 159 | 160 | override def onCreateFailure(failure: Throwable): Task[Unit] = { 161 | logger.warn(failure)(s"Connection NOT created") 162 | } 163 | 164 | override def onRecoveryStarted(connection: Connection): Task[Unit] = { 165 | recoveryStarted.update(_ + 1) >> 166 | logger.info(s"Connection recovery started: $connection (name ${connection.getClientProvidedName})") 167 | } 168 | 169 | override def onRecoveryCompleted(connection: Connection): Task[Unit] = { 170 | recoveryCompleted.update(_ + 1) >> 171 | logger.info(s"Connection recovery completed: $connection (name ${connection.getClientProvidedName})") 172 | } 173 | 174 | override def onRecoveryFailure(connection: Connection, failure: Throwable): Task[Unit] = { 175 | logger.warn(failure)(s"Connection recovery failed: $connection (name ${connection.getClientProvidedName})") 176 | } 177 | 178 | override def onShutdown(connection: Connection, cause: ShutdownSignalException): Task[Unit] = { 179 | if (cause.isInitiatedByApplication) { 180 | logger.info(s"App-initiated connection shutdown: $connection (name ${connection.getClientProvidedName})") 181 | } else { 182 | logger.warn(cause)(s"Server-initiated connection shutdown: $connection (name ${connection.getClientProvidedName})") 183 | } 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name="rabbitmq-scala-client" 2 | 3 | include 'api' 4 | include 'core' 5 | include 'pureconfig' 6 | include 'extras' 7 | include 'extras-circe' 8 | include 'extras-protobuf' 9 | include 'extras-scalapb' 10 | include 'recovery-testing' 11 | --------------------------------------------------------------------------------