├── .circleci └── config.yml ├── .gitignore ├── .haskell-ghc-mod.json ├── LICENSE ├── README.md ├── Setup.hs ├── docker-compose.yaml ├── eta-kafka-client.cabal ├── example └── Main.hs ├── scripts └── eta-install.sh └── src ├── Kafka.hs └── Kafka ├── Consumer.hs ├── Consumer ├── Bindings.hs ├── ConsumerProperties.hs └── Types.hs ├── Internal └── JPattern.hs ├── Producer.hs ├── Producer ├── Bindings.hs ├── ProducerProperties.hs └── Types.hs └── Types.hs /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.0 2 | jobs: 3 | build: 4 | working_directory: ~/eta-kafka-client 5 | docker: 6 | - image: typelead/eta:latest 7 | 8 | steps: 9 | - checkout 10 | 11 | - restore_cache: 12 | keys: 13 | - dot-eta-{{ checksum "eta-kafka-client.cabal" }} 14 | - dot-eta 15 | 16 | - restore_cache: 17 | keys: 18 | - dist-{{ checksum "eta-kafka-client.cabal" }} 19 | - dist 20 | 21 | - run: etlas update 22 | - run: etlas install --dependencies-only 23 | - run: etlas build 24 | 25 | - save_cache: 26 | key: dot-eta 27 | paths: 28 | - ~/.eta 29 | - ~/.etlas 30 | - ~/.coursier 31 | 32 | - save_cache: 33 | key: dot-eta-{{ checksum "eta-kafka-client.cabal" }} 34 | paths: 35 | - ~/.eta 36 | - ~/.etlas 37 | - ~/.coursier 38 | 39 | - save_cache: 40 | key: dist 41 | paths: 42 | - ./dist 43 | 44 | - save_cache: 45 | key: dist-{{ checksum "eta-kafka-client.cabal" }} 46 | paths: 47 | - ./dist -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | dist-* 3 | cabal-dev 4 | *.o 5 | *.hi 6 | *.chi 7 | *.chs.h 8 | *.dyn_o 9 | *.dyn_hi 10 | .hpc 11 | .hsenv 12 | .cabal-sandbox/ 13 | cabal.sandbox.config 14 | *.prof 15 | *.aux 16 | *.hp 17 | *.eventlog 18 | .stack-work/ 19 | cabal.project.local 20 | .HTF/ 21 | -------------------------------------------------------------------------------- /.haskell-ghc-mod.json: -------------------------------------------------------------------------------- 1 | { 2 | "disable": true 3 | } 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017, Alexey Raga 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Alexey Raga nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kafka bindings for ETA 2 | [![CircleCI](https://circleci.com/gh/haskell-works/eta-kafka-client.svg?style=svg&circle-token=f2664b3602a45dedc11f48a3b9fa35753a91fa8e)](https://circleci.com/gh/haskell-works/eta-kafka-client) 3 | 4 | ## Example 5 | An example can be found in the [example](example/Main.hs) project. 6 | 7 | ### Running the example 8 | Running the example requires Kafka to be available at `localhost`. 9 | 10 | #### Run Kafka inside `docker-compose` 11 | If you already have Kafka accessible at `localhost:9092` skip this section. 12 | 13 | ``` 14 | $ export DOCKER_IP=your_ip_address 15 | $ docker-compose up 16 | ``` 17 | 18 | **Note** `DOCKER_IP` should be a real IP address of your machine, not `127.0.0.1`. 19 | The following script can be used as a helper (MacOS): 20 | ``` 21 | export DOCKER_IP=$(ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1' | head -n 1) 22 | ``` 23 | 24 | #### Execute the example 25 | ``` 26 | $ etlas update 27 | $ etlas install --dependencies-only 28 | $ etlas run 29 | "Running producer..." 30 | "Running consumer..." 31 | "one" 32 | "two" 33 | "three" 34 | ``` 35 | -------------------------------------------------------------------------------- /Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | 4 | zookeeper: 5 | image: confluentinc/cp-zookeeper 6 | hostname: zookeeper 7 | ports: 8 | - 2181:2181 9 | environment: 10 | SERVICE_NAME: zookeeper 11 | ZOOKEEPER_CLIENT_PORT: 2181 12 | 13 | 14 | kafka: 15 | image: confluentinc/cp-kafka 16 | hostname: kafka 17 | ports: 18 | - 9092:9092 19 | links: 20 | - zookeeper:zookeeper 21 | environment: 22 | KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" 23 | KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://$DOCKER_IP:9092" 24 | KAFKA_CREATE_TOPICS: 25 | 26 | schema-registry: 27 | image: confluentinc/cp-schema-registry 28 | hostname: schema-registry 29 | ports: 30 | - 8081:8081 31 | links: 32 | - zookeeper:zookeeper 33 | environment: 34 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper:2181 35 | SCHEMA_REGISTRY_HOST_NAME: schema_registry 36 | depends_on: 37 | - zookeeper 38 | - kafka 39 | -------------------------------------------------------------------------------- /eta-kafka-client.cabal: -------------------------------------------------------------------------------- 1 | name: eta-kafka-client 2 | version: 0.5.1 3 | synopsis: Eta bindings to Kafka 4 | -- description: 5 | homepage: http://github.com/haskell-works/eta-kafka-client 6 | license: BSD3 7 | license-file: LICENSE 8 | author: Alexey Raga 9 | maintainer: alexey.raga@gmail.com 10 | -- copyright: 11 | category: Database, Eta, Experimental 12 | build-type: Simple 13 | -- extra-source-files: 14 | cabal-version: >=1.10 15 | description: Kafka bingings for Eta (http://eta-lang.org/) 16 | . 17 | Features include: 18 | . 19 | * Consumer groups: auto-rebalancing consumers 20 | . 21 | * Keyed and keyless messages producing/consuming 22 | 23 | 24 | source-repository head 25 | type: git 26 | location: https://github.com/haskell-works/eta-kafka-client 27 | tag: 0.5.1 28 | 29 | executable kafka-client-example 30 | main-is: Main.hs 31 | -- other-modules: 32 | -- other-extensions: 33 | build-depends: base >=4.8 && <4.9, 34 | ghc-prim, 35 | bifunctors, 36 | bytestring, 37 | containers, 38 | transformers, 39 | eta-kafka-client 40 | hs-source-dirs: example 41 | default-language: Haskell2010 42 | maven-depends: org.apache.kafka:kafka-clients:0.10.1.1 43 | maven-repos: http://packages.confluent.io/maven/ 44 | 45 | library 46 | exposed-modules: Kafka 47 | , Kafka.Consumer 48 | , Kafka.Producer 49 | , Kafka.Types 50 | , Kafka.Consumer.Types 51 | , Kafka.Consumer.ConsumerProperties 52 | , Kafka.Producer.Types 53 | , Kafka.Producer.ProducerProperties 54 | 55 | other-modules: Kafka.Consumer.Bindings 56 | , Kafka.Producer.Bindings 57 | , Kafka.Internal.JPattern 58 | -- other-extensions: 59 | build-depends: base >=4.8 && <4.9, 60 | bifunctors, 61 | bytestring, 62 | transformers, 63 | containers 64 | hs-source-dirs: src 65 | default-language: Haskell2010 66 | maven-depends: org.apache.kafka:kafka-clients:0.10.1.1 67 | maven-repos: http://packages.confluent.io/maven/ 68 | -------------------------------------------------------------------------------- /example/Main.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE MagicHash, BangPatterns, FlexibleContexts, DataKinds, TypeFamilies, OverloadedStrings, ScopedTypeVariables #-} 2 | module Main where 3 | 4 | import GHC.Base 5 | 6 | import Control.Monad(forM_) 7 | import Data.ByteString 8 | import Data.Monoid 9 | import Data.Maybe (maybeToList) 10 | 11 | import Kafka.Consumer 12 | import Kafka.Producer 13 | 14 | consumerConf :: ConsumerProperties 15 | consumerConf = consumerBrokersList [BrokerAddress "localhost:9092"] 16 | <> groupId (ConsumerGroupId "test-group-1") 17 | <> offsetReset Earliest 18 | <> noAutoCommit 19 | 20 | producerConf :: ProducerProperties 21 | producerConf = producerBrokersList [BrokerAddress "localhost:9092"] 22 | 23 | testTopic = TopicName "kafka-example-topic" 24 | 25 | main :: IO () 26 | main = do 27 | print "Running producer..." 28 | runProducer testTopic ["one", "two", "three"] 29 | 30 | print "Running consumer..." 31 | received <- runConsumer testTopic 32 | 33 | forM_ received print 34 | print "Ok." 35 | 36 | runProducer :: TopicName -> [ByteString] -> IO () 37 | runProducer t msgs = do 38 | prod <- newProducer producerConf 39 | let items = mkProdRecord t <$> msgs 40 | forM_ items (send prod) 41 | closeProducer prod 42 | where 43 | mkProdRecord t v = ProducerRecord t Nothing (Just v) (Just v) 44 | 45 | 46 | runConsumer :: TopicName -> IO [ByteString] 47 | runConsumer t = do 48 | cons <- newConsumer consumerConf 49 | subscribeTo cons [t] 50 | msgs <- poll cons (Millis 3000) 51 | closeConsumer cons 52 | return $ msgs >>= maybeToList . crValue 53 | 54 | -------------------------------------------------------------------------------- /scripts/eta-install.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | if [ ! -d "${HOME}/eta" ]; then 3 | echo "Installing ETA" 4 | cd ${HOME} 5 | git clone --recursive https://github.com/typelead/eta 6 | cd eta 7 | ./install.sh 8 | else 9 | echo "Updating ETA" 10 | cd ${HOME}/eta 11 | _pull=$(git pull) 12 | _subs=$(git submodule update --recursive) 13 | 14 | if [ "$_pull" == "Already up-to-date." ] && [ -z "$_subs" ]; then 15 | echo "Already up-to-date." 16 | exit 0 17 | fi 18 | 19 | ./cleaninstall.sh 20 | epm update 21 | fi 22 | -------------------------------------------------------------------------------- /src/Kafka.hs: -------------------------------------------------------------------------------- 1 | module Kafka 2 | ( module X 3 | ) where 4 | 5 | import Kafka.Consumer as X 6 | import Kafka.Producer as X 7 | -------------------------------------------------------------------------------- /src/Kafka/Consumer.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module Kafka.Consumer 3 | ( module X 4 | , KafkaConsumer 5 | , newConsumer 6 | , closeConsumer 7 | , subscribeTo, unsubscribe 8 | , commitSync, commitAsync 9 | , poll 10 | ) where 11 | 12 | -- 13 | import Java 14 | import qualified Java.Array as A 15 | import Java.Collections as J 16 | 17 | import Control.Monad (forM_) 18 | import Control.Monad.IO.Class 19 | import Data.Bifunctor 20 | import Data.ByteString as BS 21 | import Data.Map (Map) 22 | import qualified Data.Map as M 23 | import Data.Monoid 24 | import Data.String 25 | 26 | import Kafka.Consumer.Bindings 27 | 28 | import Kafka.Consumer.ConsumerProperties as X 29 | import Kafka.Consumer.Types as X 30 | import Kafka.Types as X 31 | 32 | newtype KafkaConsumer = KafkaConsumer (JKafkaConsumer JByteArray JByteArray) 33 | 34 | fixedProps :: ConsumerProperties 35 | fixedProps = extraConsumerProps $ M.fromList 36 | [ ("key.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer") 37 | , ("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer") 38 | ] 39 | 40 | -- | Creates a new Kafka consumer 41 | newConsumer :: MonadIO m => ConsumerProperties -> m KafkaConsumer 42 | newConsumer props = 43 | let bsProps = fixedProps <> props 44 | cons = mkRawConsumer (mkConsumerProps bsProps) 45 | in liftIO $ KafkaConsumer <$> cons 46 | 47 | -- | Subscribes an existing kafka consumer to the specified topics 48 | subscribeTo :: MonadIO m => KafkaConsumer -> [TopicName] -> m () 49 | subscribeTo (KafkaConsumer kc) ts = 50 | let rawTopics = toJava $ (\(TopicName t) -> (toJString t)) <$> ts :: J.List JString 51 | in liftIO $ rawSubscribe kc rawTopics 52 | 53 | poll :: MonadIO m => KafkaConsumer -> Millis -> m [ConsumerRecord (Maybe ByteString) (Maybe ByteString)] 54 | poll (KafkaConsumer kc) (Millis t) = liftIO $ do 55 | res <- listRecords <$> rawPoll kc t 56 | return $ mkConsumerRecord <$> res 57 | 58 | commitSync :: MonadIO m => KafkaConsumer -> m () 59 | commitSync (KafkaConsumer kc) = liftIO $ rawCommitSync kc 60 | 61 | commitAsync :: MonadIO m => KafkaConsumer -> m () 62 | commitAsync (KafkaConsumer kc) = liftIO $ rawCommitAsync kc 63 | 64 | unsubscribe :: MonadIO m => KafkaConsumer -> m () 65 | unsubscribe (KafkaConsumer kc) = liftIO $ rawUnsubscribe kc 66 | 67 | closeConsumer :: MonadIO m => KafkaConsumer -> m () 68 | closeConsumer (KafkaConsumer kc) = liftIO $ rawCloseConsumer kc 69 | 70 | mkConsumerRecord :: JConsumerRecord JByteArray JByteArray -> ConsumerRecord (Maybe ByteString) (Maybe ByteString) 71 | mkConsumerRecord jcr = 72 | ConsumerRecord 73 | { crTopic = TopicName . fromJString $ crTopic' jcr 74 | , crPartition = PartitionId (crPartition' jcr) 75 | , crOffset = Offset (crOffset' jcr) 76 | , crChecksum = Checksum (crChecksum' jcr) 77 | , crKey = (BS.pack . fromJava) <$> crKey' jcr 78 | , crValue = (BS.pack . fromJava) <$> crValue' jcr 79 | } 80 | 81 | mkConsumerProps :: ConsumerProperties -> J.Map JString JString 82 | mkConsumerProps (ConsumerProperties m) = 83 | toJava $ bimap toJString toJString <$> M.toList m 84 | -------------------------------------------------------------------------------- /src/Kafka/Consumer/Bindings.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE MagicHash, FlexibleContexts, DataKinds, TypeFamilies, ScopedTypeVariables #-} 2 | module Kafka.Consumer.Bindings 3 | where 4 | 5 | import Java 6 | import Java.Collections as J 7 | import Control.Monad(forM_) 8 | import Data.Map (Map) 9 | import qualified Data.Map as M 10 | 11 | -- TopicPartition 12 | data {-# CLASS "org.apache.kafka.common.TopicPartition" #-} JTopicPartition = 13 | JTopicPartition (Object# JTopicPartition) 14 | deriving (Class, Show) 15 | 16 | foreign import java unsafe "@new" newTopicPartition :: JString -> Int -> JTopicPartition 17 | foreign import java unsafe "topic" tpTopic :: JTopicPartition -> JString 18 | foreign import java unsafe "partition" tpPartition :: JTopicPartition -> Int 19 | 20 | -- JConsumerRecords 21 | data {-# CLASS "org.apache.kafka.clients.consumer.ConsumerRecords" #-} JConsumerRecords k v = 22 | JConsumerRecords (Object# (JConsumerRecords k v)) 23 | deriving (Class, Show) 24 | 25 | type instance Inherits (JConsumerRecords k v) = '[Iterable (JConsumerRecord k v)] 26 | 27 | -- JConsumerRecord 28 | data {-# CLASS "org.apache.kafka.clients.consumer.ConsumerRecord" #-} JConsumerRecord k v = 29 | JConsumerRecord (Object# (JConsumerRecord k v)) 30 | deriving (Class, Show) 31 | 32 | foreign import java unsafe "topic" crTopic' :: JConsumerRecord k v -> JString 33 | foreign import java unsafe "partition" crPartition' :: JConsumerRecord k v -> Int 34 | foreign import java unsafe "key" crKey' :: (Extends k Object) => JConsumerRecord k v -> Maybe k 35 | foreign import java unsafe "value" crValue' :: (Extends v Object) => JConsumerRecord k v -> Maybe v 36 | foreign import java unsafe "offset" crOffset' :: JConsumerRecord k v -> Int64 37 | foreign import java unsafe "checksum" crChecksum' :: JConsumerRecord k v -> Int64 38 | 39 | -- Consumer 40 | data {-# CLASS "org.apache.kafka.clients.consumer.KafkaConsumer" #-} JKafkaConsumer k v = 41 | JKafkaConsumer (Object# (JKafkaConsumer k v)) 42 | deriving Class 43 | 44 | 45 | foreign import java unsafe "@new" mkRawConsumer :: J.Map JString JString -> IO (JKafkaConsumer k v) 46 | foreign import java unsafe "close" rawCloseConsumer :: JKafkaConsumer k v -> IO () 47 | foreign import java unsafe "subscribe" rawSubscribe :: (Extends b (Collection JString)) => JKafkaConsumer k v -> b -> IO () 48 | foreign import java unsafe "unsubscribe" rawUnsubscribe :: JKafkaConsumer k v -> IO () 49 | foreign import java unsafe "commitSync" rawCommitSync :: JKafkaConsumer k v -> IO () 50 | foreign import java unsafe "commitAsync" rawCommitAsync :: JKafkaConsumer k v -> IO () 51 | 52 | foreign import java unsafe "poll" rawPoll :: JKafkaConsumer k v -> Int64 -> IO (JConsumerRecords k v) 53 | 54 | listRecords :: forall k v. JConsumerRecords k v -> [JConsumerRecord k v] 55 | listRecords rs = fromJava (superCast rs :: Iterable (JConsumerRecord k v)) 56 | -------------------------------------------------------------------------------- /src/Kafka/Consumer/ConsumerProperties.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 2 | module Kafka.Consumer.ConsumerProperties 3 | where 4 | 5 | -- 6 | import Data.Map (Map) 7 | import Data.Bifunctor 8 | import Kafka.Types 9 | import qualified Data.Map as M 10 | import qualified Data.List as L 11 | import Data.Monoid 12 | import Kafka.Consumer.Types 13 | 14 | newtype ConsumerProperties = ConsumerProperties (Map String String) 15 | deriving (Show) 16 | 17 | instance Monoid ConsumerProperties where 18 | mempty = ConsumerProperties M.empty 19 | mappend (ConsumerProperties m1) (ConsumerProperties m2) = ConsumerProperties (M.union m1 m2) 20 | 21 | consumerBrokersList :: [BrokerAddress] -> ConsumerProperties 22 | consumerBrokersList bs = 23 | let bs' = L.intercalate "," ((\(BrokerAddress x) -> x) <$> bs) 24 | in ConsumerProperties $ M.fromList [("bootstrap.servers", bs')] 25 | 26 | groupId :: ConsumerGroupId -> ConsumerProperties 27 | groupId (ConsumerGroupId cid) = 28 | ConsumerProperties $ M.fromList [("group.id", cid)] 29 | 30 | offsetReset :: OffsetReset -> ConsumerProperties 31 | offsetReset o = 32 | let o' = case o of 33 | Earliest -> "earliest" 34 | Latest -> "latest" 35 | in ConsumerProperties $ M.fromList [("auto.offset.reset", o')] 36 | 37 | noAutoCommit :: ConsumerProperties 38 | noAutoCommit = 39 | ConsumerProperties $ M.fromList [("enable.auto.commit", "false")] 40 | 41 | autoCommit :: Millis -> ConsumerProperties 42 | autoCommit (Millis ms) = ConsumerProperties $ 43 | M.fromList 44 | [ ("enable.auto.commit", "true") 45 | , ("auto.commit.interval.ms", show ms) 46 | ] 47 | 48 | clientId :: ClientId -> ConsumerProperties 49 | clientId (ClientId cid) = 50 | ConsumerProperties $ M.fromList [("client.id", cid)] 51 | 52 | extraConsumerProps :: Map String String -> ConsumerProperties 53 | extraConsumerProps = ConsumerProperties 54 | -------------------------------------------------------------------------------- /src/Kafka/Consumer/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 2 | module Kafka.Consumer.Types 3 | where 4 | 5 | import Java 6 | import Data.Map (Map) 7 | import Data.Bifunctor 8 | import Data.Bifoldable 9 | import Data.Bitraversable 10 | import Kafka.Types 11 | import qualified Data.Map as M 12 | import qualified Data.List as L 13 | import Data.Monoid 14 | 15 | 16 | -- 17 | newtype ConsumerGroupId = ConsumerGroupId String deriving (Show, Eq, Ord) 18 | newtype ClientId = ClientId String deriving (Show, Eq, Ord) 19 | newtype Offset = Offset Int64 deriving (Show, Eq, Ord, Num) 20 | newtype Checksum = Checksum Int64 deriving (Show, Eq, Ord, Num) 21 | newtype Millis = Millis Int64 deriving (Show, Eq, Ord, Num) 22 | data OffsetReset = Earliest | Latest deriving (Show, Eq) 23 | 24 | data ConsumerRecord k v = 25 | ConsumerRecord { 26 | crTopic :: TopicName, 27 | crPartition :: PartitionId, 28 | crOffset :: Offset, 29 | crChecksum :: Checksum, 30 | crKey :: k, 31 | crValue :: v 32 | } deriving (Show) 33 | 34 | -- 35 | instance Bifunctor ConsumerRecord where 36 | bimap f g (ConsumerRecord t p o c k v) = ConsumerRecord t p o c (f k) (g v) 37 | {-# INLINE bimap #-} 38 | 39 | instance Functor (ConsumerRecord k) where 40 | fmap = second 41 | {-# INLINE fmap #-} 42 | 43 | instance Foldable (ConsumerRecord k) where 44 | foldMap f r = f (crValue r) 45 | {-# INLINE foldMap #-} 46 | 47 | instance Traversable (ConsumerRecord k) where 48 | traverse f r = (\v -> crMapValue (const v) r) <$> f (crValue r) 49 | {-# INLINE traverse #-} 50 | 51 | instance Bifoldable ConsumerRecord where 52 | bifoldMap f g r = f (crKey r) `mappend` g (crValue r) 53 | {-# INLINE bifoldMap #-} 54 | 55 | instance Bitraversable ConsumerRecord where 56 | bitraverse f g r = (\k v -> bimap (const k) (const v) r) <$> f (crKey r) <*> g (crValue r) 57 | {-# INLINE bitraverse #-} 58 | 59 | crMapKey :: (k -> k') -> ConsumerRecord k v -> ConsumerRecord k' v 60 | crMapKey = first 61 | {-# INLINE crMapKey #-} 62 | 63 | crMapValue :: (v -> v') -> ConsumerRecord k v -> ConsumerRecord k v' 64 | crMapValue = second 65 | {-# INLINE crMapValue #-} 66 | 67 | crMapKV :: (k -> k') -> (v -> v') -> ConsumerRecord k v -> ConsumerRecord k' v' 68 | crMapKV = bimap 69 | {-# INLINE crMapKV #-} 70 | 71 | sequenceFirst :: (Bitraversable t, Applicative f) => t (f k) v -> f (t k v) 72 | sequenceFirst = bitraverse id pure 73 | {-# INLINE sequenceFirst #-} 74 | 75 | traverseFirst :: (Bitraversable t, Applicative f) 76 | => (k -> f k') 77 | -> t k v 78 | -> f (t k' v) 79 | traverseFirst f = bitraverse f pure 80 | {-# INLINE traverseFirst #-} 81 | 82 | traverseFirstM :: (Bitraversable t, Applicative f, Monad m) 83 | => (k -> m (f k')) 84 | -> t k v 85 | -> m (f (t k' v)) 86 | traverseFirstM f r = bitraverse id pure <$> bitraverse f pure r 87 | {-# INLINE traverseFirstM #-} 88 | 89 | traverseM :: (Traversable t, Applicative f, Monad m) 90 | => (v -> m (f v')) 91 | -> t v 92 | -> m (f (t v')) 93 | traverseM f r = sequenceA <$> traverse f r 94 | {-# INLINE traverseM #-} 95 | 96 | bitraverseM :: (Bitraversable t, Applicative f, Monad m) 97 | => (k -> m (f k')) 98 | -> (v -> m (f v')) 99 | -> t k v 100 | -> m (f (t k' v')) 101 | bitraverseM f g r = bisequenceA <$> bimapM f g r 102 | {-# INLINE bitraverseM #-} 103 | -------------------------------------------------------------------------------- /src/Kafka/Internal/JPattern.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE MagicHash, FlexibleContexts, DataKinds, TypeFamilies #-} 2 | module Kafka.Internal.JPattern 3 | where 4 | 5 | -- 6 | import Java 7 | 8 | data {-# CLASS "java.util.regex.Pattern" #-} JPattern = JPattern (Object# JPattern) 9 | deriving Class 10 | 11 | foreign import java unsafe "@static java.util.regex.Pattern.compile" 12 | newPattern :: JString -> Java a JPattern 13 | -------------------------------------------------------------------------------- /src/Kafka/Producer.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module Kafka.Producer 3 | ( module X 4 | , KafkaProducer, JFuture, JRecordMetadata 5 | , newProducer 6 | , send 7 | , closeProducer 8 | , mkJProducerRecord 9 | ) where 10 | 11 | import Java 12 | import Java.Collections as J 13 | 14 | import Control.Monad.IO.Class 15 | import Data.Bifunctor 16 | import Data.ByteString as BS 17 | import Data.Map (Map) 18 | import qualified Data.Map as M 19 | import Data.Monoid 20 | 21 | import Kafka.Producer.Bindings 22 | 23 | import Kafka.Producer.ProducerProperties as X 24 | import Kafka.Producer.Types as X 25 | import Kafka.Types as X 26 | 27 | newtype KafkaProducer = KafkaProducer (JKafkaProducer JByteArray JByteArray) 28 | 29 | fixedProps :: ProducerProperties 30 | fixedProps = extraProducerProps $ M.fromList 31 | [ ("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer") 32 | , ("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer") 33 | ] 34 | 35 | newProducer :: MonadIO m 36 | => ProducerProperties 37 | -> m KafkaProducer 38 | newProducer props = 39 | let bsProps = fixedProps <> props 40 | prod = mkRawProducer (mkProducerProps bsProps) 41 | in liftIO $ KafkaProducer <$> prod 42 | 43 | send :: MonadIO m 44 | => KafkaProducer 45 | -> ProducerRecord 46 | -> m (JFuture JRecordMetadata) 47 | send (KafkaProducer kp) r = liftIO $ rawSend kp (mkJProducerRecord r) 48 | 49 | mkJProducerRecord :: ProducerRecord -> JProducerRecord JByteArray JByteArray 50 | mkJProducerRecord (ProducerRecord t p k v) = 51 | let TopicName t' = t 52 | p' = (\(PartitionId x) -> x) <$> p 53 | k' = toJava . BS.unpack <$> k 54 | v' = toJava . BS.unpack <$> v 55 | in newJProducerRecord (toJString t') (toJava <$> p') Nothing k' v' 56 | 57 | closeProducer :: MonadIO m => KafkaProducer -> m () 58 | closeProducer (KafkaProducer kp) = liftIO $ flushProducer kp >> destroyProducer kp 59 | 60 | mkProducerProps :: ProducerProperties -> J.Map JString JString 61 | mkProducerProps (ProducerProperties m) = 62 | toJava $ bimap toJString toJString <$> M.toList m 63 | -------------------------------------------------------------------------------- /src/Kafka/Producer/Bindings.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE MagicHash, FlexibleContexts, DataKinds, TypeFamilies #-} 2 | module Kafka.Producer.Bindings 3 | where 4 | 5 | import Java 6 | import Java.Collections as J 7 | import Control.Monad(forM_) 8 | import Data.Map (Map) 9 | import qualified Data.Map as M 10 | 11 | data {-# CLASS "java.util.concurrent.Future" #-} JFuture a = JFuture (Object# (JFuture a)) 12 | deriving (Class) 13 | 14 | data {-# CLASS "org.apache.kafka.clients.producer.RecordMetadata" #-} JRecordMetadata = JRecordMetadata (Object# JRecordMetadata) 15 | deriving (Class) 16 | 17 | foreign import java unsafe "offset" rmOffset :: JRecordMetadata -> Int64 18 | foreign import java unsafe "topic" rmTopic :: JRecordMetadata -> JString 19 | foreign import java unsafe "partition" rmPartition :: JRecordMetadata -> Int 20 | 21 | -- JProducerRecord 22 | data {-# CLASS "org.apache.kafka.clients.producer.ProducerRecord" #-} JProducerRecord k v = 23 | JProducerRecord (Object# (JProducerRecord k v)) 24 | deriving (Class, Show) 25 | 26 | foreign import java unsafe "@new" newJProducerRecord :: 27 | (Extends k Object, Extends v Object) => JString -> Maybe JInteger -> Maybe JLong -> Maybe k -> Maybe v -> JProducerRecord k v 28 | 29 | -- Producer 30 | data {-# CLASS "org.apache.kafka.clients.producer.KafkaProducer" #-} JKafkaProducer k v = 31 | JKafkaProducer (Object# (JKafkaProducer k v)) 32 | deriving Class 33 | 34 | foreign import java unsafe "@new" mkRawProducer :: J.Map JString JString -> IO (JKafkaProducer k v) 35 | foreign import java unsafe "close" destroyProducer :: JKafkaProducer k v -> IO () 36 | foreign import java unsafe "flush" flushProducer :: JKafkaProducer k v -> IO () 37 | foreign import java unsafe "send" rawSend :: JKafkaProducer k v -> JProducerRecord k v -> IO (JFuture JRecordMetadata) 38 | -------------------------------------------------------------------------------- /src/Kafka/Producer/ProducerProperties.hs: -------------------------------------------------------------------------------- 1 | module Kafka.Producer.ProducerProperties 2 | where 3 | 4 | -- 5 | import Data.Map (Map) 6 | import Data.Bifunctor 7 | import Kafka.Types 8 | import qualified Data.Map as M 9 | import qualified Data.List as L 10 | import Data.Monoid 11 | 12 | newtype ProducerProperties = ProducerProperties (Map String String) 13 | deriving (Show) 14 | 15 | instance Monoid ProducerProperties where 16 | mempty = ProducerProperties M.empty 17 | mappend (ProducerProperties m1) (ProducerProperties m2) = ProducerProperties (M.union m1 m2) 18 | 19 | producerBrokersList :: [BrokerAddress] -> ProducerProperties 20 | producerBrokersList bs = 21 | let bs' = L.intercalate "," ((\(BrokerAddress x) -> x) <$> bs) 22 | in ProducerProperties $ M.fromList [("bootstrap.servers", bs')] 23 | 24 | extraProducerProps :: Map String String -> ProducerProperties 25 | extraProducerProps = ProducerProperties 26 | -------------------------------------------------------------------------------- /src/Kafka/Producer/Types.hs: -------------------------------------------------------------------------------- 1 | module Kafka.Producer.Types 2 | where 3 | 4 | -- 5 | import Data.ByteString 6 | import Kafka.Types 7 | 8 | data ProducerRecord = ProducerRecord 9 | { prTopic :: !TopicName 10 | , prPartition :: Maybe PartitionId 11 | , prKey :: Maybe ByteString 12 | , prValue :: Maybe ByteString 13 | } deriving (Eq, Show) 14 | -------------------------------------------------------------------------------- /src/Kafka/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 2 | module Kafka.Types 3 | where 4 | 5 | import Java 6 | 7 | newtype TopicName = TopicName String deriving (Show, Eq, Ord) 8 | newtype PartitionId = PartitionId Int deriving (Show, Eq, Ord, Num) 9 | newtype Timestamp = Timestamp Int64 deriving (Show, Eq, Ord) 10 | 11 | data BrokerAddress = BrokerAddress String deriving (Show, Eq, Ord) 12 | --------------------------------------------------------------------------------