├── .gitignore ├── docker-run.sh ├── dockerfile-kafka ├── usr │ └── local │ │ └── bin │ │ └── run.sh └── Dockerfile ├── Makefile ├── sync-producer └── main.go ├── LICENSE ├── consumer └── main.go ├── README.md ├── async-producer └── main.go └── http-log-producer └── main.go /.gitignore: -------------------------------------------------------------------------------- 1 | async-producer/async-producer 2 | sync-producer/sync-producer 3 | http-log-producer/http-log-producer 4 | consumer/consumer -------------------------------------------------------------------------------- /docker-run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker rm --force kafka 4 | docker run --rm -it --name kafka -p 8080:8080 -p 9092:9092 -p 2181:2181 -v $(pwd):/work tcnksm/kafka 5 | -------------------------------------------------------------------------------- /dockerfile-kafka/usr/local/bin/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Start to run zookeeper as background process 4 | bin/zookeeper-server-start.sh config/zookeeper.properties & 5 | 6 | # Start kafka server 7 | bin/kafka-server-start.sh config/server.properties 8 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | build: 2 | cd sync-producer; GOOS=linux GOARCH=amd64 go build 3 | cd async-producer; GOOS=linux GOARCH=amd64 go build 4 | cd http-log-producer; GOOS=linux GOARCH=amd64 go build 5 | cd consumer; GOOS=linux GOARCH=amd64 go build 6 | 7 | docker-build: 8 | cd dockerfile-kafka; docker build -t tcnksm/kafka . 9 | -------------------------------------------------------------------------------- /dockerfile-kafka/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM java:openjdk-8-jre 2 | 3 | ENV DEBIAN_FRONTEND noninteractive 4 | 5 | RUN apt-get update && apt-get install -y \ 6 | zookeeper \ 7 | wget \ 8 | dnsutils \ 9 | vim \ 10 | && rm -rf /var/lib/apt/lists/* 11 | 12 | ENV KAFKA_VERSION 0.8.2.1 13 | ENV SCALA_VERSION 2.10 14 | RUN wget -q \ 15 | http://apache.mirrors.spacedump.net/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz \ 16 | -O /tmp/kafka.tgz \ 17 | && tar xfz /tmp/kafka.tgz -C /opt \ 18 | && rm /tmp/kafka.tgz \ 19 | && mv /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION} /opt/kafka 20 | 21 | 22 | ADD usr/local/bin/run.sh /usr/local/bin/run.sh 23 | 24 | WORKDIR /opt/kafka 25 | CMD ["/usr/local/bin/run.sh"] 26 | 27 | -------------------------------------------------------------------------------- /sync-producer/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/Shopify/sarama" 7 | ) 8 | 9 | func main() { 10 | 11 | config := sarama.NewConfig() 12 | config.Producer.RequiredAcks = sarama.WaitForAll 13 | config.Producer.Retry.Max = 5 14 | 15 | // brokers := []string{"192.168.59.103:9092"} 16 | brokers := []string{"localhost:9092"} 17 | producer, err := sarama.NewSyncProducer(brokers, config) 18 | if err != nil { 19 | // Should not reach here 20 | panic(err) 21 | } 22 | 23 | defer func() { 24 | if err := producer.Close(); err != nil { 25 | // Should not reach here 26 | panic(err) 27 | } 28 | }() 29 | 30 | topic := "important" 31 | msg := &sarama.ProducerMessage{ 32 | Topic: topic, 33 | Value: sarama.StringEncoder("Something Cool"), 34 | } 35 | 36 | partition, offset, err := producer.SendMessage(msg) 37 | if err != nil { 38 | panic(err) 39 | } 40 | 41 | fmt.Printf("Message is stored in topic(%s)/partition(%d)/offset(%d)\n", topic, partition, offset) 42 | } 43 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 Taichi Nakashima 2 | 3 | MIT License 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | "Software"), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 20 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 22 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /consumer/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "os/signal" 7 | 8 | "github.com/Shopify/sarama" 9 | ) 10 | 11 | func main() { 12 | 13 | config := sarama.NewConfig() 14 | config.Consumer.Return.Errors = true 15 | 16 | // Specify brokers address. This is default one 17 | brokers := []string{"localhost:9092"} 18 | 19 | // Create new consumer 20 | master, err := sarama.NewConsumer(brokers, config) 21 | if err != nil { 22 | panic(err) 23 | } 24 | 25 | defer func() { 26 | if err := master.Close(); err != nil { 27 | panic(err) 28 | } 29 | }() 30 | 31 | topic := "important" 32 | // How to decide partition, is it fixed value...? 33 | consumer, err := master.ConsumePartition(topic, 0, sarama.OffsetOldest) 34 | if err != nil { 35 | panic(err) 36 | } 37 | 38 | signals := make(chan os.Signal, 1) 39 | signal.Notify(signals, os.Interrupt) 40 | 41 | // Count how many message processed 42 | msgCount := 0 43 | 44 | // Get signnal for finish 45 | doneCh := make(chan struct{}) 46 | go func() { 47 | for { 48 | select { 49 | case err := <-consumer.Errors(): 50 | fmt.Println(err) 51 | case msg := <-consumer.Messages(): 52 | msgCount++ 53 | fmt.Println("Received messages", string(msg.Key), string(msg.Value)) 54 | case <-signals: 55 | fmt.Println("Interrupt is detected") 56 | doneCh <- struct{}{} 57 | } 58 | } 59 | }() 60 | 61 | <-doneCh 62 | fmt.Println("Processed", msgCount, "messages") 63 | } 64 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # sarama 2 | 3 | This is a sample project to use [sarama](https://godoc.org/github.com/Shopify/sarama), sarama is golang client library for Apache kafka. This repositry contains simple producer and consumer. To use this you need to prepare kafka (or you can run it by docker image). 4 | 5 | 6 | ## Install 7 | 8 | To get sarama, 9 | 10 | ```bash 11 | $ go get github.com/Shopify/sarama 12 | ``` 13 | 14 | ## Build 15 | 16 | To build producer & consumer, run below. Binary is generated in each directory, 17 | 18 | ```bash 19 | $ make build 20 | ``` 21 | 22 | ## Usage 23 | 24 | Ensure kafka is running on where you expect. 25 | 26 | To execute consumer, run below. Consumer tries to fetch topic until received `os.Interrupt` signal. 27 | 28 | ```bash 29 | $ ./consumer/consumer 30 | ``` 31 | 32 | To execute async-producer, run below. Producer tries to publish messages until received `os.Interrupt` signal. 33 | 34 | ```bash 35 | $ ./async-producer/async-producer 36 | ``` 37 | 38 | ## Kafka on Docker 39 | 40 | You can run kafka on docker. And you can also run producer & consumer on docker. 41 | 42 | To build docker image of kafka, 43 | 44 | ```bash 45 | $ make docker-build 46 | ``` 47 | 48 | To run kafka container, 49 | 50 | ```bash 51 | $ ./docker-run.sh 52 | ``` 53 | 54 | To execute consumer, 55 | 56 | ```bash 57 | $ docker exec -it kafka /work/consumer/consumer 58 | ``` 59 | 60 | To execute async-producer, 61 | 62 | ```bash 63 | $ docker exec -it kafka /work/async-producer/async-producer 64 | ``` 65 | 66 | To execute http-log-producer, 67 | 68 | ```bash 69 | $ docker exec -it kafka /work/http-log-producer/http-log-producer 70 | ``` 71 | 72 | And access it (e.g., docker is on boot2docker), 73 | 74 | ```bash 75 | $ curl $(boot2docker ip):8080 76 | ``` 77 | 78 | ## Author 79 | 80 | [Taichi Nakashima](https://github.com/tcnksm) 81 | 82 | 83 | -------------------------------------------------------------------------------- /async-producer/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "log" 6 | "os" 7 | "os/signal" 8 | "time" 9 | 10 | "strconv" 11 | 12 | "github.com/Shopify/sarama" 13 | ) 14 | 15 | func main() { 16 | 17 | // Setup configuration 18 | config := sarama.NewConfig() 19 | // Return specifies what channels will be populated. 20 | // If they are set to true, you must read from 21 | // config.Producer.Return.Successes = true 22 | // The total number of times to retry sending a message (default 3). 23 | config.Producer.Retry.Max = 5 24 | // The level of acknowledgement reliability needed from the broker. 25 | config.Producer.RequiredAcks = sarama.WaitForAll 26 | brokers := []string{"localhost:9092"} 27 | producer, err := sarama.NewAsyncProducer(brokers, config) 28 | if err != nil { 29 | // Should not reach here 30 | panic(err) 31 | } 32 | 33 | defer func() { 34 | if err := producer.Close(); err != nil { 35 | // Should not reach here 36 | panic(err) 37 | } 38 | }() 39 | 40 | signals := make(chan os.Signal, 1) 41 | signal.Notify(signals, os.Interrupt) 42 | 43 | var enqueued, errors int 44 | doneCh := make(chan struct{}) 45 | go func() { 46 | for { 47 | 48 | time.Sleep(500 * time.Millisecond) 49 | 50 | strTime := strconv.Itoa(int(time.Now().Unix())) 51 | msg := &sarama.ProducerMessage{ 52 | Topic: "important", 53 | Key: sarama.StringEncoder(strTime), 54 | Value: sarama.StringEncoder("Something Cool"), 55 | } 56 | select { 57 | case producer.Input() <- msg: 58 | enqueued++ 59 | fmt.Println("Produce message") 60 | case err := <-producer.Errors(): 61 | errors++ 62 | fmt.Println("Failed to produce message:", err) 63 | case <-signals: 64 | doneCh <- struct{}{} 65 | } 66 | } 67 | }() 68 | 69 | <-doneCh 70 | log.Printf("Enqueued: %d; errors: %d\n", enqueued, errors) 71 | } 72 | -------------------------------------------------------------------------------- /http-log-producer/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "log" 6 | "net/http" 7 | "os" 8 | "strconv" 9 | "time" 10 | 11 | "github.com/Shopify/sarama" 12 | ) 13 | 14 | func main() { 15 | os.Exit(realMain()) 16 | } 17 | 18 | func realMain() int { 19 | 20 | brokers := []string{"localhost:9092"} 21 | 22 | config := sarama.NewConfig() 23 | config.Producer.RequiredAcks = sarama.WaitForLocal 24 | config.Producer.Compression = sarama.CompressionSnappy 25 | config.Producer.Flush.Frequency = 500 * time.Millisecond 26 | 27 | producer, err := sarama.NewAsyncProducer(brokers, config) 28 | if err != nil { 29 | // Should not reach here 30 | panic(err) 31 | } 32 | 33 | defer func() { 34 | if err := producer.Close(); err != nil { 35 | // Should not reach here 36 | panic(err) 37 | } 38 | }() 39 | 40 | http.HandleFunc("/", KafkaLogProducerWrapper(producer, HelloHandler)) 41 | 42 | log.Printf("[INFO] start server on :8080") 43 | http.ListenAndServe(":8080", nil) 44 | return 0 45 | } 46 | 47 | func HelloHandler(w http.ResponseWriter, r *http.Request) { 48 | w.WriteHeader(http.StatusOK) 49 | w.Write([]byte("Hello, world!\n")) 50 | } 51 | 52 | // KafkaLogger is HandlerWrapper function to send log to Apache kafka 53 | func KafkaLogProducerWrapper(producer sarama.AsyncProducer, fn http.HandlerFunc) http.HandlerFunc { 54 | return func(w http.ResponseWriter, r *http.Request) { 55 | log.Printf("[INFO] %s %s %s%s", r.UserAgent(), r.Method, r.URL.Host, r.URL.Path) 56 | 57 | // Should execute handler first 58 | fn(w, r) 59 | 60 | logEncoder := &AccessLogEncoder{ 61 | Time: strconv.Itoa(int(time.Now().Unix())), 62 | Method: r.Method, 63 | Host: r.Host, 64 | Path: r.RequestURI, 65 | IP: r.RemoteAddr, 66 | } 67 | 68 | msg := &sarama.ProducerMessage{ 69 | Topic: "important", 70 | Key: sarama.StringEncoder("app_hello"), 71 | Value: logEncoder, 72 | } 73 | 74 | producer.Input() <- msg 75 | } 76 | } 77 | 78 | // AccessLog is entry for KafkaLogProducer 79 | // Need to implement sarama.Encoder interface 80 | type AccessLogEncoder struct { 81 | Time string `json:"time"` 82 | Method string `json:"method"` 83 | Host string `json:"host"` 84 | Path string `json:"path"` 85 | IP string `json:"ip"` 86 | } 87 | 88 | func (a *AccessLogEncoder) Encode() ([]byte, error) { 89 | return json.Marshal(a) 90 | } 91 | 92 | func (a *AccessLogEncoder) Length() int { 93 | encoded, _ := json.Marshal(a) 94 | return len(encoded) 95 | } 96 | --------------------------------------------------------------------------------