├── .gitignore ├── Dockerfile ├── cmd ├── consumer │ └── main.go └── producer │ └── main.go ├── docker-compose.yaml ├── go.mod └── go.sum /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang:1.16 2 | 3 | WORKDIR /go/src 4 | ENV PATH="/go/bin:${PATH}" 5 | 6 | RUN apt-get update && \ 7 | apt-get install build-essential librdkafka-dev -y 8 | 9 | CMD ["tail", "-f", "/dev/null"] -------------------------------------------------------------------------------- /cmd/consumer/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "github.com/confluentinc/confluent-kafka-go/kafka" 6 | ) 7 | 8 | func main() { 9 | configMap := &kafka.ConfigMap{ 10 | "bootstrap.servers": "gokafka_kafka_1:9092", 11 | "client.id": "goapp-consumer", 12 | "group.id": "goapp-group2", 13 | "auto.offset.reset": "earliest", 14 | } 15 | c, err := kafka.NewConsumer(configMap) 16 | if err != nil { 17 | fmt.Println("error consumer", err.Error()) 18 | } 19 | topics := []string{"teste"} 20 | c.SubscribeTopics(topics, nil) 21 | for { 22 | msg, err := c.ReadMessage(-1) 23 | if err == nil { 24 | fmt.Println(string(msg.Value), msg.TopicPartition) 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /cmd/producer/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "github.com/confluentinc/confluent-kafka-go/kafka" 6 | "log" 7 | ) 8 | 9 | func main() { 10 | deliveryChan := make(chan kafka.Event) 11 | producer := NewKafkaProducer() 12 | Publish("transferiu", "teste", producer, []byte("transferecia2"), deliveryChan) 13 | DeliveryReport(deliveryChan) // async 14 | 15 | 16 | 17 | //e := <-deliveryChan 18 | //msg := e.(*kafka.Message) 19 | //if msg.TopicPartition.Error != nil { 20 | // fmt.Println("Erro ao enviar") 21 | //} else { 22 | // fmt.Println("Mensagem enviada:", msg.TopicPartition) 23 | //} 24 | // 25 | 26 | } 27 | 28 | func NewKafkaProducer() *kafka.Producer { 29 | configMap := &kafka.ConfigMap{ 30 | "bootstrap.servers": "gokafka_kafka_1:9092", 31 | "delivery.timeout.ms": "0", 32 | "acks": "all", 33 | "enable.idempotence": "true", 34 | } 35 | p, err := kafka.NewProducer(configMap) 36 | if err != nil { 37 | log.Println(err.Error()) 38 | } 39 | return p 40 | } 41 | 42 | func Publish(msg string, topic string, producer *kafka.Producer, key []byte, deliveryChan chan kafka.Event) error { 43 | message := &kafka.Message{ 44 | Value: []byte(msg), 45 | TopicPartition: kafka.TopicPartition{Topic: &topic, Partition: kafka.PartitionAny}, 46 | Key: key, 47 | } 48 | err := producer.Produce(message, deliveryChan) 49 | if err != nil { 50 | return err 51 | } 52 | return nil 53 | } 54 | 55 | func DeliveryReport(deliveryChan chan kafka.Event) { 56 | for e := range deliveryChan { 57 | switch ev := e.(type) { 58 | case *kafka.Message: 59 | if ev.TopicPartition.Error != nil { 60 | fmt.Println("Erro ao enviar") 61 | } else { 62 | fmt.Println("Mensagem enviada:", ev.TopicPartition) 63 | // anotar no banco de dados que a mensagem foi processado. 64 | // ex: confirma que uma transferencia bancaria ocorreu. 65 | } 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | 3 | services: 4 | app: 5 | build: . 6 | container_name: gokafka 7 | volumes: 8 | - .:/go/src/ 9 | extra_hosts: 10 | - "host.docker.internal:172.17.0.1" 11 | 12 | zookeeper: 13 | image: confluentinc/cp-zookeeper:latest 14 | environment: 15 | ZOOKEEPER_CLIENT_PORT: 2181 16 | extra_hosts: 17 | - "host.docker.internal:172.17.0.1" 18 | 19 | kafka: 20 | image: confluentinc/cp-kafka:latest 21 | depends_on: 22 | - zookeeper 23 | ports: 24 | - "9092:9092" 25 | - "9094:9094" 26 | environment: 27 | KAFKA_BROKER_ID: 1 28 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 29 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 30 | KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL 31 | KAFKA_LISTENERS: INTERNAL://:9092,OUTSIDE://:9094 32 | KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,OUTSIDE://host.docker.internal:9094 33 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,OUTSIDE:PLAINTEXT 34 | extra_hosts: 35 | - "host.docker.internal:172.17.0.1" 36 | 37 | control-center: 38 | image: confluentinc/cp-enterprise-control-center:6.0.1 39 | hostname: control-center 40 | depends_on: 41 | - kafka 42 | ports: 43 | - "9021:9021" 44 | environment: 45 | CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092' 46 | CONTROL_CENTER_REPLICATION_FACTOR: 1 47 | CONTROL_CENTER_CONNECT_CLUSTER: http://app-connect:8083 48 | PORT: 9021 49 | extra_hosts: 50 | - "host.docker.internal:172.17.0.1" -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/codeedu/fc2-gokafka 2 | 3 | go 1.16 4 | 5 | require github.com/confluentinc/confluent-kafka-go v1.7.0 6 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/confluentinc/confluent-kafka-go v1.7.0 h1:tXh3LWb2Ne0WiU3ng4h5qiGA9XV61rz46w60O+cq8bM= 2 | github.com/confluentinc/confluent-kafka-go v1.7.0/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg= 3 | --------------------------------------------------------------------------------