├── .gitignore ├── src ├── kafka-message.adb ├── wrapper.c ├── kafka-topic.adb ├── kafka-config.adb ├── kafka-topic-config.adb ├── kafka-consumer.adb ├── kafka-message.ads ├── kafka-topic-partition.adb ├── kafka-config.ads ├── kafka-topic-config.ads ├── kafka-topic.ads ├── kafka-consumer.ads ├── kafka-topic-partition.ads ├── kafka.adb └── kafka.ads ├── examples ├── getcommandargument.ads ├── signal.ads ├── signal.adb ├── getcommandargument.adb ├── simple_producer.adb └── simple_consumer.adb ├── kafkaada_examples.gpr ├── kafkaada.gpr ├── LICENSE ├── .gitlab-ci.yml └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | obj/ 2 | bin/ 3 | .clang-format 4 | .idea/ 5 | *.kate-swp 6 | *.fuse_hidden* 7 | tmp/ -------------------------------------------------------------------------------- /src/kafka-message.adb: -------------------------------------------------------------------------------- 1 | package body Kafka.Message is 2 | function Get_Error(Message : access constant Message_Type) return String is 3 | begin 4 | return Interfaces.C.Strings.Value(rd_kafka_message_errstr(Message)); 5 | end Get_Error; 6 | end Kafka.Message; 7 | -------------------------------------------------------------------------------- /examples/getcommandargument.ads: -------------------------------------------------------------------------------- 1 | generic 2 | Switch_Arg : in String; -- ex: "-t:" 3 | Long_Switch_Arg: in String; -- ex: "--topic:" 4 | Help_Text : in String; -- ex: "Topic name to use" 5 | 6 | package GetCommandArgument is 7 | 8 | function Parse_Command_Line (DefaultValue : in String) return String; 9 | 10 | end GetCommandArgument; -------------------------------------------------------------------------------- /src/wrapper.c: -------------------------------------------------------------------------------- 1 | 2 | #include 3 | 4 | // We have to do this because of https://github.com/edenhill/librdkafka/issues/2822 5 | // to support older versions of librdkafka (e.g. Ubuntu 20) 6 | RD_EXPORT const char* rd_kafka_message_errstr_wrapper(const rd_kafka_message_t* rkmessage) 7 | { 8 | return rd_kafka_message_errstr(rkmessage); 9 | } 10 | -------------------------------------------------------------------------------- /kafkaada_examples.gpr: -------------------------------------------------------------------------------- 1 | project KafkaAda_Examples is 2 | for Languages use ("Ada", "C"); 3 | for Source_Dirs use ("src/**", "examples/**"); 4 | for Object_Dir use "obj"; 5 | for Exec_Dir use "bin"; 6 | 7 | for Main use ("simple_producer.adb", "simple_consumer.adb"); 8 | 9 | package Compiler is 10 | for Switches ("Ada") use ("-gnatX"); 11 | end Compiler; 12 | package Linker is 13 | for Switches ("Ada") use ("-lrdkafka"); 14 | end Linker; 15 | end KafkaAda_Examples; 16 | -------------------------------------------------------------------------------- /examples/signal.ads: -------------------------------------------------------------------------------- 1 | with Ada.Interrupts.Names; 2 | 3 | package Signal is 4 | 5 | protected type Handler is 6 | function Triggered return Boolean; 7 | private 8 | Signal_Received : Boolean := False; 9 | 10 | pragma Unreserve_All_Interrupts; 11 | procedure Handle_Int with Attach_Handler => Ada.Interrupts.Names.SIGINT; 12 | procedure Handle_Quit with Attach_Handler => Ada.Interrupts.Names.SIGQUIT; 13 | procedure Handle_Term with Attach_Handler => Ada.Interrupts.Names.SIGTERM; 14 | end Handler; 15 | 16 | end Signal; 17 | -------------------------------------------------------------------------------- /examples/signal.adb: -------------------------------------------------------------------------------- 1 | package body Signal is 2 | 3 | protected body Handler is 4 | 5 | function Triggered return Boolean is (Signal_Received); 6 | 7 | procedure Handle_Int is 8 | begin 9 | Signal_Received := True; 10 | end Handle_Int; 11 | 12 | procedure Handle_Quit is 13 | begin 14 | Signal_Received := True; 15 | end Handle_Quit; 16 | 17 | procedure Handle_Term is 18 | begin 19 | Signal_Received := True; 20 | end Handle_Term; 21 | 22 | end Handler; 23 | 24 | end Signal; 25 | -------------------------------------------------------------------------------- /kafkaada.gpr: -------------------------------------------------------------------------------- 1 | project KafkaAda is 2 | for Languages use ("Ada", "C"); 3 | for Source_Dirs use ("src/**"); 4 | for Object_Dir use "obj"; 5 | 6 | for Interfaces use ("kafka.ads", 7 | "kafka-config.ads", 8 | "kafka-consumer.ads", 9 | "kafka-message.ads", 10 | "kafka-topic.ads", 11 | "kafka-topic-partition.ads"); 12 | 13 | for Library_Name use "kafka"; 14 | for Library_Dir use "bin"; 15 | for Library_Kind use "dynamic"; 16 | 17 | package Compiler is 18 | for Switches ("Ada") use ("-gnatX"); 19 | end Compiler; 20 | end KafkaAda; 21 | -------------------------------------------------------------------------------- /examples/getcommandargument.adb: -------------------------------------------------------------------------------- 1 | with GNAT.Command_Line; use GNAT.Command_Line; 2 | with GNAT.Strings; 3 | 4 | package body GetCommandArgument is 5 | 6 | function Parse_Command_Line (DefaultValue : in String) return String is 7 | VAR : aliased GNAT.Strings.String_Access; 8 | Cmd_Line_Setup : Command_Line_Configuration; 9 | begin 10 | Define_Switch(Config => Cmd_Line_Setup, 11 | Output => VAR'Access, 12 | Switch => Switch_Arg, 13 | Long_Switch => Long_Switch_Arg, 14 | Help => Help_Text); 15 | Getopt(Cmd_Line_Setup); 16 | GNAT.Command_Line.Free(Cmd_Line_Setup); 17 | return (if VAR.all = "" then DefaultValue else VAR.all); 18 | exception 19 | when Exit_From_Command_Line => 20 | GNAT.Command_Line.Free(Cmd_Line_Setup); 21 | raise Exit_From_Command_Line; 22 | end; 23 | 24 | end GetCommandArgument; -------------------------------------------------------------------------------- /src/kafka-topic.adb: -------------------------------------------------------------------------------- 1 | package body Kafka.Topic is 2 | function Create_Topic_Handle(Handle : Handle_Type; 3 | Topic : String; 4 | Config : Topic_Config_Type) return Topic_Type is 5 | C_Topic : chars_ptr := New_String(Topic); 6 | Topic_Handle : Topic_Type; 7 | begin 8 | Topic_Handle := rd_kafka_topic_new(Handle, C_Topic, Config); 9 | Free(C_Topic); 10 | return Topic_Handle; 11 | end; 12 | 13 | function Create_Topic_Handle(Handle : Handle_Type; 14 | Topic : String) return Topic_Type is 15 | begin 16 | return Create_Topic_Handle(Handle, Topic, Topic_Config_Type(System.Null_Address)); 17 | end; 18 | 19 | function Get_Name(Topic : Topic_Type) return String is 20 | begin 21 | return Interfaces.C.Strings.Value(rd_kafka_topic_name(Topic)); 22 | end; 23 | end Kafka.Topic; 24 | -------------------------------------------------------------------------------- /src/kafka-config.adb: -------------------------------------------------------------------------------- 1 | package body Kafka.Config is 2 | Error_Buffer_Size : constant size_t := 512; 3 | RD_Kafka_Conf_OK : constant Integer := 0; 4 | 5 | procedure Set(Config : Config_Type; 6 | Name : String; 7 | Value : String) is 8 | C_Name : chars_ptr := New_String(Name); 9 | C_Value : chars_ptr := New_String(Value); 10 | C_Err : chars_ptr := Alloc(Error_Buffer_Size); 11 | Result : Integer; 12 | begin 13 | Result := rd_kafka_conf_set(Config, C_Name, C_Value, C_Err, Error_Buffer_Size); 14 | 15 | if Result /= RD_Kafka_Conf_OK then 16 | declare 17 | Error : String := Interfaces.C.Strings.Value(C_Err); 18 | begin 19 | Free(C_Name); 20 | Free(C_Value); 21 | Free(C_Err); 22 | raise Kafka_Error with Error; 23 | end; 24 | end if; 25 | 26 | Free(C_Name); 27 | Free(C_Value); 28 | Free(C_Err); 29 | end Set; 30 | end Kafka.Config; 31 | -------------------------------------------------------------------------------- /src/kafka-topic-config.adb: -------------------------------------------------------------------------------- 1 | package body Kafka.Topic.Config is 2 | Error_Buffer_Size : constant size_t := 512; 3 | RD_Kafka_Conf_OK : constant Integer := 0; 4 | 5 | procedure Set(Config : Topic_Config_Type; 6 | Name : String; 7 | Value : String) is 8 | C_Name : chars_ptr := New_String(Name); 9 | C_Value : chars_ptr := New_String(Value); 10 | C_Err : chars_ptr := Alloc(Error_Buffer_Size); 11 | Result : Integer; 12 | begin 13 | Result := rd_kafka_topic_conf_set(Config, C_Name, C_Value, C_Err, Error_Buffer_Size); 14 | 15 | if Result /= RD_Kafka_Conf_OK then 16 | declare 17 | Error : String := Interfaces.C.Strings.Value(C_Err); 18 | begin 19 | Free(C_Name); 20 | Free(C_Value); 21 | Free(C_Err); 22 | raise Kafka_Error with Error; 23 | end; 24 | end if; 25 | 26 | Free(C_Name); 27 | Free(C_Value); 28 | Free(C_Err); 29 | end Set; 30 | end Kafka.Topic.Config; -------------------------------------------------------------------------------- /src/kafka-consumer.adb: -------------------------------------------------------------------------------- 1 | package body Kafka.Consumer is 2 | 3 | procedure Poll_Set_Consumer(Handle : Handle_Type) is 4 | Response : Kafka_Response_Error_Type; 5 | begin 6 | Response := rd_kafka_poll_set_consumer(Handle); 7 | if Response /= RD_KAFKA_RESP_ERR_NO_ERROR then 8 | raise Kafka_Error with "Error returned by rd_kafka_poll_set_consumer: " & Kafka.Get_Error_Name(Response); 9 | end if; 10 | end Poll_Set_Consumer; 11 | 12 | function Poll(Handle : Handle_Type; 13 | Timeout : Duration) return access Message_Type is 14 | begin 15 | return rd_kafka_consumer_poll(Handle, int(Timeout * 1000)); 16 | end Poll; 17 | 18 | procedure Close(Handle : Handle_Type) is 19 | Response : Kafka_Response_Error_Type; 20 | begin 21 | Response := rd_kafka_consumer_close(Handle); 22 | if Response /= RD_KAFKA_RESP_ERR_NO_ERROR then 23 | raise Kafka_Error with "Error returned by rd_kafka_consumer_close: " & Kafka.Get_Error_Name(Response); 24 | end if; 25 | end Close; 26 | end Kafka.Consumer; 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Latence Technologies 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/kafka-message.ads: -------------------------------------------------------------------------------- 1 | -- 2 | -- Provides Kafka functionality to manage messages 3 | -- 4 | package Kafka.Message is 5 | -- 6 | -- Frees resources for the specified Message and hands ownership back to 7 | -- rdkafka. 8 | -- 9 | -- librdkafka equivalent: rd_kafka_message_destroy 10 | -- 11 | procedure Destroy(Message : access Message_Type) 12 | with Import => True, 13 | Convention => C, 14 | External_Name => "rd_kafka_message_destroy"; 15 | 16 | -- 17 | -- Returns the error string for an errored Message or empty string if there 18 | -- was no error. 19 | -- 20 | -- This function MUST NOT be used with the producer. 21 | -- 22 | -- librdkafka equivalent: rd_kafka_message_errstr 23 | -- 24 | -- @param Message message to get the error of 25 | -- @returns string describing the error 26 | -- 27 | function Get_Error(Message : access constant Message_Type) return String; 28 | private 29 | 30 | function rd_kafka_message_errstr(Message : access constant Message_Type) return chars_ptr 31 | with Import => True, 32 | Convention => C, 33 | External_Name => "rd_kafka_message_errstr_wrapper"; 34 | 35 | end Kafka.Message; 36 | -------------------------------------------------------------------------------- /src/kafka-topic-partition.adb: -------------------------------------------------------------------------------- 1 | package body Kafka.Topic.Partition is 2 | 3 | procedure List_Add(List : Partition_List_Type; 4 | Topic : String; 5 | Partition : Integer_32) is 6 | C_Topic : chars_ptr := New_String(Topic); 7 | Unused : System.Address; 8 | begin 9 | Unused := rd_kafka_topic_partition_list_add(List, C_Topic, Partition); 10 | Free(C_Topic); 11 | end List_Add; 12 | 13 | procedure List_Add_Range(List : Partition_List_Type; 14 | Topic : String; 15 | Start : Integer_32; 16 | Stop : Integer_32) is 17 | C_Topic : chars_ptr := New_String(Topic); 18 | begin 19 | rd_kafka_topic_partition_list_add_range(List, C_Topic, Start, Stop); 20 | Free(C_Topic); 21 | end List_Add_Range; 22 | 23 | 24 | function List_Delete(List : Partition_List_Type; 25 | Topic : String; 26 | Partition : Integer_32) return Boolean is 27 | C_Topic : chars_ptr := New_String(Topic); 28 | Result : int; 29 | begin 30 | Result := rd_kafka_topic_partition_list_del(List, C_Topic, Partition); 31 | Free(C_Topic); 32 | return Result = 1; 33 | end List_Delete; 34 | 35 | end Kafka.Topic.Partition; 36 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Welcome to the Gitlab CI for Kafka Ada 3 | # 4 | # The CI pulls docker images from https://hub.docker.com/r/alexanderlatence/gprbuild 5 | # to get gprbuild and builds the project in its intended way, using the following command: 6 | # 7 | # gprbuild kafka.gpr 8 | # 9 | # Then it retrieves the build Kafka Ada library that is in the root folder of the project 10 | # 11 | 12 | 13 | stages: 14 | - build # every task only has a build stage for now 15 | 16 | gcc-9-job: # task to compile with gcc-9 and gcc-ada-9, uses Ubuntu 20 17 | stage: build 18 | script: 19 | - mkdir -p obj bin 20 | - docker pull alexanderlatence/gprbuild:9 # pulls the image (gprbuild:9 for gcc-9) 21 | # runs the container, creates obj/ folder to compile, runs the gprbuild command (from /build9 inside the container) and then deletes the obj/ folder 22 | - docker run --rm -v $(pwd):/build9 alexanderlatence/gprbuild:9 gprbuild -d /build9/kafkaada.gpr 23 | - rm -rf obj 24 | artifacts: 25 | paths: 26 | - bin/libkafka.a 27 | 28 | gcc-10-job: # task to compile with gcc-10 and gcc-ada-10, uses Ubuntu 20 29 | stage: build 30 | script: 31 | - mkdir -p obj bin 32 | - docker pull alexanderlatence/gprbuild:10 33 | - docker run --rm -v $(pwd):/build10 alexanderlatence/gprbuild:10 gprbuild -d /build10/kafkaada.gpr 34 | - rm -rf obj 35 | artifacts: 36 | paths: 37 | - bin/libkafka.a 38 | 39 | gcc-11-job: # task to compile with gcc-11 and gcc-ada-11, uses Ubuntu 21 40 | stage: build 41 | script: 42 | - mkdir -p obj bin 43 | - docker pull alexanderlatence/gprbuild:11 44 | - docker run --rm -v $(pwd):/build11 alexanderlatence/gprbuild:11 gprbuild -d /build11/kafkaada.gpr 45 | - rm -rf obj 46 | artifacts: 47 | paths: 48 | - bin/libkafka.a 49 | 50 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kafka Ada 2 | 3 | This project offers Ada bindings for the C librdkafka library. You can use it to send and receive from a Kafka bus from Ada. Currently work in progress, it does not offer all functionalities of librdkafka yet. If what you are looking for is simply producing data onto a kafka bus or consuming data, this library likely already supports the features you need. 4 | 5 | Supported functionalities: 6 | 7 | - Basic error handling 8 | - Kafka handle creation and destruction 9 | - Topic handling 10 | - Message handling 11 | - Basic producer functionalities 12 | - Basic consumer functionalities 13 | - Kafka configuration 14 | - Topic configuration 15 | 16 | Not yet supported: 17 | 18 | - Retrieving debug contexts 19 | - Advanced error handling (Description, fatal errors, retriable errors etc.) 20 | - Partition handling 21 | - Message headers 22 | - Message status 23 | - Event sourcing and event handling 24 | - Callbacks 25 | - Broker handling 26 | - Broker metadata 27 | - Queue consumer 28 | - Batch consumer 29 | - Atomic assignment of partitions 30 | - Kafka logging 31 | - Topic creation and deletion and other admin operations 32 | - Various other advanced functionalities offered by librdkafka 33 | 34 | If you need some of those not supported features, feel free to open a pull request. 35 | 36 | ## Building and installing from source 37 | 38 | To install this library on your system, run the following command: 39 | 40 | ```bash 41 | gprbuild -p kafkaada.gpr 42 | sudo gprinstall -p -f kafkaada.gpr 43 | ``` 44 | 45 | Note: You need to have librdkafka already installed. 46 | 47 | ## Usage 48 | 49 | See the `examples/` folder for code examples showcasing a basic consumer and a 50 | basic producer using Kafka Ada. 51 | 52 | You can use the `-t` (or `--topic`) argument to change the name of the Kafka Topic when using either example. 53 | 54 | ## Latence Technologies 55 | 56 | This Ada binding is offered by LatenceTech, a Montreal based startup specialized in low-latency optimization. Our website is https://latencetech.com/ 57 | -------------------------------------------------------------------------------- /src/kafka-config.ads: -------------------------------------------------------------------------------- 1 | -- 2 | -- Provides the ability to create a configuration to be used when creating a 3 | -- Kafka handle 4 | -- 5 | package Kafka.Config is 6 | 7 | -- 8 | -- Creates a new kafka config object 9 | -- 10 | -- librdkafka equivalent: rd_kafka_conf_new 11 | -- 12 | function Create return Config_Type 13 | with Import => True, 14 | Convention => C, 15 | External_Name => "rd_kafka_conf_new"; 16 | 17 | -- 18 | -- Destroys a kafka config object 19 | -- 20 | -- librdkafka equivalent: rd_kafka_conf_destroy 21 | -- 22 | -- @param Config configuration to destroy 23 | -- 24 | procedure Destroy(Config : Config_Type) 25 | with Import => True, 26 | Convention => C, 27 | External_Name => "rd_kafka_conf_destroy"; 28 | 29 | -- 30 | -- Duplicates a kafka config object 31 | -- 32 | -- librdkafka equivalent: rd_kafka_conf_dup 33 | -- 34 | -- @param Config configuration to duplicate 35 | -- 36 | function Duplicate(Config : Config_Type) return Config_Type 37 | with Import => True, 38 | Convention => C, 39 | External_Name => "rd_kafka_conf_dup"; 40 | 41 | -- 42 | -- Sets a kafka config property for a given kafka config. 43 | -- 44 | -- librdkafka equivalent: rd_kafka_conf_set 45 | -- 46 | -- @param Config configuration to set the property in 47 | -- @param Name name of property to set 48 | -- @param Value value of property to set 49 | -- @raises Kafka_Error on error 50 | -- 51 | procedure Set(Config : Config_Type; 52 | Name : String; 53 | Value : String); 54 | private 55 | 56 | function rd_kafka_conf_set(conf : Config_Type; 57 | name : chars_ptr; 58 | value : chars_ptr; 59 | errstr : chars_ptr; 60 | errstr_size : size_t) return Integer 61 | with Import => True, 62 | Convention => C, 63 | External_Name => "rd_kafka_conf_set"; 64 | end Kafka.Config; 65 | -------------------------------------------------------------------------------- /src/kafka-topic-config.ads: -------------------------------------------------------------------------------- 1 | 2 | 3 | package Kafka.Topic.Config is 4 | 5 | -- 6 | -- Creates a new kafka topic config object 7 | -- 8 | -- librdkafka equivalent: rd_kafka_topic_conf_new 9 | -- 10 | function Create return Topic_Config_Type 11 | with Import => True, 12 | Convention => C, 13 | External_Name => "rd_kafka_topic_conf_new"; 14 | 15 | -- 16 | -- Destroys a kafka topic config object 17 | -- 18 | -- librdkafka equivalent: rd_kafka_topic_conf_destroy 19 | -- 20 | -- @param Config configuration to destroy 21 | -- 22 | procedure Destroy(Config : Topic_Config_Type) 23 | with Import => True, 24 | Convention => C, 25 | External_Name => "rd_kafka_topic_conf_destroy"; 26 | 27 | -- 28 | -- Duplicates a kafka topic config object 29 | -- 30 | -- librdkafka equivalent: rd_kafka_topic_conf_dup 31 | -- 32 | -- @param Config configuration to duplicate 33 | -- 34 | function Duplicate(Config : Topic_Config_Type) return Topic_Config_Type 35 | with Import => True, 36 | Convention => C, 37 | External_Name => "rd_kafka_topic_conf_dup"; 38 | 39 | -- 40 | -- Sets a kafka topic config property for a given kafka topic config. 41 | -- 42 | -- librdkafka equivalent: rd_kafka_topic_conf_set 43 | -- 44 | -- @param Config configuration to set the property in 45 | -- @param Name name of property to set 46 | -- @param Value value of property to set 47 | -- @raises Kafka_Error on error 48 | -- 49 | procedure Set(Config : Topic_Config_Type; 50 | Name : String; 51 | Value : String); 52 | private 53 | 54 | function rd_kafka_topic_conf_set(conf : Topic_Config_Type; 55 | name : chars_ptr; 56 | value : chars_ptr; 57 | errstr : chars_ptr; 58 | errstr_size : size_t) return Integer 59 | with Import => True, 60 | Convention => C, 61 | External_Name => "rd_kafka_topic_conf_set"; 62 | 63 | end Kafka.Topic.Config; -------------------------------------------------------------------------------- /src/kafka-topic.ads: -------------------------------------------------------------------------------- 1 | -- 2 | -- Provides kafka functionality to interact with Topics 3 | -- 4 | package Kafka.Topic is 5 | 6 | -- 7 | -- Creates a handle for a given topic. Does not perform the admin command 8 | -- to create a topic 9 | -- 10 | -- librdkafka equivalent: rd_kafka_topic_new 11 | -- 12 | function Create_Topic_Handle(Handle : Handle_Type; 13 | Topic : String; 14 | Config : Topic_Config_Type) return Topic_Type; 15 | 16 | 17 | -- 18 | -- Creates a handle for a given topic. Does not perform the admin command 19 | -- to create a topic 20 | -- 21 | -- librdkafka equivalent: rd_kafka_topic_new 22 | -- 23 | function Create_Topic_Handle(Handle : Handle_Type; 24 | Topic : String) return Topic_Type; 25 | 26 | -- 27 | -- Destroys the specified topic handle 28 | -- 29 | -- librdkafka equivalent: rd_kafka_topic_destroy 30 | -- 31 | procedure Destroy_Topic_Handle(Topic : Topic_Type) 32 | with Import => True, 33 | Convention => C, 34 | External_Name => "rd_kafka_topic_destroy"; 35 | 36 | -- 37 | -- Returns the name of a given topic 38 | -- 39 | -- librdkafka equivalent: rd_kafka_topic_name 40 | -- 41 | function Get_Name(Topic : Topic_Type) return String; 42 | 43 | -- 44 | -- Returns the opaque for a given topic 45 | -- 46 | -- librdkafka equivalent: rd_kafka_topic_opaque 47 | -- 48 | function Get_Opaque(Topic : Topic_Type) return System.Address 49 | with Import => True, 50 | Convention => C, 51 | External_Name => "rd_kafka_topic_opaque"; 52 | 53 | private 54 | 55 | function rd_kafka_topic_new(Handle : Handle_Type; 56 | Topic : chars_ptr; 57 | Config : Topic_Config_Type) return Topic_Type 58 | with Import => True, 59 | Convention => C, 60 | External_Name => "rd_kafka_topic_new"; 61 | 62 | 63 | function rd_kafka_topic_name(Topic : Topic_Type) return chars_ptr 64 | with Import => True, 65 | Convention => C, 66 | External_Name => "rd_kafka_topic_name"; 67 | end Kafka.Topic; 68 | -------------------------------------------------------------------------------- /examples/simple_producer.adb: -------------------------------------------------------------------------------- 1 | with Ada.Text_IO; 2 | with GNAT.Sockets; 3 | with Interfaces; 4 | with Kafka; 5 | with Kafka.Config; 6 | with Kafka.Topic; 7 | with System; 8 | with GetCommandArgument; 9 | 10 | -- 11 | -- Basic Kafka producer 12 | -- 13 | procedure Simple_Producer is 14 | Config : Kafka.Config_Type; 15 | Handle : Kafka.Handle_Type; 16 | Topic : Kafka.Topic_Type; 17 | 18 | package CommandTopic is new GetCommandArgument ("-t:", "--topic:", "Topic name to use"); 19 | begin 20 | Ada.Text_IO.Put_Line("Kafka version: " & Kafka.Version); 21 | 22 | -- Create a new config object 23 | Config := Kafka.Config.Create; 24 | 25 | -- Configure your properties 26 | Kafka.Config.Set(Config, "client.id", GNAT.Sockets.Host_name); 27 | Kafka.Config.Set(Config, "bootstrap.servers", "localhost:9092"); 28 | 29 | -- Create handle 30 | Handle := Kafka.Create_Handle(Kafka.RD_KAFKA_PRODUCER, Config); 31 | 32 | -- Create topic handle 33 | Topic := Kafka.Topic.Create_Topic_Handle(Handle, 34 | CommandTopic.Parse_Command_Line("test_topic")); -- topic must already exist 35 | 36 | -- Producing a String 37 | Kafka.Produce(Topic, 38 | Kafka.RD_KAFKA_PARTITION_UA, 39 | "World", -- payload 40 | "Hello", -- key 41 | System.Null_Address); 42 | 43 | -- Producing binary data 44 | declare 45 | type Some_Message is record 46 | A : Interfaces.Unsigned_32; 47 | B : Interfaces.Unsigned_64; 48 | C : Interfaces.Unsigned_8; 49 | end record 50 | with Convention => C; 51 | 52 | for Some_Message use record 53 | A at 0 range 0 .. 31; 54 | B at 4 range 0 .. 63; 55 | C at 12 range 0 .. 7; 56 | end record; 57 | 58 | for Some_Message'Bit_Order use System.High_Order_First; 59 | for Some_Message'Scalar_Storage_Order use System.High_Order_First; 60 | 61 | Message : Some_Message := (A => 55, B => 40002, C => 13); 62 | begin 63 | Kafka.Produce(Topic, 64 | Kafka.RD_KAFKA_PARTITION_UA, 65 | Kafka.RD_KAFKA_MSG_F_COPY, 66 | Message'Address, 67 | 13, 68 | System.Null_Address, -- key is optional 69 | 0, 70 | System.Null_Address); 71 | end; 72 | 73 | Kafka.Poll(Handle, 0.0); 74 | Kafka.Flush(Handle, 15.0); 75 | 76 | Kafka.Destroy_Handle(Handle); 77 | end Simple_Producer; -------------------------------------------------------------------------------- /src/kafka-consumer.ads: -------------------------------------------------------------------------------- 1 | -- 2 | -- Provides Kafka functionality to consume data 3 | -- 4 | package Kafka.Consumer is 5 | 6 | -- 7 | -- Redirect the main poll queue to the Kafka Consumer queue. It is not 8 | -- permitted to call Kafka.Poll after directing the main queue with Poll_Set_Consumer 9 | -- 10 | -- librdkafka equivalent: rd_kafka_poll_set_consumer 11 | -- 12 | -- @param Handle Kafka handle 13 | -- @raises Kafka_Error if an error happens 14 | -- 15 | procedure Poll_Set_Consumer(Handle : Handle_Type); 16 | 17 | -- 18 | -- Polls the consumer for messages or events. Will block for at most the 19 | -- Timeout specified as parameter. 20 | -- 21 | -- An application should make sure to call Kafka.Consumer.Poll() at regular 22 | -- intervals, even if no messages are expected, to serve any queued 23 | -- callbacks waiting to be called. This is especially important when a 24 | -- rebalance callback has been registered as it needs to be called and 25 | -- handled properly to synchronize internal consumer state. 26 | -- 27 | -- librdkafka equivalent: rd_kafka_consumer_poll 28 | -- 29 | -- @param Handle Kafka handle 30 | -- @returns A message object which is a proper message if its field Error is 0 31 | -- @raises Kafka_Error if an error happens 32 | -- 33 | function Poll(Handle : Handle_Type; 34 | Timeout : Duration) return access Message_Type; 35 | 36 | -- 37 | -- Closes down the Kafka Consumer. 38 | -- 39 | -- This call will block until the consumer has revoked its assignment, 40 | -- calling the rebalance callback if it is configured, committed offsets 41 | -- to broker, and left the consumer group. 42 | -- The maximum blocking time is roughly limited to session.timeout.ms. 43 | -- 44 | -- The application still needs to call rd_kafka_destroy() after 45 | -- this call finishes to clean up the underlying handle resources. 46 | -- 47 | -- librdkafka equivalent: rd_kafka_consumer_close 48 | -- 49 | -- @raises Kafka_Error if an error happens 50 | procedure Close(Handle : Handle_Type); 51 | private 52 | 53 | function rd_kafka_poll_set_consumer(rk : Handle_Type) return Kafka_Response_Error_Type 54 | with Import => True, 55 | Convention => C, 56 | External_Name => "rd_kafka_poll_set_consumer"; 57 | 58 | function rd_kafka_consumer_poll(rk : Handle_Type; 59 | timeout_ms : int) return access Message_Type 60 | with Import => True, 61 | Convention => C, 62 | External_Name => "rd_kafka_consumer_poll"; 63 | 64 | function rd_kafka_consumer_close(rk : Handle_Type) return Kafka_Response_Error_Type 65 | with Import => True, 66 | Convention => C, 67 | External_Name => "rd_kafka_consumer_close"; 68 | end Kafka.Consumer; 69 | -------------------------------------------------------------------------------- /src/kafka-topic-partition.ads: -------------------------------------------------------------------------------- 1 | 2 | -- 3 | -- Provides Kafka functionality to create partition lists for a kafka consumer 4 | -- to subscribe to 5 | -- 6 | package Kafka.Topic.Partition is 7 | 8 | -- 9 | -- Creates a new list/vector Topic+Partition container. 10 | -- 11 | -- librdkafka equivalent: rd_kafka_topic_partition_list_new 12 | -- 13 | -- @param Size Initial allocated size used when the expected number of 14 | -- elements is known or can be estimated. 15 | -- Avoids reallocation and possibly relocation of the 16 | -- elems array. 17 | -- @returns A newly allocated Topic+Partition list. 18 | -- 19 | function Create_List(Size : Integer) return Partition_List_Type 20 | with Import => True, 21 | Convention => C, 22 | External_Name => "rd_kafka_topic_partition_list_new"; 23 | 24 | -- 25 | -- Free all resources used by the list and the list itself. 26 | -- 27 | -- librdkafka equivalent: rd_kafka_topic_partition_list_destroy 28 | -- 29 | procedure Destroy_List(List : Partition_List_Type) 30 | with Import => True, 31 | Convention => C, 32 | External_Name => "rd_kafka_topic_partition_list_destroy"; 33 | 34 | -- 35 | -- librdkafka equivalent: rd_kafka_topic_partition_list_add 36 | -- 37 | procedure List_Add(List : Partition_List_Type; 38 | Topic : String; 39 | Partition : Integer_32); 40 | 41 | -- 42 | -- librdkafka equivalent: rd_kafka_topic_partition_list_add_range 43 | -- 44 | -- @param List List to extend 45 | -- @param Topic Topic name 46 | -- @param Start Start partition of range 47 | -- @param Stop Last partition of range (inclusive) 48 | -- 49 | procedure List_Add_Range(List : Partition_List_Type; 50 | Topic : String; 51 | Start : Integer_32; 52 | Stop : Integer_32); 53 | 54 | -- 55 | -- Deletes the specified partition from the list 56 | -- 57 | -- librdkafka equivalent: rd_kafka_topic_partition_list_del 58 | -- 59 | -- @param List List to modify 60 | -- @param Topic Topic name to match 61 | -- @param Partition Partition to match 62 | -- @return True if the partition was found and removed, otherwise False 63 | -- 64 | function List_Delete(List : Partition_List_Type; 65 | Topic : String; 66 | Partition : Integer_32) return Boolean; 67 | private 68 | function rd_kafka_topic_partition_list_add(rktparlist : Partition_List_Type; 69 | topic : chars_ptr; 70 | partition : Integer_32) return System.Address 71 | with Import => True, 72 | Convention => C, 73 | External_Name => "rd_kafka_topic_partition_list_add"; 74 | 75 | 76 | procedure rd_kafka_topic_partition_list_add_range(rktparlist : Partition_List_Type; 77 | topic : chars_ptr; 78 | start : Integer_32; 79 | stop : Integer_32) 80 | with Import => True, 81 | Convention => C, 82 | External_Name => "rd_kafka_topic_partition_list_add_range"; 83 | 84 | function rd_kafka_topic_partition_list_del(rktparlist : Partition_List_Type; 85 | topic : chars_ptr; 86 | partition : Integer_32) return int 87 | with Import => True, 88 | Convention => C, 89 | External_Name => "rd_kafka_topic_partition_list_del"; 90 | 91 | end Kafka.Topic.Partition; -------------------------------------------------------------------------------- /examples/simple_consumer.adb: -------------------------------------------------------------------------------- 1 | with Ada.Text_IO; 2 | with GNAT.Sockets; 3 | with Interfaces; 4 | with Interfaces.C; 5 | with Kafka; 6 | with Kafka.Config; 7 | with Kafka.Consumer; 8 | with Kafka.Message; 9 | with Kafka.Topic; 10 | with Kafka.Topic.Partition; 11 | with Signal; 12 | with GetCommandArgument; 13 | with System; 14 | 15 | -- 16 | -- Basic Kafka consumer 17 | -- 18 | procedure Simple_Consumer is 19 | use type System.Address; 20 | use type Interfaces.C.size_t; 21 | 22 | Config : Kafka.Config_Type; 23 | Handle : Kafka.Handle_Type; 24 | 25 | Handler : Signal.Handler; -- basic Signal handler to stop on CTRL + C 26 | package KafkaTopic is new GetCommandArgument ("-t:", "--topic:", "Topic name to use"); 27 | begin 28 | -- Create configuration 29 | Config := Kafka.Config.Create; 30 | 31 | -- Configure 32 | -- see librdkafka documentation on how to configure your Kafka consumer 33 | -- Kafka-Ada does not add any configuration entries of its own 34 | Kafka.Config.Set(Config, "group.id", GNAT.Sockets.Host_name); 35 | Kafka.Config.Set(Config, "bootstrap.servers", "localhost:9092"); 36 | Kafka.Config.Set(Config, "auto.offset.reset", "earliest"); 37 | 38 | Handle := Kafka.Create_Handle(Kafka.RD_KAFKA_CONSUMER, Config); 39 | 40 | Kafka.Consumer.Poll_Set_Consumer(Handle); 41 | 42 | declare 43 | Partition_List : Kafka.Partition_List_Type; 44 | begin 45 | Partition_List := Kafka.Topic.Partition.Create_List(1); 46 | Kafka.Topic.Partition.List_Add(Partition_List, 47 | KafkaTopic.Parse_Command_Line("test_topic"), Kafka.RD_KAFKA_PARTITION_UA); 48 | 49 | Kafka.Subscribe(Handle, Partition_List); 50 | Kafka.Topic.Partition.Destroy_List(Partition_List); 51 | end; 52 | 53 | while not Handler.Triggered loop 54 | declare 55 | Message : access Kafka.Message_Type; 56 | begin 57 | Message := Kafka.Consumer.Poll(Handle, Duration(0.1)); -- 100ms 58 | if Message = null then 59 | goto Continue; 60 | end if; 61 | 62 | if Message.Error /= 0 then 63 | Ada.Text_IO.Put_Line("Consumer error: " & Kafka.Message.Get_Error(Message)); 64 | Kafka.Message.Destroy(Message); 65 | goto Continue; 66 | end if; 67 | 68 | Ada.Text_IO.Put_Line("A message was received"); 69 | Ada.Text_IO.Put_Line("Topic: " & Kafka.Topic.Get_Name(Message.Topic)); 70 | 71 | if Message.Key /= System.Null_Address and Message.Key_Length > 0 then 72 | declare 73 | Key : aliased String(1 .. Integer(Message.Key_Length)); 74 | for Key'Address use Message.Key; 75 | begin 76 | Ada.Text_IO.Put_Line("Key:"); 77 | Ada.Text_IO.Put_Line(Key); 78 | end; 79 | else 80 | Ada.Text_IO.Put_Line("Key is null"); 81 | end if; 82 | 83 | if Message.Payload /= System.Null_Address and Message.Payload_Length > 0 then 84 | declare 85 | Payload : aliased String(1 .. Integer(Message.Payload_Length)); 86 | for Payload'Address use Message.Payload; 87 | begin 88 | Ada.Text_IO.Put_Line("Payload:"); 89 | Ada.Text_IO.Put_Line(Payload); 90 | end; 91 | else 92 | Ada.Text_IO.Put_Line("Payload is null"); 93 | end if; 94 | 95 | Kafka.Message.Destroy(Message); 96 | end; 97 | <> 98 | end loop; 99 | 100 | Kafka.Consumer.Close(Handle); 101 | Kafka.Destroy_Handle(Handle); 102 | end Simple_Consumer; -------------------------------------------------------------------------------- /src/kafka.adb: -------------------------------------------------------------------------------- 1 | with System.Address_To_Access_Conversions; 2 | 3 | package body Kafka is 4 | Error_Buffer_Size : constant size_t := 512; 5 | 6 | function Version return String is 7 | begin 8 | return Interfaces.C.Strings.Value(rd_kafka_version_str); 9 | end; 10 | 11 | function Get_Error_Name(Error_Code: Kafka_Response_Error_Type) return String is 12 | begin 13 | return Interfaces.C.Strings.Value(rd_kafka_err2name(Error_Code)); 14 | end; 15 | 16 | function Create_Handle(HandleType : Kafka_Handle_Type; 17 | Config : Config_Type) return Handle_Type is 18 | C_Err : chars_ptr := Alloc(Error_Buffer_Size); 19 | Handle : Handle_Type; 20 | begin 21 | Handle := rd_kafka_new(HandleType, Config, C_Err, Error_Buffer_Size); 22 | if Handle = Handle_Type(System.Null_Address) then 23 | declare 24 | Error : String := Interfaces.C.Strings.Value(C_Err); 25 | begin 26 | Free(C_Err); 27 | raise Kafka_Error with Error; 28 | end; 29 | end if; 30 | 31 | Free(C_Err); 32 | return Handle; 33 | end Create_Handle; 34 | 35 | procedure Flush(Handle : Handle_Type; 36 | Timeout : Duration) is 37 | Response : Kafka_Response_Error_Type; 38 | begin 39 | Response := rd_kafka_flush(Handle, int(Timeout * 1000)); 40 | 41 | if Response = RD_KAFKA_RESP_ERR_u_TIMED_OUT then 42 | raise Timeout_Reached; 43 | elsif Response /= RD_KAFKA_RESP_ERR_NO_ERROR then 44 | raise Kafka_Error with "Unknown error returned by rd_kafka_flush: " & Kafka.Get_Error_Name(Response); 45 | end if; 46 | end Flush; 47 | 48 | function Poll(Handle : Handle_Type; 49 | Timeout : Duration) return Integer is 50 | begin 51 | return Integer(rd_kafka_poll(Handle, int(Timeout * 1000))); 52 | end Poll; 53 | 54 | procedure Poll(Handle : Handle_Type; 55 | Timeout : Duration) is 56 | Result : Integer; 57 | begin 58 | Result := Poll(Handle, Timeout); 59 | end Poll; 60 | 61 | procedure Produce(Topic : Topic_Type; 62 | Partition : Integer_32; 63 | Message_Flags : Kafka_Message_Flag_Type; 64 | Payload : System.Address; 65 | Payload_Length : size_t; 66 | Key : System.Address; 67 | Key_Length : size_t; 68 | Message_Opaque : System.Address) is 69 | Result : int; 70 | begin 71 | Result := rd_kafka_produce(Topic, Partition, Message_Flags, Payload, Payload_Length, Key, Key_Length, Message_Opaque); 72 | 73 | if(Result /= 0) then 74 | raise Kafka_Error with Get_Error_Name(Get_Last_Error); 75 | end if; 76 | end Produce; 77 | 78 | procedure Produce(Topic : Topic_Type; 79 | Partition : Integer_32; 80 | Payload : String; 81 | Key : String; 82 | Message_Opaque : System.Address) is 83 | type Byte_Array is array (Positive range <>) of aliased Interfaces.Unsigned_8; 84 | 85 | -- Does not matter since we are passing length to the C function, specifying the bound 86 | pragma Warnings (Off, "To_Pointer results may not have bounds"); 87 | package Byte_Conv is new System.Address_To_Access_Conversions(Byte_Array); 88 | pragma Warnings (On); 89 | 90 | Payload_Bytes : aliased Byte_Array := (1 .. Payload'Length => 0); 91 | Key_Bytes : aliased Byte_Array := (1 .. Key'Length => 0); 92 | begin 93 | for Index in 1 .. Payload'Length loop 94 | Payload_Bytes(Index) := Character'Pos(Payload(Payload'First + Index - 1)); 95 | end loop; 96 | 97 | for Index in 1 .. Key'Length loop 98 | Key_Bytes(Index) := Character'Pos(Key(Key'First + Index - 1)); 99 | end loop; 100 | 101 | Produce(Topic, 102 | Partition, 103 | RD_KAFKA_MSG_F_COPY, 104 | Byte_Conv.To_Address(Payload_Bytes'Access), 105 | Payload_Bytes'Length, 106 | Byte_Conv.To_Address(Key_Bytes'Access), 107 | Key_Bytes'Length, 108 | Message_Opaque); 109 | end Produce; 110 | 111 | procedure Subscribe(Handle : Handle_Type; 112 | Partition_List : Partition_List_Type) is 113 | Response : Kafka_Response_Error_Type; 114 | begin 115 | Response := rd_kafka_subscribe(Handle, Partition_List); 116 | 117 | if Response /= RD_KAFKA_RESP_ERR_NO_ERROR then 118 | raise Kafka_Error with "Error returned by rd_kafka_subscribe: " & Kafka.Get_Error_Name(Response); 119 | end if; 120 | end Subscribe; 121 | 122 | procedure Unsubscribe(Handle : Handle_Type) is 123 | Response : Kafka_Response_Error_Type; 124 | begin 125 | Response := rd_kafka_unsubscribe(Handle); 126 | 127 | if Response /= RD_KAFKA_RESP_ERR_NO_ERROR then 128 | raise Kafka_Error with "Error returned by rd_kafka_unsubscribe: " & Kafka.Get_Error_Name(Response); 129 | end if; 130 | end Unsubscribe; 131 | end Kafka; 132 | -------------------------------------------------------------------------------- /src/kafka.ads: -------------------------------------------------------------------------------- 1 | pragma Warnings (Off, "use of this unit is non-portable and version-dependent"); 2 | private with System.Parameters; 3 | pragma Warnings (On); 4 | 5 | with System; 6 | with Interfaces; use Interfaces; 7 | with Interfaces.C; use Interfaces.C; 8 | with Interfaces.C.Strings; use Interfaces.C.Strings; 9 | 10 | -- 11 | -- Provides functionality to create Kafka handles for a connection instance, 12 | -- as well as Poll, Flush, Produce and Subscribe to topics 13 | -- 14 | package Kafka is 15 | 16 | subtype Kafka_Response_Error_Type is Integer; 17 | RD_KAFKA_RESP_ERR_u_BEGIN : constant Kafka_Response_Error_Type := -200; 18 | RD_KAFKA_RESP_ERR_u_BAD_MSG : constant Kafka_Response_Error_Type := -199; 19 | RD_KAFKA_RESP_ERR_u_BAD_COMPRESSION : constant Kafka_Response_Error_Type := -198; 20 | RD_KAFKA_RESP_ERR_u_DESTROY : constant Kafka_Response_Error_Type := -197; 21 | RD_KAFKA_RESP_ERR_u_FAIL : constant Kafka_Response_Error_Type := -196; 22 | RD_KAFKA_RESP_ERR_u_TRANSPORT : constant Kafka_Response_Error_Type := -195; 23 | RD_KAFKA_RESP_ERR_u_CRIT_SYS_RESOURCE : constant Kafka_Response_Error_Type := -194; 24 | RD_KAFKA_RESP_ERR_u_RESOLVE : constant Kafka_Response_Error_Type := -193; 25 | RD_KAFKA_RESP_ERR_u_MSG_TIMED_OUT : constant Kafka_Response_Error_Type := -192; 26 | RD_KAFKA_RESP_ERR_u_PARTITION_EOF : constant Kafka_Response_Error_Type := -191; 27 | RD_KAFKA_RESP_ERR_u_UNKNOWN_PARTITION : constant Kafka_Response_Error_Type := -190; 28 | RD_KAFKA_RESP_ERR_u_FS : constant Kafka_Response_Error_Type := -189; 29 | RD_KAFKA_RESP_ERR_u_UNKNOWN_TOPIC : constant Kafka_Response_Error_Type := -188; 30 | RD_KAFKA_RESP_ERR_u_ALL_BROKERS_DOWN : constant Kafka_Response_Error_Type := -187; 31 | RD_KAFKA_RESP_ERR_u_INVALID_ARG : constant Kafka_Response_Error_Type := -186; 32 | RD_KAFKA_RESP_ERR_u_TIMED_OUT : constant Kafka_Response_Error_Type := -185; 33 | RD_KAFKA_RESP_ERR_u_QUEUE_FULL : constant Kafka_Response_Error_Type := -184; 34 | RD_KAFKA_RESP_ERR_u_ISR_INSUFF : constant Kafka_Response_Error_Type := -183; 35 | RD_KAFKA_RESP_ERR_u_NODE_UPDATE : constant Kafka_Response_Error_Type := -182; 36 | RD_KAFKA_RESP_ERR_u_SSL : constant Kafka_Response_Error_Type := -181; 37 | RD_KAFKA_RESP_ERR_u_WAIT_COORD : constant Kafka_Response_Error_Type := -180; 38 | RD_KAFKA_RESP_ERR_u_UNKNOWN_GROUP : constant Kafka_Response_Error_Type := -179; 39 | RD_KAFKA_RESP_ERR_u_IN_PROGRESS : constant Kafka_Response_Error_Type := -178; 40 | RD_KAFKA_RESP_ERR_u_PREV_IN_PROGRESS : constant Kafka_Response_Error_Type := -177; 41 | RD_KAFKA_RESP_ERR_u_EXISTING_SUBSCRIPTION : constant Kafka_Response_Error_Type := -176; 42 | RD_KAFKA_RESP_ERR_u_ASSIGN_PARTITIONS : constant Kafka_Response_Error_Type := -175; 43 | RD_KAFKA_RESP_ERR_u_REVOKE_PARTITIONS : constant Kafka_Response_Error_Type := -174; 44 | RD_KAFKA_RESP_ERR_u_CONFLICT : constant Kafka_Response_Error_Type := -173; 45 | RD_KAFKA_RESP_ERR_u_STATE : constant Kafka_Response_Error_Type := -172; 46 | RD_KAFKA_RESP_ERR_u_UNKNOWN_PROTOCOL : constant Kafka_Response_Error_Type := -171; 47 | RD_KAFKA_RESP_ERR_u_NOT_IMPLEMENTED : constant Kafka_Response_Error_Type := -170; 48 | RD_KAFKA_RESP_ERR_u_AUTHENTICATION : constant Kafka_Response_Error_Type := -169; 49 | RD_KAFKA_RESP_ERR_u_NO_OFFSET : constant Kafka_Response_Error_Type := -168; 50 | RD_KAFKA_RESP_ERR_u_OUTDATED : constant Kafka_Response_Error_Type := -167; 51 | RD_KAFKA_RESP_ERR_u_TIMED_OUT_QUEUE : constant Kafka_Response_Error_Type := -166; 52 | RD_KAFKA_RESP_ERR_u_UNSUPPORTED_FEATURE : constant Kafka_Response_Error_Type := -165; 53 | RD_KAFKA_RESP_ERR_u_WAIT_CACHE : constant Kafka_Response_Error_Type := -164; 54 | RD_KAFKA_RESP_ERR_u_INTR : constant Kafka_Response_Error_Type := -163; 55 | RD_KAFKA_RESP_ERR_u_KEY_SERIALIZATION : constant Kafka_Response_Error_Type := -162; 56 | RD_KAFKA_RESP_ERR_u_VALUE_SERIALIZATION : constant Kafka_Response_Error_Type := -161; 57 | RD_KAFKA_RESP_ERR_u_KEY_DESERIALIZATION : constant Kafka_Response_Error_Type := -160; 58 | RD_KAFKA_RESP_ERR_u_VALUE_DESERIALIZATION : constant Kafka_Response_Error_Type := -159; 59 | RD_KAFKA_RESP_ERR_u_PARTIAL : constant Kafka_Response_Error_Type := -158; 60 | RD_KAFKA_RESP_ERR_u_READ_ONLY : constant Kafka_Response_Error_Type := -157; 61 | RD_KAFKA_RESP_ERR_u_NOENT : constant Kafka_Response_Error_Type := -156; 62 | RD_KAFKA_RESP_ERR_u_UNDERFLOW : constant Kafka_Response_Error_Type := -155; 63 | RD_KAFKA_RESP_ERR_u_INVALID_TYPE : constant Kafka_Response_Error_Type := -154; 64 | RD_KAFKA_RESP_ERR_u_RETRY : constant Kafka_Response_Error_Type := -153; 65 | RD_KAFKA_RESP_ERR_u_PURGE_QUEUE : constant Kafka_Response_Error_Type := -152; 66 | RD_KAFKA_RESP_ERR_u_PURGE_INFLIGHT : constant Kafka_Response_Error_Type := -151; 67 | RD_KAFKA_RESP_ERR_u_FATAL : constant Kafka_Response_Error_Type := -150; 68 | RD_KAFKA_RESP_ERR_u_INCONSISTENT : constant Kafka_Response_Error_Type := -149; 69 | RD_KAFKA_RESP_ERR_u_GAPLESS_GUARANTEE : constant Kafka_Response_Error_Type := -148; 70 | RD_KAFKA_RESP_ERR_u_MAX_POLL_EXCEEDED : constant Kafka_Response_Error_Type := -147; 71 | RD_KAFKA_RESP_ERR_u_UNKNOWN_BROKER : constant Kafka_Response_Error_Type := -146; 72 | RD_KAFKA_RESP_ERR_u_NOT_CONFIGURED : constant Kafka_Response_Error_Type := -145; 73 | RD_KAFKA_RESP_ERR_u_FENCED : constant Kafka_Response_Error_Type := -144; 74 | RD_KAFKA_RESP_ERR_u_APPLICATION : constant Kafka_Response_Error_Type := -143; 75 | RD_KAFKA_RESP_ERR_u_ASSIGNMENT_LOST : constant Kafka_Response_Error_Type := -142; 76 | RD_KAFKA_RESP_ERR_u_NOOP : constant Kafka_Response_Error_Type := -141; 77 | RD_KAFKA_RESP_ERR_u_AUTO_OFFSET_RESET : constant Kafka_Response_Error_Type := -140; 78 | RD_KAFKA_RESP_ERR_u_END : constant Kafka_Response_Error_Type := -100; 79 | RD_KAFKA_RESP_ERR_UNKNOWN : constant Kafka_Response_Error_Type := -1; 80 | RD_KAFKA_RESP_ERR_NO_ERROR : constant Kafka_Response_Error_Type := 0; 81 | RD_KAFKA_RESP_ERR_OFFSET_OUT_OF_RANGE : constant Kafka_Response_Error_Type := 1; 82 | RD_KAFKA_RESP_ERR_INVALID_MSG : constant Kafka_Response_Error_Type := 2; 83 | RD_KAFKA_RESP_ERR_UNKNOWN_TOPIC_OR_PART : constant Kafka_Response_Error_Type := 3; 84 | RD_KAFKA_RESP_ERR_INVALID_MSG_SIZE : constant Kafka_Response_Error_Type := 4; 85 | RD_KAFKA_RESP_ERR_LEADER_NOT_AVAILABLE : constant Kafka_Response_Error_Type := 5; 86 | RD_KAFKA_RESP_ERR_NOT_LEADER_FOR_PARTITION : constant Kafka_Response_Error_Type := 6; 87 | RD_KAFKA_RESP_ERR_REQUEST_TIMED_OUT : constant Kafka_Response_Error_Type := 7; 88 | RD_KAFKA_RESP_ERR_BROKER_NOT_AVAILABLE : constant Kafka_Response_Error_Type := 8; 89 | RD_KAFKA_RESP_ERR_REPLICA_NOT_AVAILABLE : constant Kafka_Response_Error_Type := 9; 90 | RD_KAFKA_RESP_ERR_MSG_SIZE_TOO_LARGE : constant Kafka_Response_Error_Type := 10; 91 | RD_KAFKA_RESP_ERR_STALE_CTRL_EPOCH : constant Kafka_Response_Error_Type := 11; 92 | RD_KAFKA_RESP_ERR_OFFSET_METADATA_TOO_LARGE : constant Kafka_Response_Error_Type := 12; 93 | RD_KAFKA_RESP_ERR_NETWORK_EXCEPTION : constant Kafka_Response_Error_Type := 13; 94 | RD_KAFKA_RESP_ERR_COORDINATOR_LOAD_IN_PROGRESS : constant Kafka_Response_Error_Type := 14; 95 | RD_KAFKA_RESP_ERR_COORDINATOR_NOT_AVAILABLE : constant Kafka_Response_Error_Type := 15; 96 | RD_KAFKA_RESP_ERR_NOT_COORDINATOR : constant Kafka_Response_Error_Type := 16; 97 | RD_KAFKA_RESP_ERR_TOPIC_EXCEPTION : constant Kafka_Response_Error_Type := 17; 98 | RD_KAFKA_RESP_ERR_RECORD_LIST_TOO_LARGE : constant Kafka_Response_Error_Type := 18; 99 | RD_KAFKA_RESP_ERR_NOT_ENOUGH_REPLICAS : constant Kafka_Response_Error_Type := 19; 100 | RD_KAFKA_RESP_ERR_NOT_ENOUGH_REPLICAS_AFTER_APPEND : constant Kafka_Response_Error_Type := 20; 101 | RD_KAFKA_RESP_ERR_INVALID_REQUIRED_ACKS : constant Kafka_Response_Error_Type := 21; 102 | RD_KAFKA_RESP_ERR_ILLEGAL_GENERATION : constant Kafka_Response_Error_Type := 22; 103 | RD_KAFKA_RESP_ERR_INCONSISTENT_GROUP_PROTOCOL : constant Kafka_Response_Error_Type := 23; 104 | RD_KAFKA_RESP_ERR_INVALID_GROUP_ID : constant Kafka_Response_Error_Type := 24; 105 | RD_KAFKA_RESP_ERR_UNKNOWN_MEMBER_ID : constant Kafka_Response_Error_Type := 25; 106 | RD_KAFKA_RESP_ERR_INVALID_SESSION_TIMEOUT : constant Kafka_Response_Error_Type := 26; 107 | RD_KAFKA_RESP_ERR_REBALANCE_IN_PROGRESS : constant Kafka_Response_Error_Type := 27; 108 | RD_KAFKA_RESP_ERR_INVALID_COMMIT_OFFSET_SIZE : constant Kafka_Response_Error_Type := 28; 109 | RD_KAFKA_RESP_ERR_TOPIC_AUTHORIZATION_FAILED : constant Kafka_Response_Error_Type := 29; 110 | RD_KAFKA_RESP_ERR_GROUP_AUTHORIZATION_FAILED : constant Kafka_Response_Error_Type := 30; 111 | RD_KAFKA_RESP_ERR_CLUSTER_AUTHORIZATION_FAILED : constant Kafka_Response_Error_Type := 31; 112 | RD_KAFKA_RESP_ERR_INVALID_TIMESTAMP : constant Kafka_Response_Error_Type := 32; 113 | RD_KAFKA_RESP_ERR_UNSUPPORTED_SASL_MECHANISM : constant Kafka_Response_Error_Type := 33; 114 | RD_KAFKA_RESP_ERR_ILLEGAL_SASL_STATE : constant Kafka_Response_Error_Type := 34; 115 | RD_KAFKA_RESP_ERR_UNSUPPORTED_VERSION : constant Kafka_Response_Error_Type := 35; 116 | RD_KAFKA_RESP_ERR_TOPIC_ALREADY_EXISTS : constant Kafka_Response_Error_Type := 36; 117 | RD_KAFKA_RESP_ERR_INVALID_PARTITIONS : constant Kafka_Response_Error_Type := 37; 118 | RD_KAFKA_RESP_ERR_INVALID_REPLICATION_FACTOR : constant Kafka_Response_Error_Type := 38; 119 | RD_KAFKA_RESP_ERR_INVALID_REPLICA_ASSIGNMENT : constant Kafka_Response_Error_Type := 39; 120 | RD_KAFKA_RESP_ERR_INVALID_CONFIG : constant Kafka_Response_Error_Type := 40; 121 | RD_KAFKA_RESP_ERR_NOT_CONTROLLER : constant Kafka_Response_Error_Type := 41; 122 | RD_KAFKA_RESP_ERR_INVALID_REQUEST : constant Kafka_Response_Error_Type := 42; 123 | RD_KAFKA_RESP_ERR_UNSUPPORTED_FOR_MESSAGE_FORMAT : constant Kafka_Response_Error_Type := 43; 124 | RD_KAFKA_RESP_ERR_POLICY_VIOLATION : constant Kafka_Response_Error_Type := 44; 125 | RD_KAFKA_RESP_ERR_OUT_OF_ORDER_SEQUENCE_NUMBER : constant Kafka_Response_Error_Type := 45; 126 | RD_KAFKA_RESP_ERR_DUPLICATE_SEQUENCE_NUMBER : constant Kafka_Response_Error_Type := 46; 127 | RD_KAFKA_RESP_ERR_INVALID_PRODUCER_EPOCH : constant Kafka_Response_Error_Type := 47; 128 | RD_KAFKA_RESP_ERR_INVALID_TXN_STATE : constant Kafka_Response_Error_Type := 48; 129 | RD_KAFKA_RESP_ERR_INVALID_PRODUCER_ID_MAPPING : constant Kafka_Response_Error_Type := 49; 130 | RD_KAFKA_RESP_ERR_INVALID_TRANSACTION_TIMEOUT : constant Kafka_Response_Error_Type := 50; 131 | RD_KAFKA_RESP_ERR_CONCURRENT_TRANSACTIONS : constant Kafka_Response_Error_Type := 51; 132 | RD_KAFKA_RESP_ERR_TRANSACTION_COORDINATOR_FENCED : constant Kafka_Response_Error_Type := 52; 133 | RD_KAFKA_RESP_ERR_TRANSACTIONAL_ID_AUTHORIZATION_FAILED : constant Kafka_Response_Error_Type := 53; 134 | RD_KAFKA_RESP_ERR_SECURITY_DISABLED : constant Kafka_Response_Error_Type := 54; 135 | RD_KAFKA_RESP_ERR_OPERATION_NOT_ATTEMPTED : constant Kafka_Response_Error_Type := 55; 136 | RD_KAFKA_RESP_ERR_KAFKA_STORAGE_ERROR : constant Kafka_Response_Error_Type := 56; 137 | RD_KAFKA_RESP_ERR_LOG_DIR_NOT_FOUND : constant Kafka_Response_Error_Type := 57; 138 | RD_KAFKA_RESP_ERR_SASL_AUTHENTICATION_FAILED : constant Kafka_Response_Error_Type := 58; 139 | RD_KAFKA_RESP_ERR_UNKNOWN_PRODUCER_ID : constant Kafka_Response_Error_Type := 59; 140 | RD_KAFKA_RESP_ERR_REASSIGNMENT_IN_PROGRESS : constant Kafka_Response_Error_Type := 60; 141 | RD_KAFKA_RESP_ERR_DELEGATION_TOKEN_AUTH_DISABLED : constant Kafka_Response_Error_Type := 61; 142 | RD_KAFKA_RESP_ERR_DELEGATION_TOKEN_NOT_FOUND : constant Kafka_Response_Error_Type := 62; 143 | RD_KAFKA_RESP_ERR_DELEGATION_TOKEN_OWNER_MISMATCH : constant Kafka_Response_Error_Type := 63; 144 | RD_KAFKA_RESP_ERR_DELEGATION_TOKEN_REQUEST_NOT_ALLOWED : constant Kafka_Response_Error_Type := 64; 145 | RD_KAFKA_RESP_ERR_DELEGATION_TOKEN_AUTHORIZATION_FAILED : constant Kafka_Response_Error_Type := 65; 146 | RD_KAFKA_RESP_ERR_DELEGATION_TOKEN_EXPIRED : constant Kafka_Response_Error_Type := 66; 147 | RD_KAFKA_RESP_ERR_INVALID_PRINCIPAL_TYPE : constant Kafka_Response_Error_Type := 67; 148 | RD_KAFKA_RESP_ERR_NON_EMPTY_GROUP : constant Kafka_Response_Error_Type := 68; 149 | RD_KAFKA_RESP_ERR_GROUP_ID_NOT_FOUND : constant Kafka_Response_Error_Type := 69; 150 | RD_KAFKA_RESP_ERR_FETCH_SESSION_ID_NOT_FOUND : constant Kafka_Response_Error_Type := 70; 151 | RD_KAFKA_RESP_ERR_INVALID_FETCH_SESSION_EPOCH : constant Kafka_Response_Error_Type := 71; 152 | RD_KAFKA_RESP_ERR_LISTENER_NOT_FOUND : constant Kafka_Response_Error_Type := 72; 153 | RD_KAFKA_RESP_ERR_TOPIC_DELETION_DISABLED : constant Kafka_Response_Error_Type := 73; 154 | RD_KAFKA_RESP_ERR_FENCED_LEADER_EPOCH : constant Kafka_Response_Error_Type := 74; 155 | RD_KAFKA_RESP_ERR_UNKNOWN_LEADER_EPOCH : constant Kafka_Response_Error_Type := 75; 156 | RD_KAFKA_RESP_ERR_UNSUPPORTED_COMPRESSION_TYPE : constant Kafka_Response_Error_Type := 76; 157 | RD_KAFKA_RESP_ERR_STALE_BROKER_EPOCH : constant Kafka_Response_Error_Type := 77; 158 | RD_KAFKA_RESP_ERR_OFFSET_NOT_AVAILABLE : constant Kafka_Response_Error_Type := 78; 159 | RD_KAFKA_RESP_ERR_MEMBER_ID_REQUIRED : constant Kafka_Response_Error_Type := 79; 160 | RD_KAFKA_RESP_ERR_PREFERRED_LEADER_NOT_AVAILABLE : constant Kafka_Response_Error_Type := 80; 161 | RD_KAFKA_RESP_ERR_GROUP_MAX_SIZE_REACHED : constant Kafka_Response_Error_Type := 81; 162 | RD_KAFKA_RESP_ERR_FENCED_INSTANCE_ID : constant Kafka_Response_Error_Type := 82; 163 | RD_KAFKA_RESP_ERR_ELIGIBLE_LEADERS_NOT_AVAILABLE : constant Kafka_Response_Error_Type := 83; 164 | RD_KAFKA_RESP_ERR_ELECTION_NOT_NEEDED : constant Kafka_Response_Error_Type := 84; 165 | RD_KAFKA_RESP_ERR_NO_REASSIGNMENT_IN_PROGRESS : constant Kafka_Response_Error_Type := 85; 166 | RD_KAFKA_RESP_ERR_GROUP_SUBSCRIBED_TO_TOPIC : constant Kafka_Response_Error_Type := 86; 167 | RD_KAFKA_RESP_ERR_INVALID_RECORD : constant Kafka_Response_Error_Type := 87; 168 | RD_KAFKA_RESP_ERR_UNSTABLE_OFFSET_COMMIT : constant Kafka_Response_Error_Type := 88; 169 | RD_KAFKA_RESP_ERR_THROTTLING_QUOTA_EXCEEDED : constant Kafka_Response_Error_Type := 89; 170 | RD_KAFKA_RESP_ERR_PRODUCER_FENCED : constant Kafka_Response_Error_Type := 90; 171 | RD_KAFKA_RESP_ERR_RESOURCE_NOT_FOUND : constant Kafka_Response_Error_Type := 91; 172 | RD_KAFKA_RESP_ERR_DUPLICATE_RESOURCE : constant Kafka_Response_Error_Type := 92; 173 | RD_KAFKA_RESP_ERR_UNACCEPTABLE_CREDENTIAL : constant Kafka_Response_Error_Type := 93; 174 | RD_KAFKA_RESP_ERR_INCONSISTENT_VOTER_SET : constant Kafka_Response_Error_Type := 94; 175 | RD_KAFKA_RESP_ERR_INVALID_UPDATE_VERSION : constant Kafka_Response_Error_Type := 95; 176 | RD_KAFKA_RESP_ERR_FEATURE_UPDATE_FAILED : constant Kafka_Response_Error_Type := 96; 177 | RD_KAFKA_RESP_ERR_PRINCIPAL_DESERIALIZATION_FAILURE : constant Kafka_Response_Error_Type := 97; 178 | RD_KAFKA_RESP_ERR_END_ALL : constant Kafka_Response_Error_Type := 98; 179 | 180 | subtype Kafka_Message_Flag_Type is Integer; 181 | RD_KAFKA_MSG_F_FREE : constant Kafka_Message_Flag_Type := 1; 182 | RD_KAFKA_MSG_F_COPY : constant Kafka_Message_Flag_Type := 2; 183 | RD_KAFKA_MSG_F_BLOCK : constant Kafka_Message_Flag_Type := 4; 184 | RD_KAFKA_MSG_F_PARTITION : constant Kafka_Message_Flag_Type := 8; 185 | 186 | RD_KAFKA_PARTITION_UA : constant Integer_32 := -1; 187 | 188 | Kafka_Error : exception; 189 | Timeout_Reached : exception; 190 | 191 | type Handle_Type is new System.Address; 192 | type Topic_Type is new System.Address; 193 | type Config_Type is new System.Address; 194 | type Topic_Config_Type is new System.Address; 195 | type Partition_List_Type is new System.Address; 196 | 197 | type Kafka_Handle_Type is (RD_KAFKA_PRODUCER, RD_KAFKA_CONSUMER) 198 | with Convention => C; 199 | 200 | type Message_Type is record 201 | Error : aliased Kafka_Response_Error_Type; 202 | Topic : Topic_Type; 203 | Partition : aliased Integer_32; 204 | Payload : System.Address; 205 | Payload_Length : aliased size_t; 206 | Key : System.Address; 207 | Key_Length : aliased size_t; 208 | Offset : aliased Integer_64; 209 | Opaque : System.Address; 210 | end record 211 | with Convention => C_Pass_By_Copy; 212 | 213 | type Delivery_Report_Callback is access procedure (Kafka : Handle_Type; 214 | Message : access constant Message_Type; 215 | Opaque : System.Address) 216 | with Convention => C; 217 | 218 | -- 219 | -- Returns the version of librdkafka as an Integer. 220 | -- 221 | -- librdkafka equivalent: rd_kafka_version 222 | -- 223 | function Version return Integer 224 | with Import => True, 225 | Convention => C, 226 | External_Name => "rd_kafka_version"; 227 | 228 | -- 229 | -- Returns the version of librdkafka as a String 230 | -- 231 | -- librdkafka equivalent: rd_kafka_version_str 232 | -- 233 | function Version return String; 234 | 235 | -- 236 | -- Return the last error encountered by Kafka. The last error is stored 237 | -- per thread. 238 | -- 239 | -- librdkafka equivalent: rd_kafka_last_error 240 | -- 241 | function Get_Last_Error return Kafka_Response_Error_Type 242 | with Import => True, 243 | Convention => C, 244 | External_Name => "rd_kafka_last_error"; 245 | 246 | 247 | -- 248 | -- Returns the name of a kafka error given an error code 249 | -- 250 | -- librdkafka equivalent: rd_kafka_err2name 251 | -- 252 | function Get_Error_Name(Error_Code: Kafka_Response_Error_Type) return String; 253 | 254 | -- 255 | -- Sets the callback for listening to messages that are being produced, for 256 | -- the provided configuration object 257 | -- 258 | -- librdkafka equivalent: rd_kafka_conf_set_dr_msg_cb 259 | -- 260 | procedure Set_Delivery_Report_Callback(Config : Config_Type; 261 | Callback : Delivery_Report_Callback) 262 | with Import => True, 263 | Convention => C, 264 | External_Name => "rd_kafka_conf_set_dr_msg_cb"; 265 | 266 | 267 | -- 268 | -- Creates a kafka handle 269 | -- 270 | -- librdkafka equivalent: rd_kafka_new 271 | -- 272 | function Create_Handle(HandleType : Kafka_Handle_Type; 273 | Config : Config_Type) return Handle_Type; 274 | 275 | -- 276 | -- Destroys the specified kafka handle 277 | -- 278 | -- librdkafka equivalent: rd_kafka_destroy 279 | -- 280 | procedure Destroy_Handle(Handle : Handle_Type) 281 | with Import => True, 282 | Convention => C, 283 | External_Name => "rd_kafka_destroy"; 284 | 285 | -- 286 | -- Wait until all outstanding produce requests, et.al, are completed. 287 | -- This should typically be done prior to destroying a producer instance 288 | -- to make sure all queued and in-flight produce requests are completed 289 | -- before terminating. 290 | -- 291 | -- @raises Timeout_Reached if Timeout was reached before all outstanding requests were completed. 292 | -- @raises Kafka_Error if an error occurs 293 | -- 294 | procedure Flush(Handle : Handle_Type; 295 | Timeout : Duration); 296 | 297 | -- 298 | -- Polls the provided kafka handle for events. Events will cause application 299 | -- provided callbacks to be called. 300 | -- 301 | -- librdkafka equivalent: rd_kafka_poll 302 | -- 303 | -- @params Handle Kafka handle 304 | -- @params Timeout timeout for polling events 305 | -- @returns the number of events served. 306 | -- 307 | function Poll(Handle : Handle_Type; 308 | Timeout : Duration) return Integer; 309 | 310 | -- 311 | -- Polls the provided kafka handle for events. Events will cause application 312 | -- provided callbacks to be called. 313 | -- 314 | -- librdkafka equivalent: rd_kafka_poll 315 | -- 316 | -- @params Handle Kafka handle 317 | -- @params Timeout timeout for polling events 318 | -- 319 | procedure Poll(Handle : Handle_Type; 320 | Timeout : Duration); 321 | 322 | -- 323 | -- Produce and send a single message to broker. 324 | -- 325 | -- librdkafka equivalent: rd_kafka_produce 326 | -- 327 | procedure Produce(Topic : Topic_Type; 328 | Partition : Integer_32; 329 | Message_Flags : Kafka_Message_Flag_Type; 330 | Payload : System.Address; 331 | Payload_Length : size_t; 332 | Key : System.Address; 333 | Key_Length : size_t; 334 | Message_Opaque : System.Address); 335 | 336 | -- 337 | -- Produce and send a single message to broker where both the payload and 338 | -- key are strings that will be copied 339 | -- 340 | -- librdkafka equivalent: rd_kafka_produce 341 | -- 342 | procedure Produce(Topic : Topic_Type; 343 | Partition : Integer_32; 344 | Payload : String; 345 | Key : String; 346 | Message_Opaque : System.Address); 347 | 348 | -- 349 | -- librdkafka equivalent: rd_kafka_subscribe 350 | -- 351 | procedure Subscribe(Handle : Handle_Type; 352 | Partition_List : Partition_List_Type); 353 | 354 | -- 355 | -- librdkafka equivalent: rd_kafka_unsubscribe 356 | -- 357 | procedure Unsubscribe(Handle : Handle_Type); 358 | private 359 | 360 | -- 361 | -- The following functions are used by wrapper functions due to the lack of 362 | -- convenience (either because of chars_ptr or their error handling) 363 | -- 364 | 365 | function rd_kafka_version_str return Interfaces.C.Strings.chars_ptr 366 | with Import => True, 367 | Convention => C, 368 | External_Name => "rd_kafka_version_str"; 369 | 370 | function rd_kafka_err2name(Error_Code: Kafka_Response_Error_Type) return Interfaces.C.Strings.chars_ptr 371 | with Import => True, 372 | Convention => C, 373 | External_Name => "rd_kafka_err2name"; 374 | 375 | function rd_kafka_new(c_type : Kafka_Handle_Type; 376 | conf : Config_Type; 377 | errstr : chars_ptr; 378 | errstr_size : size_t) return Handle_Type 379 | with Import => True, 380 | Convention => C, 381 | External_Name => "rd_kafka_new"; 382 | 383 | function rd_kafka_flush(rk : Handle_Type; 384 | timeout_ms : int) return Kafka_Response_Error_Type 385 | with Import => True, 386 | Convention => C, 387 | External_Name => "rd_kafka_flush"; 388 | 389 | 390 | function rd_kafka_poll(rk : Handle_Type; 391 | timeout_ms : int) return int 392 | with Import => True, 393 | Convention => C, 394 | External_Name => "rd_kafka_poll"; 395 | 396 | function rd_kafka_produce(Topic : Topic_Type; 397 | Partition : Integer_32; 398 | Message_Flags : Kafka_Message_Flag_Type; 399 | Payload : System.Address; 400 | Payload_Length : size_t; 401 | Key : System.Address; 402 | Key_Length : size_t; 403 | Message_Opaque : System.Address) return int 404 | with Import => True, 405 | Convention => C, 406 | External_Name => "rd_kafka_produce"; 407 | 408 | function rd_kafka_subscribe(Handle : Handle_Type; 409 | Partition_List : Partition_List_Type) return Kafka_Response_Error_Type 410 | with Import => True, 411 | Convention => C, 412 | External_Name => "rd_kafka_subscribe"; 413 | 414 | function rd_kafka_unsubscribe(Handle : Handle_Type) return Kafka_Response_Error_Type 415 | with Import => True, 416 | Convention => C, 417 | External_Name => "rd_kafka_unsubscribe"; 418 | 419 | function Alloc(Size : size_t) return Chars_Ptr 420 | with Import => True, 421 | Convention => C, 422 | External_Name => System.Parameters.C_Malloc_Linkname; 423 | 424 | end Kafka; 425 | --------------------------------------------------------------------------------