├── .github └── FUNDING.yml ├── .gitignore ├── src ├── ext-php-rdkafka │ ├── php-kafka-lib │ │ ├── avroSchema │ │ │ ├── nickzh.php.kafka.examples.entity.product-key.avsc │ │ │ └── nickzh.php.kafka.examples.entity.product-value.avsc │ │ ├── README.md │ │ ├── console │ │ ├── v0.1.x │ │ │ ├── producer.php │ │ │ ├── highLevelConsumer.php │ │ │ ├── lowLevelConsumer.php │ │ │ ├── avroProducer.php │ │ │ └── avroHighLevelConsumer.php │ │ ├── producer.php │ │ ├── consumer.php │ │ ├── avroProducer.php │ │ └── avroConsumer.php │ ├── README.md │ ├── composer.json │ ├── pure-php │ │ ├── README.md │ │ ├── consumer.php │ │ ├── producer.php │ │ └── producer_transactional.php │ └── docker-compose.yml ├── redpanda │ ├── php-simple-kafka-lib │ │ ├── avroSchema │ │ │ ├── nickzh.php.kafka.examples.entity.product-key.avsc │ │ │ └── nickzh.php.kafka.examples.entity.product-value.avsc │ │ ├── README.md │ │ ├── console │ │ ├── producer.php │ │ ├── consumer.php │ │ ├── avroProducer.php │ │ └── avroConsumer.php │ ├── composer.json │ ├── README.md │ ├── pure-php │ │ ├── README.md │ │ ├── consumer.php │ │ ├── metadata.php │ │ ├── producer.php │ │ └── producer_transactional.php │ └── docker-compose.yml └── ext-php-simple-kafka-client │ ├── php-simple-kafka-lib │ ├── avroSchema │ │ ├── nickzh.php.kafka.examples.entity.product-key.avsc │ │ └── nickzh.php.kafka.examples.entity.product-value.avsc │ ├── console │ ├── README.md │ ├── producer.php │ ├── consumer.php │ ├── avroProducer.php │ └── avroConsumer.php │ ├── README.md │ ├── composer.json │ ├── pure-php │ ├── README.md │ ├── queryWatermarkOffsets.php │ ├── offsetsForTimes.php │ ├── consumer.php │ ├── metadata.php │ ├── producer.php │ └── producer_transactional.php │ └── docker-compose.yml ├── docker ├── php │ ├── docker-php-entrypoint │ ├── docker-php-source │ ├── Dockerfile.centos │ ├── Dockerfile.alpine │ ├── docker-php-ext-configure │ ├── Dockerfile.master │ ├── docker-php-ext-install │ ├── docker-php-ext-enable │ └── Dockerfile.debug └── docker-compose-templates │ ├── docker-compose.alpine.redpanda.yml │ ├── docker-compose.master.yml │ ├── docker-compose.debug.yml │ ├── docker-compose.centos.yml │ └── docker-compose.alpine.yml └── README.md /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [nick-zh] 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/vendor/ 2 | /.idea 3 | /keys 4 | **/composer.lock 5 | **/*.iml 6 | src/jc 7 | docker-compose.jc.yml 8 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/avroSchema/nickzh.php.kafka.examples.entity.product-key.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "string" 3 | } 4 | -------------------------------------------------------------------------------- /src/redpanda/php-simple-kafka-lib/avroSchema/nickzh.php.kafka.examples.entity.product-key.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "string" 3 | } 4 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/php-simple-kafka-lib/avroSchema/nickzh.php.kafka.examples.entity.product-key.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "string" 3 | } 4 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/README.md: -------------------------------------------------------------------------------- 1 | # Running the examples 2 | 1. `docker-compose up -d` 3 | 2. `docker-compose exec php composer update` 4 | 3. Check the subfolders on how to run the examples -------------------------------------------------------------------------------- /docker/php/docker-php-entrypoint: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | # first arg is `-f` or `--some-option` 5 | if [ "${1#-}" != "$1" ]; then 6 | set -- php "$@" 7 | fi 8 | 9 | exec "$@" 10 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/README.md: -------------------------------------------------------------------------------- 1 | # Running the examples 2 | 1. `docker-compose up -d` 3 | 2. `docker-compose exec php composer update` 4 | 3. Check the subfolders on how to run the examples -------------------------------------------------------------------------------- /src/redpanda/composer.json: -------------------------------------------------------------------------------- 1 | { 2 | "require": { 3 | "ext-json": "*", 4 | "flix-tech/avro-serde-php": "^1.4", 5 | "jobcloud/php-console-kafka-schema-registry": "^1.1", 6 | "php-kafka/php-simple-kafka-lib": "dev-main", 7 | "ramsey/uuid": "^4.0" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/composer.json: -------------------------------------------------------------------------------- 1 | { 2 | "require": { 3 | "ext-json": "*", 4 | "flix-tech/avro-serde-php": "^1.4", 5 | "jobcloud/php-console-kafka-schema-registry": "^1.1", 6 | "php-kafka/php-simple-kafka-lib": "dev-main", 7 | "ramsey/uuid": "^4.0" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/composer.json: -------------------------------------------------------------------------------- 1 | { 2 | "require": { 3 | "ext-json": "*", 4 | "flix-tech/avro-serde-php": "^1.4", 5 | "jobcloud/php-console-kafka-schema-registry": "^1.1", 6 | "jobcloud/php-kafka-lib": "^1.0", 7 | "ramsey/uuid": "^4.0" 8 | }, 9 | "require-dev": { 10 | "kwn/php-rdkafka-stubs": "^1.2.0" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/redpanda/README.md: -------------------------------------------------------------------------------- 1 | # Redpanda playground 2 | [Redpanda](https://vectorized.io/redpanda) support all parts of the Kafka API. 3 | Transaction API support since `v21.8.1` (recommended, at least `v21.8.2`) 4 | Schema registry support since `v21.8.1` 5 | 6 | ## Running examples 7 | 1. `docker-compose up -d` 8 | 2. `docker-compose exec php composer update` 9 | 3. Check the subfolders on how to run the examples -------------------------------------------------------------------------------- /src/ext-php-rdkafka/pure-php/README.md: -------------------------------------------------------------------------------- 1 | # Running consumer / producer 2 | 3 | ## Prerequisites 4 | Be sure to do this first: [Start containers](./../README.md) 5 | Connect to the php container: 6 | ```bash 7 | docker-compose exec php bash 8 | ``` 9 | 10 | ## Producer 11 | Will per default produce 10 messages: 12 | ```bash 13 | cd pure-php 14 | php producer.php 15 | ``` 16 | 17 | ## Consumer 18 | Will consume all messages available: 19 | ```bash 20 | cd pure-php 21 | php consumer.php 22 | ``` 23 | -------------------------------------------------------------------------------- /src/redpanda/php-simple-kafka-lib/README.md: -------------------------------------------------------------------------------- 1 | # Register test schema 2 | Run the following to register the test schema: 3 | ```bash 4 | cd php-simple-kafka-lib 5 | ./console kafka-schema-registry:register:changed avroSchema 6 | ``` 7 | 8 | # Running consumer / producer 9 | ## Prerequisites 10 | Be sure to do this first: [Start containers](./../README.md) 11 | Connect to the php container: 12 | ```bash 13 | docker-compose exec php bash 14 | ``` 15 | 16 | ## Simple producer 17 | Will per default produce 10 messages: 18 | ```bash 19 | php producer.php 20 | ``` 21 | 22 | ## Simple consumer 23 | Will consume all messages available: 24 | ```bash 25 | php consumer.php 26 | ``` 27 | -------------------------------------------------------------------------------- /docker/php/docker-php-source: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | dir=/usr/src/php 5 | 6 | usage() { 7 | echo "usage: $0 COMMAND" 8 | echo 9 | echo "Manage php source tarball lifecycle." 10 | echo 11 | echo "Commands:" 12 | echo " extract extract php source tarball into directory $dir if not already done." 13 | echo " delete delete extracted php source located into $dir if not already done." 14 | echo 15 | } 16 | 17 | case "$1" in 18 | extract) 19 | mkdir -p "$dir" 20 | if [ ! -f "$dir/.docker-extracted" ]; then 21 | tar -Jxf /usr/src/php.tar.xz -C "$dir" --strip-components=1 22 | touch "$dir/.docker-extracted" 23 | fi 24 | ;; 25 | 26 | delete) 27 | rm -rf "$dir" 28 | ;; 29 | 30 | *) 31 | usage 32 | exit 1 33 | ;; 34 | esac 35 | -------------------------------------------------------------------------------- /src/redpanda/pure-php/README.md: -------------------------------------------------------------------------------- 1 | # Running consumer / producer 2 | 3 | ## Prerequisites 4 | Be sure to do this first: [Start containers](./../README.md) 5 | Connect to the php container: 6 | ```bash 7 | docker-compose exec php bash 8 | ``` 9 | 10 | ## Simple producer 11 | Will per default produce 10 messages: 12 | ```bash 13 | cd pure-php 14 | php producer.php 15 | ``` 16 | 17 | ## Transactional producer 18 | Will per default produce 10 messages: 19 | ```bash 20 | cd pure-php 21 | php producer_transactional.php 22 | ``` 23 | 24 | ## Consumer 25 | Will consume all messages available: 26 | ```bash 27 | cd pure-php 28 | php consumer.php 29 | ``` 30 | 31 | ## Query metadata 32 | Will query metadata for all available topics, etc.: 33 | ```bash 34 | cd pure-php 35 | php metadata.php 36 | ``` 37 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/avroSchema/nickzh.php.kafka.examples.entity.product-value.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "product", 4 | "namespace": "nickzh.php.kafka.examples.entity", 5 | "fields": [ 6 | { 7 | "name": "id", 8 | "type": "string", 9 | "doc": "UUID of our product" 10 | }, 11 | { 12 | "name": "name", 13 | "type": "string", 14 | "doc": "Name of our product" 15 | }, 16 | { 17 | "name": "description", 18 | "type": [ 19 | "null", 20 | "string" 21 | ], 22 | "default": null, 23 | "doc": "Description of our product" 24 | }, 25 | { 26 | "name": "price", 27 | "type": "float", 28 | "default": null, 29 | "doc": "Price of our product" 30 | } 31 | ] 32 | } -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/pure-php/README.md: -------------------------------------------------------------------------------- 1 | # Running consumer / producer 2 | 3 | ## Prerequisites 4 | Be sure to do this first: [Start containers](./../README.md) 5 | Connect to the php container: 6 | ```bash 7 | docker-compose exec php bash 8 | ``` 9 | 10 | ## Simple producer 11 | Will per default produce 10 messages: 12 | ```bash 13 | cd pure-php 14 | php producer.php 15 | ``` 16 | 17 | ## Transactional producer 18 | Will per default produce 10 messages: 19 | ```bash 20 | cd pure-php 21 | php producer_transactional.php 22 | ``` 23 | 24 | ## Consumer 25 | Will consume all messages available: 26 | ```bash 27 | cd pure-php 28 | php consumer.php 29 | ``` 30 | 31 | ## Query metadata 32 | Will query metadata for all available topics, etc.: 33 | ```bash 34 | cd pure-php 35 | php metadata.php 36 | ``` 37 | -------------------------------------------------------------------------------- /src/redpanda/php-simple-kafka-lib/avroSchema/nickzh.php.kafka.examples.entity.product-value.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "product", 4 | "namespace": "nickzh.php.kafka.examples.entity", 5 | "fields": [ 6 | { 7 | "name": "id", 8 | "type": "string", 9 | "doc": "UUID of our product" 10 | }, 11 | { 12 | "name": "name", 13 | "type": "string", 14 | "doc": "Name of our product" 15 | }, 16 | { 17 | "name": "description", 18 | "type": [ 19 | "null", 20 | "string" 21 | ], 22 | "default": null, 23 | "doc": "Description of our product" 24 | }, 25 | { 26 | "name": "price", 27 | "type": "float", 28 | "default": 0.0, 29 | "doc": "Price of our product" 30 | } 31 | ] 32 | } -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/php-simple-kafka-lib/avroSchema/nickzh.php.kafka.examples.entity.product-value.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "product", 4 | "namespace": "nickzh.php.kafka.examples.entity", 5 | "fields": [ 6 | { 7 | "name": "id", 8 | "type": "string", 9 | "doc": "UUID of our product" 10 | }, 11 | { 12 | "name": "name", 13 | "type": "string", 14 | "doc": "Name of our product" 15 | }, 16 | { 17 | "name": "description", 18 | "type": [ 19 | "null", 20 | "string" 21 | ], 22 | "default": null, 23 | "doc": "Description of our product" 24 | }, 25 | { 26 | "name": "price", 27 | "type": "float", 28 | "default": 0.0, 29 | "doc": "Price of our product" 30 | } 31 | ] 32 | } -------------------------------------------------------------------------------- /docker/docker-compose-templates/docker-compose.alpine.redpanda.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | php: 4 | build: 5 | context: ../php 6 | dockerfile: Dockerfile.alpine 7 | args: 8 | PHP_IMAGE_TAG: 8.0-cli-alpine3.13 9 | LIBRDKAFKA_VERSION: v1.6.1 10 | PHP_EXTENSION: php-kafka/php-simple-kafka-client 11 | PHP_EXTENSION_VERSION: v0.1.1 12 | tty: true 13 | working_dir: /app 14 | volumes: 15 | - ./:/app 16 | redpanda: 17 | entrypoint: 18 | - /usr/bin/rpk 19 | - redpanda 20 | - start 21 | - --smp 22 | - '1' 23 | - --reserve-memory 24 | - 0M 25 | - --overprovisioned 26 | - --node-id 27 | - '0' 28 | - --check=false 29 | image: vectorized/redpanda:v21.4.12 30 | ports: 31 | - 9092:9092 32 | - 29092:29092 33 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/README.md: -------------------------------------------------------------------------------- 1 | # Register test schema 2 | Run the following to register the test schema: 3 | ```bash 4 | cd php-kafka-lib 5 | ./console kafka-schema-registry:register:changed avroSchema 6 | ``` 7 | 8 | # Running consumer / producer 9 | ## Prerequisites 10 | Be sure to do this first: [Start containers](./../README.md) 11 | Connect to the php container: 12 | ```bash 13 | docker-compose exec php bash 14 | ``` 15 | 16 | ## Avro producer 17 | Will per default produce 10 avro messages: 18 | ```bash 19 | php avroProducer.php 20 | ``` 21 | 22 | ## Avro consumer 23 | Will consume all messages available: 24 | ```bash 25 | php avroConsumer.php 26 | ``` 27 | 28 | ## Producer 29 | Will per default produce 10 messages: 30 | ```bash 31 | php producer.php 32 | ``` 33 | 34 | ## Consumer 35 | Will consume all messages available: 36 | ```bash 37 | php consumer.php 38 | ``` 39 | -------------------------------------------------------------------------------- /src/redpanda/php-simple-kafka-lib/console: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env php 2 | offsetSet(KafkaSchemaRegistryApiClientProvider::CONTAINER_KEY, $settings); 18 | 19 | $container->register(new CommandServiceProvider()); 20 | 21 | $application = new Application(); 22 | $application->addCommands($container[CommandServiceProvider::COMMANDS]); 23 | $application->run(); 24 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/console: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env php 2 | offsetSet(KafkaSchemaRegistryApiClientProvider::CONTAINER_KEY, $settings); 18 | 19 | $container->register(new CommandServiceProvider()); 20 | 21 | $application = new Application(); 22 | $application->addCommands($container[CommandServiceProvider::COMMANDS]); 23 | $application->run(); 24 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/php-simple-kafka-lib/console: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env php 2 | offsetSet(KafkaSchemaRegistryApiClientProvider::CONTAINER_KEY, $settings); 18 | 19 | $container->register(new CommandServiceProvider()); 20 | 21 | $application = new Application(); 22 | $application->addCommands($container[CommandServiceProvider::COMMANDS]); 23 | $application->run(); 24 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/php-simple-kafka-lib/README.md: -------------------------------------------------------------------------------- 1 | # Register test schema 2 | Run the following to register the test schema: 3 | ```bash 4 | cd php-simple-kafka-lib 5 | ./console kafka-schema-registry:register:changed avroSchema 6 | ``` 7 | 8 | # Running consumer / producer 9 | ## Prerequisites 10 | Be sure to do this first: [Start containers](./../README.md) 11 | Connect to the php container: 12 | ```bash 13 | docker-compose exec php bash 14 | ``` 15 | 16 | ## Avro producer 17 | Will per default produce 10 avro messages: 18 | ```bash 19 | php avroProducer.php 20 | ``` 21 | 22 | ## Avro high level consumer 23 | Will consume all messages available: 24 | ```bash 25 | php avroConsumer.php 26 | ``` 27 | 28 | ## Simple producer 29 | Will per default produce 10 messages: 30 | ```bash 31 | php producer.php 32 | ``` 33 | 34 | ## Simple consumer 35 | Will consume all messages available: 36 | ```bash 37 | php consumer.php 38 | ``` 39 | -------------------------------------------------------------------------------- /src/redpanda/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | php: 4 | build: 5 | context: ../../docker/php 6 | dockerfile: Dockerfile.alpine 7 | args: 8 | PHP_IMAGE_TAG: 8.1-cli-alpine3.15 9 | LIBRDKAFKA_VERSION: v1.8.2 10 | PHP_EXTENSION: php-kafka/php-simple-kafka-client 11 | PHP_EXTENSION_VERSION: v0.1.4 12 | tty: true 13 | working_dir: /app 14 | volumes: 15 | - ./:/app 16 | redpanda: 17 | image: vectorized/redpanda:v21.11.2 18 | command: 19 | - "redpanda start" 20 | - "--smp 1" 21 | - "--reserve-memory 0M" 22 | - "--overprovisioned" 23 | - "--node-id 0" 24 | - "--set redpanda.enable_idempotence=true" 25 | - "--set redpanda.enable_transactions=true" 26 | - "--kafka-addr PLAINTEXT://0.0.0.0:29097,OUTSIDE://0.0.0.0:9097" 27 | - "--advertise-kafka-addr PLAINTEXT://redpanda:29097,OUTSIDE://redpanda:9097" 28 | - "--check=false" 29 | ports: 30 | - 9097:9097 31 | - 8081:8081 32 | - 29097:29097 33 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/pure-php/queryWatermarkOffsets.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-producer'); 13 | $conf->set('metadata.broker.list', 'kafka:9096'); 14 | $conf->set('compression.codec', 'snappy'); 15 | $conf->set('message.timeout.ms', '5000'); 16 | 17 | $producer = new Producer($conf); 18 | $topic = $producer->getTopicHandle('pure-php-test-topic-watermark'); 19 | $time = time(); 20 | $topic->producev( 21 | RD_KAFKA_PARTITION_UA, 22 | RD_KAFKA_MSG_F_BLOCK, // will block produce if queue is full 23 | 'special-message', 24 | 'special-key', 25 | [ 26 | 'special-header' => 'awesome' 27 | ] 28 | ); 29 | $result = $producer->flush(20000); 30 | $high = 0; 31 | $low = 0; 32 | $result = $producer->queryWatermarkOffsets('pure-php-test-topic-watermark', 0,$low, $high, 10000); 33 | 34 | echo sprintf('Lowest offset is: %d, highest offset is: %d', $low, $high) . PHP_EOL; 35 | 36 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/pure-php/offsetsForTimes.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-producer'); 13 | $conf->set('metadata.broker.list', 'kafka:9096'); 14 | $conf->set('compression.codec', 'snappy'); 15 | $conf->set('message.timeout.ms', '5000'); 16 | 17 | $producer = new Producer($conf); 18 | $topic = $producer->getTopicHandle('pure-php-test-topic-offsets'); 19 | $time = time(); 20 | $topic->producev( 21 | RD_KAFKA_PARTITION_UA, 22 | RD_KAFKA_MSG_F_BLOCK, // will block produce if queue is full 23 | 'special-message', 24 | 'special-key', 25 | [ 26 | 'special-header' => 'awesome' 27 | ] 28 | ); 29 | $result = $producer->flush(20000); 30 | 31 | $topicPartition = new TopicPartition('pure-php-test-topic-offsets', 0, $time); 32 | $result = $producer->offsetsForTimes([$topicPartition], 10000); 33 | var_dump($result[0]->getTopicName()); 34 | var_dump($result[0]->getPartition()); 35 | var_dump($result[0]->getOffset()); 36 | 37 | -------------------------------------------------------------------------------- /docker/php/Dockerfile.centos: -------------------------------------------------------------------------------- 1 | ARG CENTOS_VERSION 2 | 3 | FROM centos:${CENTOS_VERSION} 4 | 5 | ARG LIBRDKAFKA_VERSION 6 | ARG PHP_RDKAFKA_VERSION 7 | ARG PHP_VERSION 8 | 9 | RUN yum install -y http://rpms.remirepo.net/enterprise/remi-release-7.rpm \ 10 | && yum install -y yum-utils \ 11 | && yum-config-manager --enable remi-php${PHP_VERSION} \ 12 | && yum install -y php php-cli php-devel php-pear zlib-devel 13 | 14 | RUN yum install -y git gcc gcc-c++ automake autoconf libtool make 15 | 16 | # Install librdkafka and ext-rdkafka 17 | RUN git clone --depth 1 --branch ${LIBRDKAFKA_VERSION} https://github.com/edenhill/librdkafka.git \ 18 | && cd librdkafka \ 19 | && ./configure \ 20 | && make \ 21 | && make install \ 22 | && git clone --depth 1 --branch ${PHP_RDKAFKA_VERSION} https://github.com/arnaud-lb/php-rdkafka.git \ 23 | && cd php-rdkafka \ 24 | && phpize \ 25 | && ./configure \ 26 | && make all -j 5 \ 27 | && make install \ 28 | && cd ../..;rm -rf librdkafka \ 29 | && echo "extension=rdkafka" >> /etc/php.d/rdkafka.ini \ 30 | && cp /usr/lib64/php/modules/rdkafka.so /usr/lib64/php/modules/rdkafka 31 | 32 | RUN echo $'#!/bin/sh \n\ 33 | set -e \n\ 34 | exec "$@" \n\ 35 | ' >> /usr/local/bin/php-entrypoint \ 36 | && chmod +x /usr/local/bin/php-entrypoint 37 | 38 | ENTRYPOINT ["php-entrypoint"] 39 | CMD ["php", "-a"] 40 | -------------------------------------------------------------------------------- /docker/php/Dockerfile.alpine: -------------------------------------------------------------------------------- 1 | ARG PHP_IMAGE_TAG 2 | 3 | FROM php:${PHP_IMAGE_TAG} 4 | 5 | ARG LIBRDKAFKA_VERSION 6 | ARG PHP_EXTENSION 7 | ARG PHP_EXTENSION_VERSION 8 | 9 | 10 | 11 | # Install packages 12 | RUN apk --no-cache upgrade && \ 13 | apk --no-cache add bash sudo git gcc g++ make autoconf \ 14 | icu openssl-dev cyrus-sasl-dev pcre-dev zstd-dev snappy-dev lz4-dev zlib-dev icu-dev wget gettext valgrind 15 | 16 | # Install librdkafka and ext-rdkafka 17 | RUN git clone --depth 1 --branch ${LIBRDKAFKA_VERSION} https://github.com/edenhill/librdkafka.git \ 18 | && cd librdkafka \ 19 | && ./configure \ 20 | && make \ 21 | && make install \ 22 | && git clone --depth 1 --branch ${PHP_EXTENSION_VERSION} https://github.com/${PHP_EXTENSION}.git \ 23 | && cd $(basename ${PHP_EXTENSION}) \ 24 | && phpize \ 25 | && ./configure \ 26 | && make all -j 5 \ 27 | && make install \ 28 | && cd ../..;rm -rf librdkafka 29 | 30 | # Install php extensions 31 | RUN docker-php-ext-install pcntl && \ 32 | if [ ${PHP_EXTENSION} = "arnaud-lb/php-rdkafka" ]; then \ 33 | docker-php-ext-enable rdkafka pcntl > /dev/null 2>&1; \ 34 | else \ 35 | docker-php-ext-enable simple_kafka_client pcntl > /dev/null 2>&1; \ 36 | fi 37 | 38 | # Install composer 39 | RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/bin --filename=composer 40 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/v0.1.x/producer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 13 | [ 14 | // will be visible in broker logs 15 | 'client.id' => 'php-kafka-lib-producer', 16 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 17 | 'compression.codec' => 'snappy', 18 | ] 19 | ) 20 | ->withAdditionalBroker('kafka:9096') 21 | ->build(); 22 | 23 | for ($i = 0; $i < 10; ++$i) { 24 | $message = KafkaProducerMessage::create('php-kafka-lib-test-topic', 0) 25 | ->withKey(sprintf('test-key-%d', $i)) 26 | ->withBody(sprintf('test message-%d',$i)) 27 | ->withHeaders( 28 | [ 29 | 'some' => 'test header' 30 | ] 31 | ); 32 | 33 | $producer->produce($message); 34 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 35 | } 36 | 37 | // Shutdown producer, flush messages that are in queue. Give up after 20s 38 | $result = $producer->flush(20000); 39 | 40 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 41 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 42 | } 43 | -------------------------------------------------------------------------------- /docker/docker-compose-templates/docker-compose.master.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | php: 4 | build: 5 | context: ../php 6 | dockerfile: ../php/Dockerfile.master 7 | args: 8 | LIBRDKAFKA_VERSION: v1.5.0 9 | tty: true 10 | working_dir: /app 11 | volumes: 12 | - ./:/app 13 | - ../php-rdkafka:/rdkafka 14 | 15 | zookeeper: 16 | image: confluentinc/cp-zookeeper:5.5.0 17 | environment: 18 | ZOOKEEPER_CLIENT_PORT: 2182 19 | ZOOKEEPER_TICK_TIME: 2000 20 | 21 | kafka: 22 | image: confluentinc/cp-kafka:5.5.0 23 | depends_on: 24 | - zookeeper 25 | ports: 26 | - 9096:9096 27 | environment: 28 | KAFKA_BROKER_ID: 1 29 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2182' 30 | KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka:9096' 31 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 32 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 33 | KAFKA_NUM_PARTITIONS: 1 34 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 35 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 36 | 37 | kafka-schema-registry: 38 | image: confluentinc/cp-schema-registry:5.5.0 39 | depends_on: 40 | - zookeeper 41 | - kafka 42 | ports: 43 | - "9083:9083" 44 | environment: 45 | SCHEMA_REGISTRY_HOST_NAME: kafka-schema-registry 46 | SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:9083" 47 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2182' 48 | SCHEMA_REGISTRY_AVRO_COMPATIBILITY_LEVEL: 'full_transitive' 49 | -------------------------------------------------------------------------------- /src/redpanda/php-simple-kafka-lib/producer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 13 | [ 14 | // will be visible in broker logs 15 | 'client.id' => 'php-kafka-lib-producer', 16 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 17 | 'compression.codec' => 'zstd', 18 | 19 | // Add additional output if you need to debug a problem 20 | // 'log_level' => (string) LOG_DEBUG, 21 | // 'debug' => 'all' 22 | ] 23 | ) 24 | ->withAdditionalBroker('redpanda:9097') 25 | ->build(); 26 | 27 | for ($i = 0; $i < 10; ++$i) { 28 | $message = KafkaProducerMessage::create('php-kafka-lib-test-topic', 0) 29 | ->withKey(sprintf('test-key-%d', $i)) 30 | ->withBody(sprintf('test message-%d',$i)) 31 | ->withHeaders( 32 | [ 33 | 'some' => 'test header' 34 | ] 35 | ); 36 | 37 | $producer->produce($message); 38 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 39 | } 40 | 41 | // Shutdown producer, flush messages that are in queue. Give up after 20s 42 | $result = $producer->flush(20000); 43 | 44 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 45 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 46 | } 47 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/producer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 13 | [ 14 | // will be visible in broker logs 15 | 'client.id' => 'php-kafka-lib-producer', 16 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 17 | 'compression.codec' => 'snappy', 18 | 19 | // Add additional output if you need to debug a problem 20 | // 'log_level' => (string) LOG_DEBUG, 21 | // 'debug' => 'all' 22 | ] 23 | ) 24 | ->withAdditionalBroker('kafka:9096') 25 | ->build(); 26 | 27 | for ($i = 0; $i < 10; ++$i) { 28 | $message = KafkaProducerMessage::create('php-kafka-lib-test-topic', 0) 29 | ->withKey(sprintf('test-key-%d', $i)) 30 | ->withBody(sprintf('test message-%d',$i)) 31 | ->withHeaders( 32 | [ 33 | 'some' => 'test header' 34 | ] 35 | ); 36 | 37 | $producer->produce($message); 38 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 39 | } 40 | 41 | // Shutdown producer, flush messages that are in queue. Give up after 20s 42 | $result = $producer->flush(20000); 43 | 44 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 45 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 46 | } 47 | -------------------------------------------------------------------------------- /docker/docker-compose-templates/docker-compose.debug.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | php: 4 | build: 5 | context: ../php 6 | dockerfile: ../php/Dockerfile.debug 7 | args: 8 | PHP_VERSION: 7.4.14 9 | LIBRDKAFKA_VERSION: v1.5.3 10 | PHP_RDKAFKA_VERSION: 5.0.0 11 | tty: true 12 | working_dir: /app 13 | volumes: 14 | - ./:/app 15 | 16 | zookeeper: 17 | image: confluentinc/cp-zookeeper:5.5.0 18 | environment: 19 | ZOOKEEPER_CLIENT_PORT: 2182 20 | ZOOKEEPER_TICK_TIME: 2000 21 | 22 | kafka: 23 | image: confluentinc/cp-kafka:5.5.0 24 | depends_on: 25 | - zookeeper 26 | ports: 27 | - 9096:9096 28 | environment: 29 | KAFKA_BROKER_ID: 1 30 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2182' 31 | KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka:9096' 32 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 33 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 34 | KAFKA_NUM_PARTITIONS: 1 35 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 36 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 37 | 38 | kafka-schema-registry: 39 | image: confluentinc/cp-schema-registry:5.5.0 40 | depends_on: 41 | - zookeeper 42 | - kafka 43 | ports: 44 | - "9083:9083" 45 | environment: 46 | SCHEMA_REGISTRY_HOST_NAME: kafka-schema-registry 47 | SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:9083" 48 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2182' 49 | SCHEMA_REGISTRY_AVRO_COMPATIBILITY_LEVEL: 'full_transitive' 50 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/php-simple-kafka-lib/producer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 13 | [ 14 | // will be visible in broker logs 15 | 'client.id' => 'php-kafka-lib-producer', 16 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 17 | 'compression.codec' => 'snappy', 18 | 19 | // Add additional output if you need to debug a problem 20 | // 'log_level' => (string) LOG_DEBUG, 21 | // 'debug' => 'all' 22 | ] 23 | ) 24 | ->withAdditionalBroker('kafka:9096') 25 | ->build(); 26 | 27 | for ($i = 0; $i < 10; ++$i) { 28 | $message = KafkaProducerMessage::create('php-kafka-lib-test-topic', 0) 29 | ->withKey(sprintf('test-key-%d', $i)) 30 | ->withBody(sprintf('test message-%d',$i)) 31 | ->withHeaders( 32 | [ 33 | 'some' => 'test header' 34 | ] 35 | ); 36 | 37 | $producer->produce($message); 38 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 39 | } 40 | 41 | // Shutdown producer, flush messages that are in queue. Give up after 20s 42 | $result = $producer->flush(20000); 43 | 44 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 45 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 46 | } 47 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PHP Kafka Examples 2 | This repository has PHP examples for Kafka consumers / producers for: 3 | - [php-rdkafka](https://github.com/arnaud-lb/php-rdkafka): Examples just using the PHP extension 4 | - [php-kafka-lib](https://github.com/jobcloud/php-kafka-lib): PHP library that relies on [php-rdkafka](https://github.com/arnaud-lb/php-rdkafka) and supports [avro](https://github.com/flix-tech/avro-serde-php) 5 | - [php-simple-kafka-client](https://github.com/php-kafka/php-simple-kafka-client): Examples just using the PHP extension 6 | - [php-simple-kafka-lib](https://github.com/php-kafka/php-simple-kafka-lib): PHP library that relies on [php-simple-kafka-client](https://github.com/php-kafka/php-simple-kafka-client) and supports [avro](https://github.com/flix-tech/avro-serde-php) 7 | 8 | ## Examples 9 | Checkout these folders to see how to run the examples: 10 | - [php-rdkafka](src/ext-php-rdkafka) 11 | - [php-simple-kafka-client](src/ext-php-simple-kafka-client) 12 | 13 | ## Examples with other compatible systems 14 | - [Redpanda](src/redpanda) is a Kafka API compatible [streaming platform](https://vectorized.io/redpanda) 15 | 16 | ## Customize to fit your setup 17 | If you wan't to test / debug something that is closer to your setup, 18 | you can modify the following arguments in `docker-compose.yml`: 19 | ``` 20 | PHP_IMAGE_TAG: 8.1-cli-alpine3.15 21 | LIBRDKAFKA_VERSION: v1.8.2 22 | PHP_EXTENSION: php-kafka/php-simple-kafka-client 23 | PHP_EXTENSION_VERSION: v0.1.4 24 | ``` 25 | Adjust those, to reflect your setup and afterwards run: 26 | ``` 27 | docker-compose up --build -d 28 | ``` 29 | -------------------------------------------------------------------------------- /docker/docker-compose-templates/docker-compose.centos.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | php: 4 | build: 5 | context: ../php 6 | dockerfile: ../php/Dockerfile.centos 7 | args: 8 | CENTOS_VERSION: 7 9 | PHP_VERSION: 74 10 | LIBRDKAFKA_VERSION: v1.5.0 11 | PHP_RDKAFKA_VERSION: 4.0.3 12 | tty: true 13 | working_dir: /app 14 | volumes: 15 | - ./:/app 16 | 17 | zookeeper: 18 | image: confluentinc/cp-zookeeper:5.5.0 19 | environment: 20 | ZOOKEEPER_CLIENT_PORT: 2182 21 | ZOOKEEPER_TICK_TIME: 2000 22 | 23 | kafka: 24 | image: confluentinc/cp-kafka:5.5.0 25 | depends_on: 26 | - zookeeper 27 | ports: 28 | - 9096:9096 29 | environment: 30 | KAFKA_BROKER_ID: 1 31 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2182' 32 | KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka:9096' 33 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 34 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 35 | KAFKA_NUM_PARTITIONS: 1 36 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 37 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 38 | 39 | kafka-schema-registry: 40 | image: confluentinc/cp-schema-registry:5.5.0 41 | depends_on: 42 | - zookeeper 43 | - kafka 44 | ports: 45 | - "9083:9083" 46 | environment: 47 | SCHEMA_REGISTRY_HOST_NAME: kafka-schema-registry 48 | SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:9083" 49 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2182' 50 | SCHEMA_REGISTRY_AVRO_COMPATIBILITY_LEVEL: 'full_transitive' 51 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | php: 4 | build: 5 | context: ../../docker/php 6 | dockerfile: Dockerfile.alpine 7 | args: 8 | PHP_IMAGE_TAG: 8.0-cli-alpine3.14 9 | LIBRDKAFKA_VERSION: v1.8.2 10 | PHP_EXTENSION: arnaud-lb/php-rdkafka 11 | PHP_EXTENSION_VERSION: 5.0.0 12 | tty: true 13 | working_dir: /app 14 | volumes: 15 | - ./:/app 16 | 17 | zookeeper: 18 | image: confluentinc/cp-zookeeper:6.1.1 19 | environment: 20 | ZOOKEEPER_CLIENT_PORT: 2182 21 | ZOOKEEPER_TICK_TIME: 2000 22 | 23 | kafka: 24 | image: confluentinc/cp-kafka:6.1.1 25 | depends_on: 26 | - zookeeper 27 | ports: 28 | - 9096:9096 29 | environment: 30 | KAFKA_BROKER_ID: 1 31 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2182' 32 | KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka:9096' 33 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 34 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 35 | KAFKA_NUM_PARTITIONS: 1 36 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 37 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 38 | 39 | kafka-schema-registry: 40 | image: confluentinc/cp-schema-registry:6.1.1 41 | depends_on: 42 | - zookeeper 43 | - kafka 44 | ports: 45 | - "9083:9083" 46 | environment: 47 | SCHEMA_REGISTRY_HOST_NAME: kafka-schema-registry 48 | SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:9083" 49 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2182' 50 | SCHEMA_REGISTRY_AVRO_COMPATIBILITY_LEVEL: 'full_transitive' 51 | -------------------------------------------------------------------------------- /docker/docker-compose-templates/docker-compose.alpine.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | php: 4 | build: 5 | context: ../php 6 | dockerfile: Dockerfile.alpine 7 | args: 8 | PHP_IMAGE_TAG: 8.0-cli-alpine3.13 9 | LIBRDKAFKA_VERSION: v1.6.1 10 | PHP_EXTENSION: php-kafka/php-simple-kafka-client 11 | PHP_EXTENSION_VERSION: v0.1.1 12 | tty: true 13 | working_dir: /app 14 | volumes: 15 | - ./:/app 16 | 17 | zookeeper: 18 | image: confluentinc/cp-zookeeper:6.1.1 19 | environment: 20 | ZOOKEEPER_CLIENT_PORT: 2182 21 | ZOOKEEPER_TICK_TIME: 2000 22 | 23 | kafka: 24 | image: confluentinc/cp-kafka:6.1.1 25 | depends_on: 26 | - zookeeper 27 | ports: 28 | - 9096:9096 29 | environment: 30 | KAFKA_BROKER_ID: 1 31 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2182' 32 | KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka:9096' 33 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 34 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 35 | KAFKA_NUM_PARTITIONS: 1 36 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 37 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 38 | 39 | kafka-schema-registry: 40 | image: confluentinc/cp-schema-registry:6.1.1 41 | depends_on: 42 | - zookeeper 43 | - kafka 44 | ports: 45 | - "9083:9083" 46 | environment: 47 | SCHEMA_REGISTRY_HOST_NAME: kafka-schema-registry 48 | SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:9083" 49 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2182' 50 | SCHEMA_REGISTRY_AVRO_COMPATIBILITY_LEVEL: 'full_transitive' 51 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | php: 4 | build: 5 | context: ../../docker/php 6 | dockerfile: Dockerfile.alpine 7 | args: 8 | PHP_IMAGE_TAG: 8.1-cli-alpine3.15 9 | LIBRDKAFKA_VERSION: v1.8.2 10 | PHP_EXTENSION: php-kafka/php-simple-kafka-client 11 | PHP_EXTENSION_VERSION: v0.1.4 12 | tty: true 13 | working_dir: /app 14 | volumes: 15 | - ./:/app 16 | 17 | zookeeper: 18 | image: confluentinc/cp-zookeeper:7.3.2 19 | environment: 20 | ZOOKEEPER_CLIENT_PORT: 2182 21 | ZOOKEEPER_TICK_TIME: 2000 22 | 23 | kafka: 24 | image: confluentinc/cp-kafka:7.3.2 25 | depends_on: 26 | - zookeeper 27 | ports: 28 | - 9096:9096 29 | environment: 30 | KAFKA_BROKER_ID: 1 31 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2182' 32 | KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka:9096' 33 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 34 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 35 | KAFKA_NUM_PARTITIONS: 1 36 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 37 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 38 | 39 | kafka-schema-registry: 40 | image: confluentinc/cp-schema-registry:7.3.2 41 | depends_on: 42 | - zookeeper 43 | - kafka 44 | ports: 45 | - "9083:9083" 46 | environment: 47 | SCHEMA_REGISTRY_HOST_NAME: kafka-schema-registry 48 | SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:9083" 49 | SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'kafka:9096' 50 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2182' 51 | SCHEMA_REGISTRY_AVRO_COMPATIBILITY_LEVEL: 'full_transitive' 52 | -------------------------------------------------------------------------------- /docker/php/docker-php-ext-configure: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | # prefer user supplied CFLAGS, but default to our PHP_CFLAGS 5 | : ${CFLAGS:=$PHP_CFLAGS} 6 | : ${CPPFLAGS:=$PHP_CPPFLAGS} 7 | : ${LDFLAGS:=$PHP_LDFLAGS} 8 | export CFLAGS CPPFLAGS LDFLAGS 9 | 10 | srcExists= 11 | if [ -d /usr/src/php ]; then 12 | srcExists=1 13 | fi 14 | docker-php-source extract 15 | if [ -z "$srcExists" ]; then 16 | touch /usr/src/php/.docker-delete-me 17 | fi 18 | 19 | cd /usr/src/php/ext 20 | 21 | usage() { 22 | echo "usage: $0 ext-name [configure flags]" 23 | echo " ie: $0 gd --with-jpeg-dir=/usr/local/something" 24 | echo 25 | echo 'Possible values for ext-name:' 26 | find . \ 27 | -mindepth 2 \ 28 | -maxdepth 2 \ 29 | -type f \ 30 | -name 'config.m4' \ 31 | | xargs -n1 dirname \ 32 | | xargs -n1 basename \ 33 | | sort \ 34 | | xargs 35 | echo 36 | echo 'Some of the above modules are already compiled into PHP; please check' 37 | echo 'the output of "php -i" to see which modules are already loaded.' 38 | } 39 | 40 | ext="$1" 41 | if [ -z "$ext" ] || [ ! -d "$ext" ]; then 42 | usage >&2 43 | exit 1 44 | fi 45 | shift 46 | 47 | pm='unknown' 48 | if [ -e /lib/apk/db/installed ]; then 49 | pm='apk' 50 | fi 51 | 52 | if [ "$pm" = 'apk' ]; then 53 | if \ 54 | [ -n "$PHPIZE_DEPS" ] \ 55 | && ! apk info --installed .phpize-deps > /dev/null \ 56 | && ! apk info --installed .phpize-deps-configure > /dev/null \ 57 | ; then 58 | apk add --no-cache --virtual .phpize-deps-configure $PHPIZE_DEPS 59 | fi 60 | fi 61 | 62 | if command -v dpkg-architecture > /dev/null; then 63 | gnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" 64 | set -- --build="$gnuArch" "$@" 65 | fi 66 | 67 | cd "$ext" 68 | phpize 69 | ./configure --enable-option-checking=fatal "$@" 70 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/v0.1.x/highLevelConsumer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 15 | [ 16 | // start at the very beginning of the topic when reading for the first time 17 | 'auto.offset.reset' => 'earliest', 18 | 19 | // will be visible in broker logs 20 | 'client.id' => 'php-kafka-lib-high-level-consumer', 21 | 22 | // SSL settings 23 | //'security.protocol' => 'ssl', 24 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 25 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 26 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 27 | 28 | // SASL settings 29 | //'sasl.mechanisms' => '', 30 | //'ssl.endpoint.identification.algorithm' => 'https', 31 | //'sasl.username' => '', 32 | //'sasl.password' => '', 33 | ] 34 | ) 35 | ->withAdditionalBroker('kafka:9096') 36 | ->withTimeout(10000) 37 | ->withConsumerGroup('php-kafka-lib-high-level-consumer') 38 | ->withSubscription('php-kafka-lib-test-topic') 39 | ->build(); 40 | 41 | $consumer->subscribe(); 42 | 43 | while (true) { 44 | try { 45 | $message = $consumer->consume(); 46 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 47 | echo 'Didn\'t receive any messages, waiting for more...' . PHP_EOL; 48 | continue; 49 | } catch (KafkaConsumerConsumeException $e) { 50 | echo $e->getMessage() . PHP_EOL; 51 | continue; 52 | } 53 | 54 | echo sprintf( 55 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 56 | $message->getKey(), 57 | $message->getBody(), 58 | $message->getTopicName(), 59 | $message->getPartition(), 60 | $message->getOffset(), 61 | implode(',', $message->getHeaders()) 62 | ) . PHP_EOL; 63 | 64 | $consumer->commit($message); 65 | } 66 | -------------------------------------------------------------------------------- /docker/php/Dockerfile.master: -------------------------------------------------------------------------------- 1 | FROM alpine:3.12 2 | 3 | ENV PHP_VERSION nightly 4 | ENV PHP_INI_DIR /usr/local/etc/php 5 | ARG LIBRDKAFKA_VERSION 6 | 7 | # Install packages 8 | RUN apk --no-cache upgrade && \ 9 | apk --no-cache add bash openssh sudo git gcc g++ make autoconf \ 10 | icu libssl1.1 openssl-dev pcre-dev zlib-dev icu-dev wget gettext valgrind 11 | 12 | RUN set -xe \ 13 | && apk add --no-cache --virtual .persistent-deps \ 14 | ca-certificates \ 15 | curl \ 16 | tar \ 17 | xz \ 18 | git 19 | 20 | RUN set -xe \ 21 | && apk add --no-cache --virtual .build-deps \ 22 | autoconf \ 23 | file \ 24 | g++ \ 25 | gcc \ 26 | libc-dev \ 27 | make \ 28 | pkgconf \ 29 | re2c \ 30 | curl-dev \ 31 | libedit-dev \ 32 | libxml2-dev \ 33 | openssl-dev \ 34 | oniguruma-dev \ 35 | sqlite-dev \ 36 | bison \ 37 | libbz2 \ 38 | bzip2-dev \ 39 | && mkdir -p $PHP_INI_DIR/conf.d \ 40 | && git clone https://github.com/php/php-src.git /usr/src/php \ 41 | && cd /usr/src/php \ 42 | && git checkout ae944503c34d7c89433eaefe607fc0d631d6f9ac \ 43 | && ./buildconf --force \ 44 | && ./configure \ 45 | --with-config-file-path="$PHP_INI_DIR" \ 46 | --with-config-file-scan-dir="$PHP_INI_DIR/conf.d" \ 47 | --disable-cgi \ 48 | --enable-ftp \ 49 | --enable-debug \ 50 | --enable-mbstring \ 51 | --enable-mysqlnd \ 52 | --with-curl \ 53 | --with-libedit \ 54 | --with-openssl \ 55 | --with-zlib \ 56 | --with-bz2 \ 57 | --without-pear \ 58 | && make -j"$(getconf _NPROCESSORS_ONLN)" \ 59 | && make install \ 60 | && rm -rf /usr/src/php \ 61 | && runDeps="$( \ 62 | scanelf --needed --nobanner --recursive /usr/local \ 63 | | awk '{ gsub(/,/, "\nso:", $2); print "so:" $2 }' \ 64 | | sort -u \ 65 | | xargs -r apk info --installed \ 66 | | sort -u \ 67 | )" \ 68 | && apk add --no-cache --virtual .php-rundeps $runDeps \ 69 | && apk del .build-deps 70 | 71 | # Install librdkafka 72 | RUN git clone --depth 1 --branch ${LIBRDKAFKA_VERSION} https://github.com/edenhill/librdkafka.git \ 73 | && cd librdkafka \ 74 | && ./configure \ 75 | && make \ 76 | && make install 77 | 78 | CMD ["php", "-a"] 79 | -------------------------------------------------------------------------------- /src/redpanda/php-simple-kafka-lib/consumer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 15 | [ 16 | // start at the very beginning of the topic when reading for the first time 17 | 'auto.offset.reset' => 'earliest', 18 | 19 | // will be visible in broker logs 20 | 'client.id' => 'php-kafka-lib-high-level-consumer', 21 | 22 | // SSL settings 23 | //'security.protocol' => 'ssl', 24 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 25 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 26 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 27 | 28 | // SASL settings 29 | //'sasl.mechanisms' => '', 30 | //'ssl.endpoint.identification.algorithm' => 'https', 31 | //'sasl.username' => '', 32 | //'sasl.password' => '', 33 | 34 | // Add additional output if you need to debug a problem 35 | // 'log_level' => (string) LOG_DEBUG, 36 | // 'debug' => 'all' 37 | ] 38 | ) 39 | ->withAdditionalBroker('redpanda:9097') 40 | ->withConsumerGroup('php-kafka-lib-high-level-consumer') 41 | ->withSubscription('php-kafka-lib-test-topic') 42 | ->build(); 43 | 44 | $consumer->subscribe(); 45 | 46 | while (true) { 47 | try { 48 | $message = $consumer->consume(10000); 49 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 50 | echo 'Didn\'t receive any messages, waiting for more...' . PHP_EOL; 51 | continue; 52 | } catch (KafkaConsumerConsumeException $e) { 53 | echo $e->getMessage() . PHP_EOL; 54 | continue; 55 | } 56 | 57 | echo sprintf( 58 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 59 | $message->getKey(), 60 | $message->getBody(), 61 | $message->getTopicName(), 62 | $message->getPartition(), 63 | $message->getOffset(), 64 | implode(',', $message->getHeaders()) 65 | ) . PHP_EOL; 66 | 67 | $consumer->commit($message); 68 | } 69 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/php-simple-kafka-lib/consumer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 15 | [ 16 | // start at the very beginning of the topic when reading for the first time 17 | 'auto.offset.reset' => 'earliest', 18 | 19 | // will be visible in broker logs 20 | 'client.id' => 'php-kafka-lib-high-level-consumer', 21 | 22 | // SSL settings 23 | //'security.protocol' => 'ssl', 24 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 25 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 26 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 27 | 28 | // SASL settings 29 | //'sasl.mechanisms' => '', 30 | //'ssl.endpoint.identification.algorithm' => 'https', 31 | //'sasl.username' => '', 32 | //'sasl.password' => '', 33 | 34 | // Add additional output if you need to debug a problem 35 | // 'log_level' => (string) LOG_DEBUG, 36 | // 'debug' => 'all' 37 | ] 38 | ) 39 | ->withAdditionalBroker('kafka:9096') 40 | ->withConsumerGroup('php-kafka-lib-high-level-consumer') 41 | ->withSubscription('php-kafka-lib-test-topic') 42 | ->build(); 43 | 44 | $consumer->subscribe(); 45 | 46 | while (true) { 47 | try { 48 | $message = $consumer->consume(10000); 49 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 50 | echo 'Didn\'t receive any messages, waiting for more...' . PHP_EOL; 51 | continue; 52 | } catch (KafkaConsumerConsumeException $e) { 53 | echo $e->getMessage() . PHP_EOL; 54 | continue; 55 | } 56 | 57 | echo sprintf( 58 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 59 | $message->getKey(), 60 | $message->getBody(), 61 | $message->getTopicName(), 62 | $message->getPartition(), 63 | $message->getOffset(), 64 | implode(',', $message->getHeaders()) 65 | ) . PHP_EOL; 66 | 67 | $consumer->commit($message); 68 | } 69 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/consumer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 15 | [ 16 | // start at the very beginning of the topic when reading for the first time 17 | 'auto.offset.reset' => 'earliest', 18 | 19 | // will be visible in broker logs 20 | 'client.id' => 'php-kafka-lib-high-level-consumer', 21 | 22 | // SSL settings 23 | //'security.protocol' => 'ssl', 24 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 25 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 26 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 27 | 28 | // SASL settings 29 | //'sasl.mechanisms' => '', 30 | //'ssl.endpoint.identification.algorithm' => 'https', 31 | //'sasl.username' => '', 32 | //'sasl.password' => '', 33 | 34 | // Add additional output if you need to debug a problem 35 | // 'log_level' => (string) LOG_DEBUG, 36 | // 'debug' => 'all' 37 | ] 38 | ) 39 | ->withAdditionalBroker('kafka:9096') 40 | ->withConsumerGroup('php-kafka-lib-high-level-consumer') 41 | ->withSubscription('php-kafka-lib-test-topic') 42 | ->build(); 43 | 44 | $consumer->subscribe(); 45 | 46 | while (true) { 47 | try { 48 | $message = $consumer->consume(10000); 49 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 50 | echo 'Didn\'t receive any messages, waiting for more...' . PHP_EOL; 51 | continue; 52 | } catch (KafkaConsumerConsumeException $e) { 53 | echo $e->getMessage() . PHP_EOL; 54 | continue; 55 | } 56 | 57 | echo sprintf( 58 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 59 | $message->getKey(), 60 | $message->getBody(), 61 | $message->getTopicName(), 62 | $message->getPartition(), 63 | $message->getOffset(), 64 | implode(',', $message->getHeaders()) 65 | ) . PHP_EOL; 66 | 67 | $consumer->commit($message); 68 | } 69 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/v0.1.x/lowLevelConsumer.php: -------------------------------------------------------------------------------- 1 | withAdditionalConfig( 17 | [ 18 | // start at the very beginning of the topic when reading for the first time 19 | 'auto.offset.reset' => 'earliest', 20 | 21 | // control how fast a commited offset will be synced to the broker 22 | //'auto.commit.interval.ms' => 100, 23 | // will be visible in broker logs 24 | 'client.id' => 'php-kafka-lib-low-level-consumer', 25 | 26 | // SSL settings 27 | //'security.protocol' => 'ssl', 28 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 29 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 30 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 31 | 32 | // SASL settings 33 | //'sasl.mechanisms' => '', 34 | //'ssl.endpoint.identification.algorithm' => 'https', 35 | //'sasl.username' => '', 36 | //'sasl.password' => '', 37 | ] 38 | ) 39 | ->withAdditionalBroker('kafka:9096') 40 | ->withTimeout(10000) 41 | ->withConsumerGroup('php-kafka-lib-low-level-consumer') 42 | ->withConsumerType(KafkaConsumerBuilder::CONSUMER_TYPE_LOW_LEVEL) 43 | ->withSubscription( 44 | 'php-kafka-lib-test-topic' 45 | // optional param - partitions: if none are given, we will query the topic and subscribe to all partitions, like the high level consumer 46 | ) 47 | ->build(); 48 | 49 | $consumer->subscribe(); 50 | 51 | while (true) { 52 | try { 53 | $message = $consumer->consume(); 54 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 55 | echo 'Didn\'t receive any messages, waiting for more...' . PHP_EOL; 56 | continue; 57 | } catch (KafkaConsumerConsumeException $e) { 58 | echo $e->getMessage() . PHP_EOL; 59 | continue; 60 | } 61 | 62 | echo sprintf( 63 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 64 | $message->getKey(), 65 | $message->getBody(), 66 | $message->getTopicName(), 67 | $message->getPartition(), 68 | $message->getOffset(), 69 | implode(',', $message->getHeaders()) 70 | ) . PHP_EOL; 71 | 72 | $consumer->commit($message); 73 | } 74 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/pure-php/consumer.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-high-level-consumer'); 9 | // set consumer group, e.g. -consumer 10 | $conf->set('group.id', 'pure-php-high-level-consumer'); 11 | // set broker 12 | $conf->set('metadata.broker.list', 'kafka:9096'); 13 | // don't auto commit, give the application the control to do that (default is: true) 14 | $conf->set('enable.auto.commit', 'false'); 15 | // start at the very beginning of the topic when reading for the first time 16 | $conf->set('auto.offset.reset', 'earliest'); 17 | // Get eof code instead of null 18 | $conf->set('enable.partition.eof', 'true'); 19 | 20 | // SASL Authentication 21 | //$conf->set('sasl.mechanisms', ''); 22 | //$conf->set('ssl.endpoint.identification.algorithm', 'https'); 23 | //$conf->set('sasl.username', ''); 24 | //$conf->set('sasl.password', ''); 25 | 26 | // SSL Authentication 27 | //$conf->set('security.protocol', 'ssl'); 28 | //$conf->set('ssl.ca.location', __DIR__.'/../../../keys/ca.pem'); 29 | //$conf->set('ssl.certificate.location', __DIR__.'/../../../keys/kafka.cert'); 30 | //$conf->set('ssl.key.location', __DIR__.'/../../../keys/kafka.key'); 31 | 32 | // Add additional output if you need to debug a problem 33 | // $conf->set('log_level', (string) LOG_DEBUG); 34 | // $conf->set('debug', 'all'); 35 | 36 | // Create high level consumer 37 | $consumer = new KafkaConsumer($conf); 38 | 39 | // Subscribe to one or multiple topics 40 | $consumer->subscribe(['pure-php-test-topic']); 41 | 42 | while (true) { 43 | // Try to consume messages for the given timout (20s) 44 | $message = $consumer->consume(20000); 45 | 46 | if (RD_KAFKA_RESP_ERR__PARTITION_EOF === $message->err) { 47 | echo 'Reached end of partition, waiting for more messages...' . PHP_EOL; 48 | continue; 49 | } else if (RD_KAFKA_RESP_ERR__TIMED_OUT === $message->err) { 50 | echo 'Timed out without receiving a new message, waiting for more messages...' . PHP_EOL; 51 | continue; 52 | } else if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 53 | echo rd_kafka_err2str($message->err) . PHP_EOL; 54 | continue; 55 | } 56 | 57 | echo sprintf( 58 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d', 59 | $message->key, 60 | $message->payload, 61 | $message->topic_name, 62 | $message->partition, 63 | $message->offset 64 | ) . PHP_EOL; 65 | // Here is where you do your business logic to process your message 66 | // after you have done so, commit the message offset to the broker 67 | 68 | // commit the message(s) offset synchronous back to the broker 69 | $consumer->commit($message); 70 | 71 | // you can also commit the message(s) offset in an async manner, which is slightly faster 72 | // but poses of course the challenge of handling errors in an async manner as well 73 | //$consumer->commitAsync($message); 74 | } 75 | -------------------------------------------------------------------------------- /src/redpanda/pure-php/consumer.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-high-level-consumer'); 9 | // set consumer group, e.g. -consumer 10 | $conf->set('group.id', 'pure-php-high-level-consumer'); 11 | // set broker 12 | $conf->set('metadata.broker.list', 'redpanda:9097'); 13 | // don't auto commit, give the application the control to do that (default is: true) 14 | $conf->set('enable.auto.commit', 'false'); 15 | // start at the very beginning of the topic when reading for the first time 16 | $conf->set('auto.offset.reset', 'earliest'); 17 | // Get eof code instead of null 18 | $conf->set('enable.partition.eof', 'true'); 19 | 20 | // SASL Authentication 21 | //$conf->set('sasl.mechanisms', ''); 22 | //$conf->set('ssl.endpoint.identification.algorithm', 'https'); 23 | //$conf->set('sasl.username', ''); 24 | //$conf->set('sasl.password', ''); 25 | 26 | // SSL Authentication 27 | //$conf->set('security.protocol', 'ssl'); 28 | //$conf->set('ssl.ca.location', __DIR__.'/../../../keys/ca.pem'); 29 | //$conf->set('ssl.certificate.location', __DIR__.'/../../../keys/kafka.cert'); 30 | //$conf->set('ssl.key.location', __DIR__.'/../../../keys/kafka.key'); 31 | 32 | // Add additional output if you need to debug a problem 33 | // $conf->set('log_level', (string) LOG_DEBUG); 34 | // $conf->set('debug', 'all'); 35 | 36 | // Create high level consumer 37 | $consumer = new Consumer($conf); 38 | 39 | // Subscribe to one or multiple topics 40 | $consumer->subscribe(['pure-php-test-topic', 'pure-php-transactional-test-topic']); 41 | 42 | while (true) { 43 | // Try to consume messages for the given timout (20s) 44 | $message = $consumer->consume(20000); 45 | 46 | if (RD_KAFKA_RESP_ERR__PARTITION_EOF === $message->err) { 47 | echo 'Reached end of partition, waiting for more messages...' . PHP_EOL; 48 | continue; 49 | } else if (RD_KAFKA_RESP_ERR__TIMED_OUT === $message->err) { 50 | echo 'Timed out without receiving a new message, waiting for more messages...' . PHP_EOL; 51 | continue; 52 | } else if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 53 | echo kafka_err2str($message->err) . PHP_EOL; 54 | continue; 55 | } 56 | 57 | echo sprintf( 58 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d', 59 | $message->key, 60 | $message->payload, 61 | $message->topic_name, 62 | $message->partition, 63 | $message->offset 64 | ) . PHP_EOL; 65 | // Here is where you do your business logic to process your message 66 | // after you have done so, commit the message offset to the broker 67 | 68 | // commit the message(s) offset synchronous back to the broker 69 | $consumer->commit($message); 70 | 71 | // you can also commit the message(s) offset in an async manner, which is slightly faster 72 | // but poses of course the challenge of handling errors in an async manner as well 73 | //$consumer->commitAsync($message); 74 | } 75 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/pure-php/consumer.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-high-level-consumer'); 9 | // set consumer group, e.g. -consumer 10 | $conf->set('group.id', 'pure-php-high-level-consumer'); 11 | // set broker 12 | $conf->set('metadata.broker.list', 'kafka:9096'); 13 | // don't auto commit, give the application the control to do that (default is: true) 14 | $conf->set('enable.auto.commit', 'false'); 15 | // start at the very beginning of the topic when reading for the first time 16 | $conf->set('auto.offset.reset', 'earliest'); 17 | // Get eof code instead of null 18 | $conf->set('enable.partition.eof', 'true'); 19 | 20 | // SASL Authentication 21 | //$conf->set('sasl.mechanisms', ''); 22 | //$conf->set('ssl.endpoint.identification.algorithm', 'https'); 23 | //$conf->set('sasl.username', ''); 24 | //$conf->set('sasl.password', ''); 25 | 26 | // SSL Authentication 27 | //$conf->set('security.protocol', 'ssl'); 28 | //$conf->set('ssl.ca.location', __DIR__.'/../../../keys/ca.pem'); 29 | //$conf->set('ssl.certificate.location', __DIR__.'/../../../keys/kafka.cert'); 30 | //$conf->set('ssl.key.location', __DIR__.'/../../../keys/kafka.key'); 31 | 32 | // Add additional output if you need to debug a problem 33 | // $conf->set('log_level', (string) LOG_DEBUG); 34 | // $conf->set('debug', 'all'); 35 | 36 | // Create high level consumer 37 | $consumer = new Consumer($conf); 38 | 39 | // Subscribe to one or multiple topics 40 | $consumer->subscribe(['pure-php-test-topic', 'pure-php-transactional-test-topic']); 41 | 42 | while (true) { 43 | // Try to consume messages for the given timout (20s) 44 | $message = $consumer->consume(20000); 45 | 46 | if (RD_KAFKA_RESP_ERR__PARTITION_EOF === $message->err) { 47 | echo 'Reached end of partition, waiting for more messages...' . PHP_EOL; 48 | continue; 49 | } else if (RD_KAFKA_RESP_ERR__TIMED_OUT === $message->err) { 50 | echo 'Timed out without receiving a new message, waiting for more messages...' . PHP_EOL; 51 | continue; 52 | } else if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 53 | echo kafka_err2str($message->err) . PHP_EOL; 54 | continue; 55 | } 56 | 57 | echo sprintf( 58 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d', 59 | $message->key, 60 | $message->payload, 61 | $message->topic_name, 62 | $message->partition, 63 | $message->offset 64 | ) . PHP_EOL; 65 | // Here is where you do your business logic to process your message 66 | // after you have done so, commit the message offset to the broker 67 | 68 | // commit the message(s) offset synchronous back to the broker 69 | $consumer->commit($message); 70 | 71 | // you can also commit the message(s) offset in an async manner, which is slightly faster 72 | // but poses of course the challenge of handling errors in an async manner as well 73 | //$consumer->commitAsync($message); 74 | } 75 | -------------------------------------------------------------------------------- /docker/php/docker-php-ext-install: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | # prefer user supplied CFLAGS, but default to our PHP_CFLAGS 5 | : ${CFLAGS:=$PHP_CFLAGS} 6 | : ${CPPFLAGS:=$PHP_CPPFLAGS} 7 | : ${LDFLAGS:=$PHP_LDFLAGS} 8 | export CFLAGS CPPFLAGS LDFLAGS 9 | 10 | srcExists= 11 | if [ -d /usr/src/php ]; then 12 | srcExists=1 13 | fi 14 | docker-php-source extract 15 | if [ -z "$srcExists" ]; then 16 | touch /usr/src/php/.docker-delete-me 17 | fi 18 | 19 | cd /usr/src/php/ext 20 | 21 | usage() { 22 | echo "usage: $0 [-jN] [--ini-name file.ini] ext-name [ext-name ...]" 23 | echo " ie: $0 gd mysqli" 24 | echo " $0 pdo pdo_mysql" 25 | echo " $0 -j5 gd mbstring mysqli pdo pdo_mysql shmop" 26 | echo 27 | echo 'if custom ./configure arguments are necessary, see docker-php-ext-configure' 28 | echo 29 | echo 'Possible values for ext-name:' 30 | find . \ 31 | -mindepth 2 \ 32 | -maxdepth 2 \ 33 | -type f \ 34 | -name 'config.m4' \ 35 | | xargs -n1 dirname \ 36 | | xargs -n1 basename \ 37 | | sort \ 38 | | xargs 39 | echo 40 | echo 'Some of the above modules are already compiled into PHP; please check' 41 | echo 'the output of "php -i" to see which modules are already loaded.' 42 | } 43 | 44 | opts="$(getopt -o 'h?j:' --long 'help,ini-name:,jobs:' -- "$@" || { usage >&2 && false; })" 45 | eval set -- "$opts" 46 | 47 | j=1 48 | iniName= 49 | while true; do 50 | flag="$1" 51 | shift 52 | case "$flag" in 53 | --help|-h|'-?') usage && exit 0 ;; 54 | --ini-name) iniName="$1" && shift ;; 55 | --jobs|-j) j="$1" && shift ;; 56 | --) break ;; 57 | *) 58 | { 59 | echo "error: unknown flag: $flag" 60 | usage 61 | } >&2 62 | exit 1 63 | ;; 64 | esac 65 | done 66 | 67 | exts= 68 | for ext; do 69 | if [ -z "$ext" ]; then 70 | continue 71 | fi 72 | if [ ! -d "$ext" ]; then 73 | echo >&2 "error: $PWD/$ext does not exist" 74 | echo >&2 75 | usage >&2 76 | exit 1 77 | fi 78 | exts="$exts $ext" 79 | done 80 | 81 | if [ -z "$exts" ]; then 82 | usage >&2 83 | exit 1 84 | fi 85 | 86 | pm='unknown' 87 | if [ -e /lib/apk/db/installed ]; then 88 | pm='apk' 89 | fi 90 | 91 | apkDel= 92 | if [ "$pm" = 'apk' ]; then 93 | if [ -n "$PHPIZE_DEPS" ]; then 94 | if apk info --installed .phpize-deps-configure > /dev/null; then 95 | apkDel='.phpize-deps-configure' 96 | elif ! apk info --installed .phpize-deps > /dev/null; then 97 | apk add --no-cache --virtual .phpize-deps $PHPIZE_DEPS 98 | apkDel='.phpize-deps' 99 | fi 100 | fi 101 | fi 102 | 103 | popDir="$PWD" 104 | for ext in $exts; do 105 | cd "$ext" 106 | [ -e Makefile ] || docker-php-ext-configure "$ext" 107 | make -j"$j" 108 | make -j"$j" install 109 | find modules \ 110 | -maxdepth 1 \ 111 | -name '*.so' \ 112 | -exec basename '{}' ';' \ 113 | | xargs -r docker-php-ext-enable ${iniName:+--ini-name "$iniName"} 114 | make -j"$j" clean 115 | cd "$popDir" 116 | done 117 | 118 | if [ "$pm" = 'apk' ] && [ -n "$apkDel" ]; then 119 | apk del --no-network $apkDel 120 | fi 121 | 122 | if [ -e /usr/src/php/.docker-delete-me ]; then 123 | docker-php-source delete 124 | fi 125 | -------------------------------------------------------------------------------- /src/redpanda/pure-php/metadata.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-producer'); 11 | // set broker 12 | $conf->set('metadata.broker.list', 'redpanda:9097'); 13 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 14 | 15 | // SASL Authentication 16 | // can be SASL_PLAINTEXT, SASL_SSL 17 | // conf->set('security.protocol', ''); 18 | // can be GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER 19 | // $conf->set('sasl.mechanisms', ''); 20 | // $conf->set('sasl.username', ''); 21 | // $conf->set('sasl.password', ''); 22 | // default is none 23 | // $conf->set('ssl.endpoint.identification.algorithm', 'https'); 24 | 25 | 26 | // SSL Authentication 27 | //$conf->set('security.protocol', 'ssl'); 28 | //$conf->set('ssl.ca.location', __DIR__.'/../keys/ca.pem'); 29 | //$conf->set('ssl.certificate.location', __DIR__.'/../keys/kafka.cert'); 30 | //$conf->set('ssl.key.location', __DIR__.'/../keys/kafka.key'); 31 | 32 | // Add additional output if you need to debug a problem 33 | // $conf->set('log_level', (string) LOG_DEBUG); 34 | // $conf->set('debug', 'all'); 35 | 36 | $producer = new Producer($conf); 37 | 38 | // get metadata 39 | $metadata = $producer->getMetadata(false, 10000); 40 | echo sprintf('Broker id: %d', $metadata->getOrigBrokerId()) . PHP_EOL; 41 | echo sprintf('Broker name: %s', $metadata->getOrigBrokerName()) . PHP_EOL; 42 | 43 | echo 'Info about full broker list' . PHP_EOL; 44 | $brokers = $metadata->getBrokers(); 45 | while ($brokers->valid()) { 46 | echo sprintf('Broker id: %d', $brokers->current()->getId()) . PHP_EOL; 47 | echo sprintf('Broker host: %s', $brokers->current()->getHost()) . PHP_EOL; 48 | echo sprintf('Broker port: %d', $brokers->current()->getPort()) . PHP_EOL; 49 | $brokers->next(); 50 | } 51 | 52 | echo 'Info about topics' . PHP_EOL; 53 | $topics = $metadata->getTopics(); 54 | while ($topics->valid()) { 55 | echo sprintf('Topic name: %s', $topics->current()->getName()) . PHP_EOL; 56 | echo sprintf('Topic error: %d', $topics->current()->getErrorCode()) . PHP_EOL; 57 | $partitions = $topics->current()->getPartitions(); 58 | while ($partitions->valid()) { 59 | echo sprintf(' Topic partition id: %d', $partitions->current()->getId()) . PHP_EOL; 60 | echo sprintf(' Topic partition err: %d', $partitions->current()->getErrorCode()) . PHP_EOL; 61 | echo sprintf(' Topic partition leader id: %d', $partitions->current()->getLeader()) . PHP_EOL; 62 | $replicas = $partitions->current()->getReplicas(); 63 | while ($replicas->valid()) { 64 | echo sprintf(' Replicas id: %d', $replicas->current()) . PHP_EOL; 65 | $replicas->next(); 66 | } 67 | $inSyncReplicas = $partitions->current()->getIsrs(); 68 | while ($inSyncReplicas->valid()) { 69 | echo sprintf(' Insync Replicas id: %d', $inSyncReplicas->current()) . PHP_EOL; 70 | $inSyncReplicas->next(); 71 | } 72 | $partitions->next(); 73 | } 74 | 75 | $topics->next(); 76 | } -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/pure-php/metadata.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-producer'); 11 | // set broker 12 | $conf->set('metadata.broker.list', 'kafka:9096'); 13 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 14 | 15 | // SASL Authentication 16 | // can be SASL_PLAINTEXT, SASL_SSL 17 | // conf->set('security.protocol', ''); 18 | // can be GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER 19 | // $conf->set('sasl.mechanisms', ''); 20 | // $conf->set('sasl.username', ''); 21 | // $conf->set('sasl.password', ''); 22 | // default is none 23 | // $conf->set('ssl.endpoint.identification.algorithm', 'https'); 24 | 25 | 26 | // SSL Authentication 27 | //$conf->set('security.protocol', 'ssl'); 28 | //$conf->set('ssl.ca.location', __DIR__.'/../keys/ca.pem'); 29 | //$conf->set('ssl.certificate.location', __DIR__.'/../keys/kafka.cert'); 30 | //$conf->set('ssl.key.location', __DIR__.'/../keys/kafka.key'); 31 | 32 | // Add additional output if you need to debug a problem 33 | // $conf->set('log_level', (string) LOG_DEBUG); 34 | // $conf->set('debug', 'all'); 35 | 36 | $producer = new Producer($conf); 37 | 38 | // get metadata 39 | $metadata = $producer->getMetadata(false, 10000); 40 | echo sprintf('Broker id: %d', $metadata->getOrigBrokerId()) . PHP_EOL; 41 | echo sprintf('Broker name: %s', $metadata->getOrigBrokerName()) . PHP_EOL; 42 | 43 | echo 'Info about full broker list' . PHP_EOL; 44 | $brokers = $metadata->getBrokers(); 45 | while ($brokers->valid()) { 46 | echo sprintf('Broker id: %d', $brokers->current()->getId()) . PHP_EOL; 47 | echo sprintf('Broker host: %s', $brokers->current()->getHost()) . PHP_EOL; 48 | echo sprintf('Broker port: %d', $brokers->current()->getPort()) . PHP_EOL; 49 | $brokers->next(); 50 | } 51 | 52 | echo 'Info about topics' . PHP_EOL; 53 | $topics = $metadata->getTopics(); 54 | while ($topics->valid()) { 55 | echo sprintf('Topic name: %s', $topics->current()->getName()) . PHP_EOL; 56 | echo sprintf('Topic error: %d', $topics->current()->getErrorCode()) . PHP_EOL; 57 | $partitions = $topics->current()->getPartitions(); 58 | while ($partitions->valid()) { 59 | echo sprintf(' Topic partition id: %d', $partitions->current()->getId()) . PHP_EOL; 60 | echo sprintf(' Topic partition err: %d', $partitions->current()->getErrorCode()) . PHP_EOL; 61 | echo sprintf(' Topic partition leader id: %d', $partitions->current()->getLeader()) . PHP_EOL; 62 | $replicas = $partitions->current()->getReplicas(); 63 | while ($replicas->valid()) { 64 | echo sprintf(' Replicas id: %d', $replicas->current()) . PHP_EOL; 65 | $replicas->next(); 66 | } 67 | $inSyncReplicas = $partitions->current()->getIsrs(); 68 | while ($inSyncReplicas->valid()) { 69 | echo sprintf(' Insync Replicas id: %d', $inSyncReplicas->current()) . PHP_EOL; 70 | $inSyncReplicas->next(); 71 | } 72 | $partitions->next(); 73 | } 74 | 75 | $topics->next(); 76 | } -------------------------------------------------------------------------------- /docker/php/docker-php-ext-enable: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | extDir="$(php -d 'display_errors=stderr' -r 'echo ini_get("extension_dir");')" 5 | cd "$extDir" 6 | 7 | usage() { 8 | echo "usage: $0 [options] module-name [module-name ...]" 9 | echo " ie: $0 gd mysqli" 10 | echo " $0 pdo pdo_mysql" 11 | echo " $0 --ini-name 0-apc.ini apcu apc" 12 | echo 13 | echo 'Possible values for module-name:' 14 | find -maxdepth 1 \ 15 | -type f \ 16 | -name '*.so' \ 17 | -exec basename '{}' ';' \ 18 | | sort \ 19 | | xargs 20 | echo 21 | echo 'Some of the above modules are already compiled into PHP; please check' 22 | echo 'the output of "php -i" to see which modules are already loaded.' 23 | } 24 | 25 | opts="$(getopt -o 'h?' --long 'help,ini-name:' -- "$@" || { usage >&2 && false; })" 26 | eval set -- "$opts" 27 | 28 | iniName= 29 | while true; do 30 | flag="$1" 31 | shift 32 | case "$flag" in 33 | --help|-h|'-?') usage && exit 0 ;; 34 | --ini-name) iniName="$1" && shift ;; 35 | --) break ;; 36 | *) 37 | { 38 | echo "error: unknown flag: $flag" 39 | usage 40 | } >&2 41 | exit 1 42 | ;; 43 | esac 44 | done 45 | 46 | modules= 47 | for module; do 48 | if [ -z "$module" ]; then 49 | continue 50 | fi 51 | if ! [ -f "$module" ] && ! [ -f "$module.so" ]; then 52 | echo >&2 "error: '$module' does not exist" 53 | echo >&2 54 | usage >&2 55 | exit 1 56 | fi 57 | modules="$modules $module" 58 | done 59 | 60 | if [ -z "$modules" ]; then 61 | usage >&2 62 | exit 1 63 | fi 64 | 65 | pm='unknown' 66 | if [ -e /lib/apk/db/installed ]; then 67 | pm='apk' 68 | fi 69 | 70 | apkDel= 71 | if [ "$pm" = 'apk' ]; then 72 | if \ 73 | [ -n "$PHPIZE_DEPS" ] \ 74 | && ! apk info --installed .phpize-deps > /dev/null \ 75 | && ! apk info --installed .phpize-deps-configure > /dev/null \ 76 | ; then 77 | apk add --no-cache --virtual '.docker-php-ext-enable-deps' binutils 78 | apkDel='.docker-php-ext-enable-deps' 79 | fi 80 | fi 81 | 82 | for module in $modules; do 83 | moduleFile="$module" 84 | if [ -f "$module.so" ] && ! [ -f "$module" ]; then 85 | moduleFile="$module.so" 86 | fi 87 | if readelf --wide --syms "$moduleFile" | grep -q ' zend_extension_entry$'; then 88 | # https://wiki.php.net/internals/extensions#loading_zend_extensions 89 | line="zend_extension=$module" 90 | else 91 | line="extension=$module" 92 | fi 93 | 94 | ext="$(basename "$module")" 95 | ext="${ext%.*}" 96 | if php -d 'display_errors=stderr' -r 'exit(extension_loaded("'"$ext"'") ? 0 : 1);'; then 97 | # this isn't perfect, but it's better than nothing 98 | # (for example, 'opcache.so' presents inside PHP as 'Zend OPcache', not 'opcache') 99 | echo >&2 100 | echo >&2 "warning: $ext ($module) is already loaded!" 101 | echo >&2 102 | continue 103 | fi 104 | 105 | case "$iniName" in 106 | /*) 107 | # allow an absolute path 108 | ini="$iniName" 109 | ;; 110 | *) 111 | ini="$PHP_INI_DIR/conf.d/${iniName:-"docker-php-ext-$ext.ini"}" 112 | ;; 113 | esac 114 | if ! grep -q "$line" "$ini" 2>/dev/null; then 115 | echo "$line" >> "$ini" 116 | fi 117 | done 118 | 119 | if [ "$pm" = 'apk' ] && [ -n "$apkDel" ]; then 120 | apk del --no-network $apkDel 121 | fi 122 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/v0.1.x/avroProducer.php: -------------------------------------------------------------------------------- 1 | 'http://kafka-schema-registry:9083', 25 | //'auth' => ['user', 'pw'] 26 | ] 27 | ) 28 | ) 29 | ), 30 | new AvroObjectCacheAdapter() 31 | ); 32 | 33 | // Instantiate schema registry of lib (Note: In the future we will use our won cached registry) 34 | $schemaRegistry = new AvroSchemaRegistry($registry); 35 | // add schema for topic 36 | $schemaRegistry->addSchemaMappingForTopic( 37 | 'php-kafka-lib-test-topic-avro', 38 | new KafkaAvroSchema( 39 | 'nickzh.php.kafka.examples.entity.product-value' 40 | // optional param - version: if not passed we will take latest 41 | ) 42 | ); 43 | 44 | // instantiate avro record serializer (vendor: flix) 45 | $recordSerializer = new RecordSerializer($registry); 46 | 47 | // initialize Avro encode (Note: In the future, we will use our own record serializer) 48 | $encoder = new AvroEncoder($schemaRegistry, $recordSerializer); 49 | 50 | // Get producer Builder instance 51 | $builder = KafkaProducerBuilder::create(); 52 | 53 | $producer = $builder->withAdditionalConfig( 54 | [ 55 | // will be visible in broker logs 56 | 'client.id' => 'php-kafka-lib-producer-avro', 57 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 58 | 'compression.codec' => 'snappy', 59 | ] 60 | ) 61 | ->withAdditionalBroker('kafka:9096') 62 | ->withEncoder($encoder) 63 | ->build(); 64 | 65 | for ($i = 0; $i < 10; ++$i) { 66 | $message = KafkaProducerMessage::create('php-kafka-lib-test-topic-avro', 0) 67 | ->withKey(sprintf('test-key-%d', $i)) 68 | ->withBody( 69 | [ 70 | 'id' => Uuid::uuid6()->toString(), 71 | 'name' => sprintf('Product %d', $i), 72 | 'description' => 'A random test product', 73 | 'price' => 21.25 74 | ] 75 | ) 76 | ->withHeaders( 77 | [ 78 | 'some' => 'test header' 79 | ] 80 | ); 81 | 82 | $producer->produce($message); 83 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 84 | } 85 | 86 | // Shutdown producer, flush messages that are in queue. Give up after 20s 87 | $result = $producer->flush(20000); 88 | 89 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 90 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 91 | } 92 | -------------------------------------------------------------------------------- /src/redpanda/pure-php/producer.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-producer'); 12 | // set broker 13 | $conf->set('metadata.broker.list', 'redpanda:9097'); 14 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 15 | $conf->set('compression.codec', 'snappy'); 16 | // set timeout, producer will retry for 5s 17 | $conf->set('message.timeout.ms', '5000'); 18 | //If you need to produce exactly once and want to keep the original produce order, uncomment the line below 19 | //$conf->set('enable.idempotence', 'true'); 20 | 21 | // This callback processes the delivery reports from the broker 22 | // you can see if your message was truly sent, this can be especially of importance if you poll async 23 | $conf->setDrMsgCb(function (Producer $kafka, Message $message) { 24 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 25 | $errorStr = rd_kafka_err2str($message->err); 26 | 27 | echo sprintf('Message FAILED (%s, %s) to send with payload => %s', $message->err, $errorStr, $message->payload) . PHP_EOL; 28 | } else { 29 | echo sprintf('Message sent SUCCESSFULLY with payload => %s', $message->payload) . PHP_EOL; 30 | } 31 | }); 32 | 33 | // SASL Authentication 34 | // can be SASL_PLAINTEXT, SASL_SSL 35 | // conf->set('security.protocol', ''); 36 | // can be GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER 37 | // $conf->set('sasl.mechanisms', ''); 38 | // $conf->set('sasl.username', ''); 39 | // $conf->set('sasl.password', ''); 40 | // default is none 41 | // $conf->set('ssl.endpoint.identification.algorithm', 'https'); 42 | 43 | 44 | // SSL Authentication 45 | //$conf->set('security.protocol', 'ssl'); 46 | //$conf->set('ssl.ca.location', __DIR__.'/../keys/ca.pem'); 47 | //$conf->set('ssl.certificate.location', __DIR__.'/../keys/kafka.cert'); 48 | //$conf->set('ssl.key.location', __DIR__.'/../keys/kafka.key'); 49 | 50 | // Add additional output if you need to debug a problem 51 | // $conf->set('log_level', (string) LOG_DEBUG); 52 | // $conf->set('debug', 'all'); 53 | 54 | $producer = new Producer($conf); 55 | // initialize producer topic 56 | $topic = $producer->getTopicHandle('pure-php-test-topic'); 57 | // Produce 10 test messages 58 | $amountTestMessages = 10; 59 | 60 | // Loop to produce some test messages 61 | for ($i = 0; $i < $amountTestMessages; ++$i) { 62 | // Let the partitioner decide the target partition, default partitioner is: RD_KAFKA_MSG_PARTITIONER_CONSISTENT_RANDOM 63 | // You can use a predefined partitioner or write own logic to decide the target partition 64 | $partition = RD_KAFKA_PARTITION_UA; 65 | 66 | //produce message with payload, key and headers 67 | $topic->producev( 68 | $partition, 69 | RD_KAFKA_MSG_F_BLOCK, // will block produce if queue is full 70 | sprintf('test message-%d',$i), 71 | sprintf('test-key-%d', $i), 72 | [ 73 | 'some' => sprintf('header value %d', $i) 74 | ] 75 | ); 76 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 77 | 78 | // Poll for events e.g. producer callbacks, to handle errors, etc. 79 | // 0 = non-blocking 80 | // -1 = blocking 81 | // any other int value = timeout in ms 82 | $producer->poll(0); 83 | } 84 | 85 | // Shutdown producer, flush messages that are in queue. Give up after 20s 86 | $result = $producer->flush(20000); 87 | 88 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 89 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 90 | } 91 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/pure-php/producer.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-producer'); 12 | // set broker 13 | $conf->set('metadata.broker.list', 'kafka:9096'); 14 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 15 | $conf->set('compression.codec', 'snappy'); 16 | // set timeout, producer will retry for 5s 17 | $conf->set('message.timeout.ms', '5000'); 18 | //If you need to produce exactly once and want to keep the original produce order, uncomment the line below 19 | //$conf->set('enable.idempotence', 'true'); 20 | 21 | // This callback processes the delivery reports from the broker 22 | // you can see if your message was truly sent, this can be especially of importance if you poll async 23 | $conf->setDrMsgCb(function (Producer $kafka, Message $message) { 24 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 25 | $errorStr = kafka_err2str($message->err); 26 | 27 | echo sprintf('Message FAILED (%s, %s) to send with payload => %s', $message->err, $errorStr, $message->payload) . PHP_EOL; 28 | } else { 29 | echo sprintf('Message sent SUCCESSFULLY with payload => %s', $message->payload) . PHP_EOL; 30 | } 31 | }); 32 | 33 | // SASL Authentication 34 | // can be SASL_PLAINTEXT, SASL_SSL 35 | // $conf->set('security.protocol', ''); 36 | // can be GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER 37 | // $conf->set('sasl.mechanisms', ''); 38 | // $conf->set('sasl.username', ''); 39 | // $conf->set('sasl.password', ''); 40 | // default is none 41 | // $conf->set('ssl.endpoint.identification.algorithm', 'https'); 42 | 43 | 44 | // SSL Authentication 45 | //$conf->set('security.protocol', 'ssl'); 46 | //$conf->set('ssl.ca.location', __DIR__.'/../keys/ca.pem'); 47 | //$conf->set('ssl.certificate.location', __DIR__.'/../keys/kafka.cert'); 48 | //$conf->set('ssl.key.location', __DIR__.'/../keys/kafka.key'); 49 | 50 | // Add additional output if you need to debug a problem 51 | // $conf->set('log_level', (string) LOG_DEBUG); 52 | // $conf->set('debug', 'all'); 53 | 54 | $producer = new Producer($conf); 55 | // initialize producer topic 56 | $topic = $producer->getTopicHandle('pure-php-test-topic'); 57 | // Produce 10 test messages 58 | $amountTestMessages = 10; 59 | 60 | // Loop to produce some test messages 61 | for ($i = 0; $i < $amountTestMessages; ++$i) { 62 | // Let the partitioner decide the target partition, default partitioner is: RD_KAFKA_MSG_PARTITIONER_CONSISTENT_RANDOM 63 | // You can use a predefined partitioner or write own logic to decide the target partition 64 | $partition = RD_KAFKA_PARTITION_UA; 65 | 66 | //produce message with payload, key and headers 67 | $topic->producev( 68 | $partition, 69 | RD_KAFKA_MSG_F_BLOCK, // will block produce if queue is full 70 | sprintf('test message-%d',$i), 71 | sprintf('test-key-%d', $i), 72 | [ 73 | 'some' => sprintf('header value %d', $i) 74 | ] 75 | ); 76 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 77 | 78 | // Poll for events e.g. producer callbacks, to handle errors, etc. 79 | // 0 = non-blocking 80 | // -1 = blocking 81 | // any other int value = timeout in ms 82 | $producer->poll(0); 83 | } 84 | 85 | // Shutdown producer, flush messages that are in queue. Give up after 20s 86 | $result = $producer->flush(20000); 87 | 88 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 89 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 90 | } 91 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/pure-php/producer.php: -------------------------------------------------------------------------------- 1 | set('client.id', 'pure-php-producer'); 13 | // set broker 14 | $conf->set('metadata.broker.list', 'kafka:9096'); 15 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 16 | $conf->set('compression.codec', 'snappy'); 17 | // set timeout, producer will retry for 5s 18 | $conf->set('message.timeout.ms', '5000'); 19 | //If you need to produce exactly once and want to keep the original produce order, uncomment the line below 20 | //$conf->set('enable.idempotence', 'true'); 21 | 22 | // This callback processes the delivery reports from the broker 23 | // you can see if your message was truly sent, this can be especially of importance if you poll async 24 | $conf->setDrMsgCb(function (Producer $kafka, Message $message) { 25 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 26 | $errorStr = rd_kafka_err2str($message->err); 27 | 28 | echo sprintf('Message FAILED (%s, %s) to send with payload => %s', $message->err, $errorStr, $message->payload) . PHP_EOL; 29 | } else { 30 | // message successfully delivered 31 | echo sprintf('Message sent SUCCESSFULLY with payload => %s', $message->payload) . PHP_EOL; 32 | } 33 | }); 34 | 35 | // SASL Authentication 36 | // can be SASL_PLAINTEXT, SASL_SSL 37 | // conf->set('security.protocol', ''); 38 | // can be GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER 39 | // $conf->set('sasl.mechanisms', ''); 40 | // $conf->set('sasl.username', ''); 41 | // $conf->set('sasl.password', ''); 42 | // default is none 43 | // $conf->set('ssl.endpoint.identification.algorithm', 'https'); 44 | 45 | 46 | // SSL Authentication 47 | //$conf->set('security.protocol', 'ssl'); 48 | //$conf->set('ssl.ca.location', __DIR__.'/../keys/ca.pem'); 49 | //$conf->set('ssl.certificate.location', __DIR__.'/../keys/kafka.cert'); 50 | //$conf->set('ssl.key.location', __DIR__.'/../keys/kafka.key'); 51 | 52 | // Add additional output if you need to debug a problem 53 | // $conf->set('log_level', (string) LOG_DEBUG); 54 | // $conf->set('debug', 'all'); 55 | 56 | $producer = new Producer($conf); 57 | // initialize producer topic 58 | $topic = $producer->newTopic('pure-php-test-topic'); 59 | // Produce 10 test messages 60 | $amountTestMessages = 10; 61 | 62 | // Loop to produce some test messages 63 | for ($i = 0; $i < $amountTestMessages; ++$i) { 64 | // Let the partitioner decide the target partition, default partitioner is: RD_KAFKA_MSG_PARTITIONER_CONSISTENT_RANDOM 65 | // You can use a predefined partitioner or write own logic to decide the target partition 66 | $partition = RD_KAFKA_PARTITION_UA; 67 | 68 | //produce message with payload, key and headers 69 | $topic->producev( 70 | $partition, 71 | RD_KAFKA_MSG_F_BLOCK, // will block produce if queue is full 72 | sprintf('test message-%d',$i), 73 | sprintf('test-key-%d', $i), 74 | [ 75 | 'some' => sprintf('header value %d', $i) 76 | ] 77 | ); 78 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 79 | 80 | // Poll for events e.g. producer callbacks, to handle errors, etc. 81 | // 0 = non-blocking 82 | // -1 = blocking 83 | // any other int value = timeout in ms 84 | $producer->poll(0); 85 | } 86 | 87 | // Shutdown producer, flush messages that are in queue. Give up after 20s 88 | $result = $producer->flush(20000); 89 | 90 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 91 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 92 | } 93 | -------------------------------------------------------------------------------- /src/redpanda/php-simple-kafka-lib/avroProducer.php: -------------------------------------------------------------------------------- 1 | 'http://redpanda:8081', 25 | //'auth' => ['user', 'pw'] 26 | ] 27 | ) 28 | ) 29 | ), 30 | new AvroObjectCacheAdapter() 31 | ); 32 | 33 | // Instantiate schema registry of lib (Note: In the future we will use our won cached registry) 34 | $schemaRegistry = new AvroSchemaRegistry($registry); 35 | // add schema for topic 36 | $schemaRegistry->addBodySchemaMappingForTopic( 37 | 'php-kafka-lib-test-topic-avro', 38 | new KafkaAvroSchema( 39 | 'nickzh.php.kafka.examples.entity.product-value' 40 | // optional param - version: if not passed we will take latest 41 | ) 42 | ); 43 | $schemaRegistry->addKeySchemaMappingForTopic( 44 | 'php-kafka-lib-test-topic-avro', 45 | new KafkaAvroSchema( 46 | 'nickzh.php.kafka.examples.entity.product-key' 47 | // optional param - version: if not passed we will take latest 48 | ) 49 | ); 50 | 51 | // instantiate avro record serializer (vendor: flix) 52 | $recordSerializer = new RecordSerializer($registry); 53 | 54 | // initialize Avro encode (Note: In the future, we will use our own record serializer) 55 | $encoder = new AvroEncoder($schemaRegistry, $recordSerializer); 56 | 57 | // Get producer Builder instance 58 | $builder = KafkaProducerBuilder::create(); 59 | 60 | $producer = $builder->withAdditionalConfig( 61 | [ 62 | // will be visible in broker logs 63 | 'client.id' => 'php-kafka-lib-producer-avro', 64 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 65 | 'compression.codec' => 'snappy', 66 | 67 | // Add additional output if you need to debug a problem 68 | // 'log_level' => (string) LOG_DEBUG, 69 | // 'debug' => 'all' 70 | ] 71 | ) 72 | ->withAdditionalBroker('redpanda:9097') 73 | ->withEncoder($encoder) 74 | ->build(); 75 | 76 | for ($i = 0; $i < 10; ++$i) { 77 | $message = KafkaProducerMessage::create('php-kafka-lib-test-topic-avro', 0) 78 | ->withKey(sprintf('test-key-%d', $i)) 79 | ->withBody( 80 | [ 81 | 'id' => Uuid::uuid6()->toString(), 82 | 'name' => sprintf('Product %d', $i), 83 | 'description' => 'A random test product', 84 | 'price' => 21.25 85 | ] 86 | ) 87 | ->withHeaders( 88 | [ 89 | 'some' => 'test header' 90 | ] 91 | ); 92 | 93 | $producer->produce($message); 94 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 95 | } 96 | 97 | // Shutdown producer, flush messages that are in queue. Give up after 20s 98 | $result = $producer->flush(20000); 99 | 100 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 101 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 102 | } 103 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/php-simple-kafka-lib/avroProducer.php: -------------------------------------------------------------------------------- 1 | 'http://kafka-schema-registry:9083', 25 | //'auth' => ['user', 'pw'] 26 | ] 27 | ) 28 | ) 29 | ), 30 | new AvroObjectCacheAdapter() 31 | ); 32 | 33 | // Instantiate schema registry of lib (Note: In the future we will use our won cached registry) 34 | $schemaRegistry = new AvroSchemaRegistry($registry); 35 | // add schema for topic 36 | $schemaRegistry->addBodySchemaMappingForTopic( 37 | 'php-kafka-lib-test-topic-avro', 38 | new KafkaAvroSchema( 39 | 'nickzh.php.kafka.examples.entity.product-value' 40 | // optional param - version: if not passed we will take latest 41 | ) 42 | ); 43 | $schemaRegistry->addKeySchemaMappingForTopic( 44 | 'php-kafka-lib-test-topic-avro', 45 | new KafkaAvroSchema( 46 | 'nickzh.php.kafka.examples.entity.product-key' 47 | // optional param - version: if not passed we will take latest 48 | ) 49 | ); 50 | 51 | // instantiate avro record serializer (vendor: flix) 52 | $recordSerializer = new RecordSerializer($registry); 53 | 54 | // initialize Avro encode (Note: In the future, we will use our own record serializer) 55 | $encoder = new AvroEncoder($schemaRegistry, $recordSerializer); 56 | 57 | // Get producer Builder instance 58 | $builder = KafkaProducerBuilder::create(); 59 | 60 | $producer = $builder->withAdditionalConfig( 61 | [ 62 | // will be visible in broker logs 63 | 'client.id' => 'php-kafka-lib-producer-avro', 64 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 65 | 'compression.codec' => 'snappy', 66 | 67 | // Add additional output if you need to debug a problem 68 | // 'log_level' => (string) LOG_DEBUG, 69 | // 'debug' => 'all' 70 | ] 71 | ) 72 | ->withAdditionalBroker('kafka:9096') 73 | ->withEncoder($encoder) 74 | ->build(); 75 | 76 | for ($i = 0; $i < 10; ++$i) { 77 | $message = KafkaProducerMessage::create('php-kafka-lib-test-topic-avro', 0) 78 | ->withKey(sprintf('test-key-%d', $i)) 79 | ->withBody( 80 | [ 81 | 'id' => Uuid::uuid6()->toString(), 82 | 'name' => sprintf('Product %d', $i), 83 | 'description' => 'A random test product', 84 | 'price' => 21.25 85 | ] 86 | ) 87 | ->withHeaders( 88 | [ 89 | 'some' => 'test header' 90 | ] 91 | ); 92 | 93 | $producer->produce($message); 94 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 95 | } 96 | 97 | // Shutdown producer, flush messages that are in queue. Give up after 20s 98 | $result = $producer->flush(20000); 99 | 100 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 101 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 102 | } 103 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/avroProducer.php: -------------------------------------------------------------------------------- 1 | 'http://kafka-schema-registry:9083', 25 | //'auth' => ['user', 'pw'] 26 | ] 27 | ) 28 | ) 29 | ), 30 | new AvroObjectCacheAdapter() 31 | ); 32 | 33 | // Instantiate schema registry of lib (Note: In the future we will use our won cached registry) 34 | $schemaRegistry = new AvroSchemaRegistry($registry); 35 | // add schema for topic 36 | $schemaRegistry->addBodySchemaMappingForTopic( 37 | 'php-kafka-lib-test-topic-avro', 38 | new KafkaAvroSchema( 39 | 'nickzh.php.kafka.examples.entity.product-value' 40 | // optional param - version: if not passed we will take latest 41 | ) 42 | ); 43 | $schemaRegistry->addKeySchemaMappingForTopic( 44 | 'php-kafka-lib-test-topic-avro', 45 | new KafkaAvroSchema( 46 | 'nickzh.php.kafka.examples.entity.product-key' 47 | // optional param - version: if not passed we will take latest 48 | ) 49 | ); 50 | 51 | // instantiate avro record serializer (vendor: flix) 52 | $recordSerializer = new RecordSerializer($registry); 53 | 54 | // initialize Avro encode (Note: In the future, we will use our own record serializer) 55 | $encoder = new AvroEncoder($schemaRegistry, $recordSerializer); 56 | 57 | // Get producer Builder instance 58 | $builder = KafkaProducerBuilder::create(); 59 | 60 | $producer = $builder->withAdditionalConfig( 61 | [ 62 | // will be visible in broker logs 63 | 'client.id' => 'php-kafka-lib-producer-avro', 64 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 65 | 'compression.codec' => 'snappy', 66 | 67 | // Add additional output if you need to debug a problem 68 | // 'log_level' => (string) LOG_DEBUG, 69 | // 'debug' => 'all' 70 | ] 71 | ) 72 | ->withAdditionalBroker('kafka:9096') 73 | ->withEncoder($encoder) 74 | ->build(); 75 | 76 | for ($i = 0; $i < 10; ++$i) { 77 | $message = KafkaProducerMessage::create('php-kafka-lib-test-topic-avro', 0) 78 | ->withKey(sprintf('test-key-%d', $i)) 79 | ->withBody( 80 | [ 81 | 'id' => Uuid::uuid6()->toString(), 82 | 'name' => sprintf('Product %d', $i), 83 | 'description' => 'A random test product', 84 | 'price' => 21.25 85 | ] 86 | ) 87 | ->withHeaders( 88 | [ 89 | 'some' => 'test header' 90 | ] 91 | ); 92 | 93 | $producer->produce($message); 94 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 95 | } 96 | 97 | // Shutdown producer, flush messages that are in queue. Give up after 20s 98 | $result = $producer->flush(20000); 99 | 100 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 101 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 102 | } 103 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/v0.1.x/avroHighLevelConsumer.php: -------------------------------------------------------------------------------- 1 | 'http://kafka-schema-registry:9083', 27 | //'auth' => ['user', 'pw'] 28 | ] 29 | ) 30 | ) 31 | ), 32 | new AvroObjectCacheAdapter() 33 | ); 34 | 35 | // Instantiate schema registry of lib (Note: In the future we will use our won cached registry) 36 | $schemaRegistry = new AvroSchemaRegistry($registry); 37 | // add schema for topic 38 | $schemaRegistry->addSchemaMappingForTopic( 39 | 'php-kafka-lib-test-topic-avro', 40 | new KafkaAvroSchema( 41 | 'nickzh.php.kafka.examples.entity.product-value' 42 | // optional param - version: if not passed we will take latest 43 | ) 44 | ); 45 | 46 | // instantiate avro record serializer (vendor: flix) 47 | $recordSerializer = new RecordSerializer($registry); 48 | 49 | // initialize Avro decoder (Note: In the future, we will use our own record serializer) 50 | $decoder = new AvroDecoder($schemaRegistry, $recordSerializer); 51 | 52 | // Get consumer Builder instance 53 | $builder = KafkaConsumerBuilder::create(); 54 | 55 | // Configure consumer 56 | $consumer = $builder->withAdditionalConfig( 57 | [ 58 | // start at the very beginning of the topic when reading for the first time 59 | 'auto.offset.reset' => 'earliest', 60 | 61 | // will be visible in broker logs 62 | 'client.id' => 'php-kafka-lib-high-level-consumer-avro', 63 | 64 | // SSL settings 65 | //'security.protocol' => 'ssl', 66 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 67 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 68 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 69 | 70 | // SASL settings 71 | //'sasl.mechanisms' => '', 72 | //'ssl.endpoint.identification.algorithm' => 'https', 73 | //'sasl.username' => '', 74 | //'sasl.password' => '', 75 | ] 76 | ) 77 | ->withAdditionalBroker('kafka:9096') 78 | ->withTimeout(10000) 79 | ->withConsumerGroup('php-kafka-lib-high-level-consumer-avro') 80 | ->withDecoder($decoder) 81 | ->withSubscription('php-kafka-lib-test-topic-avro') 82 | ->build(); 83 | 84 | $consumer->subscribe(); 85 | 86 | while (true) { 87 | try { 88 | $message = $consumer->consume(); 89 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 90 | continue; 91 | } catch (KafkaConsumerConsumeException $e) { 92 | echo $e->getMessage() . PHP_EOL; 93 | continue; 94 | } catch (SchemaNotFoundException $e) { 95 | echo 'Consumed message with no or unknown schema' . PHP_EOL; 96 | continue; 97 | } 98 | 99 | echo sprintf( 100 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 101 | $message->getKey(), 102 | implode(',', $message->getBody()), 103 | $message->getTopicName(), 104 | $message->getPartition(), 105 | $message->getOffset(), 106 | implode(',', $message->getHeaders()) 107 | ) . PHP_EOL; 108 | 109 | $consumer->commit($message); 110 | } 111 | -------------------------------------------------------------------------------- /src/redpanda/php-simple-kafka-lib/avroConsumer.php: -------------------------------------------------------------------------------- 1 | 'http://redpanda:8081', 27 | //'auth' => ['user', 'pw'] 28 | ] 29 | ) 30 | ) 31 | ), 32 | new AvroObjectCacheAdapter() 33 | ); 34 | 35 | // Instantiate schema registry of lib (Note: In the future we will use our won cached registry) 36 | $schemaRegistry = new AvroSchemaRegistry($registry); 37 | // add schema for topic 38 | $schemaRegistry->addBodySchemaMappingForTopic( 39 | 'php-kafka-lib-test-topic-avro', 40 | new KafkaAvroSchema( 41 | 'nickzh.php.kafka.examples.entity.product-value' 42 | // optional param - version: if not passed we will take latest 43 | ) 44 | ); 45 | $schemaRegistry->addKeySchemaMappingForTopic( 46 | 'php-kafka-lib-test-topic-avro', 47 | new KafkaAvroSchema( 48 | 'nickzh.php.kafka.examples.entity.product-key' 49 | // optional param - version: if not passed we will take latest 50 | ) 51 | ); 52 | 53 | // instantiate avro record serializer (vendor: flix) 54 | $recordSerializer = new RecordSerializer($registry); 55 | 56 | // initialize Avro decoder (Note: In the future, we will use our own record serializer) 57 | $decoder = new AvroDecoder($schemaRegistry, $recordSerializer); 58 | 59 | // Get consumer Builder instance 60 | $builder = KafkaConsumerBuilder::create(); 61 | 62 | // Configure consumer 63 | $consumer = $builder->withAdditionalConfig( 64 | [ 65 | // start at the very beginning of the topic when reading for the first time 66 | 'auto.offset.reset' => 'earliest', 67 | 68 | // will be visible in broker logs 69 | 'client.id' => 'php-kafka-lib-high-level-consumer-avro', 70 | 71 | // SSL settings 72 | //'security.protocol' => 'ssl', 73 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 74 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 75 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 76 | 77 | // SASL settings 78 | //'sasl.mechanisms' => '', 79 | //'ssl.endpoint.identification.algorithm' => 'https', 80 | //'sasl.username' => '', 81 | //'sasl.password' => '', 82 | 83 | // Add additional output if you need to debug a problem 84 | // 'log_level' => (string) LOG_DEBUG, 85 | // 'debug' => 'all' 86 | ] 87 | ) 88 | ->withAdditionalBroker('redpanda:9097') 89 | ->withConsumerGroup('php-kafka-lib-high-level-consumer-avro') 90 | ->withDecoder($decoder) 91 | ->withSubscription('php-kafka-lib-test-topic-avro') 92 | ->build(); 93 | 94 | $consumer->subscribe(); 95 | 96 | while (true) { 97 | try { 98 | $message = $consumer->consume(10000); 99 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 100 | continue; 101 | } catch (KafkaConsumerConsumeException $e) { 102 | echo $e->getMessage() . PHP_EOL; 103 | continue; 104 | } catch (SchemaNotFoundException $e) { 105 | echo 'Consumed message with no or unknown schema' . PHP_EOL; 106 | continue; 107 | } 108 | 109 | echo sprintf( 110 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 111 | $message->getKey(), 112 | implode(',', $message->getBody()), 113 | $message->getTopicName(), 114 | $message->getPartition(), 115 | $message->getOffset(), 116 | implode(',', $message->getHeaders()) 117 | ) . PHP_EOL; 118 | 119 | $consumer->commit($message); 120 | } 121 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/php-simple-kafka-lib/avroConsumer.php: -------------------------------------------------------------------------------- 1 | 'http://kafka-schema-registry:9083', 27 | //'auth' => ['user', 'pw'] 28 | ] 29 | ) 30 | ) 31 | ), 32 | new AvroObjectCacheAdapter() 33 | ); 34 | 35 | // Instantiate schema registry of lib (Note: In the future we will use our won cached registry) 36 | $schemaRegistry = new AvroSchemaRegistry($registry); 37 | // add schema for topic 38 | $schemaRegistry->addBodySchemaMappingForTopic( 39 | 'php-kafka-lib-test-topic-avro', 40 | new KafkaAvroSchema( 41 | 'nickzh.php.kafka.examples.entity.product-value' 42 | // optional param - version: if not passed we will take latest 43 | ) 44 | ); 45 | $schemaRegistry->addKeySchemaMappingForTopic( 46 | 'php-kafka-lib-test-topic-avro', 47 | new KafkaAvroSchema( 48 | 'nickzh.php.kafka.examples.entity.product-key' 49 | // optional param - version: if not passed we will take latest 50 | ) 51 | ); 52 | 53 | // instantiate avro record serializer (vendor: flix) 54 | $recordSerializer = new RecordSerializer($registry); 55 | 56 | // initialize Avro decoder (Note: In the future, we will use our own record serializer) 57 | $decoder = new AvroDecoder($schemaRegistry, $recordSerializer); 58 | 59 | // Get consumer Builder instance 60 | $builder = KafkaConsumerBuilder::create(); 61 | 62 | // Configure consumer 63 | $consumer = $builder->withAdditionalConfig( 64 | [ 65 | // start at the very beginning of the topic when reading for the first time 66 | 'auto.offset.reset' => 'earliest', 67 | 68 | // will be visible in broker logs 69 | 'client.id' => 'php-kafka-lib-high-level-consumer-avro', 70 | 71 | // SSL settings 72 | //'security.protocol' => 'ssl', 73 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 74 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 75 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 76 | 77 | // SASL settings 78 | //'sasl.mechanisms' => '', 79 | //'ssl.endpoint.identification.algorithm' => 'https', 80 | //'sasl.username' => '', 81 | //'sasl.password' => '', 82 | 83 | // Add additional output if you need to debug a problem 84 | // 'log_level' => (string) LOG_DEBUG, 85 | // 'debug' => 'all' 86 | ] 87 | ) 88 | ->withAdditionalBroker('kafka:9096') 89 | ->withConsumerGroup('php-kafka-lib-high-level-consumer-avro') 90 | ->withDecoder($decoder) 91 | ->withSubscription('php-kafka-lib-test-topic-avro') 92 | ->build(); 93 | 94 | $consumer->subscribe(); 95 | 96 | while (true) { 97 | try { 98 | $message = $consumer->consume(10000); 99 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 100 | continue; 101 | } catch (KafkaConsumerConsumeException $e) { 102 | echo $e->getMessage() . PHP_EOL; 103 | continue; 104 | } catch (SchemaNotFoundException $e) { 105 | echo 'Consumed message with no or unknown schema' . PHP_EOL; 106 | continue; 107 | } 108 | 109 | echo sprintf( 110 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 111 | $message->getKey(), 112 | implode(',', $message->getBody()), 113 | $message->getTopicName(), 114 | $message->getPartition(), 115 | $message->getOffset(), 116 | implode(',', $message->getHeaders()) 117 | ) . PHP_EOL; 118 | 119 | $consumer->commit($message); 120 | } 121 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/php-kafka-lib/avroConsumer.php: -------------------------------------------------------------------------------- 1 | 'http://kafka-schema-registry:9083', 27 | //'auth' => ['user', 'pw'] 28 | ] 29 | ) 30 | ) 31 | ), 32 | new AvroObjectCacheAdapter() 33 | ); 34 | 35 | // Instantiate schema registry of lib (Note: In the future we will use our won cached registry) 36 | $schemaRegistry = new AvroSchemaRegistry($registry); 37 | // add schema for topic 38 | $schemaRegistry->addBodySchemaMappingForTopic( 39 | 'php-kafka-lib-test-topic-avro', 40 | new KafkaAvroSchema( 41 | 'nickzh.php.kafka.examples.entity.product-value' 42 | // optional param - version: if not passed we will take latest 43 | ) 44 | ); 45 | $schemaRegistry->addKeySchemaMappingForTopic( 46 | 'php-kafka-lib-test-topic-avro', 47 | new KafkaAvroSchema( 48 | 'nickzh.php.kafka.examples.entity.product-key' 49 | // optional param - version: if not passed we will take latest 50 | ) 51 | ); 52 | 53 | // instantiate avro record serializer (vendor: flix) 54 | $recordSerializer = new RecordSerializer($registry); 55 | 56 | // initialize Avro decoder (Note: In the future, we will use our own record serializer) 57 | $decoder = new AvroDecoder($schemaRegistry, $recordSerializer); 58 | 59 | // Get consumer Builder instance 60 | $builder = KafkaConsumerBuilder::create(); 61 | 62 | // Configure consumer 63 | $consumer = $builder->withAdditionalConfig( 64 | [ 65 | // start at the very beginning of the topic when reading for the first time 66 | 'auto.offset.reset' => 'earliest', 67 | 68 | // will be visible in broker logs 69 | 'client.id' => 'php-kafka-lib-high-level-consumer-avro', 70 | 71 | // SSL settings 72 | //'security.protocol' => 'ssl', 73 | //'ssl.ca.location' => __DIR__.'/../../../keys/ca.pem', 74 | //'ssl.certificate.location' => __DIR__.'/../../../keys/apl_stage.cert', 75 | //'ssl.key.location' => __DIR__.'/../../../keys/apl_stage.key', 76 | 77 | // SASL settings 78 | //'sasl.mechanisms' => '', 79 | //'ssl.endpoint.identification.algorithm' => 'https', 80 | //'sasl.username' => '', 81 | //'sasl.password' => '', 82 | 83 | // Add additional output if you need to debug a problem 84 | // 'log_level' => (string) LOG_DEBUG, 85 | // 'debug' => 'all' 86 | ] 87 | ) 88 | ->withAdditionalBroker('kafka:9096') 89 | ->withConsumerGroup('php-kafka-lib-high-level-consumer-avro') 90 | ->withDecoder($decoder) 91 | ->withSubscription('php-kafka-lib-test-topic-avro') 92 | ->build(); 93 | 94 | $consumer->subscribe(); 95 | 96 | while (true) { 97 | try { 98 | $message = $consumer->consume(10000); 99 | } catch (KafkaConsumerTimeoutException|KafkaConsumerEndOfPartitionException $e) { 100 | continue; 101 | } catch (KafkaConsumerConsumeException $e) { 102 | echo $e->getMessage() . PHP_EOL; 103 | continue; 104 | } catch (SchemaNotFoundException $e) { 105 | echo 'Consumed message with no or unknown schema' . PHP_EOL; 106 | continue; 107 | } 108 | 109 | echo sprintf( 110 | 'Read message with key:%s payload:%s topic:%s partition:%d offset:%d headers:%s', 111 | $message->getKey(), 112 | implode(',', $message->getBody()), 113 | $message->getTopicName(), 114 | $message->getPartition(), 115 | $message->getOffset(), 116 | implode(',', $message->getHeaders()) 117 | ) . PHP_EOL; 118 | 119 | $consumer->commit($message); 120 | } 121 | -------------------------------------------------------------------------------- /src/redpanda/pure-php/producer_transactional.php: -------------------------------------------------------------------------------- 1 | isFatal()) { 22 | $fatalString = 'is fatal'; 23 | } 24 | 25 | if ($e->isRetriable()) { 26 | $retryString = 'is retriable'; 27 | } 28 | 29 | if ($e->transactionRequiresAbort()) { 30 | $abortString = 'needs transaction abort'; 31 | } 32 | 33 | echo 'Was unable to initialize the transactional producer' . PHP_EOL; 34 | 35 | echo sprintf('The reason was: %s, this error %d, %s, %s, %s', $e->getMessage(), $e->getCode(), $fatalString, $retryString, $abortString) . PHP_EOL; 36 | echo sprintf('In detail this means %s', $e->getErrorString()) . PHP_EOL; 37 | echo sprintf('Trace is %s', $e->getTraceAsString()) . PHP_EOL; 38 | } 39 | 40 | $conf = new Configuration(); 41 | // will be visible in broker logs 42 | $conf->set('client.id', 'pure-php-producer'); 43 | // set broker 44 | $conf->set('metadata.broker.list', 'redpanda:9097'); 45 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 46 | $conf->set('compression.codec', 'snappy'); 47 | // set timeout, producer will retry for 5s 48 | $conf->set('message.timeout.ms', '5000'); 49 | 50 | // For the transactional producer you need a unique id to identify it 51 | $conf->set('transactional.id', 'some-unique-id-of-your-producer-to-recognize-it'); 52 | 53 | // This callback processes the delivery reports from the broker 54 | // you can see if your message was truly sent 55 | $conf->setDrMsgCb(function (Producer $kafka, Message $message) { 56 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 57 | $errorStr = rd_kafka_err2str($message->err); 58 | 59 | echo sprintf('Message FAILED (%s, %s) to send with payload => %s', $message->err, $errorStr, $message->payload) . PHP_EOL; 60 | } else { 61 | // message successfully delivered 62 | echo sprintf('Message sent SUCCESSFULLY with payload => %s', $message->payload) . PHP_EOL; 63 | } 64 | }); 65 | 66 | // SASL Authentication 67 | // can be SASL_PLAINTEXT, SASL_SSL 68 | //$conf->set('security.protocol', ''); 69 | // can be GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER 70 | // $conf->set('sasl.mechanisms', ''); 71 | // $conf->set('sasl.username', ''); 72 | // $conf->set('sasl.password', ''); 73 | // default is none 74 | // $conf->set('ssl.endpoint.identification.algorithm', 'https'); 75 | 76 | 77 | // SSL Authentication 78 | //$conf->set('security.protocol', 'ssl'); 79 | //$conf->set('ssl.ca.location', __DIR__.'/../keys/ca.pem'); 80 | //$conf->set('ssl.certificate.location', __DIR__.'/../keys/kafka.cert'); 81 | //$conf->set('ssl.key.location', __DIR__.'/../keys/kafka.key'); 82 | 83 | // Add additional output if you need to debug a problem 84 | // $conf->set('log_level', (string) LOG_DEBUG); 85 | // $conf->set('debug', 'all'); 86 | 87 | $producer = new Producer($conf); 88 | // initialize producer topic 89 | $topic = $producer->getTopicHandle('pure-php-transactional-test-topic'); 90 | // Produce 10 test messages 91 | $amountTestMessages = 10; 92 | 93 | // Initialize transactions 94 | try { 95 | $producer->initTransactions(10000); 96 | } catch (KafkaErrorException $e) { 97 | echoTransactionError($e); 98 | die; 99 | } 100 | 101 | // Begin transaction for our 10 messages 102 | try { 103 | $producer->beginTransaction(); 104 | } catch (KafkaErrorException $e) { 105 | echoTransactionError($e); 106 | die; 107 | } 108 | 109 | // Loop to produce some test messages 110 | for ($i = 0; $i < $amountTestMessages; ++$i) { 111 | // Let the partitioner decide the target partition, default partitioner is: RD_KAFKA_MSG_PARTITIONER_CONSISTENT_RANDOM 112 | // You can use a predefined partitioner or write own logic to decide the target partition 113 | $partition = RD_KAFKA_PARTITION_UA; 114 | 115 | //produce message with payload, key and headers 116 | $topic->producev( 117 | $partition, 118 | RD_KAFKA_MSG_F_BLOCK, // will block produce if queue is full 119 | sprintf('test message-%d',$i), 120 | sprintf('test-key-%d', $i), 121 | [ 122 | 'some' => sprintf('header value %d', $i) 123 | ] 124 | ); 125 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 126 | 127 | // Poll for events e.g. producer callbacks, to handle errors, etc. 128 | // 0 = non-blocking 129 | // -1 = blocking 130 | // any other int value = timeout in ms 131 | $producer->poll(0); 132 | } 133 | 134 | // Commit transaction for our 10 messages 135 | try { 136 | $producer->commitTransaction(10000); 137 | } catch (KafkaErrorException $e) { 138 | echoTransactionError($e); 139 | die; 140 | } 141 | 142 | // Shutdown producer, flush messages that are in queue. Give up after 20s 143 | $result = $producer->flush(20000); 144 | 145 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 146 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 147 | } 148 | -------------------------------------------------------------------------------- /src/ext-php-simple-kafka-client/pure-php/producer_transactional.php: -------------------------------------------------------------------------------- 1 | isFatal()) { 22 | $fatalString = 'is fatal'; 23 | } 24 | 25 | if ($e->isRetriable()) { 26 | $retryString = 'is retriable'; 27 | } 28 | 29 | if ($e->transactionRequiresAbort()) { 30 | $abortString = 'needs transaction abort'; 31 | } 32 | 33 | echo 'Was unable to initialize the transactional producer' . PHP_EOL; 34 | 35 | echo sprintf('The reason was: %s, this error %d, %s, %s, %s', $e->getMessage(), $e->getCode(), $fatalString, $retryString, $abortString) . PHP_EOL; 36 | echo sprintf('In detail this means %s', $e->getErrorString()) . PHP_EOL; 37 | echo sprintf('Trace is %s', $e->getTraceAsString()) . PHP_EOL; 38 | } 39 | 40 | $conf = new Configuration(); 41 | // will be visible in broker logs 42 | $conf->set('client.id', 'pure-php-producer'); 43 | // set broker 44 | $conf->set('metadata.broker.list', 'kafka:9096'); 45 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 46 | $conf->set('compression.codec', 'snappy'); 47 | // set timeout, producer will retry for 5s 48 | $conf->set('message.timeout.ms', '5000'); 49 | 50 | // For the transactional producer you need a unique id to identify it 51 | $conf->set('transactional.id', 'some-unique-id-of-your-producer-to-recognize-it'); 52 | 53 | // This callback processes the delivery reports from the broker 54 | // you can see if your message was truly sent 55 | $conf->setDrMsgCb(function (Producer $kafka, Message $message) { 56 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 57 | $errorStr = rd_kafka_err2str($message->err); 58 | 59 | echo sprintf('Message FAILED (%s, %s) to send with payload => %s', $message->err, $errorStr, $message->payload) . PHP_EOL; 60 | } else { 61 | // message successfully delivered 62 | echo sprintf('Message sent SUCCESSFULLY with payload => %s', $message->payload) . PHP_EOL; 63 | } 64 | }); 65 | 66 | // SASL Authentication 67 | // can be SASL_PLAINTEXT, SASL_SSL 68 | //$conf->set('security.protocol', ''); 69 | // can be GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER 70 | // $conf->set('sasl.mechanisms', ''); 71 | // $conf->set('sasl.username', ''); 72 | // $conf->set('sasl.password', ''); 73 | // default is none 74 | // $conf->set('ssl.endpoint.identification.algorithm', 'https'); 75 | 76 | 77 | // SSL Authentication 78 | //$conf->set('security.protocol', 'ssl'); 79 | //$conf->set('ssl.ca.location', __DIR__.'/../keys/ca.pem'); 80 | //$conf->set('ssl.certificate.location', __DIR__.'/../keys/kafka.cert'); 81 | //$conf->set('ssl.key.location', __DIR__.'/../keys/kafka.key'); 82 | 83 | // Add additional output if you need to debug a problem 84 | // $conf->set('log_level', (string) LOG_DEBUG); 85 | // $conf->set('debug', 'all'); 86 | 87 | $producer = new Producer($conf); 88 | // initialize producer topic 89 | $topic = $producer->getTopicHandle('pure-php-transactional-test-topic'); 90 | // Produce 10 test messages 91 | $amountTestMessages = 10; 92 | 93 | // Initialize transactions 94 | try { 95 | $producer->initTransactions(10000); 96 | } catch (KafkaErrorException $e) { 97 | echoTransactionError($e); 98 | die; 99 | } 100 | 101 | // Begin transaction for our 10 messages 102 | try { 103 | $producer->beginTransaction(); 104 | } catch (KafkaErrorException $e) { 105 | echoTransactionError($e); 106 | die; 107 | } 108 | 109 | // Loop to produce some test messages 110 | for ($i = 0; $i < $amountTestMessages; ++$i) { 111 | // Let the partitioner decide the target partition, default partitioner is: RD_KAFKA_MSG_PARTITIONER_CONSISTENT_RANDOM 112 | // You can use a predefined partitioner or write own logic to decide the target partition 113 | $partition = RD_KAFKA_PARTITION_UA; 114 | 115 | //produce message with payload, key and headers 116 | $topic->producev( 117 | $partition, 118 | RD_KAFKA_MSG_F_BLOCK, // will block produce if queue is full 119 | sprintf('test message-%d',$i), 120 | sprintf('test-key-%d', $i), 121 | [ 122 | 'some' => sprintf('header value %d', $i) 123 | ] 124 | ); 125 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 126 | 127 | // Poll for events e.g. producer callbacks, to handle errors, etc. 128 | // 0 = non-blocking 129 | // -1 = blocking 130 | // any other int value = timeout in ms 131 | $producer->poll(0); 132 | } 133 | 134 | // Commit transaction for our 10 messages 135 | try { 136 | $producer->commitTransaction(10000); 137 | } catch (KafkaErrorException $e) { 138 | echoTransactionError($e); 139 | die; 140 | } 141 | 142 | // Shutdown producer, flush messages that are in queue. Give up after 20s 143 | $result = $producer->flush(20000); 144 | 145 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 146 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 147 | } 148 | -------------------------------------------------------------------------------- /src/ext-php-rdkafka/pure-php/producer_transactional.php: -------------------------------------------------------------------------------- 1 | isFatal()) { 25 | $fatalString = 'is fatal'; 26 | } 27 | 28 | if ($e->isRetriable()) { 29 | $retryString = 'is retriable'; 30 | } 31 | 32 | if ($e->transactionRequiresAbort()) { 33 | $abortString = 'needs transaction abort'; 34 | } 35 | 36 | echo 'Was unable to initialize the transactional producer' . PHP_EOL; 37 | 38 | echo sprintf('The reason was: %s, this error %d, %s, %s, %s', $e->getMessage(), $e->getCode(), $fatalString, $retryString, $abortString) . PHP_EOL; 39 | echo sprintf('In detail this means %s', $e->getErrorString()) . PHP_EOL; 40 | echo sprintf('Trace is %s', $e->getTraceAsString()) . PHP_EOL; 41 | } 42 | 43 | $conf = new Conf(); 44 | // will be visible in broker logs 45 | $conf->set('client.id', 'pure-php-producer'); 46 | // set broker 47 | $conf->set('metadata.broker.list', 'kafka:9096'); 48 | // set compression (supported are: none,gzip,lz4,snappy,zstd) 49 | $conf->set('compression.codec', 'snappy'); 50 | // set timeout, producer will retry for 5s 51 | $conf->set('message.timeout.ms', '5000'); 52 | 53 | // For the transactional producer you need a unique id to identify it 54 | $conf->set('transactional.id', 'some-unique-id-of-your-producer-to-recognize-it'); 55 | 56 | // This callback processes the delivery reports from the broker 57 | // you can see if your message was truly sent 58 | $conf->setDrMsgCb(function (Producer $kafka, Message $message) { 59 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $message->err) { 60 | $errorStr = rd_kafka_err2str($message->err); 61 | 62 | echo sprintf('Message FAILED (%s, %s) to send with payload => %s', $message->err, $errorStr, $message->payload) . PHP_EOL; 63 | } else { 64 | // message successfully delivered 65 | echo sprintf('Message sent SUCCESSFULLY with payload => %s', $message->payload) . PHP_EOL; 66 | } 67 | }); 68 | 69 | // SASL Authentication 70 | // can be SASL_PLAINTEXT, SASL_SSL 71 | //$conf->set('security.protocol', ''); 72 | // can be GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER 73 | // $conf->set('sasl.mechanisms', ''); 74 | // $conf->set('sasl.username', ''); 75 | // $conf->set('sasl.password', ''); 76 | // default is none 77 | // $conf->set('ssl.endpoint.identification.algorithm', 'https'); 78 | 79 | 80 | // SSL Authentication 81 | //$conf->set('security.protocol', 'ssl'); 82 | //$conf->set('ssl.ca.location', __DIR__.'/../keys/ca.pem'); 83 | //$conf->set('ssl.certificate.location', __DIR__.'/../keys/kafka.cert'); 84 | //$conf->set('ssl.key.location', __DIR__.'/../keys/kafka.key'); 85 | 86 | // Add additional output if you need to debug a problem 87 | // $conf->set('log_level', (string) LOG_DEBUG); 88 | // $conf->set('debug', 'all'); 89 | 90 | $producer = new Producer($conf); 91 | // initialize producer topic 92 | $topic = $producer->newTopic('pure-php-transactional-test-topic'); 93 | // Produce 10 test messages 94 | $amountTestMessages = 10; 95 | 96 | // Initialize transactions 97 | try { 98 | $producer->initTransactions(10000); 99 | } catch (KafkaErrorException $e) { 100 | echoTransactionError($e); 101 | die; 102 | } 103 | 104 | // Begin transaction for our 10 messages 105 | try { 106 | $producer->beginTransaction(); 107 | } catch (KafkaErrorException $e) { 108 | echoTransactionError($e); 109 | die; 110 | } 111 | 112 | // Loop to produce some test messages 113 | for ($i = 0; $i < $amountTestMessages; ++$i) { 114 | // Let the partitioner decide the target partition, default partitioner is: RD_KAFKA_MSG_PARTITIONER_CONSISTENT_RANDOM 115 | // You can use a predefined partitioner or write own logic to decide the target partition 116 | $partition = RD_KAFKA_PARTITION_UA; 117 | 118 | //produce message with payload, key and headers 119 | $topic->producev( 120 | $partition, 121 | RD_KAFKA_MSG_F_BLOCK, // will block produce if queue is full 122 | sprintf('test message-%d',$i), 123 | sprintf('test-key-%d', $i), 124 | [ 125 | 'some' => sprintf('header value %d', $i) 126 | ] 127 | ); 128 | echo sprintf('Queued message number: %d', $i) . PHP_EOL; 129 | 130 | // Poll for events e.g. producer callbacks, to handle errors, etc. 131 | // 0 = non-blocking 132 | // -1 = blocking 133 | // any other int value = timeout in ms 134 | $producer->poll(0); 135 | } 136 | 137 | // Commit transaction for our 10 messages 138 | try { 139 | $producer->commitTransaction(10000); 140 | } catch (KafkaErrorException $e) { 141 | echoTransactionError($e); 142 | die; 143 | } 144 | 145 | // Shutdown producer, flush messages that are in queue. Give up after 20s 146 | $result = $producer->flush(20000); 147 | 148 | if (RD_KAFKA_RESP_ERR_NO_ERROR !== $result) { 149 | echo 'Was not able to shutdown within 20s. Messages might be lost!' . PHP_EOL; 150 | } 151 | -------------------------------------------------------------------------------- /docker/php/Dockerfile.debug: -------------------------------------------------------------------------------- 1 | FROM debian:buster-slim 2 | 3 | ARG PHP_VERSION 4 | ARG LIBRDKAFKA_VERSION 5 | ARG PHP_RDKAFKA_VERSION 6 | 7 | # prevent Debian's PHP packages from being installed 8 | # https://github.com/docker-library/php/pull/542 9 | RUN set -eux; \ 10 | { \ 11 | echo 'Package: php*'; \ 12 | echo 'Pin: release *'; \ 13 | echo 'Pin-Priority: -1'; \ 14 | } > /etc/apt/preferences.d/no-debian-php 15 | 16 | # dependencies required for running "phpize" 17 | # (see persistent deps below) 18 | ENV PHPIZE_DEPS \ 19 | autoconf \ 20 | dpkg-dev \ 21 | file \ 22 | g++ \ 23 | gcc \ 24 | libc-dev \ 25 | make \ 26 | pkg-config \ 27 | re2c 28 | 29 | # persistent / runtime deps 30 | RUN set -eux; \ 31 | apt-get update; \ 32 | apt-get install -y --no-install-recommends \ 33 | $PHPIZE_DEPS \ 34 | ca-certificates \ 35 | curl \ 36 | xz-utils \ 37 | ; \ 38 | rm -rf /var/lib/apt/lists/* 39 | 40 | ENV PHP_INI_DIR /usr/local/etc/php 41 | RUN set -eux; \ 42 | mkdir -p "$PHP_INI_DIR/conf.d"; \ 43 | # allow running as an arbitrary user (https://github.com/docker-library/php/issues/743) 44 | [ ! -d /var/www/html ]; \ 45 | mkdir -p /var/www/html; \ 46 | chown www-data:www-data /var/www/html; \ 47 | chmod 777 /var/www/html 48 | 49 | # https://github.com/docker-library/php/pull/939#issuecomment-730501748 50 | ENV PHP_EXTRA_CONFIGURE_ARGS --enable-embed 51 | 52 | # Apply stack smash protection to functions using local buffers and alloca() 53 | # Make PHP's main executable position-independent (improves ASLR security mechanism, and has no performance impact on x86_64) 54 | # Enable optimization (-O2) 55 | # Enable linker optimization (this sorts the hash buckets to improve cache locality, and is non-default) 56 | # https://github.com/docker-library/php/issues/272 57 | # -D_LARGEFILE_SOURCE and -D_FILE_OFFSET_BITS=64 (https://www.php.net/manual/en/intro.filesystem.php) 58 | ENV PHP_CFLAGS="-fstack-protector-strong -fpic -fpie -O2 -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64" 59 | ENV PHP_CPPFLAGS="$PHP_CFLAGS" 60 | ENV PHP_LDFLAGS="-Wl,-O1 -pie" 61 | 62 | ENV GPG_KEYS 42670A7FE4D0441C8E4632349E4FDC074A4EF02D 5A52880781F755608BF815FC910DEB46F53EA312 63 | 64 | ENV PHP_VERSION ${PHP_VERSION} 65 | ENV PHP_URL="https://www.php.net/distributions/php-7.4.14.tar.xz" PHP_ASC_URL="https://www.php.net/distributions/php-7.4.14.tar.xz.asc" 66 | ENV PHP_SHA256="f9f3c37969fcd9006c1dbb1dd76ab53f28c698a1646fa2dde8547c3f45e02886" 67 | 68 | RUN set -eux; \ 69 | \ 70 | savedAptMark="$(apt-mark showmanual)"; \ 71 | apt-get update; \ 72 | apt-get install -y --no-install-recommends gnupg dirmngr; \ 73 | rm -rf /var/lib/apt/lists/*; \ 74 | \ 75 | mkdir -p /usr/src; \ 76 | cd /usr/src; \ 77 | \ 78 | curl -fsSL -o php.tar.xz "$PHP_URL"; \ 79 | \ 80 | if [ -n "$PHP_SHA256" ]; then \ 81 | echo "$PHP_SHA256 *php.tar.xz" | sha256sum -c -; \ 82 | fi; \ 83 | \ 84 | if [ -n "$PHP_ASC_URL" ]; then \ 85 | curl -fsSL -o php.tar.xz.asc "$PHP_ASC_URL"; \ 86 | export GNUPGHOME="$(mktemp -d)"; \ 87 | for key in $GPG_KEYS; do \ 88 | gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key"; \ 89 | done; \ 90 | gpg --batch --verify php.tar.xz.asc php.tar.xz; \ 91 | gpgconf --kill all; \ 92 | rm -rf "$GNUPGHOME"; \ 93 | fi; \ 94 | \ 95 | apt-mark auto '.*' > /dev/null; \ 96 | apt-mark manual $savedAptMark > /dev/null; \ 97 | apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false 98 | 99 | COPY docker-php-source /usr/local/bin/ 100 | 101 | RUN set -eux; \ 102 | \ 103 | savedAptMark="$(apt-mark showmanual)"; \ 104 | apt-get update; \ 105 | apt-get install -y --no-install-recommends \ 106 | libargon2-dev \ 107 | libcurl4-openssl-dev \ 108 | libedit-dev \ 109 | libonig-dev \ 110 | libsodium-dev \ 111 | libsqlite3-dev \ 112 | libssl-dev \ 113 | libxml2-dev \ 114 | zlib1g-dev \ 115 | ${PHP_EXTRA_BUILD_DEPS:-} \ 116 | ; \ 117 | rm -rf /var/lib/apt/lists/*; \ 118 | \ 119 | export \ 120 | CFLAGS="$PHP_CFLAGS" \ 121 | CPPFLAGS="$PHP_CPPFLAGS" \ 122 | LDFLAGS="$PHP_LDFLAGS" \ 123 | ; \ 124 | docker-php-source extract; \ 125 | cd /usr/src/php; \ 126 | gnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)"; \ 127 | debMultiarch="$(dpkg-architecture --query DEB_BUILD_MULTIARCH)"; \ 128 | # https://bugs.php.net/bug.php?id=74125 129 | if [ ! -d /usr/include/curl ]; then \ 130 | ln -sT "/usr/include/$debMultiarch/curl" /usr/local/include/curl; \ 131 | fi; \ 132 | ./configure \ 133 | --build="$gnuArch" \ 134 | --with-config-file-path="$PHP_INI_DIR" \ 135 | --with-config-file-scan-dir="$PHP_INI_DIR/conf.d" \ 136 | \ 137 | # make sure invalid --configure-flags are fatal errors instead of just warnings 138 | --enable-option-checking=fatal \ 139 | \ 140 | # https://github.com/docker-library/php/issues/439 141 | --with-mhash \ 142 | \ 143 | # https://github.com/docker-library/php/issues/822 144 | --with-pic \ 145 | \ 146 | # --enable-ftp is included here because ftp_ssl_connect() needs ftp to be compiled statically (see https://github.com/docker-library/php/issues/236) 147 | --enable-ftp \ 148 | # --enable-mbstring is included here because otherwise there's no way to get pecl to use it properly (see https://github.com/docker-library/php/issues/195) 149 | --enable-mbstring \ 150 | # --enable-mysqlnd is included here because it's harder to compile after the fact than extensions are (since it's a plugin for several extensions, not an extension in itself) 151 | --enable-mysqlnd \ 152 | --enable-debug \ 153 | # https://wiki.php.net/rfc/argon2_password_hash (7.2+) 154 | --with-password-argon2 \ 155 | # https://wiki.php.net/rfc/libsodium 156 | --with-sodium=shared \ 157 | # always build against system sqlite3 (https://github.com/php/php-src/commit/6083a387a81dbbd66d6316a3a12a63f06d5f7109) 158 | --with-pdo-sqlite=/usr \ 159 | --with-sqlite3=/usr \ 160 | \ 161 | --with-curl \ 162 | --with-libedit \ 163 | --with-openssl \ 164 | --with-zlib \ 165 | \ 166 | # in PHP 7.4+, the pecl/pear installers are officially deprecated (requiring an explicit "--with-pear") 167 | --with-pear \ 168 | \ 169 | # bundled pcre does not support JIT on s390x 170 | # https://manpages.debian.org/stretch/libpcre3-dev/pcrejit.3.en.html#AVAILABILITY_OF_JIT_SUPPORT 171 | $(test "$gnuArch" = 's390x-linux-gnu' && echo '--without-pcre-jit') \ 172 | --with-libdir="lib/$debMultiarch" \ 173 | \ 174 | ${PHP_EXTRA_CONFIGURE_ARGS:-} \ 175 | ; \ 176 | make -j "$(nproc)"; \ 177 | find -type f -name '*.a' -delete; \ 178 | make install; \ 179 | \ 180 | # reset apt-mark's "manual" list so that "purge --auto-remove" will remove all build dependencies 181 | apt-mark auto '.*' > /dev/null; \ 182 | [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; \ 183 | find /usr/local -type f -executable -exec ldd '{}' ';' \ 184 | | awk '/=>/ { print $(NF-1) }' \ 185 | | sort -u \ 186 | | xargs -r dpkg-query --search \ 187 | | cut -d: -f1 \ 188 | | sort -u \ 189 | | xargs -r apt-mark manual \ 190 | ; \ 191 | apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \ 192 | \ 193 | # update pecl channel definitions https://github.com/docker-library/php/issues/443 194 | pecl update-channels; \ 195 | rm -rf /tmp/pear ~/.pearrc; \ 196 | \ 197 | # smoke test 198 | php --version 199 | 200 | COPY docker-php-ext-* docker-php-entrypoint /usr/local/bin/ 201 | 202 | # sodium was built as a shared module (so that it can be replaced later if so desired), so let's enable it too (https://github.com/docker-library/php/issues/598) 203 | RUN docker-php-ext-enable sodium 204 | 205 | ENTRYPOINT ["docker-php-entrypoint"] 206 | CMD ["php", "-a"] 207 | 208 | 209 | # Customization 210 | # Install packages 211 | RUN apt-get update\ 212 | && apt-get install -y bash sudo git gcc g++ make autoconf \ 213 | icu-devtools libssl-dev libsasl2-dev libpcre3-dev libzstd-dev liblz4-dev zlib1g-dev libicu63 wget gettext valgrind vim \ 214 | && rm -rf /var/lib/apt/lists/* 215 | 216 | # Install librdkafka and ext-rdkafka 217 | RUN git clone --depth 1 --branch ${LIBRDKAFKA_VERSION} https://github.com/edenhill/librdkafka.git \ 218 | && cd librdkafka \ 219 | && ./configure \ 220 | && make \ 221 | && make install \ 222 | && git clone --depth 1 --branch ${PHP_RDKAFKA_VERSION} https://github.com/arnaud-lb/php-rdkafka.git \ 223 | && cd php-rdkafka \ 224 | && phpize \ 225 | && ./configure \ 226 | && make all -j 5 \ 227 | && make install \ 228 | && cd ../..;rm -rf librdkafka 229 | 230 | # Install php extensions 231 | RUN docker-php-ext-install pcntl && \ 232 | docker-php-ext-enable rdkafka pcntl > /dev/null 2>&1 233 | 234 | # Install composer 235 | RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/bin --filename=composer 236 | 237 | ENTRYPOINT ["docker-php-entrypoint"] 238 | CMD ["php", "-a"] --------------------------------------------------------------------------------