├── clients ├── chat-room │ ├── docker │ │ ├── rust │ │ │ ├── .gitignore │ │ │ ├── Cargo.toml │ │ │ └── src │ │ │ │ ├── producer.rs │ │ │ │ ├── admin.rs │ │ │ │ ├── main.rs │ │ │ │ └── consumer.rs │ │ ├── python │ │ │ ├── requirements.txt │ │ │ ├── producer.py │ │ │ ├── consumer.py │ │ │ ├── admin.py │ │ │ └── app.py │ │ ├── java │ │ │ ├── src │ │ │ │ ├── test │ │ │ │ │ └── java │ │ │ │ │ │ └── com │ │ │ │ │ │ └── example │ │ │ │ │ │ └── AppTest.java │ │ │ │ └── main │ │ │ │ │ └── java │ │ │ │ │ └── com │ │ │ │ │ └── example │ │ │ │ │ ├── ChatProducer.java │ │ │ │ │ ├── Main.java │ │ │ │ │ ├── ChatConsumer.java │ │ │ │ │ └── Admin.java │ │ │ └── pom.xml │ │ ├── go │ │ │ ├── go.mod │ │ │ ├── producer.go │ │ │ ├── main.go │ │ │ ├── consumer.go │ │ │ └── admin.go │ │ └── nodejs │ │ │ ├── package.json │ │ │ ├── src │ │ │ ├── admin.ts │ │ │ ├── producer.ts │ │ │ ├── consumer.ts │ │ │ └── index.ts │ │ │ └── package-lock.json │ └── cloud │ │ ├── rust │ │ ├── .gitignore │ │ ├── .env.example │ │ ├── Cargo.toml │ │ └── src │ │ │ ├── producer.rs │ │ │ └── admin.rs │ │ ├── python │ │ ├── requirements.txt │ │ ├── producer.py │ │ ├── consumer.py │ │ ├── admin.py │ │ └── app.py │ │ ├── java │ │ ├── src │ │ │ ├── test │ │ │ │ └── java │ │ │ │ │ └── com │ │ │ │ │ └── example │ │ │ │ │ └── AppTest.java │ │ │ └── main │ │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── example │ │ │ │ ├── ChatProducer.java │ │ │ │ ├── Main.java │ │ │ │ ├── ChatConsumer.java │ │ │ │ └── Admin.java │ │ └── pom.xml │ │ ├── go │ │ ├── go.mod │ │ ├── main.go │ │ ├── producer.go │ │ ├── consumer.go │ │ ├── admin.go │ │ └── go.sum │ │ └── nodejs │ │ ├── package.json │ │ ├── src │ │ ├── admin.ts │ │ ├── producer.ts │ │ ├── consumer.ts │ │ └── index.ts │ │ └── package-lock.json ├── schema-registry-demo │ └── python │ │ ├── .gitignore │ │ ├── console-topic.png │ │ ├── requirements.txt │ │ ├── clickstream_key_pb2.pyi │ │ ├── clickstream_key_pb2.py │ │ └── click.py ├── stock-market-activity │ ├── python │ │ ├── requirements.txt │ │ └── schema_registry │ │ │ ├── stock_pb2.pyi │ │ │ └── stock_pb2.py │ ├── nodejs │ │ └── package.json │ └── data │ │ ├── stock.proto │ │ ├── stock.avsc │ │ └── README.md └── go │ ├── franz-go │ ├── go.mod │ └── README.md │ └── sarama │ ├── README.md │ └── go.mod ├── data-transforms ├── rust │ ├── jq │ │ ├── .gitignore │ │ ├── conf │ │ │ └── .bootstrap.yaml │ │ ├── transform.yaml │ │ ├── deploy-transform.sh │ │ ├── profile.yml │ │ ├── .clangd │ │ ├── .clang-tidy │ │ ├── .clang-format │ │ ├── Cargo.toml │ │ └── docker-compose.yml │ └── ts-converter │ │ ├── .gitignore │ │ ├── conf │ │ └── .bootstrap.yaml │ │ ├── produce.sh │ │ ├── transform.yaml │ │ ├── deploy-transform.sh │ │ ├── profile.yml │ │ ├── Cargo.toml │ │ ├── create-schema.sh │ │ ├── docker-compose.yml │ │ └── src │ │ └── schema.rs ├── go │ ├── flatten │ │ ├── conf │ │ │ └── .bootstrap.yaml │ │ ├── transform.yaml │ │ ├── go.mod │ │ ├── profile.yml │ │ ├── go.sum │ │ └── docker-compose.yml │ ├── iss_demo │ │ ├── conf │ │ │ ├── .bootstrap.yaml │ │ │ └── install-go.sh │ │ ├── transform.yaml │ │ ├── profile.yml │ │ ├── go.mod │ │ ├── post-schema.sh │ │ ├── iss.avsc │ │ ├── docker-compose.yml │ │ └── go.sum │ ├── regex │ │ ├── conf │ │ │ └── .bootstrap.yaml │ │ ├── go.mod │ │ ├── deploy-transform.sh │ │ ├── profile.yml │ │ ├── transform.yaml │ │ ├── go.sum │ │ ├── docker-compose.yml │ │ ├── test.js │ │ └── transform.go │ ├── to_avro │ │ ├── conf │ │ │ └── .bootstrap.yaml │ │ ├── transform.yaml │ │ ├── profile.yml │ │ ├── go.mod │ │ ├── test │ │ │ └── produce.sh │ │ ├── schema.avsc │ │ ├── docker-compose.yml │ │ ├── transform.go │ │ └── go.sum │ └── redaction │ │ ├── demo │ │ ├── conf │ │ │ ├── .bootstrap.yaml │ │ │ └── install-go.sh │ │ ├── .env │ │ ├── Dockerfile │ │ └── config.yaml │ │ ├── transform.yaml │ │ ├── go.mod │ │ ├── redact │ │ ├── deploy-redaction │ │ ├── example │ │ └── config.yaml │ │ ├── redaction │ │ └── redaction.go │ │ ├── redactors │ │ └── redactor_builtins.go │ │ ├── go.sum │ │ ├── transform.go │ │ └── cmd │ │ └── redact.go ├── js │ └── csv-json │ │ ├── conf │ │ └── .bootstrap.yaml │ │ ├── transform.yaml │ │ ├── profile.yml │ │ ├── package.json │ │ ├── dist │ │ ├── data-transforms-tutorial.js │ │ └── source.wat │ │ ├── esbuild.js │ │ ├── src │ │ └── index.js │ │ └── docker-compose.yml └── README.adoc ├── test-schema.avsc ├── docs ├── modules │ ├── ROOT │ │ ├── pages │ │ │ ├── contribute.adoc │ │ │ └── index.adoc │ │ └── images │ │ │ ├── docs-lab.png │ │ │ ├── docs-labs.png │ │ │ └── docs-link-to-labs.png │ ├── docker-compose │ │ ├── pages │ │ │ ├── oidc.adoc │ │ │ ├── iceberg.adoc │ │ │ ├── owl-shop.adoc │ │ │ ├── cdc-mysql-json.adoc │ │ │ ├── single-broker.adoc │ │ │ ├── three-brokers.adoc │ │ │ ├── cdc-postgres-json.adoc │ │ │ └── jira-metrics-pipeline.adoc │ │ ├── attachments │ │ │ ├── data-transforms │ │ │ │ ├── conf │ │ │ │ │ └── .bootstrap.yaml │ │ │ │ └── docker-compose.yml │ │ │ ├── single-broker │ │ │ │ └── docker-compose.yml │ │ │ └── cdc │ │ │ │ └── mysql-json │ │ │ │ └── docker-compose.yml │ │ ├── examples │ │ │ └── jira-metrics-pipeline │ │ └── images │ │ │ ├── mysql-architecture.png │ │ │ └── postgres-architecture.png │ ├── kubernetes │ │ └── pages │ │ │ ├── iceberg.adoc │ │ │ └── gitops-helm.adoc │ ├── clients │ │ ├── pages │ │ │ ├── cloud-go.adoc │ │ │ ├── docker-go.adoc │ │ │ ├── cloud-java.adoc │ │ │ ├── cloud-nodejs.adoc │ │ │ ├── cloud-python.adoc │ │ │ ├── cloud-rust.adoc │ │ │ ├── docker-java.adoc │ │ │ ├── docker-rust.adoc │ │ │ ├── docker-nodejs.adoc │ │ │ ├── docker-python.adoc │ │ │ ├── stock-market-activity-nodejs.adoc │ │ │ └── stock-market-activity-python.adoc │ │ └── images │ │ │ └── chat-room.gif │ ├── data-transforms │ │ ├── pages │ │ │ ├── regex-go.adoc │ │ │ ├── flatten-go.adoc │ │ │ ├── issdemo-go.adoc │ │ │ ├── redaction-go.adoc │ │ │ └── ts-converter-rust.adoc │ │ └── images │ │ │ ├── iss_console.png │ │ │ └── iss_overview.png │ └── connect-plugins │ │ └── pages │ │ └── openai.adoc └── antora.yml ├── docker-compose ├── data-transforms │ ├── conf │ │ └── .bootstrap.yaml │ └── docker-compose.yml ├── iceberg │ ├── .dockerignore │ ├── spark │ │ ├── requirements.txt │ │ ├── ipython │ │ │ └── startup │ │ │ │ └── README │ │ ├── .pyiceberg.yaml │ │ ├── entrypoint.sh │ │ └── spark-defaults.conf │ └── schema.avsc ├── cdc │ ├── mysql-json │ │ ├── data │ │ │ ├── mysql.cnf │ │ │ └── mysql_bootstrap.sql │ │ ├── create_debezium_mysql_connector.sh │ │ └── docker-compose.yml │ ├── README.adoc │ └── postgres-json │ │ ├── data │ │ └── postgres_bootstrap.sql │ │ ├── create_debezium_postgres_connector.sh │ │ └── docker-compose.yml ├── jira-metrics-pipeline │ ├── console-config.yml │ └── .env.example ├── oidc │ ├── package.json │ ├── console-config.yaml │ └── profile.yaml ├── README.adoc └── single-broker │ └── docker-compose.yml ├── images ├── redpanda_lab1.png └── redpanda_lab2.png ├── kubernetes ├── gitops-helm │ ├── redpanda-ns.yaml │ ├── cert-manager-ns.yaml │ ├── cert-manager-repo.yaml │ ├── redpanda-repo.yaml │ ├── cert-manager-release.yaml │ └── redpanda-helm-release.yaml ├── iceberg │ ├── spark │ │ ├── requirements.txt │ │ ├── ipython │ │ │ └── startup │ │ │ │ └── README │ │ ├── spark-defaults.conf │ │ ├── .pyiceberg.yaml │ │ ├── entrypoint.sh │ │ └── notebooks │ │ │ └── Iceberg - Query Redpanda Table.ipynb │ ├── secret.yaml │ ├── kind.yaml │ ├── minio-nodeport.yaml │ ├── minio-tenant-values.yaml │ └── dns-solution.md └── README.adoc ├── .gitmodules ├── setup-tests ├── cleanup.json ├── cloud │ └── cloud-log-in.json ├── fetch-versions-and-rpk.json └── .doc-detective.json ├── .github └── workflows │ ├── build-docs.yml │ ├── test-docs.yml │ └── update-deps.yml ├── .gitignore ├── README-TEMPLATE.adoc ├── connect-plugins └── processor │ └── embeddings │ └── openai │ ├── atlas_demo.yaml │ └── requirements.txt ├── package.json └── README.md /clients/chat-room/docker/rust/.gitignore: -------------------------------------------------------------------------------- 1 | /target -------------------------------------------------------------------------------- /data-transforms/rust/jq/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | -------------------------------------------------------------------------------- /test-schema.avsc: -------------------------------------------------------------------------------- 1 | {"type": "long", "name": "epoch"} 2 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/rust/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .env 3 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/contribute.adoc: -------------------------------------------------------------------------------- 1 | ../../../CONTRIBUTING.adoc -------------------------------------------------------------------------------- /clients/schema-registry-demo/python/.gitignore: -------------------------------------------------------------------------------- 1 | venv 2 | __pycache__/ 3 | -------------------------------------------------------------------------------- /data-transforms/rust/jq/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /clients/chat-room/cloud/python/requirements.txt: -------------------------------------------------------------------------------- 1 | kafka-python-ng==2.2.2 2 | -------------------------------------------------------------------------------- /clients/chat-room/docker/python/requirements.txt: -------------------------------------------------------------------------------- 1 | kafka-python-ng==2.2.2 2 | -------------------------------------------------------------------------------- /data-transforms/go/flatten/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /data-transforms/go/iss_demo/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /data-transforms/go/regex/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /data-transforms/go/to_avro/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /data-transforms/js/csv-json/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /data-transforms/go/redaction/demo/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /docker-compose/data-transforms/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /docs/modules/docker-compose/pages/oidc.adoc: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/oidc/README.adoc -------------------------------------------------------------------------------- /docs/modules/kubernetes/pages/iceberg.adoc: -------------------------------------------------------------------------------- 1 | ../../../../kubernetes/iceberg/README.adoc -------------------------------------------------------------------------------- /data-transforms/go/redaction/demo/.env: -------------------------------------------------------------------------------- 1 | IMAGE=redpandadata/redpanda:${REDPANDA_VERSION:-latest} -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true 2 | 3 | -------------------------------------------------------------------------------- /docs/modules/clients/pages/cloud-go.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/cloud/go/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/docker-go.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/docker/go/README.adoc -------------------------------------------------------------------------------- /docs/modules/docker-compose/pages/iceberg.adoc: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/iceberg/README.adoc -------------------------------------------------------------------------------- /docs/modules/kubernetes/pages/gitops-helm.adoc: -------------------------------------------------------------------------------- 1 | ../../../../kubernetes/gitops-helm/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/cloud-java.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/cloud/java/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/cloud-nodejs.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/cloud/nodejs/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/cloud-python.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/cloud/python/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/cloud-rust.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/cloud/rust/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/docker-java.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/docker/java/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/docker-rust.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/docker/rust/README.adoc -------------------------------------------------------------------------------- /docs/modules/data-transforms/pages/regex-go.adoc: -------------------------------------------------------------------------------- 1 | ../../../../data-transforms/go/regex/README.adoc -------------------------------------------------------------------------------- /docs/modules/docker-compose/pages/owl-shop.adoc: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/owl-shop/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/docker-nodejs.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/docker/nodejs/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/docker-python.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/chat-room/docker/python/README.adoc -------------------------------------------------------------------------------- /docs/modules/data-transforms/pages/flatten-go.adoc: -------------------------------------------------------------------------------- 1 | ../../../../data-transforms/go/flatten/README.adoc -------------------------------------------------------------------------------- /docs/modules/data-transforms/pages/issdemo-go.adoc: -------------------------------------------------------------------------------- 1 | ../../../../data-transforms/go/iss_demo/README.adoc -------------------------------------------------------------------------------- /docs/modules/docker-compose/attachments/data-transforms/conf/.bootstrap.yaml: -------------------------------------------------------------------------------- 1 | data_transforms_enabled: true -------------------------------------------------------------------------------- /docs/modules/docker-compose/pages/cdc-mysql-json.adoc: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/cdc/mysql-json/README.adoc -------------------------------------------------------------------------------- /docs/modules/docker-compose/pages/single-broker.adoc: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/single-broker/README.adoc -------------------------------------------------------------------------------- /docs/modules/docker-compose/pages/three-brokers.adoc: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/three-brokers/README.adoc -------------------------------------------------------------------------------- /data-transforms/rust/jq/transform.yaml: -------------------------------------------------------------------------------- 1 | name: jq 2 | input-topic: "" 3 | output-topic: "" 4 | language: rust 5 | -------------------------------------------------------------------------------- /docs/modules/connect-plugins/pages/openai.adoc: -------------------------------------------------------------------------------- 1 | ../../../../connect-plugins/processor/embeddings/openai/README.adoc -------------------------------------------------------------------------------- /docs/modules/data-transforms/pages/redaction-go.adoc: -------------------------------------------------------------------------------- 1 | ../../../../data-transforms/go/redaction/demo/README.adoc -------------------------------------------------------------------------------- /docs/modules/docker-compose/pages/cdc-postgres-json.adoc: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/cdc/postgres-json/README.adoc -------------------------------------------------------------------------------- /images/redpanda_lab1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/images/redpanda_lab1.png -------------------------------------------------------------------------------- /images/redpanda_lab2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/images/redpanda_lab2.png -------------------------------------------------------------------------------- /kubernetes/gitops-helm/redpanda-ns.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: redpanda 5 | -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/produce.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | date '+%s000' | rpk topic produce src --schema-id topic 3 | -------------------------------------------------------------------------------- /docs/modules/data-transforms/pages/ts-converter-rust.adoc: -------------------------------------------------------------------------------- 1 | ../../../../data-transforms/rust/ts-converter/README.adoc -------------------------------------------------------------------------------- /docker-compose/iceberg/.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | .gitignore 3 | spark/Dockerfile 4 | README.adoc 5 | docker-compose.yml 6 | -------------------------------------------------------------------------------- /docs/modules/clients/pages/stock-market-activity-nodejs.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/stock-market-activity/nodejs/README.adoc -------------------------------------------------------------------------------- /docs/modules/clients/pages/stock-market-activity-python.adoc: -------------------------------------------------------------------------------- 1 | ../../../../clients/stock-market-activity/python/README.adoc -------------------------------------------------------------------------------- /docs/modules/docker-compose/examples/jira-metrics-pipeline: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/jira-metrics-pipeline/connect-configs -------------------------------------------------------------------------------- /docs/modules/docker-compose/pages/jira-metrics-pipeline.adoc: -------------------------------------------------------------------------------- 1 | ../../../../docker-compose/jira-metrics-pipeline/README.adoc -------------------------------------------------------------------------------- /kubernetes/gitops-helm/cert-manager-ns.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Namespace 3 | metadata: 4 | name: cert-manager 5 | -------------------------------------------------------------------------------- /data-transforms/rust/jq/deploy-transform.sh: -------------------------------------------------------------------------------- 1 | rpk transform deploy --var=FILTER="del(.email)" --input-topic=src --output-topic=sink -------------------------------------------------------------------------------- /data-transforms/go/iss_demo/transform.yaml: -------------------------------------------------------------------------------- 1 | name: iss_demo 2 | input-topic: "" 3 | output-topic: "" 4 | language: tinygo-no-goroutines -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/transform.yaml: -------------------------------------------------------------------------------- 1 | name: ts-converter 2 | input-topic: "" 3 | output-topic: "" 4 | language: rust 5 | -------------------------------------------------------------------------------- /data-transforms/go/flatten/transform.yaml: -------------------------------------------------------------------------------- 1 | name: flatten 2 | input-topic: "" 3 | output-topic: "" 4 | language: tinygo-with-goroutines 5 | -------------------------------------------------------------------------------- /docs/modules/ROOT/images/docs-lab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/docs/modules/ROOT/images/docs-lab.png -------------------------------------------------------------------------------- /docs/modules/ROOT/images/docs-labs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/docs/modules/ROOT/images/docs-labs.png -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "redpanda-edge-agent"] 2 | path = redpanda-edge-agent 3 | url = https://github.com/redpanda-data/redpanda-edge-agent 4 | -------------------------------------------------------------------------------- /data-transforms/js/csv-json/transform.yaml: -------------------------------------------------------------------------------- 1 | name: data-transforms-tutorial 2 | input-topic: "" 3 | output-topics: [] 4 | language: javascript 5 | -------------------------------------------------------------------------------- /docs/modules/clients/images/chat-room.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/docs/modules/clients/images/chat-room.gif -------------------------------------------------------------------------------- /data-transforms/go/regex/go.mod: -------------------------------------------------------------------------------- 1 | module regex 2 | 3 | go 1.20 4 | 5 | require github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0 6 | -------------------------------------------------------------------------------- /docs/modules/ROOT/images/docs-link-to-labs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/docs/modules/ROOT/images/docs-link-to-labs.png -------------------------------------------------------------------------------- /docs/modules/data-transforms/images/iss_console.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/docs/modules/data-transforms/images/iss_console.png -------------------------------------------------------------------------------- /docs/modules/data-transforms/images/iss_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/docs/modules/data-transforms/images/iss_overview.png -------------------------------------------------------------------------------- /clients/schema-registry-demo/python/console-topic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/clients/schema-registry-demo/python/console-topic.png -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/deploy-transform.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | rpk transform deploy --file ts-converter.wasm -i src -o sink --var "TIMESTAMP_TARGET_TYPE=string[%+]" 3 | -------------------------------------------------------------------------------- /docker-compose/cdc/mysql-json/data/mysql.cnf: -------------------------------------------------------------------------------- 1 | [mysqld] 2 | server-id = 223344 3 | log_bin = mysql-bin 4 | expire_logs_days = 1 5 | binlog_format = row -------------------------------------------------------------------------------- /docs/modules/docker-compose/images/mysql-architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/docs/modules/docker-compose/images/mysql-architecture.png -------------------------------------------------------------------------------- /docs/modules/docker-compose/images/postgres-architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redpanda-data/redpanda-labs/HEAD/docs/modules/docker-compose/images/postgres-architecture.png -------------------------------------------------------------------------------- /clients/chat-room/cloud/rust/.env.example: -------------------------------------------------------------------------------- 1 | REDPANDA_BROKER="" 2 | REDPANDA_USERNAME="" 3 | REDPANDA_PASSWORD="" 4 | -------------------------------------------------------------------------------- /data-transforms/go/redaction/transform.yaml: -------------------------------------------------------------------------------- 1 | name: owlshop-orders-redacted-transform 2 | input-topic: "owlshop-orders" 3 | output-topic: "owlshop-orders-redacted" 4 | language: tinygo-no-goroutines 5 | -------------------------------------------------------------------------------- /data-transforms/go/flatten/go.mod: -------------------------------------------------------------------------------- 1 | module flatten 2 | 3 | go 1.20 4 | 5 | require ( 6 | github.com/bcicen/jstream v1.0.1 7 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0 8 | ) 9 | -------------------------------------------------------------------------------- /data-transforms/go/redaction/demo/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG IMAGE 2 | #FROM 3 | FROM $IMAGE 4 | 5 | USER root 6 | ENTRYPOINT [ "/bin/bash" ] 7 | 8 | COPY conf/install-go.sh /tmp 9 | 10 | RUN /tmp/install-go.sh -------------------------------------------------------------------------------- /data-transforms/go/to_avro/transform.yaml: -------------------------------------------------------------------------------- 1 | name: to_avro 2 | input-topic: "nasdaq_history_csv" 3 | output-topic: "nasdaq_history_avro" 4 | language: tinygo-no-goroutines 5 | env: { 6 | SCHEMA_ID: 1 7 | } 8 | -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/profile.yml: -------------------------------------------------------------------------------- 1 | description: For use with the docker compose file 2 | kafka_api: 3 | brokers: 4 | - 127.0.0.1:9093 5 | admin_api: 6 | addresses: 7 | - 127.0.0.1:9644 8 | -------------------------------------------------------------------------------- /data-transforms/go/regex/deploy-transform.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | rpk transform deploy --var=PATTERN="\\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\\b" --var=MATCH_VALUE=true --input-topic=src --output-topic=sink -------------------------------------------------------------------------------- /data-transforms/rust/jq/profile.yml: -------------------------------------------------------------------------------- 1 | description: For use with the docker compose file 2 | kafka_api: 3 | brokers: 4 | - 127.0.0.1:9093 5 | admin_api: 6 | addresses: 7 | - 127.0.0.1:9644 8 | 9 | -------------------------------------------------------------------------------- /data-transforms/go/flatten/profile.yml: -------------------------------------------------------------------------------- 1 | description: For use with the docker compose file 2 | kafka_api: 3 | brokers: 4 | - 127.0.0.1:9093 5 | admin_api: 6 | addresses: 7 | - 127.0.0.1:9644 8 | 9 | -------------------------------------------------------------------------------- /data-transforms/go/iss_demo/profile.yml: -------------------------------------------------------------------------------- 1 | description: For use with the docker compose file 2 | kafka_api: 3 | brokers: 4 | - 127.0.0.1:9093 5 | admin_api: 6 | addresses: 7 | - 127.0.0.1:9644 8 | 9 | -------------------------------------------------------------------------------- /data-transforms/go/regex/profile.yml: -------------------------------------------------------------------------------- 1 | description: For use with the docker compose file 2 | kafka_api: 3 | brokers: 4 | - 127.0.0.1:9093 5 | admin_api: 6 | addresses: 7 | - 127.0.0.1:9644 8 | 9 | -------------------------------------------------------------------------------- /data-transforms/go/to_avro/profile.yml: -------------------------------------------------------------------------------- 1 | description: For use with the docker compose file 2 | kafka_api: 3 | brokers: 4 | - 127.0.0.1:9093 5 | admin_api: 6 | addresses: 7 | - 127.0.0.1:9644 8 | 9 | -------------------------------------------------------------------------------- /data-transforms/js/csv-json/profile.yml: -------------------------------------------------------------------------------- 1 | description: For use with the docker compose file 2 | kafka_api: 3 | brokers: 4 | - 127.0.0.1:9093 5 | admin_api: 6 | addresses: 7 | - 127.0.0.1:9644 8 | 9 | -------------------------------------------------------------------------------- /docker-compose/iceberg/spark/requirements.txt: -------------------------------------------------------------------------------- 1 | jupyter==1.0.0 2 | spylon-kernel==0.4.1 3 | pyiceberg[pyarrow,duckdb,pandas]==0.7.1 4 | jupysql==0.10.5 5 | matplotlib==3.9.2 6 | scipy==1.14.1 7 | duckdb-engine==0.13.1 8 | -------------------------------------------------------------------------------- /clients/schema-registry-demo/python/requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2024.2.2 2 | charset-normalizer==3.3.2 3 | confluent-kafka==2.3.0 4 | fastavro==1.9.4 5 | idna==3.6 6 | protobuf==5.26.0 7 | requests==2.31.0 8 | urllib3==2.2.1 9 | -------------------------------------------------------------------------------- /docs/antora.yml: -------------------------------------------------------------------------------- 1 | name: redpanda-labs 2 | title: Labs 3 | version: ~ 4 | asciidoc: 5 | attributes: 6 | page-header-data: 7 | order: 4 8 | color: '#227093' 9 | full-version: 24.2.2 10 | 11 | 12 | -------------------------------------------------------------------------------- /clients/stock-market-activity/python/requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2023.7.22 2 | charset-normalizer==3.3.0 3 | kafka-python-ng==2.2.2 4 | fastavro==1.8.4 5 | idna==3.4 6 | python-dotenv==0.20.0 7 | requests==2.31.0 8 | urllib3==2.0.7 9 | -------------------------------------------------------------------------------- /kubernetes/iceberg/spark/requirements.txt: -------------------------------------------------------------------------------- 1 | jupyter==1.0.0 2 | spylon-kernel==0.4.1 3 | pyiceberg[pyarrow,duckdb,pandas]==0.7.1 4 | jupysql==0.10.5 5 | matplotlib==3.9.2 6 | scipy==1.14.1 7 | duckdb-engine==0.13.1 8 | prettytable==3.10.0 9 | -------------------------------------------------------------------------------- /data-transforms/rust/jq/.clangd: -------------------------------------------------------------------------------- 1 | CompileFlags: 2 | Add: 3 | - -xc++ 4 | - -Wall 5 | - -std=c++23 6 | - -stdlib=libc++ 7 | - -Ithird_party 8 | - -fno-exceptions 9 | Compiler: clang++-17 10 | -------------------------------------------------------------------------------- /kubernetes/gitops-helm/cert-manager-repo.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: source.toolkit.fluxcd.io/v1beta1 2 | kind: HelmRepository 3 | metadata: 4 | name: jetstack 5 | namespace: flux-system 6 | spec: 7 | interval: 1h 8 | url: https://charts.jetstack.io -------------------------------------------------------------------------------- /kubernetes/gitops-helm/redpanda-repo.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: source.toolkit.fluxcd.io/v1beta2 2 | kind: HelmRepository 3 | metadata: 4 | name: redpanda 5 | namespace: redpanda 6 | spec: 7 | interval: 5m0s 8 | url: https://charts.redpanda.com/ 9 | -------------------------------------------------------------------------------- /kubernetes/README.adoc: -------------------------------------------------------------------------------- 1 | = Kubernetes Labs 2 | :description: A collection of Redpanda Labs that showcase how to work with Redpanda in Kubernetes. 3 | 4 | {description} 5 | 6 | - link:./gitops-helm/README.adoc[Set Up GitOps for the Redpanda Helm Chart] -------------------------------------------------------------------------------- /data-transforms/rust/jq/.clang-tidy: -------------------------------------------------------------------------------- 1 | --- 2 | Checks: 'clang-diagnostic-*,clang-analyzer-*,cert-*,cppcoreguidelines-*,hicpp-*,modernize-*,performance-*,misc-*,bugprone-*,-modernize-use-trailing-return-type,-modernize-use-nodiscard,-hicpp-named-parameter' 3 | 4 | -------------------------------------------------------------------------------- /clients/stock-market-activity/nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "module", 3 | "dependencies": { 4 | "chalk": "^5.0.1", 5 | "csv-parser": "^3.0.0", 6 | "date-fns": "^2.28.0", 7 | "kafkajs": "^1.15.0", 8 | "node-fetch": "^2.6.7" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /data-transforms/go/iss_demo/go.mod: -------------------------------------------------------------------------------- 1 | module iss_demo 2 | 3 | go 1.20 4 | 5 | require ( 6 | github.com/linkedin/goavro/v2 v2.12.0 7 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0 8 | ) 9 | 10 | require github.com/golang/snappy v0.0.1 // indirect 11 | -------------------------------------------------------------------------------- /data-transforms/go/iss_demo/post-schema.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Convert Avro schema to JSON and post it using curl 4 | jq '. | {schema: tojson}' iss.avsc | curl -X POST "http://localhost:8081/subjects/iss_position/versions" -H "Content-Type: application/vnd.schemaregistry.v1+json" -d @- -------------------------------------------------------------------------------- /data-transforms/go/to_avro/go.mod: -------------------------------------------------------------------------------- 1 | module to_avro 2 | 3 | go 1.20 4 | 5 | require ( 6 | github.com/linkedin/goavro/v2 v2.12.0 7 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0 8 | ) 9 | 10 | require github.com/golang/snappy v0.0.1 // indirect 11 | -------------------------------------------------------------------------------- /clients/stock-market-activity/data/stock.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package stock; 4 | 5 | message Stock { 6 | string date = 1; 7 | string last = 2; 8 | string volume = 3; 9 | string open = 4; 10 | string high = 5; 11 | string low = 6; 12 | } 13 | -------------------------------------------------------------------------------- /data-transforms/rust/jq/.clang-format: -------------------------------------------------------------------------------- 1 | BasedOnStyle: Chromium 2 | 3 | # Some folks prefer to write "int& foo" while others prefer "int &foo". The 4 | # Google Style Guide only asks for consistency within a project, we chose 5 | # "int& foo" for this project: 6 | DerivePointerAlignment: false 7 | PointerAlignment: Left 8 | -------------------------------------------------------------------------------- /data-transforms/go/iss_demo/conf/install-go.sh: -------------------------------------------------------------------------------- 1 | apt -y update 2 | apt -y install wget 3 | 4 | cd /tmp || exit 5 | ARCH=$(arch | sed 's/aarch64/arm64/') 6 | 7 | wget https://go.dev/dl/go1.21.5.linux-${ARCH}.tar.gz || exit 8 | tar xf go1.21.5.linux-${ARCH}.tar.gz 9 | mv go /usr/local 10 | ln -s /usr/local/go/bin/go /usr/local/bin/ -------------------------------------------------------------------------------- /data-transforms/go/redaction/go.mod: -------------------------------------------------------------------------------- 1 | module redactor 2 | 3 | go 1.20 4 | 5 | require github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0 6 | 7 | require gopkg.in/yaml.v3 v3.0.1 8 | 9 | require ( 10 | github.com/pmw-rp/jsonparser v0.1.1 // indirect 11 | github.com/pmw-rp/splice v0.1.0 // indirect 12 | ) 13 | -------------------------------------------------------------------------------- /docker-compose/iceberg/schema.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type" : "record", 3 | "namespace" : "com.redpanda.examples.avro", 4 | "name" : "ClickEvent", 5 | "fields" : [ 6 | { "name": "user_id", "type" : "int" }, 7 | { "name": "event_type", "type" : "string" }, 8 | { "name": "ts", "type": "string" } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /data-transforms/go/iss_demo/iss.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "iss_location", 4 | "namespace": "com.redpanda", 5 | "fields" : [ 6 | {"name": "latitude", "type": "double"}, 7 | {"name": "longitude", "type": "double"}, 8 | {"name": "timestamp", "type": "int", "logicalType": "date"} 9 | ] 10 | } -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ts-converter" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | anyhow = "1.0.82" 8 | apache-avro = { version = "0.16.0", features = ["snappy"] } 9 | chrono = "0.4.38" 10 | redpanda-transform-sdk = "1.1.0" 11 | redpanda-transform-sdk-sr = "1.1.0" 12 | 13 | -------------------------------------------------------------------------------- /data-transforms/go/redaction/demo/conf/install-go.sh: -------------------------------------------------------------------------------- 1 | apt -y update 2 | apt -y install wget 3 | 4 | cd /tmp || exit 5 | ARCH=$(arch | sed 's/aarch64/arm64/' | sed 's/x86_64/amd64/') 6 | 7 | wget https://go.dev/dl/go1.21.5.linux-${ARCH}.tar.gz || exit 8 | tar xf go1.21.5.linux-${ARCH}.tar.gz 9 | mv go /usr/local 10 | ln -s /usr/local/go/bin/go /usr/local/bin/ -------------------------------------------------------------------------------- /setup-tests/cleanup.json: -------------------------------------------------------------------------------- 1 | { 2 | "tests": [ 3 | { 4 | "id": "cleanup-docker", 5 | "description": "Clean up Docker containers", 6 | "steps": [ 7 | { 8 | "action": "runShell", 9 | "command": "docker compose down -v", 10 | "workingDirectory": "." 11 | } 12 | ] 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /clients/chat-room/docker/rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rust" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | rdkafka = { version = "0.36.2", features = ["tokio"] } 8 | serde = { version = "1.0.202", features = ["derive"] } 9 | serde_json = "1.0.117" 10 | tokio = { version = "1.37.0", features = ["full"] } 11 | tokio-stream = "0.1.15" 12 | -------------------------------------------------------------------------------- /data-transforms/go/to_avro/test/produce.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | filename="$1" 4 | topic="$2" 5 | 6 | if [[ -z "$filename" || -z "$topic" ]]; then 7 | echo "Usage: $0 " 8 | exit 1 9 | fi 10 | 11 | { 12 | read # Skip the header row 13 | while IFS= read -r line; do 14 | echo "$line" | rpk topic produce "$topic" 15 | done 16 | } < "$filename" 17 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/index.adoc: -------------------------------------------------------------------------------- 1 | = Redpanda Labs 2 | :page-layout: labs-search 3 | :page-aliases: 23.3@ROOT:get-started:code-examples.adoc, 23.3@ROOT:development:code-examples.adoc, 23.3@ROOT:introduction:code-examples.adoc, 23.3@ROOT:develop:code-examples.adoc, 23.3@ROOT:console:reference/docker-compose.adoc, 23.3@ROOT:reference:console/docker-compose.adoc, 23.3@ROOT:reference:docker-compose.adoc -------------------------------------------------------------------------------- /data-transforms/js/csv-json/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "data-transforms-tutorial", 3 | "type": "module", 4 | "private": true, 5 | "scripts": { 6 | "build": "node esbuild.js" 7 | }, 8 | "dependencies": { 9 | "@redpanda-data/transform-sdk": "1.x" 10 | }, 11 | "devDependencies": { 12 | "esbuild": "0.20.x", 13 | "esbuild-plugin-polyfill-node": "0.3.x" 14 | } 15 | } -------------------------------------------------------------------------------- /.github/workflows/build-docs.yml: -------------------------------------------------------------------------------- 1 | # This workflow triggers a build of the production documentation site. 2 | 3 | name: Build production site 4 | 5 | on: 6 | push: 7 | branches: [ main ] 8 | 9 | jobs: 10 | dispatch: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Trigger build 14 | run: curl -X POST -d {} https://api.netlify.com/build_hooks/65c0cd2e63853f0b159cdbc6 15 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rust" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | dotenv = "0.15.0" 8 | rdkafka = { version = "0.36.2", features = ["tokio", "ssl"] } 9 | serde = { version = "1.0.202", features = ["derive"] } 10 | serde_json = "1.0.117" 11 | tokio = { version = "1.37.0", features = ["full"] } 12 | tokio-stream = "0.1.15" 13 | -------------------------------------------------------------------------------- /data-transforms/rust/jq/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "jq" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | anyhow = "1.0.81" 8 | jaq-core = "1.2.1" 9 | jaq-interpret = "1.2.1" 10 | jaq-parse = "1.0.2" 11 | jaq-std = "1.2.1" 12 | redpanda-transform-sdk = "1.1.0" 13 | serde_json = "1.0.114" 14 | talc = { version = "4.4.1", default-features = false, features = ["lock_api"] } 15 | -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/create-schema.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Create a temporary schema file since RPK needs one. 4 | SCHEMA_FILE=$(mktemp "epoch.XXXXXXXXXX.avsc") 5 | function cleanup { 6 | rm "${SCHEMA_FILE}" 7 | } 8 | trap cleanup EXIT 9 | 10 | echo '{"type": "long", "name": "epoch"}' > "${SCHEMA_FILE}" 11 | rpk registry schema create \ 12 | src-value --schema "${SCHEMA_FILE}" -------------------------------------------------------------------------------- /docker-compose/cdc/README.adoc: -------------------------------------------------------------------------------- 1 | = Change Data Capture (CDC) with Debezium and Redpanda 2 | 3 | A collection of Docker Compose files that demonstrate CDC from different databases with link:https://debezium.io/[Debezium]. Debezium has been configured to stream change events into Redpanda. 4 | 5 | Databases include: 6 | 7 | - link:./mysql-json/README.adoc[MySQL] 8 | - link:./postgres-json/README.adoc[Postgres] 9 | -------------------------------------------------------------------------------- /data-transforms/go/redaction/redact: -------------------------------------------------------------------------------- 1 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 2 | 3 | TEST=`go run cmd/redact.go "$@" >/dev/null 2>/dev/null` 4 | 5 | if [ $? -eq 0 ] 6 | then 7 | # Optional pretty printing via jq 8 | if command -v jq &> /dev/null 9 | then 10 | go run cmd/redact.go "$@" | jq 11 | exit 12 | fi 13 | else 14 | go run cmd/redact.go "$@" 15 | fi -------------------------------------------------------------------------------- /docker-compose/jira-metrics-pipeline/console-config.yml: -------------------------------------------------------------------------------- 1 | kafka: 2 | brokers: 3 | - redpanda:9092 4 | schemaRegistry: 5 | enabled: true 6 | urls: 7 | - http://redpanda:8081 8 | 9 | redpanda: 10 | adminApi: 11 | enabled: true 12 | urls: 13 | - http://redpanda:9644 14 | 15 | connect: 16 | enabled: false 17 | 18 | console: 19 | topicDocumentation: 20 | enabled: false 21 | -------------------------------------------------------------------------------- /clients/stock-market-activity/data/stock.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "stock", 4 | "namespace": "stock", 5 | "fields": [ 6 | { "name": "date", "type": "string" }, 7 | { "name": "last", "type": "string" }, 8 | { "name": "volume", "type": "string" }, 9 | { "name": "open", "type": "string" }, 10 | { "name": "high", "type": "string" }, 11 | { "name": "low", "type": "string" } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/java/src/test/java/com/example/AppTest.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import static org.junit.Assert.assertTrue; 4 | 5 | import org.junit.Test; 6 | 7 | /** 8 | * Unit test for simple App. 9 | */ 10 | public class AppTest 11 | { 12 | /** 13 | * Rigorous Test :-) 14 | */ 15 | @Test 16 | public void shouldAnswerWithTrue() 17 | { 18 | assertTrue( true ); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /clients/chat-room/docker/java/src/test/java/com/example/AppTest.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import static org.junit.Assert.assertTrue; 4 | 5 | import org.junit.Test; 6 | 7 | /** 8 | * Unit test for simple App. 9 | */ 10 | public class AppTest 11 | { 12 | /** 13 | * Rigorous Test :-) 14 | */ 15 | @Test 16 | public void shouldAnswerWithTrue() 17 | { 18 | assertTrue( true ); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /data-transforms/README.adoc: -------------------------------------------------------------------------------- 1 | = Redpanda Data Transforms 2 | 3 | Redpanda Data transforms allow you to transform a topic's records into another topic without ping-ponging 4 | your data into another distributed system. 5 | 6 | This directory contains a set of prebuilt transformations that you can build and deploy to your cluster. 7 | 8 | Transforms can be configured using environment variables, see the individual READMEs for more information. 9 | -------------------------------------------------------------------------------- /kubernetes/iceberg/spark/ipython/startup/README: -------------------------------------------------------------------------------- 1 | This is the IPython startup directory 2 | 3 | .py and .ipy files in this directory run *prior* to any code or files specified 4 | via the exec_lines or exec_files configurables whenever you load this profile. 5 | 6 | Files run in lexicographical order, so you can control the execution order of files 7 | with a prefix, e.g.:: 8 | 9 | 00-first.py 10 | 50-middle.py 11 | 99-last.ipy 12 | -------------------------------------------------------------------------------- /data-transforms/go/to_avro/schema.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "nasdaq", 4 | "namespace": "com.nasdaq", 5 | "fields" : [ 6 | {"name": "Date", "type": "string"}, 7 | {"name": "Last", "type": "string"}, 8 | {"name": "Volume", "type": "long"}, 9 | {"name": "Open", "type": "string"}, 10 | {"name": "High", "type": "string"}, 11 | {"name": "Low", "type": "string"} 12 | ] 13 | } -------------------------------------------------------------------------------- /clients/chat-room/cloud/go/go.mod: -------------------------------------------------------------------------------- 1 | module com/redpanda/chat-room 2 | 3 | go 1.20 4 | 5 | require ( 6 | github.com/google/uuid v1.3.0 7 | github.com/twmb/franz-go v1.9.0 8 | github.com/twmb/franz-go/pkg/kadm v1.3.1 9 | ) 10 | 11 | require ( 12 | github.com/klauspost/compress v1.16.7 // indirect 13 | github.com/pierrec/lz4/v4 v4.1.18 // indirect 14 | github.com/twmb/franz-go/pkg/kmsg v1.6.1 // indirect 15 | golang.org/x/crypto v0.11.0 // indirect 16 | ) 17 | -------------------------------------------------------------------------------- /docker-compose/iceberg/spark/ipython/startup/README: -------------------------------------------------------------------------------- 1 | This is the IPython startup directory 2 | 3 | .py and .ipy files in this directory will be run *prior* to any code or files specified 4 | via the exec_lines or exec_files configurables whenever you load this profile. 5 | 6 | Files will be run in lexicographical order, so you can control the execution order of files 7 | with a prefix, e.g.:: 8 | 9 | 00-first.py 10 | 50-middle.py 11 | 99-last.ipy 12 | -------------------------------------------------------------------------------- /kubernetes/gitops-helm/cert-manager-release.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: helm.toolkit.fluxcd.io/v2beta1 2 | kind: HelmRelease 3 | metadata: 4 | name: cert-manager 5 | namespace: cert-manager 6 | spec: 7 | interval: 1h 8 | chart: 9 | spec: 10 | chart: cert-manager 11 | version: "1.13.3" 12 | sourceRef: 13 | kind: HelmRepository 14 | name: jetstack 15 | namespace: flux-system 16 | values: 17 | installCRDs: true 18 | -------------------------------------------------------------------------------- /clients/chat-room/docker/go/go.mod: -------------------------------------------------------------------------------- 1 | module com/redpanda/chat-room 2 | 3 | go 1.20 4 | 5 | require ( 6 | github.com/google/uuid v1.3.0 7 | github.com/twmb/franz-go v1.15.0 8 | github.com/twmb/franz-go/pkg/kadm v1.10.0 9 | ) 10 | 11 | require ( 12 | github.com/klauspost/compress v1.17.0 // indirect 13 | github.com/pierrec/lz4/v4 v4.1.18 // indirect 14 | github.com/twmb/franz-go/pkg/kmsg v1.6.1 // indirect 15 | golang.org/x/crypto v0.13.0 // indirect 16 | ) 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # IDE 2 | settings.json 3 | .idea/ 4 | .DS_store 5 | 6 | # Compiled wasm artifacts 7 | *.wasm 8 | 9 | # Node.js 10 | node_modules/ 11 | 12 | # Docs 13 | local-docs/ 14 | 15 | # Tests 16 | testresults-*.json 17 | 18 | # Go 19 | go.work* 20 | 21 | # Kubernetes 22 | kubernetes/gitops/flux-system/ 23 | 24 | # Python 25 | __pycache__/ 26 | env/ 27 | venv/ 28 | .env 29 | .venv 30 | 31 | # Redaction docker .env exception 32 | !data-transforms/go/redaction/demo/.env -------------------------------------------------------------------------------- /docker-compose/jira-metrics-pipeline/.env.example: -------------------------------------------------------------------------------- 1 | # Redpanda, Console, and Connect versions 2 | REDPANDA_VERSION=v25.3.1 3 | REDPANDA_CONSOLE_VERSION=v3.3.1 4 | REDPANDA_CONNECT_VERSION=4.70.0 5 | 6 | # JIRA configuration 7 | # Get your JIRA API token from: https://id.atlassian.com/manage-profile/security/api-tokens 8 | JIRA_BASE_URL=https://your-domain.atlassian.net 9 | JIRA_USERNAME=your-email@example.com 10 | JIRA_API_TOKEN=your-api-token-here 11 | JIRA_PROJECT=YOUR_PROJECT_KEY 12 | -------------------------------------------------------------------------------- /docker-compose/oidc/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oidc", 3 | "version": "1.0.0", 4 | "main": "index.js", 5 | "scripts": { 6 | "test": "echo \"Error: no test specified\" && exit 1" 7 | }, 8 | "type": "module", 9 | "keywords": [], 10 | "author": "", 11 | "license": "ISC", 12 | "description": "", 13 | "dependencies": { 14 | "dotenv": "^16.5.0", 15 | "jwt-decode": "^4.0.0", 16 | "kafkajs": "^2.2.4", 17 | "node-fetch": "^3.3.2", 18 | "simple-oauth2": "^5.1.0" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "chat-room", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "keywords": [], 10 | "author": "", 11 | "license": "ISC", 12 | "devDependencies": { 13 | "@types/node": "^20.4.4", 14 | "@types/uuid": "^9.0.2", 15 | "typescript": "^5.1.6" 16 | }, 17 | "dependencies": { 18 | "kafkajs": "^2.2.4", 19 | "uuid": "^9.0.0" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /clients/chat-room/docker/nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "chat-room", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "keywords": [], 10 | "author": "", 11 | "license": "ISC", 12 | "devDependencies": { 13 | "@types/node": "^20.4.4", 14 | "@types/uuid": "^9.0.2", 15 | "typescript": "^5.1.6" 16 | }, 17 | "dependencies": { 18 | "kafkajs": "^2.2.4", 19 | "uuid": "^9.0.0" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /clients/chat-room/docker/python/producer.py: -------------------------------------------------------------------------------- 1 | from kafka import KafkaProducer 2 | import json 3 | class ChatProducer: 4 | def __init__(self, brokers, topic): 5 | self.topic = topic 6 | self.producer = KafkaProducer( 7 | bootstrap_servers=brokers, 8 | value_serializer=lambda v: json.dumps(v).encode("utf-8"), 9 | ) 10 | def send_message(self, user, message): 11 | self.producer.send(self.topic, {"user": user, "message": message}) 12 | self.producer.flush() 13 | def close(self): 14 | self.producer.close() -------------------------------------------------------------------------------- /data-transforms/go/redaction/deploy-redaction: -------------------------------------------------------------------------------- 1 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 2 | 3 | if ! [ -f $1 ]; then 4 | echo "Config file doesn't exist." 5 | exit 1 6 | fi 7 | 8 | export CONFIG=$(gzip < "${1}" | base64 -w 0) 9 | 10 | cat << EOF > "${SCRIPT_DIR}"/transform.yaml 11 | name: $3-transform 12 | input-topic: "$2" 13 | output-topic: "$3" 14 | language: tinygo-no-goroutines 15 | EOF 16 | 17 | rpk transform build 18 | rpk transform deploy --var="CONFIG=${CONFIG}" 19 | 20 | rm "${SCRIPT_DIR}"/transform.yaml -------------------------------------------------------------------------------- /docker-compose/README.adoc: -------------------------------------------------------------------------------- 1 | = Docker Compose Deployments 2 | :description: A collection of Docker Compose files that deploy Redpanda along with a variety of other applications for use in Redpanda Labs. 3 | 4 | {description} 5 | 6 | - link:./single-broker/README.adoc[Single broker] 7 | - link:./three-brokers/README.adoc[Three brokers] 8 | - link:./owl-shop/README.adoc[Owl shop streaming application] 9 | - link:./console-plain-login/README.adoc[Redpanda Console with Plain Login Authentication] 10 | - link:./cdc/README.adoc[Change data capture (CDC) with Debezium] -------------------------------------------------------------------------------- /docker-compose/cdc/postgres-json/data/postgres_bootstrap.sql: -------------------------------------------------------------------------------- 1 | -- Create the orders table 2 | create table orders ( 3 | order_id serial primary key, 4 | customer_id int, 5 | total float, 6 | created_at timestamp default now() 7 | ); 8 | 9 | -- Populate it with a few values 10 | insert into orders(customer_id, total) values (1,50); 11 | insert into orders(customer_id, total) values (2,100); 12 | insert into orders(customer_id, total) values (2,50); 13 | insert into orders(customer_id, total) values (3,10); 14 | insert into orders(customer_id, total) values (4,90); -------------------------------------------------------------------------------- /clients/schema-registry-demo/python/clickstream_key_pb2.pyi: -------------------------------------------------------------------------------- 1 | from google.protobuf import descriptor as _descriptor 2 | from google.protobuf import message as _message 3 | from typing import ClassVar as _ClassVar, Optional as _Optional 4 | 5 | DESCRIPTOR: _descriptor.FileDescriptor 6 | 7 | class Key(_message.Message): 8 | __slots__ = ["seq", "uuid"] 9 | SEQ_FIELD_NUMBER: _ClassVar[int] 10 | UUID_FIELD_NUMBER: _ClassVar[int] 11 | seq: int 12 | uuid: str 13 | def __init__(self, uuid: _Optional[str] = ..., seq: _Optional[int] = ...) -> None: ... 14 | -------------------------------------------------------------------------------- /data-transforms/go/redaction/demo/config.yaml: -------------------------------------------------------------------------------- 1 | redactions: 2 | - "path": "customer" 3 | "type": "drop" 4 | - "path": "deliveryAddress.firstName" 5 | "type": "drop" 6 | - "path": "deliveryAddress.lastName" 7 | "type": "drop" 8 | - "path": "deliveryAddress.houseNumber" 9 | "type": "redact" 10 | - "path": "deliveryAddress.street" 11 | "type": "md5" 12 | - "path": "deliveryAddress.phone" 13 | "type": "x-digits" 14 | - "path": "deliveryAddress.latitude" 15 | "type": "truncate" 16 | - "path": "deliveryAddress.longitude" 17 | "type": "truncate" -------------------------------------------------------------------------------- /clients/go/franz-go/go.mod: -------------------------------------------------------------------------------- 1 | module redpanda.com/clients/go/franz-go 2 | 3 | go 1.18 4 | 5 | require github.com/twmb/franz-go/pkg/kadm v1.1.1 6 | 7 | require ( 8 | golang.org/x/crypto v0.0.0-20220427172511-eb4f295cb31f // indirect 9 | golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 // indirect 10 | ) 11 | 12 | require ( 13 | github.com/klauspost/compress v1.15.4 // indirect 14 | github.com/pierrec/lz4/v4 v4.1.14 // indirect 15 | github.com/sirupsen/logrus v1.9.0 16 | github.com/twmb/franz-go v1.5.3 17 | github.com/twmb/franz-go/pkg/kmsg v1.1.0 // indirect 18 | github.com/twmb/tlscfg v1.2.0 19 | ) 20 | -------------------------------------------------------------------------------- /kubernetes/gitops-helm/redpanda-helm-release.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: helm.toolkit.fluxcd.io/v2beta1 2 | kind: HelmRelease 3 | metadata: 4 | name: redpanda 5 | namespace: redpanda 6 | spec: 7 | dependsOn: 8 | - name: cert-manager 9 | namespace: cert-manager 10 | interval: 5m 11 | chart: 12 | spec: 13 | chart: redpanda 14 | version: "5.7.*" 15 | sourceRef: 16 | kind: HelmRepository 17 | name: redpanda 18 | namespace: redpanda 19 | interval: 1m 20 | values: 21 | statefulset: 22 | initContainers: 23 | setDataDirOwnership: 24 | enabled: true 25 | -------------------------------------------------------------------------------- /clients/chat-room/docker/python/consumer.py: -------------------------------------------------------------------------------- 1 | from kafka import KafkaConsumer 2 | import json 3 | import uuid 4 | class ChatConsumer: 5 | def __init__(self, brokers, topic, group_id=None): 6 | if group_id is None: 7 | group_id = str(uuid.uuid4()) 8 | self.consumer = KafkaConsumer( 9 | topic, 10 | bootstrap_servers=brokers, 11 | group_id=group_id, 12 | value_deserializer=lambda m: json.loads(m.decode("utf-8")), 13 | ) 14 | def print_messages(self): 15 | for msg in self.consumer: 16 | print(f"{msg.value['user']}: {msg.value['message']}") 17 | def close(self): 18 | self.consumer.close() -------------------------------------------------------------------------------- /data-transforms/go/regex/transform.yaml: -------------------------------------------------------------------------------- 1 | name: regex 2 | description: | 3 | Filters the input topic to records that only match a regular expression. 4 | 5 | Regular expression are implemented using Go's regexp library, which uses the same syntax as RE2. 6 | See the RE2 wiki for allowed syntax: https://github.com/google/re2/wiki/Syntax 7 | 8 | Environment variables: 9 | - PATTERN: The regular expression that will match against records (required) 10 | - MATCH_VALUE: By default, the regex matches keys, but if set to true, the regex will match values 11 | input-topic: "" 12 | output-topic: "" 13 | language: tinygo-no-goroutines 14 | env: 15 | PATTERN: '' 16 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/python/producer.py: -------------------------------------------------------------------------------- 1 | from kafka import KafkaProducer 2 | import json 3 | class ChatProducer: 4 | def __init__(self, brokers, topic): 5 | self.topic = topic 6 | self.producer = KafkaProducer( 7 | bootstrap_servers=brokers, 8 | sasl_mechanism="SCRAM-SHA-256", 9 | security_protocol="SASL_SSL", 10 | sasl_plain_username="redpanda-chat-account", 11 | sasl_plain_password="", 12 | value_serializer=lambda v: json.dumps(v).encode("utf-8"), 13 | ) 14 | def send_message(self, user, message): 15 | self.producer.send(self.topic, {"user": user, "message": message}) 16 | self.producer.flush() 17 | def close(self): 18 | self.producer.close() -------------------------------------------------------------------------------- /docker-compose/cdc/postgres-json/create_debezium_postgres_connector.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | docker compose exec debezium `curl -H 'Content-Type: application/json' debezium:8083/connectors --data ' 4 | { 5 | "name": "postgres-connector", 6 | "config": { 7 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector", 8 | "plugin.name": "pgoutput", 9 | "database.hostname": "postgres", 10 | "database.port": "5432", 11 | "database.user": "postgresuser", 12 | "database.password": "postgrespw", 13 | "database.dbname" : "pandashop", 14 | "database.server.name": "postgres", 15 | "table.include.list": "public.orders", 16 | "topic.prefix" : "dbz" 17 | } 18 | }' -------------------------------------------------------------------------------- /README-TEMPLATE.adoc: -------------------------------------------------------------------------------- 1 | = Lab Title 2 | // Required: Sets the UI template when published on the Redpanda docs site. 3 | :page-layout: lab 4 | // Required: Add a deployment type by uncommenting one of these lines 5 | //:env-docker: true // Docker 6 | //:env-kubernetes: true // Kubernetes 7 | //:env-linux: true // Linux 8 | //:env-cloud: true // Redpanda Cloud 9 | // Required: For more details about categories, see https://github.com/redpanda-data/redpanda-labs/blob/main/docs/CONTRIBUTING.md 10 | :page-categories: 11 | // Required: Add a short description of what the lab does. 12 | :description: 13 | 14 | Overview 15 | 16 | == Prerequisites 17 | 18 | == Limitations 19 | 20 | == Run the lab 21 | 22 | == Clean up 23 | 24 | 25 | -------------------------------------------------------------------------------- /clients/chat-room/docker/nodejs/src/admin.ts: -------------------------------------------------------------------------------- 1 | import { Kafka } from "kafkajs"; 2 | const redpanda = new Kafka({ 3 | brokers: ["localhost:19092"], 4 | }); 5 | const admin = redpanda.admin(); 6 | export async function createTopic( 7 | topic: string, 8 | partitions?: number, 9 | replicas?: number 10 | ) { 11 | await admin.connect(); 12 | const existingTopics = await admin.listTopics(); 13 | if (!existingTopics.includes(topic)) { 14 | await admin.createTopics({ 15 | topics: [ 16 | { 17 | topic: topic, 18 | numPartitions: partitions ? partitions : 1, 19 | replicationFactor: replicas ? replicas : 1, 20 | }, 21 | ], 22 | }); 23 | } 24 | await admin.disconnect(); 25 | } -------------------------------------------------------------------------------- /clients/chat-room/docker/nodejs/src/producer.ts: -------------------------------------------------------------------------------- 1 | import { Kafka } from "kafkajs"; 2 | const redpanda = new Kafka({ 3 | brokers: ["localhost:19092"], 4 | }); 5 | const producer = redpanda.producer(); 6 | export async function getConnection(user: string) { 7 | try { 8 | await producer.connect(); 9 | return async (message: string) => { 10 | await producer.send({ 11 | topic: "chat-room", 12 | messages: [{ value: JSON.stringify({ message, user }) }], 13 | }); 14 | }; 15 | } catch (error) { 16 | console.error("Error:", error); 17 | } 18 | } 19 | export async function disconnect() { 20 | try { 21 | await producer.disconnect(); 22 | } catch (error) { 23 | console.error("Error:", error); 24 | } 25 | } -------------------------------------------------------------------------------- /data-transforms/go/redaction/example/config.yaml: -------------------------------------------------------------------------------- 1 | redactors: 2 | - name: "redactWithHyphens" 3 | type: "value" 4 | quote: true 5 | value: 6 | function: "replace" 7 | replacement: "---" 8 | redactions: 9 | - "path": "customer" 10 | "type": "drop" 11 | - "path": "deliveryAddress.firstName" 12 | "type": "drop" 13 | - "path": "deliveryAddress.lastName" 14 | "type": "drop" 15 | - "path": "deliveryAddress.houseNumber" 16 | "type": "redactWithHyphens" 17 | - "path": "deliveryAddress.street" 18 | "type": "md5" 19 | - "path": "deliveryAddress.phone" 20 | "type": "x-digits" 21 | - "path": "deliveryAddress.latitude" 22 | "type": "truncate" 23 | - "path": "deliveryAddress.longitude" 24 | "type": "truncate" -------------------------------------------------------------------------------- /clients/stock-market-activity/data/README.md: -------------------------------------------------------------------------------- 1 | The data used in the client examples was downloaded from the Nasdaq historical data site: https://www.nasdaq.com/market-activity/quotes/historical. The historical stock prices and volumes are provided in CSV format, for example: 2 | 3 | ```csv 4 | Date,Close/Last,Volume,Open,High,Low 5 | 10/18/2023,$93.75,4110651,$93.94,$94.53,$93.50 6 | 10/17/2023,$94.18,6086690,$92.75,$94.19,$92.6315 7 | 10/16/2023,$93.65,4595886,$92.17,$93.855,$91.85 8 | 10/13/2023,$91.48,4780570,$91.28,$92.06,$91.05 9 | ``` 10 | 11 | ## Protobuf 12 | 13 | To recompile the `.proto` file: 14 | 15 | ```shell 16 | cd clients/stock-market-activity/python 17 | protoc -I=../data --python_out=./schema_registry --pyi_out=./schema_registry ../data/*.proto 18 | ``` -------------------------------------------------------------------------------- /data-transforms/go/regex/go.sum: -------------------------------------------------------------------------------- 1 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v0.1.0 h1:ZEGwghKqmE9we+xbW2SoiXQVFv3z/RKp3p3/W6IXXXk= 2 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v0.1.0/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 3 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.0.2 h1:34F42buBTGuK1uaXKky1PdxAZzqMh6kQE1ojCLf/hWw= 4 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.0.2/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 5 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0 h1:KxgHJZsHsrT3YX7DMpu/vJN4TZN3KFm1jzrCFLyOepA= 6 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 7 | -------------------------------------------------------------------------------- /docker-compose/cdc/mysql-json/create_debezium_mysql_connector.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker compose exec debezium curl -i -X POST -H "Accept:application/json" -H "Content-Type:application/json" localhost:8083/connectors/ -d ' 4 | { 5 | "name": "mysql-connector", 6 | "config": { 7 | "connector.class": "io.debezium.connector.mysql.MySqlConnector", 8 | "tasks.max": "1", 9 | "database.hostname": "mysql", 10 | "database.port": "3306", 11 | "database.user": "debezium", 12 | "database.password": "dbz", 13 | "database.server.id": "184054", 14 | "topic.prefix": "dbz", 15 | "database.include.list": "pandashop", 16 | "schema.history.internal.kafka.bootstrap.servers": "redpanda:9092", 17 | "schema.history.internal.kafka.topic": "schemahistory.pandashop" 18 | } 19 | }' 20 | 21 | -------------------------------------------------------------------------------- /docker-compose/cdc/mysql-json/data/mysql_bootstrap.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE IF NOT EXISTS pandashop; 2 | USE pandashop; 3 | 4 | GRANT ALL PRIVILEGES ON pandashop.* TO 'mysqluser'; 5 | GRANT FILE on *.* to 'mysqluser'; 6 | 7 | CREATE USER 'debezium' IDENTIFIED WITH mysql_native_password BY 'dbz'; 8 | 9 | GRANT SELECT, RELOAD, SHOW DATABASES, REPLICATION SLAVE, REPLICATION CLIENT ON *.* TO 'debezium'; 10 | 11 | FLUSH PRIVILEGES; 12 | 13 | -- Create the orders table 14 | CREATE TABLE orders ( 15 | order_id INT AUTO_INCREMENT PRIMARY KEY, 16 | customer_id INT, 17 | total FLOAT, 18 | created_at DATETIME DEFAULT NOW() 19 | ); 20 | 21 | -- Populate the 'orders' table 22 | INSERT INTO orders (customer_id, total) VALUES 23 | (1, 100.50), 24 | (2, 75.25), 25 | (1, 50.75), 26 | (3, 120.00); 27 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/python/consumer.py: -------------------------------------------------------------------------------- 1 | from kafka import KafkaConsumer 2 | import json 3 | import uuid 4 | class ChatConsumer: 5 | def __init__(self, brokers, topic, group_id=None): 6 | if group_id is None: 7 | group_id = str(uuid.uuid4()) 8 | self.consumer = KafkaConsumer( 9 | topic, 10 | bootstrap_servers=brokers, 11 | sasl_mechanism="SCRAM-SHA-256", 12 | security_protocol="SASL_SSL", 13 | sasl_plain_username="redpanda-chat-account", 14 | sasl_plain_password="", 15 | group_id=group_id, 16 | value_deserializer=lambda m: json.loads(m.decode("utf-8")), 17 | ) 18 | def print_messages(self): 19 | for msg in self.consumer: 20 | print(f"{msg.value['user']}: {msg.value['message']}") 21 | def close(self): 22 | self.consumer.close() -------------------------------------------------------------------------------- /data-transforms/go/redaction/redaction/redaction.go: -------------------------------------------------------------------------------- 1 | package redaction 2 | 3 | import ( 4 | "errors" 5 | "redactor/redactors" 6 | ) 7 | 8 | func Wrap(message string, err error) error { 9 | return errors.Join(errors.New(message), err) 10 | } 11 | 12 | func Redact(input []byte) ([]byte, error) { 13 | data, err := redactors.Process(input) 14 | if err != nil { 15 | return []byte{}, Wrap("unable to redact record", err) 16 | } 17 | return data, nil 18 | } 19 | 20 | func Initialise(bytes []byte) error { 21 | err := redactors.Configure([]byte(redactors.Builtins)) 22 | if err != nil { 23 | return Wrap("unable to configure built-in redactors", err) 24 | } 25 | err = redactors.Configure(bytes) 26 | if err != nil { 27 | return Wrap("unable to configure custom redactors", err) 28 | } 29 | return nil 30 | } 31 | -------------------------------------------------------------------------------- /clients/chat-room/docker/go/producer.go: -------------------------------------------------------------------------------- 1 | package main 2 | import ( 3 | "context" 4 | "encoding/json" 5 | "github.com/twmb/franz-go/pkg/kgo" 6 | ) 7 | type Producer struct { 8 | client *kgo.Client 9 | topic string 10 | } 11 | func NewProducer(brokers []string, topic string) *Producer { 12 | client, err := kgo.NewClient( 13 | kgo.SeedBrokers(brokers...), 14 | ) 15 | if err != nil { 16 | panic(err) 17 | } 18 | return &Producer{client: client, topic: topic} 19 | } 20 | func (p *Producer) SendMessage(user, message string) { 21 | ctx := context.Background() 22 | msg := Message{User: user, Message: message} 23 | b, _ := json.Marshal(msg) 24 | p.client.Produce(ctx, &kgo.Record{Topic: p.topic, Value: b}, nil) 25 | } 26 | func (p *Producer) Close() { 27 | p.client.Close() 28 | } -------------------------------------------------------------------------------- /clients/chat-room/docker/python/admin.py: -------------------------------------------------------------------------------- 1 | from kafka import KafkaAdminClient 2 | from kafka.admin import NewTopic 3 | from kafka.admin import KafkaAdminClient, NewTopic 4 | class ChatAdmin: 5 | def __init__(self, brokers): 6 | self.admin = KafkaAdminClient(bootstrap_servers=brokers) 7 | def topic_exists(self, topic_name): 8 | topics_metadata = self.admin.list_topics() 9 | return topic_name in topics_metadata 10 | def create_topic(self, topic_name, num_partitions=1, replication_factor=1): 11 | if not self.topic_exists(topic_name): 12 | new_topic = NewTopic(name=topic_name, num_partitions=num_partitions, replication_factor=replication_factor) 13 | self.admin.create_topics([new_topic]) 14 | print(f"Topic {topic_name} created.") 15 | else: 16 | print(f"Topic {topic_name} already exists.") 17 | def close(self): 18 | self.admin.close() -------------------------------------------------------------------------------- /data-transforms/js/csv-json/dist/data-transforms-tutorial.js: -------------------------------------------------------------------------------- 1 | // src/index.js 2 | import { onRecordWritten } from "@redpanda-data/transform-sdk"; 3 | onRecordWritten(csvToJsonTransform); 4 | function csvToJsonTransform(event, writer) { 5 | const input = event.record.value.text(); 6 | const rows = input.split("\n"); 7 | console.log(rows); 8 | for (const row of rows) { 9 | const columns = row.split(","); 10 | console.log(columns); 11 | if (columns.length !== 2) { 12 | throw new Error("unexpected number of columns"); 13 | } 14 | const quantity = parseInt(columns[1], 10); 15 | if (isNaN(quantity)) { 16 | throw new Error("invalid quantity"); 17 | } 18 | const itemQuantity = { 19 | item: columns[0], 20 | quantity 21 | }; 22 | event.record.value = JSON.stringify(itemQuantity); 23 | writer.write(event.record); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /docker-compose/oidc/console-config.yaml: -------------------------------------------------------------------------------- 1 | # Configure a connection to the Redpanda cluster 2 | # See https://docs.redpanda.com/current/console/config/connect-to-redpanda/ 3 | kafka: 4 | brokers: ["redpanda-0:9092", "redpanda-1:9092", "redpanda-2:9092"] 5 | sasl: 6 | enabled: true 7 | impersonateUser: true 8 | schemaRegistry: 9 | enabled: true 10 | urls: ["http://redpanda-0:8081","http://redpanda-1:8081","http://redpanda-2:8081"] 11 | authentication: 12 | impersonateUser: true 13 | redpanda: 14 | adminApi: 15 | enabled: true 16 | urls: ["http://redpanda-0:9644","http://redpanda-1:9644","http://redpanda-2:9644"] 17 | authentication: 18 | impersonateUser: true 19 | authentication: 20 | basic: 21 | enabled: true 22 | oidc: 23 | enabled: true 24 | redirectUrl: "http://localhost:8080/auth/callbacks/oidc" 25 | accessType: "offline" 26 | prompt: "consent" -------------------------------------------------------------------------------- /clients/chat-room/cloud/nodejs/src/admin.ts: -------------------------------------------------------------------------------- 1 | import { Kafka } from "kafkajs"; 2 | const redpanda = new Kafka({ 3 | brokers: [""], 4 | ssl: { 5 | }, 6 | sasl: { 7 | mechanism: "scram-sha-256", 8 | username: "redpanda-chat-account", 9 | password: "" 10 | } 11 | }); 12 | const admin = redpanda.admin(); 13 | export async function createTopic( 14 | topic: string, 15 | partitions?: number, 16 | replicas?: number 17 | ) { 18 | await admin.connect(); 19 | const existingTopics = await admin.listTopics(); 20 | if (!existingTopics.includes(topic)) { 21 | await admin.createTopics({ 22 | topics: [ 23 | { 24 | topic: topic, 25 | numPartitions: partitions ? partitions : 1, 26 | replicationFactor: replicas ? replicas : 1, 27 | }, 28 | ], 29 | }); 30 | } 31 | await admin.disconnect(); 32 | } -------------------------------------------------------------------------------- /clients/chat-room/cloud/nodejs/src/producer.ts: -------------------------------------------------------------------------------- 1 | import { Kafka } from "kafkajs"; 2 | const redpanda = new Kafka({ 3 | brokers: [""], 4 | ssl: { 5 | }, 6 | sasl: { 7 | mechanism: "scram-sha-256", 8 | username: "redpanda-chat-account", 9 | password: "" 10 | } 11 | }); 12 | const producer = redpanda.producer(); 13 | export async function getConnection(user: string) { 14 | try { 15 | await producer.connect(); 16 | return async (message: string) => { 17 | await producer.send({ 18 | topic: "chat-room", 19 | messages: [{ value: JSON.stringify({ message, user }) }], 20 | }); 21 | }; 22 | } catch (error) { 23 | console.error("Error:", error); 24 | } 25 | } 26 | export async function disconnect() { 27 | try { 28 | await producer.disconnect(); 29 | } catch (error) { 30 | console.error("Error:", error); 31 | } 32 | } -------------------------------------------------------------------------------- /data-transforms/js/csv-json/esbuild.js: -------------------------------------------------------------------------------- 1 | import * as esbuild from 'esbuild' 2 | import { polyfillNode } from "esbuild-plugin-polyfill-node"; 3 | 4 | await esbuild.build({ 5 | entryPoints: ["src/index.js"], 6 | outfile: "dist/data-transforms-tutorial.js", 7 | bundle: true, 8 | external: [ 9 | // This package is provided by the Redpanda JavaScript runtime. 10 | "@redpanda-data/transform-sdk", 11 | ], 12 | target: "es2022", 13 | platform: "neutral", // We're running in Wasm 14 | plugins: [ 15 | polyfillNode({ 16 | globals: { 17 | // Allow a global Buffer variable if referenced. 18 | buffer: true, 19 | // Don't inject the process global, the Redpanda JavaScript runtime 20 | // does that. 21 | process: false, 22 | }, 23 | polyfills: { 24 | // Any NodeJS APIs that need to polyfilled can be added here. 25 | }, 26 | }), 27 | ], 28 | }); 29 | -------------------------------------------------------------------------------- /data-transforms/go/flatten/go.sum: -------------------------------------------------------------------------------- 1 | github.com/bcicen/jstream v1.0.1 h1:BXY7Cu4rdmc0rhyTVyT3UkxAiX3bnLpKLas9btbH5ck= 2 | github.com/bcicen/jstream v1.0.1/go.mod h1:9ielPxqFry7Y4Tg3j4BfjPocfJ3TbsRtXOAYXYmRuAQ= 3 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v0.1.0 h1:ZEGwghKqmE9we+xbW2SoiXQVFv3z/RKp3p3/W6IXXXk= 4 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v0.1.0/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 5 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.0.2 h1:34F42buBTGuK1uaXKky1PdxAZzqMh6kQE1ojCLf/hWw= 6 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.0.2/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 7 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0 h1:KxgHJZsHsrT3YX7DMpu/vJN4TZN3KFm1jzrCFLyOepA= 8 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 9 | -------------------------------------------------------------------------------- /data-transforms/js/csv-json/src/index.js: -------------------------------------------------------------------------------- 1 | import { onRecordWritten } from "@redpanda-data/transform-sdk"; 2 | 3 | onRecordWritten(csvToJsonTransform); 4 | 5 | function csvToJsonTransform(event, writer) { 6 | // The input data is a CSV (without a header row) that is structured as: 7 | // key, item, quantity 8 | const input = event.record.value.text(); 9 | const rows = input.split('\n'); 10 | 11 | for (const row of rows) { 12 | const columns = row.split(','); 13 | 14 | if (columns.length !== 2) { 15 | throw new Error('unexpected number of columns'); 16 | } 17 | 18 | const quantity = parseInt(columns[1], 10); 19 | if (isNaN(quantity)) { 20 | throw new Error('invalid quantity'); 21 | } 22 | 23 | const itemQuantity = { 24 | item: columns[0], 25 | quantity: quantity, 26 | }; 27 | event.record.value = JSON.stringify(itemQuantity); 28 | writer.write(event.record); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /clients/chat-room/docker/nodejs/src/consumer.ts: -------------------------------------------------------------------------------- 1 | import { Kafka } from "kafkajs"; 2 | import { v4 as uuidv4 } from "uuid"; 3 | const redpanda = new Kafka({ 4 | brokers: ["localhost:19092"], 5 | }); 6 | const consumer = redpanda.consumer({ groupId: uuidv4() }); 7 | export async function connect() { 8 | try { 9 | await consumer.connect(); 10 | await consumer.subscribe({ topic: "chat-room" }); 11 | await consumer.run({ 12 | eachMessage: async ({ topic, partition, message }) => { 13 | const formattedValue = JSON.parse( 14 | (message.value as Buffer).toString() 15 | ); 16 | console.log(`${formattedValue.user}: ${formattedValue.message}`); 17 | }, 18 | }); 19 | } catch (error) { 20 | console.error("Error:", error); 21 | } 22 | } 23 | export async function disconnect() { 24 | try { 25 | await consumer.disconnect(); 26 | } catch (error) { 27 | console.error("Error:", error); 28 | } 29 | } -------------------------------------------------------------------------------- /clients/stock-market-activity/python/schema_registry/stock_pb2.pyi: -------------------------------------------------------------------------------- 1 | from google.protobuf import descriptor as _descriptor 2 | from google.protobuf import message as _message 3 | from typing import ClassVar as _ClassVar, Optional as _Optional 4 | 5 | DESCRIPTOR: _descriptor.FileDescriptor 6 | 7 | class Stock(_message.Message): 8 | __slots__ = ["date", "last", "volume", "open", "high", "low"] 9 | DATE_FIELD_NUMBER: _ClassVar[int] 10 | LAST_FIELD_NUMBER: _ClassVar[int] 11 | VOLUME_FIELD_NUMBER: _ClassVar[int] 12 | OPEN_FIELD_NUMBER: _ClassVar[int] 13 | HIGH_FIELD_NUMBER: _ClassVar[int] 14 | LOW_FIELD_NUMBER: _ClassVar[int] 15 | date: str 16 | last: str 17 | volume: str 18 | open: str 19 | high: str 20 | low: str 21 | def __init__(self, date: _Optional[str] = ..., last: _Optional[str] = ..., volume: _Optional[str] = ..., open: _Optional[str] = ..., high: _Optional[str] = ..., low: _Optional[str] = ...) -> None: ... 22 | -------------------------------------------------------------------------------- /clients/go/sarama/README.md: -------------------------------------------------------------------------------- 1 | # Redpanda Sarama Example 2 | 3 | ## Running the example 4 | 5 | ```shell 6 | go run main.go -h 7 | -brokers string 8 | Comma-separated list of brokers. (default "localhost:9092") 9 | -group string 10 | Consumer group name. (default "test-group") 11 | -password string 12 | SASL password. 13 | -tls 14 | Enable TLS. (default false) 15 | -topic string 16 | Topic to produce to and consume from. (default "test") 17 | -username string 18 | SASL username. 19 | ``` 20 | 21 | ## Redpanda Cloud 22 | 23 | ``` 24 | -brokers 25 | Copy the "Cluster hosts" string from the Overview page. 26 | -topic 27 | Create a new topic on the Topics page. 28 | -username 29 | Create a new service account on the Security page and paste the name here (don't forget to copy the password). 30 | Create the associated ACLs for the topic, service account, and consumer group. 31 | -password 32 | Paste the password here. 33 | ``` -------------------------------------------------------------------------------- /connect-plugins/processor/embeddings/openai/atlas_demo.yaml: -------------------------------------------------------------------------------- 1 | input: 2 | kafka: 3 | addresses: [ ${REDPANDA_SERVERS} ] 4 | tls: 5 | enabled: true 6 | sasl: 7 | mechanism: "SCRAM-SHA-256" 8 | user: ${REDPANDA_USER} 9 | password: ${REDPANDA_PASS} 10 | consumer_group: connect 11 | start_from_oldest: false # consume from latest 12 | topics: [ ${REDPANDA_TOPICS} ] 13 | 14 | pipeline: 15 | processors: 16 | - openai_embeddings: 17 | api_key: ${OPENAI_API_KEY} 18 | model: ${OPENAI_EMBEDDING_MODEL} 19 | 20 | output: 21 | mongodb: 22 | url: ${ATLAS_CONNECTION_STRING} 23 | database: ${ATLAS_DB} 24 | username: "" 25 | password: "" 26 | collection: ${ATLAS_COLLECTION} 27 | operation: "insert-one" 28 | write_concern: 29 | w: 1 30 | j: false 31 | w_timeout: "10s" 32 | document_map: |- 33 | root.text = this.text 34 | root.metadata = this.metadata 35 | root.embedding = this.embedding 36 | -------------------------------------------------------------------------------- /kubernetes/iceberg/secret.yaml: -------------------------------------------------------------------------------- 1 | # This secret contains all authentication credentials required for the lab: 2 | # 3 | # 1. cloud_storage_* keys: MinIO S3-compatible storage credentials for Redpanda 4 | # - Used by Redpanda brokers to store topic segments in MinIO buckets 5 | # - Matches the MinIO tenant configuration in minio-tenant-values.yaml 6 | # 7 | # 2. iceberg_rest_catalog_* keys: Authentication for Iceberg REST catalog 8 | # - Used by both Redpanda (for Iceberg sink connector) and Spark (for queries) 9 | # - Enables secure communication between components in the streaming pipeline 10 | # 11 | # Security Note: In production, use stronger passwords and consider external 12 | # secret management (such as AWS Secrets Manager, HashiCorp Vault) 13 | apiVersion: v1 14 | kind: Secret 15 | metadata: 16 | name: redpanda-secret 17 | type: Opaque 18 | stringData: 19 | cloud_storage_access_key: minio 20 | cloud_storage_secret_key: minio123 21 | iceberg_rest_catalog_client_id: catalog 22 | iceberg_rest_catalog_client_secret: catalog123 23 | -------------------------------------------------------------------------------- /clients/chat-room/docker/go/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | import ( 3 | "bufio" 4 | "fmt" 5 | "os" 6 | "strings" 7 | ) 8 | type Message struct { 9 | User string `json:"user"` 10 | Message string `json:"message"` 11 | } 12 | func main() { 13 | topic := "chat-room" 14 | brokers := []string{"localhost:19092"} 15 | admin := NewAdmin(brokers) 16 | defer admin.Close() 17 | if !admin.TopicExists(topic) { 18 | admin.CreateTopic(topic) 19 | } 20 | username := "" 21 | fmt.Print("Enter your username: ") 22 | fmt.Scanln(&username) 23 | producer := NewProducer(brokers, topic) 24 | defer producer.Close() 25 | consumer := NewConsumer(brokers, topic) 26 | defer consumer.Close() 27 | go consumer.PrintMessages() 28 | fmt.Println("Connected. Press Ctrl+C to exit") 29 | reader := bufio.NewReader(os.Stdin) 30 | for { 31 | message, _ := reader.ReadString('\n') 32 | message = strings.TrimSpace(message) 33 | producer.SendMessage(username, message) 34 | } 35 | } -------------------------------------------------------------------------------- /clients/chat-room/cloud/go/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | import ( 3 | "bufio" 4 | "fmt" 5 | "os" 6 | "strings" 7 | ) 8 | type Message struct { 9 | User string `json:"user"` 10 | Message string `json:"message"` 11 | } 12 | func main() { 13 | topic := "chat-room" 14 | brokers := []string{""} 15 | admin := NewAdmin(brokers) 16 | defer admin.Close() 17 | if !admin.TopicExists(topic) { 18 | admin.CreateTopic(topic) 19 | } 20 | username := "" 21 | fmt.Print("Enter your username: ") 22 | fmt.Scanln(&username) 23 | producer := NewProducer(brokers, topic) 24 | defer producer.Close() 25 | consumer := NewConsumer(brokers, topic) 26 | defer consumer.Close() 27 | go consumer.PrintMessages() 28 | fmt.Println("Connected. Press Ctrl+C to exit") 29 | reader := bufio.NewReader(os.Stdin) 30 | for { 31 | message, _ := reader.ReadString('\n') 32 | message = strings.TrimSpace(message) 33 | producer.SendMessage(username, message) 34 | } 35 | } -------------------------------------------------------------------------------- /clients/chat-room/cloud/go/producer.go: -------------------------------------------------------------------------------- 1 | package main 2 | import ( 3 | "context" 4 | "encoding/json" 5 | "crypto/tls" 6 | "github.com/twmb/franz-go/pkg/kgo" 7 | "github.com/twmb/franz-go/pkg/sasl/scram" 8 | ) 9 | type Producer struct { 10 | client *kgo.Client 11 | topic string 12 | } 13 | func NewProducer(brokers []string, topic string) *Producer { 14 | client, err := kgo.NewClient( 15 | kgo.SeedBrokers(brokers...), 16 | kgo.DialTLSConfig(new(tls.Config)), 17 | kgo.SASL(scram.Auth{User: "redpanda-chat-account",Pass: "", 18 | }.AsSha256Mechanism()), 19 | ) 20 | if err != nil { 21 | panic(err) 22 | } 23 | return &Producer{client: client, topic: topic} 24 | } 25 | func (p *Producer) SendMessage(user, message string) { 26 | ctx := context.Background() 27 | msg := Message{User: user, Message: message} 28 | b, _ := json.Marshal(msg) 29 | p.client.Produce(ctx, &kgo.Record{Topic: p.topic, Value: b}, nil) 30 | } 31 | func (p *Producer) Close() { 32 | p.client.Close() 33 | } -------------------------------------------------------------------------------- /clients/chat-room/cloud/python/admin.py: -------------------------------------------------------------------------------- 1 | from kafka import KafkaAdminClient 2 | from kafka.admin import NewTopic 3 | from kafka.admin import KafkaAdminClient, NewTopic 4 | class ChatAdmin: 5 | def __init__(self, brokers): 6 | self.admin = KafkaAdminClient( 7 | bootstrap_servers=brokers, 8 | sasl_mechanism="SCRAM-SHA-256", 9 | security_protocol="SASL_SSL", 10 | sasl_plain_username="redpanda-chat-account", 11 | sasl_plain_password="", 12 | ) 13 | def topic_exists(self, topic_name): 14 | topics_metadata = self.admin.list_topics() 15 | return topic_name in topics_metadata 16 | def create_topic(self, topic_name, num_partitions=1, replication_factor=1): 17 | if not self.topic_exists(topic_name): 18 | new_topic = NewTopic(name=topic_name, num_partitions=num_partitions, replication_factor=replication_factor) 19 | self.admin.create_topics([new_topic]) 20 | print(f"Topic {topic_name} created.") 21 | else: 22 | print(f"Topic {topic_name} already exists.") 23 | def close(self): 24 | self.admin.close() -------------------------------------------------------------------------------- /docker-compose/iceberg/spark/.pyiceberg.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, 13 | # software distributed under the License is distributed on an 14 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | # KIND, either express or implied. See the License for the 16 | # specific language governing permissions and limitations 17 | # under the License. 18 | # 19 | catalog: 20 | default: 21 | uri: http://rest:8181 22 | s3.endpoint: http://minio:9000 23 | s3.access-key-id: admin 24 | s3.secret-access-key: password 25 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/java/src/main/java/com/example/ChatProducer.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import org.apache.kafka.clients.producer.KafkaProducer; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import com.google.gson.Gson; 6 | import java.util.HashMap; 7 | import java.util.Map; 8 | public class ChatProducer implements AutoCloseable { 9 | private KafkaProducer producer; 10 | private String topic; 11 | private Gson gson; 12 | public ChatProducer(String topic) { 13 | this.producer = new KafkaProducer<>(Admin.getProducerProps()); 14 | this.topic = topic; 15 | this.gson = new Gson(); 16 | } 17 | public void sendMessage(String user, String message) { 18 | Map messageMap = new HashMap<>(); 19 | messageMap.put("user", user); 20 | messageMap.put("message", message); 21 | String jsonMessage = gson.toJson(messageMap); 22 | producer.send(new ProducerRecord<>(topic, null, jsonMessage)); 23 | producer.flush(); 24 | } 25 | @Override 26 | public void close() { 27 | producer.close(); 28 | } 29 | } -------------------------------------------------------------------------------- /clients/chat-room/docker/java/src/main/java/com/example/ChatProducer.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import org.apache.kafka.clients.producer.KafkaProducer; 4 | import org.apache.kafka.clients.producer.ProducerRecord; 5 | import com.google.gson.Gson; 6 | import java.util.HashMap; 7 | import java.util.Map; 8 | public class ChatProducer implements AutoCloseable { 9 | private KafkaProducer producer; 10 | private String topic; 11 | private Gson gson; 12 | public ChatProducer(String topic) { 13 | this.producer = new KafkaProducer<>(Admin.getProducerProps()); 14 | this.topic = topic; 15 | this.gson = new Gson(); 16 | } 17 | public void sendMessage(String user, String message) { 18 | Map messageMap = new HashMap<>(); 19 | messageMap.put("user", user); 20 | messageMap.put("message", message); 21 | String jsonMessage = gson.toJson(messageMap); 22 | producer.send(new ProducerRecord<>(topic, null, jsonMessage)); 23 | producer.flush(); 24 | } 25 | @Override 26 | public void close() { 27 | producer.close(); 28 | } 29 | } -------------------------------------------------------------------------------- /clients/chat-room/docker/python/app.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from producer import ChatProducer 3 | from consumer import ChatConsumer 4 | from admin import ChatAdmin 5 | brokers = ["localhost:19092"] 6 | topic = "chat-room" 7 | def consumer_thread(consumer): 8 | consumer.print_messages() 9 | if __name__ == "__main__": 10 | admin = ChatAdmin(brokers) 11 | if not admin.topic_exists(topic): 12 | print(f"Creating topic: {topic}") 13 | admin.create_topic(topic) 14 | username = input("Enter your username: ") 15 | producer = ChatProducer(brokers, topic) 16 | consumer = ChatConsumer(brokers, topic) 17 | consumer_t = threading.Thread(target=consumer_thread, args=(consumer,)) 18 | consumer_t.daemon = True 19 | consumer_t.start() 20 | print("Connected. Press Ctrl+C to exit") 21 | try: 22 | while True: 23 | message = input() 24 | producer.send_message(username, message) 25 | except KeyboardInterrupt: 26 | pass 27 | finally: 28 | print("\nClosing chat...") 29 | producer.close() 30 | consumer.close() 31 | admin.close() 32 | consumer_t.join(1) -------------------------------------------------------------------------------- /clients/chat-room/cloud/python/app.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from producer import ChatProducer 3 | from consumer import ChatConsumer 4 | from admin import ChatAdmin 5 | brokers = [""] 6 | topic = "chat-room" 7 | def consumer_thread(consumer): 8 | consumer.print_messages() 9 | if __name__ == "__main__": 10 | admin = ChatAdmin(brokers) 11 | if not admin.topic_exists(topic): 12 | print(f"Creating topic: {topic}") 13 | admin.create_topic(topic) 14 | username = input("Enter your username: ") 15 | producer = ChatProducer(brokers, topic) 16 | consumer = ChatConsumer(brokers, topic) 17 | consumer_t = threading.Thread(target=consumer_thread, args=(consumer,)) 18 | consumer_t.daemon = True 19 | consumer_t.start() 20 | print("Connected. Press Ctrl+C to exit") 21 | try: 22 | while True: 23 | message = input() 24 | producer.send_message(username, message) 25 | except KeyboardInterrupt: 26 | pass 27 | finally: 28 | print("\nClosing chat...") 29 | producer.close() 30 | consumer.close() 31 | admin.close() 32 | consumer_t.join(1) -------------------------------------------------------------------------------- /kubernetes/iceberg/kind.yaml: -------------------------------------------------------------------------------- 1 | # This configuration creates a 5-node kind cluster optimized for the streaming lab: 2 | # 3 | # 1. Control Plane Node: 4 | # - Runs Kubernetes API server, etcd, scheduler, and controller manager 5 | # - Port mapping (32090) exposes MinIO console for external access 6 | # - Isolated from workload execution for stability 7 | # 8 | # 2. Worker Nodes (4x): 9 | # - Distribute Redpanda brokers, MinIO storage, Spark executors, and Jupyter 10 | # - Multiple nodes allow testing of distributed scenarios and node failures 11 | # - Sufficient capacity for concurrent Spark jobs and streaming workloads 12 | # 13 | # 3. Port Mapping Strategy: 14 | # - hostPort 32090 maps to MinIO NodePort service for browser access 15 | # - Enables you to access MinIO console at http://localhost:32090 16 | # 17 | --- 18 | apiVersion: kind.x-k8s.io/v1alpha4 19 | kind: Cluster 20 | nodes: 21 | - role: control-plane 22 | extraPortMappings: 23 | - containerPort: 32090 24 | hostPort: 32090 25 | protocol: TCP 26 | - role: worker 27 | - role: worker 28 | - role: worker 29 | - role: worker -------------------------------------------------------------------------------- /clients/chat-room/cloud/nodejs/src/consumer.ts: -------------------------------------------------------------------------------- 1 | import { Kafka } from "kafkajs"; 2 | import { v4 as uuidv4 } from "uuid"; 3 | const redpanda = new Kafka({ 4 | brokers: [""], 5 | ssl: { 6 | }, 7 | sasl: { 8 | mechanism: "scram-sha-256", 9 | username: "redpanda-chat-account", 10 | password: "" 11 | } 12 | }); 13 | const consumer = redpanda.consumer({ groupId: uuidv4() }); 14 | export async function connect() { 15 | try { 16 | await consumer.connect(); 17 | await consumer.subscribe({ topic: "chat-room" }); 18 | await consumer.run({ 19 | eachMessage: async ({ topic, partition, message }) => { 20 | const formattedValue = JSON.parse( 21 | (message.value as Buffer).toString() 22 | ); 23 | console.log(`${formattedValue.user}: ${formattedValue.message}`); 24 | }, 25 | }); 26 | } catch (error) { 27 | console.error("Error:", error); 28 | } 29 | } 30 | export async function disconnect() { 31 | try { 32 | await consumer.disconnect(); 33 | } catch (error) { 34 | console.error("Error:", error); 35 | } 36 | } -------------------------------------------------------------------------------- /kubernetes/iceberg/minio-nodeport.yaml: -------------------------------------------------------------------------------- 1 | # This service exposes the MinIO console outside the Kubernetes cluster: 2 | # 3 | # 1. NodePort Service Type: 4 | # - Maps internal MinIO console port (9090) to external port (32090) 5 | # - Enables browser access at http://localhost:32090 in kind clusters 6 | # - Required because MinIO operator doesn't expose console by default 7 | # 8 | # 2. Service Selection: 9 | # - Targets MinIO tenant pods using the min.io/tenant label 10 | # - Automatically discovers MinIO instances in the namespace 11 | # - Provides load balancing if multiple MinIO pods exist 12 | # 13 | # 3. Developer Workflow: 14 | # - Access MinIO browser console for bucket management and monitoring 15 | # - Upload/download files for testing data pipelines 16 | # - Monitor S3 storage usage and performance metrics 17 | # 18 | apiVersion: v1 19 | kind: Service 20 | metadata: 21 | name: minio-nodeport 22 | namespace: iceberg-lab 23 | spec: 24 | ports: 25 | - name: http 26 | port: 9090 27 | targetPort: 9090 28 | nodePort: 32090 29 | selector: 30 | v1.min.io/tenant: iceberg-minio 31 | type: NodePort -------------------------------------------------------------------------------- /clients/schema-registry-demo/python/clickstream_key_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: clickstream-key.proto 4 | """Generated protocol buffer code.""" 5 | from google.protobuf.internal import builder as _builder 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | # @@protoc_insertion_point(imports) 10 | 11 | _sym_db = _symbol_database.Default() 12 | 13 | 14 | 15 | 16 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x63lickstream-key.proto\x12\x15\x63om.redpanda.examples\" \n\x03Key\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x0b\n\x03seq\x18\x02 \x01(\x05\x62\x06proto3') 17 | 18 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) 19 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'clickstream_key_pb2', globals()) 20 | if _descriptor._USE_C_DESCRIPTORS == False: 21 | 22 | DESCRIPTOR._options = None 23 | _KEY._serialized_start=48 24 | _KEY._serialized_end=80 25 | # @@protoc_insertion_point(module_scope) 26 | -------------------------------------------------------------------------------- /clients/schema-registry-demo/python/click.py: -------------------------------------------------------------------------------- 1 | """ 2 | Our value data model. Yes, this is overly trivial. 3 | """ 4 | import time 5 | 6 | UNKNOWN = "unknown" 7 | 8 | class Click: 9 | """Our data class for demonstration purposes.""" 10 | def __init__(self, user_id: int, event_type: str = UNKNOWN, ts_millis: int = 0): 11 | self.user_id = user_id 12 | self.event_type = event_type 13 | self.ts_millis = ts_millis or int(time.time_ns() / 1000) 14 | self._idx = 0 15 | self._fields = ["user_id", "event_type", "ts_millis"] 16 | 17 | @classmethod 18 | def to_dict(cls, value, _ctx): 19 | return dict(user_id=value.user_id, 20 | event_type=value.event_type, 21 | ts_millis=value.ts_millis) 22 | 23 | @classmethod 24 | def from_dict(cls, d, _ctx): 25 | """Quick and dirty implementation.""" 26 | return Click( 27 | d.get("user_id", -1), 28 | d.get("event_type", UNKNOWN), 29 | d.get("ts_millis", -1) 30 | ) 31 | 32 | def __str__(self): 33 | return f"Click({self.user_id}, {self.event_type}, {self.ts_millis})" 34 | -------------------------------------------------------------------------------- /data-transforms/go/redaction/redactors/redactor_builtins.go: -------------------------------------------------------------------------------- 1 | package redactors 2 | 3 | var Builtins = ` 4 | redactors: 5 | - name: "drop" 6 | type: "drop" 7 | - name: "clear" 8 | type: "value" 9 | quote: true 10 | value: 11 | function: "replace" 12 | replacement: "" 13 | - name: "redact" 14 | type: "value" 15 | quote: true 16 | value: 17 | function: "replace" 18 | replacement: "REDACTED" 19 | - name: "redactEmailUsername" 20 | type: "value" 21 | quote: true 22 | value: 23 | function: "replaceBeforeSeparator" 24 | replacement: "redacted" 25 | separator: "@" 26 | - name: "truncate" 27 | type: "key-value" 28 | key: 29 | function: "camelPrepend" 30 | prefix: "truncated" 31 | value: 32 | function: "truncateFloat64" 33 | decimals: 1 34 | - name: "md5" 35 | type: "key-value" 36 | quote: true 37 | key: 38 | function: "camelPrepend" 39 | prefix: "hashed" 40 | value: 41 | function: "md5" 42 | - name: "x-digits" 43 | type: "value" 44 | quote: true 45 | value: 46 | function: "x-digits" 47 | i: 4 48 | ` 49 | -------------------------------------------------------------------------------- /docker-compose/iceberg/spark/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | 20 | start-master.sh -p 7077 21 | start-worker.sh spark://spark-iceberg:7077 22 | start-history-server.sh 23 | start-thriftserver.sh --driver-java-options "-Dderby.system.home=/tmp/derby" 24 | 25 | # Entrypoint, for example notebook, pyspark or spark-sql 26 | if [[ $# -gt 0 ]] ; then 27 | eval "$1" 28 | fi 29 | -------------------------------------------------------------------------------- /setup-tests/cloud/cloud-log-in.json: -------------------------------------------------------------------------------- 1 | { 2 | "tests": [ 3 | { 4 | "steps": [ 5 | { 6 | "action": "setVariables", 7 | "path": "setup-tests/cloud/.env" 8 | }, 9 | { 10 | "action": "goTo", 11 | "url": "https://cloud.redpanda.com" 12 | }, 13 | { 14 | "action": "find", 15 | "selector": "[name=username]", 16 | "click": true 17 | }, 18 | { 19 | "action": "typeKeys", 20 | "keys": ["jake@redpanda.com", "$ENTER$"] 21 | }, 22 | { 23 | "action": "wait" 24 | }, 25 | { 26 | "action": "find", 27 | "selector": "[data-provider=Username-Password-Authentication]", 28 | "click": true 29 | }, 30 | { 31 | "action": "wait" 32 | }, 33 | { 34 | "action": "find", 35 | "selector": "[type=password]", 36 | "click": true 37 | }, 38 | { 39 | "action": "typeKeys", 40 | "keys": ["$PASSWORD", "$ENTER$"] 41 | }, 42 | { 43 | "action": "wait", 44 | "duration": 10000 45 | } 46 | ] 47 | } 48 | ] 49 | } -------------------------------------------------------------------------------- /clients/go/franz-go/README.md: -------------------------------------------------------------------------------- 1 | # Redpanda franz-go example 2 | 3 | How to create a topic, write to, and read from a Redpanda cluster using the [franz-go](https://github.com/twmb/franz-go) Kafka API client. This example has been tested on local Redpanda clusters that were spun up using [docker-compose](../../../docker-compose/), as well as Redpanda FMC/BYOC cluster with TLS and SASL SCRAM enabled (you just need to download the TLS certs and create the necessary users and ACLs). 4 | 5 | ```shell 6 | go build -o redpanda-franz-example 7 | 8 | ./redpanda-franz-example -h 9 | Usage of ./redpanda-franz-example: 10 | -brokers string 11 | Comma-separated list of brokers. (default "localhost:9092") 12 | -ca string 13 | Path to PEM-formatted CA file. 14 | -clientCert string 15 | Path to PEM-formatted cert file. 16 | -clientKey string 17 | Path to PEM-formatted key file. 18 | -group string 19 | Consumer group name. (default "test-group") 20 | -password string 21 | SASL password. 22 | -saslMechanism string 23 | 'plain', 'SCRAM-SHA-256', or 'SCRAM-SHA-512' 24 | -tls 25 | Enable TLS. 26 | -topic string 27 | Topic to produce to and consume from. (default "test") 28 | -username string 29 | SASL username. 30 | ``` -------------------------------------------------------------------------------- /connect-plugins/processor/embeddings/openai/requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp==3.9.5 2 | aiosignal==1.3.1 3 | annotated-types==0.7.0 4 | anyio==4.3.0 5 | attrs==23.2.0 6 | beautifulsoup4==4.12.3 7 | bs4==0.0.2 8 | certifi==2024.2.2 9 | charset-normalizer==3.3.2 10 | confluent-kafka==2.4.0 11 | dataclasses-json==0.6.6 12 | distro==1.9.0 13 | dnspython==2.6.1 14 | frozenlist==1.4.1 15 | greenlet==3.0.3 16 | h11==0.14.0 17 | httpcore==1.0.5 18 | httpx==0.27.0 19 | idna==3.7 20 | jsonpatch==1.33 21 | jsonpointer==2.4 22 | langchain==0.2.0 23 | langchain-community==0.2.0 24 | langchain-core==0.2.0 25 | langchain-mongodb==0.1.5 26 | langchain-openai==0.1.7 27 | langchain-text-splitters==0.2.0 28 | langsmith==0.1.60 29 | lxml==5.2.2 30 | marshmallow==3.21.2 31 | multidict==6.0.5 32 | mypy-extensions==1.0.0 33 | nest-asyncio==1.6.0 34 | numpy==1.26.4 35 | openai==1.30.1 36 | orjson==3.10.3 37 | packaging==23.2 38 | pydantic==2.7.1 39 | pydantic_core==2.18.2 40 | pymongo==4.7.2 41 | python-dotenv==1.0.1 42 | PyYAML==6.0.1 43 | regex==2024.5.15 44 | requests==2.32.1 45 | sniffio==1.3.1 46 | soupsieve==2.5 47 | SQLAlchemy==2.0.30 48 | tenacity==8.3.0 49 | tiktoken==0.7.0 50 | tqdm==4.66.4 51 | typing-inspect==0.9.0 52 | typing_extensions==4.11.0 53 | urllib3==2.2.1 54 | yarl==1.9.4 55 | -------------------------------------------------------------------------------- /clients/stock-market-activity/python/schema_registry/stock_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: stock.proto 4 | """Generated protocol buffer code.""" 5 | from google.protobuf import descriptor as _descriptor 6 | from google.protobuf import descriptor_pool as _descriptor_pool 7 | from google.protobuf import symbol_database as _symbol_database 8 | from google.protobuf.internal import builder as _builder 9 | # @@protoc_insertion_point(imports) 10 | 11 | _sym_db = _symbol_database.Default() 12 | 13 | 14 | 15 | 16 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0bstock.proto\x12\x05stock\"\\\n\x05Stock\x12\x0c\n\x04\x64\x61te\x18\x01 \x01(\t\x12\x0c\n\x04last\x18\x02 \x01(\t\x12\x0e\n\x06volume\x18\x03 \x01(\t\x12\x0c\n\x04open\x18\x04 \x01(\t\x12\x0c\n\x04high\x18\x05 \x01(\t\x12\x0b\n\x03low\x18\x06 \x01(\tb\x06proto3') 17 | 18 | _globals = globals() 19 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 20 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'stock_pb2', _globals) 21 | if _descriptor._USE_C_DESCRIPTORS == False: 22 | 23 | DESCRIPTOR._options = None 24 | _globals['_STOCK']._serialized_start=22 25 | _globals['_STOCK']._serialized_end=114 26 | # @@protoc_insertion_point(module_scope) 27 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/java/src/main/java/com/example/Main.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | import java.util.Scanner; 3 | import java.util.UUID; 4 | import java.util.concurrent.Executors; 5 | import java.util.concurrent.ExecutorService; 6 | import java.util.concurrent.Future; 7 | 8 | public class Main { 9 | private static final String TOPIC = "chat-room"; 10 | public static void main(String[] args) { 11 | if (!Admin.topicExists(TOPIC)) { 12 | Admin.createTopic(TOPIC); 13 | } 14 | Scanner scanner = new Scanner(System.in); 15 | System.out.print("Enter your username: "); 16 | String username = scanner.nextLine(); 17 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 18 | try (ChatConsumer consumer = new ChatConsumer(TOPIC, UUID.randomUUID().toString()); 19 | ChatProducer producer = new ChatProducer(TOPIC)) { 20 | Future future = executorService.submit(consumer); 21 | System.out.print("Connected, press Ctrl+C to exit\n"); 22 | while (!future.isDone()) { 23 | String message = scanner.nextLine(); 24 | producer.sendMessage(username, message); 25 | } 26 | } catch (Exception e) { 27 | System.out.println("Closing chat..."); 28 | } finally { 29 | executorService.shutdownNow(); 30 | } 31 | } 32 | } -------------------------------------------------------------------------------- /clients/chat-room/docker/java/src/main/java/com/example/Main.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import java.util.Scanner; 4 | import java.util.UUID; 5 | import java.util.concurrent.Executors; 6 | import java.util.concurrent.ExecutorService; 7 | import java.util.concurrent.Future; 8 | public class Main { 9 | private static final String TOPIC = "chat-room"; 10 | public static void main(String[] args) { 11 | if (!Admin.topicExists(TOPIC)) { 12 | Admin.createTopic(TOPIC); 13 | } 14 | Scanner scanner = new Scanner(System.in); 15 | System.out.print("Enter your username: "); 16 | String username = scanner.nextLine(); 17 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 18 | try (ChatConsumer consumer = new ChatConsumer(TOPIC, UUID.randomUUID().toString()); 19 | ChatProducer producer = new ChatProducer(TOPIC)) { 20 | Future future = executorService.submit(consumer); 21 | System.out.print("Connected, press Ctrl+C to exit\n"); 22 | while (!future.isDone()) { 23 | String message = scanner.nextLine(); 24 | producer.sendMessage(username, message); 25 | } 26 | } catch (Exception e) { 27 | System.out.println("Closing chat..."); 28 | } finally { 29 | executorService.shutdownNow(); 30 | } 31 | } 32 | } -------------------------------------------------------------------------------- /data-transforms/rust/jq/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 4 | command: 5 | - redpanda 6 | - start 7 | - --mode dev-container 8 | - --smp 1 9 | - --kafka-addr 10 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 11 | - --advertise-kafka-addr 12 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 13 | ports: 14 | - 9093:9093 15 | - 8081:8081 16 | - 8082:8082 17 | - 9644:9644 18 | volumes: 19 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 20 | console: 21 | container_name: redpanda-console 22 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 23 | entrypoint: /bin/sh 24 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 25 | environment: 26 | CONFIG_FILEPATH: /tmp/config.yml 27 | CONSOLE_CONFIG_FILE: | 28 | kafka: 29 | brokers: ["redpanda:9092"] 30 | schemaRegistry: 31 | enabled: true 32 | urls: ["http://redpanda:8081"] 33 | redpanda: 34 | adminApi: 35 | enabled: true 36 | urls: ["http://redpanda:9644"] 37 | ports: 38 | - 8080:8080 39 | depends_on: 40 | - redpanda -------------------------------------------------------------------------------- /data-transforms/go/flatten/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 4 | command: 5 | - redpanda 6 | - start 7 | - --mode dev-container 8 | - --smp 1 9 | - --kafka-addr 10 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 11 | - --advertise-kafka-addr 12 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 13 | ports: 14 | - 9093:9093 15 | - 8081:8081 16 | - 8082:8082 17 | - 9644:9644 18 | volumes: 19 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 20 | console: 21 | container_name: redpanda-console 22 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 23 | entrypoint: /bin/sh 24 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 25 | environment: 26 | CONFIG_FILEPATH: /tmp/config.yml 27 | CONSOLE_CONFIG_FILE: | 28 | kafka: 29 | brokers: ["redpanda:9092"] 30 | schemaRegistry: 31 | enabled: true 32 | urls: ["http://redpanda:8081"] 33 | redpanda: 34 | adminApi: 35 | enabled: true 36 | urls: ["http://redpanda:9644"] 37 | ports: 38 | - 8080:8080 39 | depends_on: 40 | - redpanda -------------------------------------------------------------------------------- /data-transforms/go/iss_demo/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 4 | command: 5 | - redpanda 6 | - start 7 | - --mode dev-container 8 | - --smp 1 9 | - --kafka-addr 10 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 11 | - --advertise-kafka-addr 12 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 13 | ports: 14 | - 9093:9093 15 | - 8081:8081 16 | - 8082:8082 17 | - 9644:9644 18 | volumes: 19 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 20 | console: 21 | container_name: redpanda-console 22 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 23 | entrypoint: /bin/sh 24 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 25 | environment: 26 | CONFIG_FILEPATH: /tmp/config.yml 27 | CONSOLE_CONFIG_FILE: | 28 | kafka: 29 | brokers: ["redpanda:9092"] 30 | schemaRegistry: 31 | enabled: true 32 | urls: ["http://redpanda:8081"] 33 | redpanda: 34 | adminApi: 35 | enabled: true 36 | urls: ["http://redpanda:9644"] 37 | ports: 38 | - 8080:8080 39 | depends_on: 40 | - redpanda -------------------------------------------------------------------------------- /data-transforms/go/regex/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 4 | command: 5 | - redpanda 6 | - start 7 | - --mode dev-container 8 | - --smp 1 9 | - --kafka-addr 10 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 11 | - --advertise-kafka-addr 12 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 13 | ports: 14 | - 9093:9093 15 | - 8081:8081 16 | - 8082:8082 17 | - 9644:9644 18 | volumes: 19 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 20 | console: 21 | container_name: redpanda-console 22 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 23 | entrypoint: /bin/sh 24 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 25 | environment: 26 | CONFIG_FILEPATH: /tmp/config.yml 27 | CONSOLE_CONFIG_FILE: | 28 | kafka: 29 | brokers: ["redpanda:9092"] 30 | schemaRegistry: 31 | enabled: true 32 | urls: ["http://redpanda:8081"] 33 | redpanda: 34 | adminApi: 35 | enabled: true 36 | urls: ["http://redpanda:9644"] 37 | ports: 38 | - 8080:8080 39 | depends_on: 40 | - redpanda -------------------------------------------------------------------------------- /data-transforms/js/csv-json/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 4 | command: 5 | - redpanda 6 | - start 7 | - --mode dev-container 8 | - --smp 1 9 | - --kafka-addr 10 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 11 | - --advertise-kafka-addr 12 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 13 | ports: 14 | - 9093:9093 15 | - 8081:8081 16 | - 8082:8082 17 | - 9644:9644 18 | volumes: 19 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 20 | console: 21 | container_name: redpanda-console 22 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 23 | entrypoint: /bin/sh 24 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 25 | environment: 26 | CONFIG_FILEPATH: /tmp/config.yml 27 | CONSOLE_CONFIG_FILE: | 28 | kafka: 29 | brokers: ["redpanda:9092"] 30 | schemaRegistry: 31 | enabled: true 32 | urls: ["http://redpanda:8081"] 33 | redpanda: 34 | adminApi: 35 | enabled: true 36 | urls: ["http://redpanda:9644"] 37 | ports: 38 | - 8080:8080 39 | depends_on: 40 | - redpanda -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 4 | command: 5 | - redpanda 6 | - start 7 | - --mode dev-container 8 | - --smp 1 9 | - --kafka-addr 10 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 11 | - --advertise-kafka-addr 12 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 13 | ports: 14 | - 9093:9093 15 | - 8081:8081 16 | - 8082:8082 17 | - 9644:9644 18 | volumes: 19 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 20 | console: 21 | container_name: redpanda-console 22 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 23 | entrypoint: /bin/sh 24 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 25 | environment: 26 | CONFIG_FILEPATH: /tmp/config.yml 27 | CONSOLE_CONFIG_FILE: | 28 | kafka: 29 | brokers: ["redpanda:9092"] 30 | schemaRegistry: 31 | enabled: true 32 | urls: ["http://redpanda:8081"] 33 | redpanda: 34 | adminApi: 35 | enabled: true 36 | urls: ["http://redpanda:9644"] 37 | ports: 38 | - 8080:8080 39 | depends_on: 40 | - redpanda -------------------------------------------------------------------------------- /docker-compose/data-transforms/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 4 | command: 5 | - redpanda 6 | - start 7 | - --mode dev-container 8 | - --smp 1 9 | - --kafka-addr 10 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 11 | - --advertise-kafka-addr 12 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 13 | ports: 14 | - 9093:9093 15 | - 8081:8081 16 | - 8082:8082 17 | - 9644:9644 18 | volumes: 19 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 20 | console: 21 | container_name: redpanda-console 22 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 23 | entrypoint: /bin/sh 24 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 25 | environment: 26 | CONFIG_FILEPATH: /tmp/config.yml 27 | CONSOLE_CONFIG_FILE: | 28 | kafka: 29 | brokers: ["redpanda:9092"] 30 | schemaRegistry: 31 | enabled: true 32 | urls: ["http://redpanda:8081"] 33 | redpanda: 34 | adminApi: 35 | enabled: true 36 | urls: ["http://redpanda:9644"] 37 | ports: 38 | - 8080:8080 39 | depends_on: 40 | - redpanda -------------------------------------------------------------------------------- /docs/modules/docker-compose/attachments/data-transforms/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 4 | command: 5 | - redpanda 6 | - start 7 | - --mode dev-container 8 | - --smp 1 9 | - --kafka-addr 10 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 11 | - --advertise-kafka-addr 12 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 13 | ports: 14 | - 9093:9093 15 | - 8081:8081 16 | - 8082:8082 17 | - 9644:9644 18 | volumes: 19 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 20 | console: 21 | container_name: redpanda-console 22 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 23 | entrypoint: /bin/sh 24 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 25 | environment: 26 | CONFIG_FILEPATH: /tmp/config.yml 27 | CONSOLE_CONFIG_FILE: | 28 | kafka: 29 | brokers: ["redpanda:9092"] 30 | schemaRegistry: 31 | enabled: true 32 | urls: ["http://redpanda:8081"] 33 | redpanda: 34 | adminApi: 35 | enabled: true 36 | urls: ["http://redpanda:9644"] 37 | ports: 38 | - 8080:8080 39 | depends_on: 40 | - redpanda -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "redpanda-labs", 3 | "version": "1.0.0", 4 | "description": "Redpanda labs", 5 | "license": "ISC", 6 | "scripts": { 7 | "build": "antora --to-dir local-docs --fetch docs/local-antora-playbook.yml", 8 | "serve": "wds --node-resolve --open / --watch --root-dir local-docs --port 5002", 9 | "start": "cross-env-shell LIVERELOAD=true npx gulp", 10 | "test-transforms": "cd setup-tests && npx doc-detective runTests --input ../data-transforms/go -l debug", 11 | "test-oidc": "cd setup-tests && npx doc-detective runTests --input ../docker-compose/oidc -l debug" 12 | }, 13 | "dependencies": { 14 | "@antora/cli": "3.1.2", 15 | "@antora/site-generator": "3.1.2", 16 | "@asciidoctor/tabs": "^1.0.0-beta.5", 17 | "@redpanda-data/docs-extensions-and-macros": "^4.0.0", 18 | "@sntke/antora-mermaid-extension": "^0.0.6" 19 | }, 20 | "devDependencies": { 21 | "@octokit/core": "^6.1.2", 22 | "@octokit/plugin-retry": "^7.1.1", 23 | "@octokit/rest": "^21.0.1", 24 | "@web/dev-server": "^0.2.1", 25 | "cross-env": "^7.0.3", 26 | "doc-detective": "^2.17.0", 27 | "gulp": "^4.0.2", 28 | "gulp-connect": "^5.7.0", 29 | "open": "^9.1.0", 30 | "semver": "^7.6.3" 31 | }, 32 | "overrides": { 33 | "vinyl-fs": { 34 | "glob-stream": "~7.0" 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /data-transforms/go/to_avro/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | redpanda: 3 | container_name: redpanda 4 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 5 | command: 6 | - redpanda 7 | - start 8 | - --mode dev-container 9 | - --smp 1 10 | - --kafka-addr 11 | - PLAINTEXT://0.0.0.0:9092,OUTSIDE://0.0.0.0:9093 12 | - --advertise-kafka-addr 13 | - PLAINTEXT://redpanda:9092,OUTSIDE://localhost:9093 14 | ports: 15 | - 9093:9093 16 | - 8081:8081 17 | - 8082:8082 18 | - 9644:9644 19 | volumes: 20 | - ./conf/.bootstrap.yaml:/etc/redpanda/.bootstrap.yaml 21 | console: 22 | container_name: redpanda-console 23 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 24 | entrypoint: /bin/sh 25 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 26 | environment: 27 | CONFIG_FILEPATH: /tmp/config.yml 28 | CONSOLE_CONFIG_FILE: | 29 | kafka: 30 | brokers: ["redpanda:9092"] 31 | schemaRegistry: 32 | enabled: true 33 | urls: ["http://redpanda:8081"] 34 | redpanda: 35 | adminApi: 36 | enabled: true 37 | urls: ["http://redpanda:9644"] 38 | ports: 39 | - 8080:8080 40 | depends_on: 41 | - redpanda -------------------------------------------------------------------------------- /clients/chat-room/docker/go/consumer.go: -------------------------------------------------------------------------------- 1 | package main 2 | import ( 3 | "context" 4 | "encoding/json" 5 | "fmt" 6 | "github.com/twmb/franz-go/pkg/kgo" 7 | "github.com/google/uuid" 8 | ) 9 | type Consumer struct { 10 | client *kgo.Client 11 | topic string 12 | } 13 | func NewConsumer(brokers []string, topic string) *Consumer { 14 | groupID := uuid.New().String() 15 | client, err := kgo.NewClient( 16 | kgo.SeedBrokers(brokers...), 17 | kgo.ConsumerGroup(groupID), 18 | kgo.ConsumeTopics(topic), 19 | kgo.ConsumeResetOffset(kgo.NewOffset().AtStart()), 20 | ) 21 | if err != nil { 22 | panic(err) 23 | } 24 | return &Consumer{client: client, topic: topic} 25 | } 26 | func (c *Consumer) PrintMessages() { 27 | ctx := context.Background() 28 | for { 29 | fetches := c.client.PollFetches(ctx) 30 | iter := fetches.RecordIter() 31 | for !iter.Done() { 32 | record := iter.Next() 33 | var msg Message 34 | if err := json.Unmarshal(record.Value, &msg); err != nil { 35 | fmt.Printf("Error decoding message: %v\n", err) 36 | continue 37 | } 38 | fmt.Printf("%s: %s\n", msg.User, msg.Message) 39 | } 40 | } 41 | } 42 | func (c *Consumer) Close() { 43 | c.client.Close() 44 | } -------------------------------------------------------------------------------- /clients/go/sarama/go.mod: -------------------------------------------------------------------------------- 1 | module sarama-example 2 | 3 | go 1.18 4 | 5 | require ( 6 | github.com/Shopify/sarama v1.35.0 7 | github.com/xdg-go/scram v1.1.1 8 | ) 9 | 10 | require ( 11 | github.com/davecgh/go-spew v1.1.1 // indirect 12 | github.com/eapache/go-resiliency v1.3.0 // indirect 13 | github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect 14 | github.com/eapache/queue v1.1.0 // indirect 15 | github.com/golang/snappy v0.0.4 // indirect 16 | github.com/hashicorp/errwrap v1.0.0 // indirect 17 | github.com/hashicorp/go-multierror v1.1.1 // indirect 18 | github.com/hashicorp/go-uuid v1.0.2 // indirect 19 | github.com/jcmturner/aescts/v2 v2.0.0 // indirect 20 | github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect 21 | github.com/jcmturner/gofork v1.0.0 // indirect 22 | github.com/jcmturner/gokrb5/v8 v8.4.2 // indirect 23 | github.com/jcmturner/rpc/v2 v2.0.3 // indirect 24 | github.com/klauspost/compress v1.15.8 // indirect 25 | github.com/pierrec/lz4/v4 v4.1.15 // indirect 26 | github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect 27 | github.com/xdg-go/pbkdf2 v1.0.0 // indirect 28 | github.com/xdg-go/stringprep v1.0.3 // indirect 29 | golang.org/x/crypto v0.0.0-20220214200702-86341886e292 // indirect 30 | golang.org/x/net v0.0.0-20220708220712-1185a9018129 // indirect 31 | golang.org/x/text v0.3.7 // indirect 32 | ) 33 | -------------------------------------------------------------------------------- /clients/chat-room/docker/rust/src/producer.rs: -------------------------------------------------------------------------------- 1 | use rdkafka::config::ClientConfig; 2 | use rdkafka::producer::{FutureProducer, FutureRecord}; 3 | use rdkafka::util::Timeout; 4 | use serde::{Deserialize, Serialize}; 5 | use std::time::Duration; 6 | 7 | #[derive(Serialize, Deserialize, Debug, Clone)] 8 | pub struct ChatMessage { 9 | pub username: String, 10 | pub message: String, 11 | } 12 | 13 | pub struct ChatProducer { 14 | producer: FutureProducer, 15 | topic: String, 16 | } 17 | 18 | impl ChatProducer { 19 | pub fn new(brokers: &str, topic: &str) -> Self { 20 | let producer: FutureProducer = ClientConfig::new() 21 | .set("bootstrap.servers", brokers) 22 | .create() 23 | .expect("Producer creation failed"); 24 | 25 | ChatProducer { 26 | producer, 27 | topic: topic.to_string(), 28 | } 29 | } 30 | 31 | pub async fn send_message(&self, message: ChatMessage) { 32 | let payload = serde_json::to_string(&message).expect("Failed to serialize message"); 33 | 34 | self.producer 35 | .send( 36 | FutureRecord::to(&self.topic) 37 | .payload(&payload) 38 | .key(&message.username), 39 | Timeout::After(Duration::from_secs(0)), 40 | ) 41 | .await 42 | .unwrap(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/nodejs/src/index.ts: -------------------------------------------------------------------------------- 1 | import * as readline from "node:readline"; 2 | import * as Admin from "./admin"; 3 | import * as Producer from "./producer"; 4 | import * as Consumer from "./consumer"; 5 | const rl = readline.createInterface({ 6 | input: process.stdin, 7 | output: process.stdout, 8 | }); 9 | async function start() { 10 | const topic = "chat-room"; 11 | console.log(`Creating topic: ${topic}`); 12 | await Admin.createTopic(topic); 13 | console.log("Connecting..."); 14 | await Consumer.connect(); 15 | rl.question("Enter user name: \n", async function (username) { 16 | const sendMessage = await Producer.getConnection(username); 17 | if (sendMessage) { 18 | console.log("Connected, press Ctrl+C to exit"); 19 | rl.on("line", (input) => { 20 | readline.moveCursor(process.stdout, 0, -1); 21 | sendMessage(input); 22 | }); 23 | } else { 24 | console.error("Failed to initialize sendMessage function"); 25 | } 26 | }); 27 | } 28 | start(); 29 | process.on("SIGINT", async () => { 30 | console.log('Closing app...'); 31 | try { 32 | await Producer.disconnect(); 33 | await Consumer.disconnect(); 34 | rl.close(); 35 | } catch (err) { 36 | console.error('Error during cleanup:', err); 37 | process.exit(1); 38 | } finally { 39 | console.log('Cleanup finished. Exiting'); 40 | process.exit(0); 41 | } 42 | }); -------------------------------------------------------------------------------- /clients/chat-room/docker/nodejs/src/index.ts: -------------------------------------------------------------------------------- 1 | import * as readline from "node:readline"; 2 | import * as Admin from "./admin"; 3 | import * as Producer from "./producer"; 4 | import * as Consumer from "./consumer"; 5 | const rl = readline.createInterface({ 6 | input: process.stdin, 7 | output: process.stdout, 8 | }); 9 | async function start() { 10 | const topic = "chat-room"; 11 | console.log(`Creating topic: ${topic}`); 12 | await Admin.createTopic(topic); 13 | console.log("Connecting..."); 14 | await Consumer.connect(); 15 | rl.question("Enter user name: \n", async function (username) { 16 | const sendMessage = await Producer.getConnection(username); 17 | if (sendMessage) { 18 | console.log("Connected, press Ctrl+C to exit"); 19 | rl.on("line", (input) => { 20 | readline.moveCursor(process.stdout, 0, -1); 21 | sendMessage(input); 22 | }); 23 | } else { 24 | console.error("Failed to initialize sendMessage function"); 25 | } 26 | }); 27 | } 28 | start(); 29 | process.on("SIGINT", async () => { 30 | console.log('Closing app...'); 31 | try { 32 | await Producer.disconnect(); 33 | await Consumer.disconnect(); 34 | rl.close(); 35 | } catch (err) { 36 | console.error('Error during cleanup:', err); 37 | process.exit(1); 38 | } finally { 39 | console.log('Cleanup finished. Exiting'); 40 | process.exit(0); 41 | } 42 | }); -------------------------------------------------------------------------------- /clients/chat-room/docker/go/admin.go: -------------------------------------------------------------------------------- 1 | package main 2 | import ( 3 | "context" 4 | "fmt" 5 | "github.com/twmb/franz-go/pkg/kadm" 6 | "github.com/twmb/franz-go/pkg/kgo" 7 | ) 8 | type Admin struct { 9 | client *kadm.Client 10 | } 11 | func NewAdmin(brokers []string) *Admin { 12 | client, err := kgo.NewClient( 13 | kgo.SeedBrokers(brokers...), 14 | ) 15 | if err != nil { 16 | panic(err) 17 | } 18 | admin := kadm.NewClient(client) 19 | return &Admin{client: admin} 20 | } 21 | func (a *Admin) TopicExists(topic string) bool { 22 | ctx := context.Background() 23 | topicsMetadata, err := a.client.ListTopics(ctx) 24 | if err != nil { 25 | panic(err) 26 | } 27 | for _, metadata := range topicsMetadata { 28 | if metadata.Topic == topic { 29 | return true 30 | } 31 | } 32 | return false 33 | } 34 | func (a *Admin) CreateTopic(topic string) { 35 | ctx := context.Background() 36 | resp, err := a.client.CreateTopics(ctx, 1, 1, nil, topic) 37 | if err != nil { 38 | panic(err) 39 | } 40 | for _, ctr := range resp { 41 | if ctr.Err != nil { 42 | fmt.Printf("Unable to create topic '%s': %s", ctr.Topic, ctr.Err) 43 | } else { 44 | fmt.Printf("Created topic '%s'\n", ctr.Topic) 45 | } 46 | } 47 | } 48 | func (a *Admin) Close() { 49 | a.client.Close() 50 | } -------------------------------------------------------------------------------- /clients/chat-room/cloud/java/src/main/java/com/example/ChatConsumer.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecords; 4 | import org.apache.kafka.clients.consumer.KafkaConsumer; 5 | import org.apache.kafka.clients.consumer.ConsumerRecord; 6 | import com.google.gson.Gson; 7 | import com.google.gson.reflect.TypeToken; 8 | import java.lang.reflect.Type; 9 | import java.util.Map; 10 | import java.time.Duration; 11 | import java.util.Collections; 12 | 13 | public class ChatConsumer implements Runnable, AutoCloseable { 14 | private volatile boolean running = true; 15 | private KafkaConsumer consumer; 16 | private Gson gson; 17 | private Type type; 18 | public ChatConsumer(String topic, String groupId) { 19 | this.consumer = new KafkaConsumer<>(Admin.getConsumerProps(groupId)); 20 | this.consumer.subscribe(Collections.singletonList(topic)); 21 | this.gson = new Gson(); 22 | this.type = new TypeToken>(){}.getType(); 23 | } 24 | @Override 25 | public void run() { 26 | while (running) { 27 | ConsumerRecords records = consumer.poll(Duration.ofMillis(1000)); 28 | for (ConsumerRecord record : records) { 29 | Map messageMap = gson.fromJson(record.value(), type); 30 | System.out.println(messageMap.get("user") + ": " + messageMap.get("message")); 31 | } 32 | } 33 | } 34 | @Override 35 | public void close() { 36 | running = false; 37 | consumer.close(); 38 | } 39 | } -------------------------------------------------------------------------------- /clients/chat-room/docker/java/src/main/java/com/example/ChatConsumer.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecords; 4 | import org.apache.kafka.clients.consumer.KafkaConsumer; 5 | import org.apache.kafka.clients.consumer.ConsumerRecord; 6 | import com.google.gson.Gson; 7 | import com.google.gson.reflect.TypeToken; 8 | import java.lang.reflect.Type; 9 | import java.util.Map; 10 | import java.time.Duration; 11 | import java.util.Collections; 12 | 13 | public class ChatConsumer implements Runnable, AutoCloseable { 14 | private volatile boolean running = true; 15 | private KafkaConsumer consumer; 16 | private Gson gson; 17 | private Type type; 18 | public ChatConsumer(String topic, String groupId) { 19 | this.consumer = new KafkaConsumer<>(Admin.getConsumerProps(groupId)); 20 | this.consumer.subscribe(Collections.singletonList(topic)); 21 | this.gson = new Gson(); 22 | this.type = new TypeToken>(){}.getType(); 23 | } 24 | @Override 25 | public void run() { 26 | while (running) { 27 | ConsumerRecords records = consumer.poll(Duration.ofMillis(1000)); 28 | for (ConsumerRecord record : records) { 29 | Map messageMap = gson.fromJson(record.value(), type); 30 | System.out.println(messageMap.get("user") + ": " + messageMap.get("message")); 31 | } 32 | } 33 | } 34 | @Override 35 | public void close() { 36 | running = false; 37 | consumer.close(); 38 | } 39 | } -------------------------------------------------------------------------------- /kubernetes/iceberg/minio-tenant-values.yaml: -------------------------------------------------------------------------------- 1 | # This Helm values file configures a single-node MinIO tenant optimized for development: 2 | # 3 | # 1. Tenant Resource Allocation: 4 | # - Single server with 5Gi storage (sufficient for lab data volumes) 5 | # - Deployed to kind-worker node 6 | # 7 | # 2. S3 Compatibility Configuration: 8 | # - Access credentials match Redpanda secret configuration 9 | # - Bucket-style URL access for seamless Spark/Iceberg integration 10 | # - Console disabled animations for better performance in containers 11 | # 12 | # 3. Node Placement Strategy: 13 | # - nodeSelector ensures MinIO runs on a specific worker nodes so we only need to load the built Docker image onto that node 14 | # - tolerations allow co-location with Spark workloads if needed 15 | # 16 | # Production Note: For production deployments, increase storage size, 17 | # enable TLS, use distributed mode, and implement proper RBAC 18 | tenant: 19 | name: iceberg-minio 20 | namespace: iceberg-lab 21 | pools: 22 | - name: pool-0 23 | servers: 1 24 | volumesPerServer: 1 25 | size: 5Gi 26 | affinity: {} 27 | mountPath: /export 28 | certificate: 29 | requestAutoCert: false 30 | configSecret: 31 | name: minio-config 32 | accessKey: minio 33 | secretKey: minio123 34 | env: 35 | - name: MINIO_BROWSER_LOGIN_ANIMATION 36 | value: 'off' 37 | console: 38 | enabled: true 39 | nodeSelector: 40 | kubernetes.io/hostname: kind-worker 41 | tolerations: 42 | - key: spark 43 | operator: Equal 44 | effect: NoSchedule 45 | -------------------------------------------------------------------------------- /docker-compose/oidc/profile.yaml: -------------------------------------------------------------------------------- 1 | # This file configures `rpk` to connect to a remote Redpanda cluster running in the same local network as `rpk`. 2 | 3 | # Configuration for connecting to the Kafka API of the Redpanda cluster. 4 | kafka_api: 5 | # SASL (Simple Authentication and Security Layer) settings for authentication. 6 | # rpk does not support OIDC authentication, so we use SCRAM instead. 7 | sasl: 8 | user: superuser # The username used for authentication 9 | password: secretpassword # The password associated with the username 10 | mechanism: scram-sha-256 # Authentication mechanism; SCRAM-SHA-256 provides secure password-based authentication 11 | # List of Kafka brokers in the Redpanda cluster. 12 | # These brokers ensure high availability and fault tolerance for Kafka-based communication. 13 | brokers: 14 | - 127.0.0.1:19092 # Broker 1: Accessible on localhost, port 19092 15 | - 127.0.0.1:29092 # Broker 2: Accessible on localhost, port 29092 16 | - 127.0.0.1:39092 # Broker 3: Accessible on localhost, port 39092 17 | 18 | # Configuration for connecting to the Redpanda Admin API. 19 | # The Admin API allows you to perform administrative tasks such as managing configurations, monitoring, and scaling. 20 | admin_api: 21 | # List of Admin API endpoints for managing the cluster. 22 | addresses: 23 | - 127.0.0.1:19644 # Admin API for Broker 1: Accessible on localhost, port 19644 24 | - 127.0.0.1:29644 # Admin API for Broker 2: Accessible on localhost, port 29644 25 | - 127.0.0.1:39644 # Admin API for Broker 3: Accessible on localhost, port 39644 26 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/go/consumer.go: -------------------------------------------------------------------------------- 1 | package main 2 | import ( 3 | "context" 4 | "encoding/json" 5 | "fmt" 6 | "crypto/tls" 7 | "github.com/twmb/franz-go/pkg/kgo" 8 | "github.com/twmb/franz-go/pkg/sasl/scram" 9 | "github.com/google/uuid" 10 | ) 11 | type Consumer struct { 12 | client *kgo.Client 13 | topic string 14 | } 15 | func NewConsumer(brokers []string, topic string) *Consumer { 16 | groupID := uuid.New().String() 17 | client, err := kgo.NewClient( 18 | kgo.SeedBrokers(brokers...), 19 | kgo.DialTLSConfig(new(tls.Config)), 20 | kgo.SASL(scram.Auth{User: "redpanda-chat-account",Pass: "", 21 | }.AsSha256Mechanism()), 22 | kgo.ConsumerGroup(groupID), 23 | kgo.ConsumeTopics(topic), 24 | kgo.ConsumeResetOffset(kgo.NewOffset().AtStart()), 25 | ) 26 | if err != nil { 27 | panic(err) 28 | } 29 | return &Consumer{client: client, topic: topic} 30 | } 31 | func (c *Consumer) PrintMessages() { 32 | ctx := context.Background() 33 | for { 34 | fetches := c.client.PollFetches(ctx) 35 | iter := fetches.RecordIter() 36 | for !iter.Done() { 37 | record := iter.Next() 38 | var msg Message 39 | if err := json.Unmarshal(record.Value, &msg); err != nil { 40 | fmt.Printf("Error decoding message: %v\n", err) 41 | continue 42 | } 43 | fmt.Printf("%s: %s\n", msg.User, msg.Message) 44 | } 45 | } 46 | } 47 | func (c *Consumer) Close() { 48 | c.client.Close() 49 | } -------------------------------------------------------------------------------- /clients/chat-room/cloud/rust/src/producer.rs: -------------------------------------------------------------------------------- 1 | use rdkafka::config::ClientConfig; 2 | use rdkafka::producer::{FutureProducer, FutureRecord}; 3 | use rdkafka::util::Timeout; 4 | use serde::{Deserialize, Serialize}; 5 | use std::time::Duration; 6 | 7 | #[derive(Serialize, Deserialize, Debug, Clone)] 8 | pub struct ChatMessage { 9 | pub username: String, 10 | pub message: String, 11 | } 12 | 13 | pub struct ChatProducer { 14 | producer: FutureProducer, 15 | topic: String, 16 | } 17 | 18 | impl ChatProducer { 19 | pub fn new(brokers: &str, topic: &str, username: &str, password: &str) -> Self { 20 | let producer: FutureProducer = ClientConfig::new() 21 | .set("bootstrap.servers", brokers) 22 | .set("security.protocol", "SASL_SSL") 23 | .set("sasl.mechanisms", "SCRAM-SHA-256") 24 | .set("sasl.username", username) 25 | .set("sasl.password", password) 26 | .create() 27 | .expect("Producer creation failed"); 28 | 29 | ChatProducer { 30 | producer, 31 | topic: topic.to_string(), 32 | } 33 | } 34 | 35 | pub async fn send_message(&self, message: ChatMessage) { 36 | let payload = serde_json::to_string(&message).expect("Failed to serialize message"); 37 | 38 | self.producer 39 | .send( 40 | FutureRecord::to(&self.topic) 41 | .payload(&payload) 42 | .key(&message.username), 43 | Timeout::After(Duration::from_secs(0)), 44 | ) 45 | .await 46 | .unwrap(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/go/admin.go: -------------------------------------------------------------------------------- 1 | package main 2 | import ( 3 | "context" 4 | "fmt" 5 | "crypto/tls" 6 | "github.com/twmb/franz-go/pkg/kadm" 7 | "github.com/twmb/franz-go/pkg/kgo" 8 | "github.com/twmb/franz-go/pkg/sasl/scram" 9 | ) 10 | type Admin struct { 11 | client *kadm.Client 12 | } 13 | func NewAdmin(brokers []string) *Admin { 14 | client, err := kgo.NewClient( 15 | kgo.SeedBrokers(brokers...), 16 | kgo.DialTLSConfig(new(tls.Config)), 17 | kgo.SASL(scram.Auth{User: "redpanda-chat-account",Pass: "", 18 | }.AsSha256Mechanism()), 19 | ) 20 | if err != nil { 21 | panic(err) 22 | } 23 | admin := kadm.NewClient(client) 24 | return &Admin{client: admin} 25 | } 26 | func (a *Admin) TopicExists(topic string) bool { 27 | ctx := context.Background() 28 | topicsMetadata, err := a.client.ListTopics(ctx) 29 | if err != nil { 30 | panic(err) 31 | } 32 | for _, metadata := range topicsMetadata { 33 | if metadata.Topic == topic { 34 | return true 35 | } 36 | } 37 | return false 38 | } 39 | func (a *Admin) CreateTopic(topic string) { 40 | ctx := context.Background() 41 | resp, err := a.client.CreateTopics(ctx, 1, 1, nil, topic) 42 | if err != nil { 43 | panic(err) 44 | } 45 | for _, ctr := range resp { 46 | if ctr.Err != nil { 47 | fmt.Printf("Unable to create topic '%s': %s", ctr.Topic, ctr.Err) 48 | } else { 49 | fmt.Printf("Created topic '%s'\n", ctr.Topic) 50 | } 51 | } 52 | } 53 | func (a *Admin) Close() { 54 | a.client.Close() 55 | } -------------------------------------------------------------------------------- /.github/workflows/test-docs.yml: -------------------------------------------------------------------------------- 1 | name: Run doc tests 2 | 3 | on: 4 | pull_request: 5 | paths-ignore: 6 | - '**/docs/**' 7 | 8 | jobs: 9 | setup: 10 | runs-on: ubuntu-latest 11 | outputs: 12 | data-transforms: ${{ steps.filter.outputs.data-transforms }} 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v4 16 | 17 | - name: Determine changed paths 18 | id: filter 19 | uses: dorny/paths-filter@v3 20 | with: 21 | filters: | 22 | data-transforms: 23 | - 'data-transforms/**' 24 | run-tests: 25 | needs: setup 26 | strategy: 27 | fail-fast: false # Ensure all matrix jobs run to completion even if one fails 28 | matrix: 29 | os: [ubuntu-latest] # Only using Linux for now since macOS takes a long time 30 | runs-on: ${{ matrix.os }} 31 | steps: 32 | - uses: actions/checkout@v4 33 | with: 34 | token: ${{ secrets.GITHUB_TOKEN }} 35 | 36 | - name: Set up Node.js 37 | uses: actions/setup-node@v4 38 | with: 39 | node-version: '18' 40 | 41 | - name: Install dependencies 42 | run: npm install 43 | 44 | - name: Test data transforms 45 | if: needs.setup.outputs.data-transforms == 'true' 46 | run: | 47 | # Run the tests for the data transforms 48 | npm run test-transforms 49 | 50 | - name: Upload debug artifacts 51 | if: failure() 52 | uses: actions/upload-artifact@v4 53 | with: 54 | name: doc-detective-output 55 | path: /home/runner/work/_temp/doc-detective-output.json 56 | -------------------------------------------------------------------------------- /setup-tests/fetch-versions-and-rpk.json: -------------------------------------------------------------------------------- 1 | { 2 | "tests": [ 3 | { 4 | "id": "fetch-versions-and-install-rpk", 5 | "description": "Fetch the latest versions of Redpanda and Redpanda Console for tests", 6 | "steps": [ 7 | { 8 | "action": "runShell", 9 | "command": "npm install" 10 | }, 11 | { 12 | "action": "runShell", 13 | "command": "npx doc-tools get-console-version --from-antora", 14 | "setVariables": [ 15 | { 16 | "name": "REDPANDA_CONSOLE_VERSION", 17 | "regex": "(?<=CONSOLE_VERSION=)(.*)" 18 | }, 19 | { 20 | "name": "CONSOLE_DOCKER_REPO", 21 | "regex": "(?<=CONSOLE_DOCKER_REPO=)(.*)" 22 | } 23 | ] 24 | }, 25 | { 26 | "action": "runShell", 27 | "command": "npx doc-tools get-redpanda-version --from-antora", 28 | "setVariables": [ 29 | { 30 | "name": "REDPANDA_VERSION", 31 | "regex": "(?<=REDPANDA_VERSION=)(.*)" 32 | }, 33 | { 34 | "name": "REDPANDA_DOCKER_REPO", 35 | "regex": "(?<=REDPANDA_DOCKER_REPO=)(.*)" 36 | } 37 | ] 38 | }, 39 | { 40 | "action": "runShell", 41 | "command": "npx doc-tools install-test-dependencies" 42 | }, 43 | { 44 | "action": "runShell", 45 | "command": "echo $REDPANDA_VERSION" 46 | }, 47 | { 48 | "action": "runShell", 49 | "command": "echo $REDPANDA_CONSOLE_VERSION" 50 | } 51 | ] 52 | } 53 | ] 54 | } 55 | -------------------------------------------------------------------------------- /clients/chat-room/docker/rust/src/admin.rs: -------------------------------------------------------------------------------- 1 | use rdkafka::admin::{AdminClient, AdminOptions, NewTopic, TopicReplication}; 2 | use rdkafka::client::DefaultClientContext; 3 | use rdkafka::config::ClientConfig; 4 | use rdkafka::error::KafkaResult; 5 | use rdkafka::util::Timeout; 6 | use std::time::Duration; 7 | 8 | pub struct Admin { 9 | client: AdminClient, 10 | } 11 | 12 | impl Admin { 13 | pub fn new(brokers: &str) -> Self { 14 | let client: AdminClient = ClientConfig::new() 15 | .set("bootstrap.servers", brokers) 16 | .create() 17 | .expect("Admin client creation error"); 18 | 19 | Admin { client } 20 | } 21 | 22 | pub async fn topic_exists(&self, topic: &str) -> KafkaResult { 23 | let metadata = self.client.inner().fetch_metadata(None, Timeout::Never)?; 24 | Ok(metadata.topics().iter().any(|t| t.name() == topic)) 25 | } 26 | 27 | pub async fn create_topic(&self, topic: &str) -> KafkaResult<()> { 28 | let new_topic = NewTopic::new(topic, 1, TopicReplication::Fixed(1)); 29 | let res = self 30 | .client 31 | .create_topics( 32 | &[new_topic], 33 | &AdminOptions::new() 34 | .operation_timeout(Some(Timeout::After(Duration::from_secs(10)))), 35 | ) 36 | .await?; 37 | 38 | for result in res { 39 | match result { 40 | Ok(_) => println!("Topic {} created successfully", topic), 41 | Err((err, _)) => eprintln!("Failed to create topic {}: {:?}", topic, err), 42 | } 43 | } 44 | Ok(()) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /docker-compose/iceberg/spark/spark-defaults.conf: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | # Default system properties included when running spark-submit. 19 | # This is useful for setting default environmental settings. 20 | 21 | # Example: 22 | spark.sql.extensions org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions 23 | spark.sql.catalog.lab org.apache.iceberg.spark.SparkCatalog 24 | spark.sql.catalog.lab.type rest 25 | spark.sql.catalog.lab.uri http://catalog:8181 26 | spark.sql.catalog.lab.io-impl org.apache.iceberg.aws.s3.S3FileIO 27 | spark.sql.catalog.lab.warehouse s3://redpanda/ 28 | spark.sql.catalog.lab.s3.endpoint http://minio:9000 29 | spark.sql.defaultCatalog lab 30 | spark.eventLog.enabled true 31 | spark.eventLog.dir /home/iceberg/spark-events 32 | spark.history.fs.logDirectory /home/iceberg/spark-events 33 | spark.sql.catalogImplementation in-memory 34 | spark.sql.catalog.lab.cache-enabled false 35 | 36 | -------------------------------------------------------------------------------- /data-transforms/rust/ts-converter/src/schema.rs: -------------------------------------------------------------------------------- 1 | use anyhow::bail; 2 | 3 | pub const MAGIC_BYTES: [u8; 1] = [0x00]; 4 | 5 | /// Given a slice of bytes, peel off the magic byte and schema id. Returns a tuple 6 | /// of the schema id and a new reference to the remaining data in the slice. 7 | pub fn decompose(buf: &[u8]) -> anyhow::Result<(i32, &[u8])> { 8 | if !buf.starts_with(&MAGIC_BYTES) { 9 | bail!("missing magic byte") 10 | } 11 | if buf.len() < 5 { 12 | bail!("frame too short") 13 | } 14 | 15 | // XXX: Confluent's Python driver decodes as an unsigned int...but I think 16 | // the actual API uses a signed int. ¯\_(ツ)_/¯ 17 | let id = i32::from_be_bytes([buf[1], buf[2], buf[3], buf[4]]); 18 | Ok((id, &buf[5..])) 19 | } 20 | 21 | #[cfg(test)] 22 | mod tests { 23 | use crate::schema::{decompose, MAGIC_BYTES}; 24 | 25 | #[test] 26 | fn test_decompose_avro() { 27 | let good: [u8; 7] = [ 28 | MAGIC_BYTES[0], 29 | 0x00, // 1234, big endian 30 | 0x00, 31 | 0x04, 32 | 0xd2, 33 | 0xbe, // garbage data for now 34 | 0xef, 35 | ]; 36 | let (id, buf) = decompose(&good).unwrap(); 37 | assert_eq!(1234, id, "should find the schema id"); 38 | assert_eq!([0xbe, 0xef], buf, "should point to actual data"); 39 | 40 | let bad_id: [u8; 3] = [MAGIC_BYTES[0], 0x00, 0x00]; 41 | assert!( 42 | decompose(&bad_id).is_err(), 43 | "should fail to decompose short id" 44 | ); 45 | 46 | let mut bad_magic = good.clone(); 47 | bad_magic[0] = 0xff; 48 | assert!( 49 | decompose(&bad_magic).is_err(), 50 | "should fail on bad magic byte" 51 | ); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Slack](https://img.shields.io/badge/Slack-Redpanda%20Community-blue)](https://redpanda.com/slack) 2 | 3 | ## Redpanda Labs 4 | 5 | Redpanda Labs is the home for examples, experiments and research projects created by the Customer Success and Marketing teams at Redpanda. 6 | 7 | Labs projects intend to showcase what is possible to achieve with Redpanda as the centerpiece of your streaming data architecture. Some of these projects may make it into the product, and many will not, but what they will do is provide examples, guidance, best practices, and most importantly give you ideas for how you can use Redpanda in your own projects. 8 | 9 | Contributions are welcome. Just fork the repo (or submodule) and send a pull request against the upstream `main` branch. 10 | 11 | ## Lab Projects 12 | 13 | | Project | Description | 14 | | ------------- | ------------- | 15 | | [`clients`](https://github.com/redpanda-data/redpanda-labs/tree/main/clients) | A collection of Redpanda clients available in different programming languages. | 16 | | [`data-transforms`](https://github.com/redpanda-data/redpanda-labs/tree/main/data-transforms) | Example topic data transforms powered by WebAssembly (Wasm). | 17 | | [`docker-compose`](https://github.com/redpanda-data/redpanda-labs/tree/main/docker-compose) | Example deployments of Redpanda, Redpanda Console, and Redpanda Connectors using Docker. | 18 | | [`kubernetes`](https://github.com/redpanda-data/redpanda-labs/tree/main/kubernetes) | Examples of deploying and managing Redpanda in Kubernetes. | 19 | | [`redpanda-edge-agent`](https://github.com/redpanda-data/redpanda-edge-agent) | Lightweight Internet of Things (IoT) agent that forwards events from the edge. | 20 | 21 | ## Update submodules 22 | 23 | ``` 24 | git submodule update --remote --recursive 25 | ``` 26 | -------------------------------------------------------------------------------- /data-transforms/go/regex/test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const { runTests } = require("doc-detective-core"); 4 | 5 | const configPath = path.join(process.cwd(), 'doc-tests/test-config.json'); 6 | const outputPath = path.join(process.cwd(), 'doc-tests/test_output.json'); 7 | 8 | try { 9 | // Read the configuration file 10 | const rawData = fs.readFileSync(configPath); 11 | const config = JSON.parse(rawData); 12 | 13 | runTests(config) 14 | .then((report) => { 15 | const failedSteps = report.specs.flatMap((spec) => 16 | spec.tests.flatMap((test) => 17 | test.contexts 18 | .filter(context => context.result === "FAIL") 19 | .map(context => ({ 20 | ...context, 21 | file: spec.file, // Include file info for each context 22 | steps: context.steps.map(step => ({ 23 | ...step, 24 | ...(step.action === 'typeKeys' && { keys: ['***'] }) // Mask keys if action is typeKeys 25 | })) 26 | })) 27 | ) 28 | ); 29 | 30 | if (failedSteps.length > 0) { 31 | fs.writeFileSync(outputPath, JSON.stringify(failedSteps, null, 2)); 32 | console.log('Failed tests have been written to test_output.json'); 33 | console.log(JSON.stringify(failedSteps, null, 2)) 34 | process.exit(1) 35 | } else { 36 | console.log('All tests passed.'); 37 | } 38 | }) 39 | .catch((error) => { 40 | console.error('Error running tests:', error); 41 | fs.writeFileSync(outputPath, `Error running tests: ${error}`); 42 | process.exit(1) 43 | }); 44 | } catch (error) { 45 | console.error('Failed to read config or run tests:', error); 46 | fs.writeFileSync(outputPath, `Failed to read config or run tests: ${error}`); 47 | process.exit(1) 48 | } -------------------------------------------------------------------------------- /data-transforms/go/redaction/go.sum: -------------------------------------------------------------------------------- 1 | github.com/pmw-rp/jsonparser v0.1.0 h1:5NX0Y01lTtIyVpx7qupXfPKLI0vfUsU5Eo5eJf+BZp8= 2 | github.com/pmw-rp/jsonparser v0.1.0/go.mod h1:lKsrENHClCgK97fCySIOD8qQsbJTScm5KkRr0JINEZg= 3 | github.com/pmw-rp/jsonparser v0.1.1 h1:GBWKKS53/HZNRGXQ4XprAHjagFs8GZSXTFbGp5/AeZI= 4 | github.com/pmw-rp/jsonparser v0.1.1/go.mod h1:mywYZlrI7dYAjoXUIcgNTUqnZ1JwHkNBlm8DpMyoHEk= 5 | github.com/pmw-rp/splice v0.0.12 h1:mTrrRsEYDXlOTfj5DLLmd4iTXfxEiIvgohj7iaTT31w= 6 | github.com/pmw-rp/splice v0.0.12/go.mod h1:almGbBrxXkl+UmLRKKoOZYvSGu8AQ6cuiKGOsdapmn0= 7 | github.com/pmw-rp/splice v0.1.0 h1:BrfOmSz2sk2wyQOWdV+aMB0xl5NKaiugsiSkD/eI9EA= 8 | github.com/pmw-rp/splice v0.1.0/go.mod h1:almGbBrxXkl+UmLRKKoOZYvSGu8AQ6cuiKGOsdapmn0= 9 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v0.0.0-20231208202813-49a40df5c5b5 h1:07rMGWKznDhdTckYXnmV14wx3ExSfYv9ib23tWa3SbM= 10 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v0.0.0-20231208202813-49a40df5c5b5/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 11 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.0.2 h1:34F42buBTGuK1uaXKky1PdxAZzqMh6kQE1ojCLf/hWw= 12 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.0.2/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 13 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0 h1:KxgHJZsHsrT3YX7DMpu/vJN4TZN3KFm1jzrCFLyOepA= 14 | github.com/redpanda-data/redpanda/src/transform-sdk/go/transform v1.1.0/go.mod h1:QGgiwwf/BIsD1b7EiyQ/Apzw+RLSpasRDdpOCiefQFQ= 15 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= 16 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 17 | gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= 18 | gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 19 | -------------------------------------------------------------------------------- /data-transforms/go/redaction/transform.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bytes" 5 | "compress/gzip" 6 | "encoding/base64" 7 | "github.com/redpanda-data/redpanda/src/transform-sdk/go/transform" 8 | "io" 9 | "log" 10 | "os" 11 | "redactor/redaction" 12 | ) 13 | 14 | func logError(err error) { 15 | log.Print(err) 16 | } 17 | 18 | func decodeConfig(s string) ([]byte, error) { 19 | data, err := base64.StdEncoding.DecodeString(s) 20 | if err != nil { 21 | return []byte{}, redaction.Wrap("unable to base64 decode config", err) 22 | } 23 | reader, err := gzip.NewReader(bytes.NewReader(data)) 24 | if err != nil { 25 | return []byte{}, redaction.Wrap("unable to decompress config", err) 26 | } 27 | uncompressed, err := io.ReadAll(reader) 28 | if err != nil { 29 | return []byte{}, redaction.Wrap("unable to read config into []byte", err) 30 | } 31 | return uncompressed, nil 32 | } 33 | 34 | func main() { 35 | // Register your transforms function. 36 | // This is a good place to perform other setup too. 37 | config, err := decodeConfig(os.Getenv("CONFIG")) 38 | if err != nil { 39 | logError(redaction.Wrap("unable to decode config", err)) 40 | return 41 | } 42 | err = redaction.Initialise(config) 43 | if err != nil { 44 | logError(redaction.Wrap("unable to initialise config", err)) 45 | } 46 | transform.OnRecordWritten(doTransform) 47 | } 48 | 49 | // doTransform is where you read the record that was written, and then you can 50 | // return new records that will be written to the output topic 51 | func doTransform(e transform.WriteEvent) ([]transform.Record, error) { 52 | var result []transform.Record 53 | redacted, err := redaction.Redact(e.Record().Value) 54 | if err != nil { 55 | return nil, redaction.Wrap("unable to redact record", err) 56 | } 57 | var record = transform.Record{Headers: e.Record().Headers, Key: e.Record().Key, Value: redacted} 58 | result = append(result, record) 59 | return result, nil 60 | } 61 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/rust/src/admin.rs: -------------------------------------------------------------------------------- 1 | use rdkafka::admin::{AdminClient, AdminOptions, NewTopic, TopicReplication}; 2 | use rdkafka::client::DefaultClientContext; 3 | use rdkafka::config::ClientConfig; 4 | use rdkafka::error::KafkaResult; 5 | use rdkafka::util::Timeout; 6 | use std::time::Duration; 7 | 8 | pub struct Admin { 9 | client: AdminClient, 10 | } 11 | 12 | impl Admin { 13 | pub fn new(brokers: &str, username: &str, password: &str) -> Self { 14 | let client: AdminClient = ClientConfig::new() 15 | .set("bootstrap.servers", brokers) 16 | .set("security.protocol", "SASL_SSL") 17 | .set("sasl.mechanisms", "SCRAM-SHA-256") 18 | .set("sasl.username", username) 19 | .set("sasl.password", password) 20 | .create() 21 | .expect("Admin client creation error"); 22 | 23 | Admin { client } 24 | } 25 | 26 | pub async fn topic_exists(&self, topic: &str) -> KafkaResult { 27 | let metadata = self.client.inner().fetch_metadata(None, Timeout::Never)?; 28 | Ok(metadata.topics().iter().any(|t| t.name() == topic)) 29 | } 30 | 31 | pub async fn create_topic(&self, topic: &str) -> KafkaResult<()> { 32 | let new_topic = NewTopic::new(topic, 1, TopicReplication::Fixed(1)); 33 | let res = self 34 | .client 35 | .create_topics( 36 | &[new_topic], 37 | &AdminOptions::new() 38 | .operation_timeout(Some(Timeout::After(Duration::from_secs(10)))), 39 | ) 40 | .await?; 41 | 42 | for result in res { 43 | match result { 44 | Ok(_) => println!("Topic {} created successfully", topic), 45 | Err((err, _)) => eprintln!("Failed to create topic {}: {:?}", topic, err), 46 | } 47 | } 48 | Ok(()) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /data-transforms/go/redaction/cmd/redact.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "errors" 6 | "flag" 7 | "fmt" 8 | "io" 9 | "os" 10 | "redactor/redaction" 11 | ) 12 | 13 | func readToBytes(filename string) ([]byte, error) { 14 | file, err := os.Open(filename) 15 | if err != nil { 16 | fmt.Println(err) 17 | os.Exit(1) 18 | } 19 | 20 | // Get the file size 21 | stat, err := file.Stat() 22 | if err != nil { 23 | fmt.Println(err) 24 | return nil, err 25 | } 26 | 27 | // Read the file into a byte slice 28 | bs := make([]byte, stat.Size()) 29 | _, err = bufio.NewReader(file).Read(bs) 30 | if err != nil && err != io.EOF { 31 | fmt.Println(err) 32 | return nil, err 33 | } 34 | 35 | return bs, nil 36 | } 37 | 38 | func processArguments() ([]byte, []byte) { 39 | configFilename := flag.String("config", "example/config.yaml", "filename of the redaction config") 40 | inputFilename := flag.String("input", "example/input.json", "filename of the JSON message to parse") 41 | 42 | flag.Parse() 43 | 44 | if _, err := os.Stat(*configFilename); errors.Is(err, os.ErrNotExist) { 45 | fmt.Printf("config file does not exist: %s", *configFilename) 46 | os.Exit(1) 47 | } 48 | 49 | if _, err := os.Stat(*inputFilename); errors.Is(err, os.ErrNotExist) { 50 | fmt.Printf("input file does not exist: %s", *inputFilename) 51 | os.Exit(1) 52 | } 53 | config, err := readToBytes(*configFilename) 54 | if err != nil { 55 | fmt.Println(err) 56 | os.Exit(1) 57 | } 58 | input, err := readToBytes(*inputFilename) 59 | if err != nil { 60 | fmt.Println(err) 61 | os.Exit(1) 62 | } 63 | 64 | return config, input 65 | } 66 | 67 | func main() { 68 | config, input := processArguments() 69 | err := redaction.Initialise(config) 70 | if err != nil { 71 | fmt.Println(err) 72 | os.Exit(1) 73 | } 74 | redacted, err := redaction.Redact(input) 75 | if err != nil { 76 | fmt.Println(err) 77 | os.Exit(1) 78 | } 79 | output := string(redacted) 80 | fmt.Println(output) 81 | } 82 | -------------------------------------------------------------------------------- /kubernetes/iceberg/spark/spark-defaults.conf: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | # Default system properties included when running spark-submit. 19 | # This is useful for setting default environmental settings. 20 | # Kubernetes-specific configuration with proper service DNS names 21 | 22 | # Iceberg Spark extensions and catalog configuration 23 | spark.sql.extensions org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions 24 | spark.sql.catalog.lab org.apache.iceberg.spark.SparkCatalog 25 | spark.sql.catalog.lab.type rest 26 | spark.sql.catalog.lab.uri http://iceberg-rest.iceberg-lab.svc.cluster.local:8181 27 | spark.sql.catalog.lab.io-impl org.apache.iceberg.aws.s3.S3FileIO 28 | spark.sql.catalog.lab.warehouse s3://redpanda/ 29 | spark.sql.catalog.lab.s3.endpoint http://iceberg-minio-hl.iceberg-lab.svc.cluster.local:9000 30 | spark.sql.catalog.lab.s3.path-style-access true 31 | spark.sql.catalog.lab.s3.access-key-id minio 32 | spark.sql.catalog.lab.s3.secret-access-key minio123 33 | spark.sql.defaultCatalog lab 34 | spark.eventLog.enabled true 35 | spark.eventLog.dir /home/iceberg/spark-events 36 | spark.history.fs.logDirectory /home/iceberg/spark-events 37 | spark.sql.catalogImplementation in-memory 38 | spark.sql.catalog.lab.cache-enabled false 39 | 40 | -------------------------------------------------------------------------------- /data-transforms/go/regex/transform.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "log" 5 | "os" 6 | "regexp" 7 | "strings" 8 | 9 | "github.com/redpanda-data/redpanda/src/transform-sdk/go/transform" 10 | ) 11 | 12 | var ( 13 | re *regexp.Regexp 14 | checkValue bool 15 | ) 16 | 17 | func isTrueVar(v string) bool { 18 | switch strings.ToLower(v) { 19 | case "yes", "ok", "1", "true": 20 | return true 21 | default: 22 | return false 23 | } 24 | } 25 | 26 | func main() { 27 | // Optionally configure log format, prefix, or flags 28 | log.SetPrefix("[regex-transform] ") 29 | log.SetFlags(log.Ldate | log.Ltime | log.LUTC | log.Lmicroseconds) 30 | 31 | log.Println("Starting transform...") 32 | 33 | pattern, ok := os.LookupEnv("PATTERN") 34 | if !ok { 35 | log.Fatal("Missing PATTERN environment variable") 36 | } 37 | log.Printf("Using PATTERN: %q\n", pattern) 38 | re = regexp.MustCompile(pattern) 39 | 40 | mk, ok := os.LookupEnv("MATCH_VALUE") 41 | checkValue = ok && isTrueVar(mk) 42 | log.Printf("MATCH_VALUE set to: %t\n", checkValue) 43 | 44 | log.Println("Initialization complete, waiting for records...") 45 | 46 | transform.OnRecordWritten(doRegexFilter) 47 | } 48 | 49 | func doRegexFilter(e transform.WriteEvent, w transform.RecordWriter) error { 50 | var dataToCheck []byte 51 | if checkValue { 52 | dataToCheck = e.Record().Value 53 | log.Printf("Checking record value: %s\n", string(dataToCheck)) 54 | } else { 55 | dataToCheck = e.Record().Key 56 | log.Printf("Checking record key: %s\n", string(dataToCheck)) 57 | } 58 | 59 | if dataToCheck == nil { 60 | log.Println("Record has no key/value to check, skipping.") 61 | return nil 62 | } 63 | 64 | pass := re.Match(dataToCheck) 65 | if pass { 66 | log.Printf("Record matched pattern, passing through. Key: %s, Value: %s\n", string(e.Record().Key), string(e.Record().Value)) 67 | return w.Write(e.Record()) 68 | } else { 69 | log.Printf("Record did not match pattern, dropping. Key: %s, Value: %s\n", string(e.Record().Key), string(e.Record().Value)) 70 | return nil 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /kubernetes/iceberg/spark/.pyiceberg.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one 3 | # or more contributor license agreements. See the NOTICE file 4 | # distributed with this work for additional information 5 | # regarding copyright ownership. The ASF licenses this file 6 | # to you under the Apache License, Version 2.0 (the 7 | # "License"); you may not use this file except in compliance 8 | # with the License. You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, 13 | # software distributed under the License is distributed on an 14 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | # KIND, either express or implied. See the License for the 16 | # specific language governing permissions and limitations 17 | # under the License. 18 | # 19 | 20 | # This configuration enables Python applications to connect to the Iceberg catalog: 21 | # 22 | # 1. REST Catalog Connection: 23 | # - Uses Kubernetes service DNS for reliable cluster-internal communication 24 | # - iceberg-rest.iceberg-lab.svc.cluster.local resolves to REST catalog pods 25 | # - Port 8181 is the standard Iceberg REST API endpoint 26 | # 27 | # 2. S3 Storage Configuration: 28 | # - MinIO headless service provides direct access to storage nodes 29 | # - Credentials match the MinIO tenant configuration 30 | # - Required for PyIceberg to read/write table data files 31 | # 32 | # 3. Integration Points: 33 | # - Jupyter notebooks can use PyIceberg library for table operations 34 | # - Complements Spark SQL access with programmatic table management 35 | # - Enables advanced analytics workflows beyond basic SQL queries 36 | # 37 | # Usage: This file is automatically mounted into Spark containers for PyIceberg access 38 | catalog: 39 | default: 40 | uri: http://iceberg-rest.iceberg-lab.svc.cluster.local:8181 41 | s3.endpoint: http://iceberg-minio-hl.iceberg-lab.svc.cluster.local:9000 42 | s3.access-key-id: minio 43 | s3.secret-access-key: minio123 44 | -------------------------------------------------------------------------------- /clients/chat-room/docker/java/src/main/java/com/example/Admin.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import org.apache.kafka.clients.admin.AdminClient; 4 | import org.apache.kafka.clients.admin.NewTopic; 5 | import org.apache.kafka.clients.admin.AdminClientConfig; 6 | import java.util.Collections; 7 | import java.util.Properties; 8 | public class Admin { 9 | private final static String BOOTSTRAP_SERVERS = "localhost:19092"; 10 | public static Properties getProducerProps() { 11 | Properties props = new Properties(); 12 | props.put("bootstrap.servers", BOOTSTRAP_SERVERS); 13 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 14 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 15 | return props; 16 | } 17 | public static Properties getConsumerProps(String groupId) { 18 | Properties props = new Properties(); 19 | props.put("bootstrap.servers", BOOTSTRAP_SERVERS); 20 | props.put("group.id", groupId); 21 | props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 22 | props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 23 | return props; 24 | } 25 | public static boolean topicExists(String topicName) { 26 | Properties props = new Properties(); 27 | props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS); 28 | try (AdminClient client = AdminClient.create(props)) { 29 | return client.listTopics().names().get().contains(topicName); 30 | } catch (Exception e) { 31 | throw new RuntimeException(e); 32 | } 33 | } 34 | public static void createTopic(String topicName) { 35 | Properties props = new Properties(); 36 | props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS); 37 | try (AdminClient client = AdminClient.create(props)) { 38 | NewTopic newTopic = new NewTopic(topicName, 1, (short) 1); 39 | client.createTopics(Collections.singletonList(newTopic)); 40 | } catch (Exception e) { 41 | throw new RuntimeException(e); 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /kubernetes/iceberg/dns-solution.md: -------------------------------------------------------------------------------- 1 | # Kubernetes DNS Solution for Iceberg + MinIO Integration 2 | 3 | This document explains the solution implemented to handle DNS resolution challenges in the Kubernetes deployment of the Redpanda Iceberg lab. 4 | 5 | ## The problem 6 | 7 | The Iceberg REST catalog client uses bucket-style S3 URLs when accessing MinIO, generating hostnames like: 8 | `redpanda.iceberg-minio-hl.iceberg-lab.svc.cluster.local` 9 | 10 | These aren't valid Kubernetes DNS names because they contain dots in the subdomain part, causing `UnknownHostException` errors. 11 | 12 | ## The solution 13 | 14 | To address this DNS issue, we introduce an init container that dynamically resolves the MinIO service IP and injects a custom DNS mapping into the pod's `/etc/hosts` file. This ensures that the Iceberg REST catalog client can access MinIO using the expected bucket-style S3 URLs, even though they are not valid Kubernetes DNS names. The init container runs before the main application starts, guaranteeing that the necessary hostname mapping is present for seamless connectivity. 15 | 16 | ```yaml 17 | initContainers: 18 | - name: dns-resolver 19 | image: busybox:1.35 20 | command: ['sh', '-c'] 21 | args: 22 | - | 23 | # Resolve MinIO IP dynamically 24 | MINIO_IP=$(nslookup iceberg-minio-hl.iceberg-lab.svc.cluster.local | grep 'Address:' | tail -1 | awk '{print $2}') 25 | 26 | # Write DNS mappings to shared /etc/hosts 27 | cp /etc/hosts /shared/hosts 28 | echo "$MINIO_IP redpanda.iceberg-minio-hl.iceberg-lab.svc.cluster.local" >> /shared/hosts 29 | volumeMounts: 30 | - name: hosts-volume 31 | mountPath: /shared 32 | ``` 33 | 34 | ## Testing 35 | 36 | Verify the solution works: 37 | 38 | ```bash 39 | # Check deployment status 40 | kubectl get pods -n iceberg-lab -l app=iceberg-rest 41 | 42 | # Verify no DNS errors 43 | kubectl logs -n iceberg-lab deployment/iceberg-rest --tail=10 44 | 45 | # Test API endpoint 46 | kubectl port-forward -n iceberg-lab svc/iceberg-rest 8181:8181 & 47 | curl http://localhost:8181/v1/config 48 | 49 | # Should show successful table operations 50 | kubectl logs -n iceberg-lab deployment/iceberg-rest | grep -E "(Successfully|Table)" 51 | ``` 52 | -------------------------------------------------------------------------------- /data-transforms/go/to_avro/transform.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "strconv" 7 | "strings" 8 | 9 | avro "github.com/linkedin/goavro/v2" 10 | "github.com/redpanda-data/redpanda/src/transform-sdk/go/transform" 11 | "github.com/redpanda-data/redpanda/src/transform-sdk/go/transform/sr" 12 | ) 13 | 14 | var codec avro.Codec 15 | 16 | func main() { 17 | // Register schema: 18 | // jq '. | {schema: tojson}' schema.avsc | \ 19 | // curl -X POST "http://localhost:58646/subjects/nasdaq_history_avro-value/versions" \ 20 | // -H "Content-Type: application/vnd.schemaregistry.v1+json" \ 21 | // -d @- 22 | idStr, set := os.LookupEnv("SCHEMA_ID") 23 | if !set { 24 | panic("SCHEMA_ID environment variable not set") 25 | } 26 | id, err := strconv.Atoi(idStr) 27 | if err != nil { 28 | panic(fmt.Sprintf("SCHEMA_ID not an integer: %s", idStr)) 29 | } 30 | registry := sr.NewClient() 31 | schema, err := registry.LookupSchemaById(id) 32 | if err != nil { 33 | panic(fmt.Sprintf("Unable to retrieve schema for id: %d", id)) 34 | } 35 | fmt.Printf("Schema: %s", schema.Schema) 36 | 37 | // Create Avro codec to use in transforms function 38 | c, err := avro.NewCodec(schema.Schema) 39 | if err != nil { 40 | panic(fmt.Sprintf("Error creating Avro codec: %v", err)) 41 | } 42 | codec = *c 43 | transform.OnRecordWritten(toAvro) 44 | } 45 | 46 | func parse(r string) (map[string]any, error) { 47 | p := strings.Split(r, ",") 48 | volume, err := strconv.Atoi(p[2]) 49 | if err != nil { 50 | return nil, err 51 | } 52 | m := map[string]any{ 53 | "Date": p[0], 54 | "Last": p[1], 55 | "Volume": volume, 56 | "Open": p[3], 57 | "High": p[4], 58 | "Low": p[5], 59 | } 60 | return m, nil 61 | } 62 | 63 | func toAvro(e transform.WriteEvent, w transform.RecordWriter) error { 64 | m, err := parse(string(e.Record().Value)) 65 | if err != nil { 66 | fmt.Printf("Unable to parse record value: %v", err) 67 | } 68 | binary, err := codec.BinaryFromNative(nil, m) 69 | if err != nil { 70 | fmt.Printf("Unable to encode map: %v", err) 71 | } 72 | record := transform.Record{ 73 | Key: e.Record().Key, 74 | Value: binary, 75 | } 76 | return w.Write(record) 77 | } 78 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/nodejs/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "chat-room", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "chat-room", 9 | "version": "1.0.0", 10 | "license": "ISC", 11 | "dependencies": { 12 | "kafkajs": "^2.2.4", 13 | "uuid": "^9.0.0" 14 | }, 15 | "devDependencies": { 16 | "@types/node": "^20.4.4", 17 | "@types/uuid": "^9.0.2", 18 | "typescript": "^5.1.6" 19 | } 20 | }, 21 | "node_modules/@types/node": { 22 | "version": "20.4.4", 23 | "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.4.tgz", 24 | "integrity": "sha512-CukZhumInROvLq3+b5gLev+vgpsIqC2D0deQr/yS1WnxvmYLlJXZpaQrQiseMY+6xusl79E04UjWoqyr+t1/Ew==", 25 | "dev": true 26 | }, 27 | "node_modules/@types/uuid": { 28 | "version": "9.0.2", 29 | "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.2.tgz", 30 | "integrity": "sha512-kNnC1GFBLuhImSnV7w4njQkUiJi0ZXUycu1rUaouPqiKlXkh77JKgdRnTAp1x5eBwcIwbtI+3otwzuIDEuDoxQ==", 31 | "dev": true 32 | }, 33 | "node_modules/kafkajs": { 34 | "version": "2.2.4", 35 | "resolved": "https://registry.npmjs.org/kafkajs/-/kafkajs-2.2.4.tgz", 36 | "integrity": "sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==", 37 | "engines": { 38 | "node": ">=14.0.0" 39 | } 40 | }, 41 | "node_modules/typescript": { 42 | "version": "5.1.6", 43 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", 44 | "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", 45 | "dev": true, 46 | "bin": { 47 | "tsc": "bin/tsc", 48 | "tsserver": "bin/tsserver" 49 | }, 50 | "engines": { 51 | "node": ">=14.17" 52 | } 53 | }, 54 | "node_modules/uuid": { 55 | "version": "9.0.0", 56 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", 57 | "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", 58 | "bin": { 59 | "uuid": "dist/bin/uuid" 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /clients/chat-room/docker/nodejs/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "chat-room", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "chat-room", 9 | "version": "1.0.0", 10 | "license": "ISC", 11 | "dependencies": { 12 | "kafkajs": "^2.2.4", 13 | "uuid": "^9.0.0" 14 | }, 15 | "devDependencies": { 16 | "@types/node": "^20.4.4", 17 | "@types/uuid": "^9.0.2", 18 | "typescript": "^5.1.6" 19 | } 20 | }, 21 | "node_modules/@types/node": { 22 | "version": "20.4.4", 23 | "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.4.tgz", 24 | "integrity": "sha512-CukZhumInROvLq3+b5gLev+vgpsIqC2D0deQr/yS1WnxvmYLlJXZpaQrQiseMY+6xusl79E04UjWoqyr+t1/Ew==", 25 | "dev": true 26 | }, 27 | "node_modules/@types/uuid": { 28 | "version": "9.0.2", 29 | "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.2.tgz", 30 | "integrity": "sha512-kNnC1GFBLuhImSnV7w4njQkUiJi0ZXUycu1rUaouPqiKlXkh77JKgdRnTAp1x5eBwcIwbtI+3otwzuIDEuDoxQ==", 31 | "dev": true 32 | }, 33 | "node_modules/kafkajs": { 34 | "version": "2.2.4", 35 | "resolved": "https://registry.npmjs.org/kafkajs/-/kafkajs-2.2.4.tgz", 36 | "integrity": "sha512-j/YeapB1vfPT2iOIUn/vxdyKEuhuY2PxMBvf5JWux6iSaukAccrMtXEY/Lb7OvavDhOWME589bpLrEdnVHjfjA==", 37 | "engines": { 38 | "node": ">=14.0.0" 39 | } 40 | }, 41 | "node_modules/typescript": { 42 | "version": "5.1.6", 43 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", 44 | "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", 45 | "dev": true, 46 | "bin": { 47 | "tsc": "bin/tsc", 48 | "tsserver": "bin/tsserver" 49 | }, 50 | "engines": { 51 | "node": ">=14.17" 52 | } 53 | }, 54 | "node_modules/uuid": { 55 | "version": "9.0.0", 56 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", 57 | "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", 58 | "bin": { 59 | "uuid": "dist/bin/uuid" 60 | } 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /setup-tests/.doc-detective.json: -------------------------------------------------------------------------------- 1 | { 2 | "logLevel": "warning", 3 | "runTests": { 4 | "setup": "./fetch-versions-and-rpk.json", 5 | "contexts": [ 6 | { 7 | "app": { 8 | "name": "firefox", 9 | "options": { 10 | "viewport_width": 1366, 11 | "viewport_height": 700, 12 | "headless": true 13 | } 14 | }, 15 | "platforms": [ 16 | "linux", 17 | "mac" 18 | ] 19 | } 20 | ] 21 | }, 22 | "fileTypes": [ 23 | { 24 | "name": "Markdown", 25 | "extensions": [".md"], 26 | "testStartStatementOpen": "[comment]: # (test start", 27 | "testStartStatementClose": ")", 28 | "testIgnoreStatement": "[comment]: # (test ignore)", 29 | "testEndStatement": "[comment]: # (test end)", 30 | "stepStatementOpen": "[comment]: # (step", 31 | "stepStatementClose": ")", 32 | "markup": [ 33 | { 34 | "name": "Hyperlink", 35 | "regex": ["(?<=(? String { 69 | println!("Please enter your username:"); 70 | let stdin = std::io::stdin(); 71 | let mut handle = stdin.lock(); 72 | let mut username = String::new(); 73 | handle 74 | .read_line(&mut username) 75 | .expect("Failed to read username"); 76 | username.trim().to_string() 77 | } 78 | -------------------------------------------------------------------------------- /clients/chat-room/docker/rust/src/consumer.rs: -------------------------------------------------------------------------------- 1 | use rdkafka::config::ClientConfig; 2 | use rdkafka::consumer::{Consumer, StreamConsumer}; 3 | use rdkafka::message::Message; 4 | use serde::{Deserialize, Serialize}; 5 | use tokio_stream::StreamExt; 6 | 7 | #[derive(Serialize, Deserialize, Debug)] 8 | pub struct ChatMessage { 9 | pub username: String, 10 | pub message: String, 11 | } 12 | 13 | pub struct ChatConsumer { 14 | consumer: StreamConsumer, 15 | } 16 | 17 | impl ChatConsumer { 18 | pub fn new(broker: &str, topic: &str, group_id: &str) -> Self { 19 | let consumer: StreamConsumer = ClientConfig::new() 20 | .set("bootstrap.servers", broker) 21 | .set("group.id", group_id) 22 | .set("auto.offset.reset", "earliest") 23 | .create() 24 | .expect("Consumer creation failed"); 25 | 26 | consumer 27 | .subscribe(&[topic]) 28 | .expect("Subscribing to topic failed"); 29 | 30 | ChatConsumer { consumer } 31 | } 32 | 33 | pub async fn consume_messages(&self) { 34 | let mut stream = self.consumer.stream(); 35 | 36 | while let Some(result) = stream.next().await { 37 | match result { 38 | Ok(message) => { 39 | let payload = match message.payload_view::() { 40 | Some(Ok(payload)) => payload, 41 | Some(Err(e)) => { 42 | eprintln!("Error while deserializing message payload: {:?}", e); 43 | continue; 44 | } 45 | None => { 46 | eprintln!("Failed to get message payload"); 47 | continue; 48 | } 49 | }; 50 | 51 | match serde_json::from_str::(payload) { 52 | Ok(chat_message) => { 53 | let message = 54 | format!("{}: {}", chat_message.username, chat_message.message); 55 | println!("{}", message); 56 | } 57 | Err(e) => { 58 | eprintln!("Error while deserializing message payload: {:?}", e); 59 | continue; 60 | } 61 | } 62 | } 63 | Err(error) => { 64 | eprintln!("Panda error: {}", error); 65 | } 66 | } 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /docker-compose/single-broker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | name: redpanda-quickstart-one-broker 2 | networks: 3 | redpanda_network: 4 | driver: bridge 5 | volumes: 6 | redpanda-0: null 7 | services: 8 | redpanda-0: 9 | command: 10 | - redpanda 11 | - start 12 | - --kafka-addr internal://0.0.0.0:9092,external://0.0.0.0:19092 13 | # Address the broker advertises to clients that connect to the Kafka API. 14 | # Use the internal addresses to connect to the Redpanda brokers' 15 | # from inside the same Docker network. 16 | # Use the external addresses to connect to the Redpanda brokers' 17 | # from outside the Docker network. 18 | - --advertise-kafka-addr internal://redpanda-0:9092,external://localhost:19092 19 | - --pandaproxy-addr internal://0.0.0.0:8082,external://0.0.0.0:18082 20 | # Address the broker advertises to clients that connect to the HTTP Proxy. 21 | - --advertise-pandaproxy-addr internal://redpanda-0:8082,external://localhost:18082 22 | - --schema-registry-addr internal://0.0.0.0:8081,external://0.0.0.0:18081 23 | # Redpanda brokers use the RPC API to communicate with each other internally. 24 | - --rpc-addr redpanda-0:33145 25 | - --advertise-rpc-addr redpanda-0:33145 26 | # Mode dev-container uses well-known configuration properties for development in containers. 27 | - --mode dev-container 28 | # Tells Seastar (the framework Redpanda uses under the hood) to use 1 core on the system. 29 | - --smp 1 30 | - --default-log-level=info 31 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 32 | container_name: redpanda-0 33 | volumes: 34 | - redpanda-0:/var/lib/redpanda/data 35 | networks: 36 | - redpanda_network 37 | ports: 38 | - 18081:18081 39 | - 18082:18082 40 | - 19092:19092 41 | - 19644:9644 42 | console: 43 | container_name: redpanda-console 44 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 45 | networks: 46 | - redpanda_network 47 | entrypoint: /bin/sh 48 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 49 | environment: 50 | CONFIG_FILEPATH: /tmp/config.yml 51 | CONSOLE_CONFIG_FILE: | 52 | kafka: 53 | brokers: ["redpanda-0:9092"] 54 | schemaRegistry: 55 | enabled: true 56 | urls: ["http://redpanda-0:8081"] 57 | redpanda: 58 | adminApi: 59 | enabled: true 60 | urls: ["http://redpanda-0:9644"] 61 | ports: 62 | - 8080:8080 63 | depends_on: 64 | - redpanda-0 65 | -------------------------------------------------------------------------------- /kubernetes/iceberg/spark/notebooks/Iceberg - Query Redpanda Table.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "1041ae6f", 6 | "metadata": {}, 7 | "source": [ 8 | "![iceberg-logo](https://www.apache.org/logos/res/iceberg/iceberg.png)" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "247fb2ab", 14 | "metadata": {}, 15 | "source": [ 16 | "## [Docker, Redpanda, Spark, and Iceberg: The fastest way to try Iceberg!](https://tabular.io/blog/docker-spark-and-iceberg/)" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "id": "bb47ca52", 22 | "metadata": {}, 23 | "source": [ 24 | "Follow the steps below to query Iceberg tables created from Redpanda topics.\n", 25 | "\n", 26 | "Note: Before running this notebook, ensure you have completed the setup steps in the docs, including running the Docker Compose environment and producing data to the topics." 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "id": "bf49ffd7", 32 | "metadata": {}, 33 | "source": [ 34 | "To interact with the Iceberg tables, start by initializing a Spark session. Run the following code to get started:" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "id": "6a5c8206", 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "from pyspark.sql import SparkSession\n", 45 | "spark = SparkSession.builder.appName(\"Jupyter\").getOrCreate()\n", 46 | "\n", 47 | "spark" 48 | ] 49 | }, 50 | { 51 | "cell_type": "markdown", 52 | "id": "6f9a9f41", 53 | "metadata": {}, 54 | "source": [ 55 | "## Query Iceberg data from Redpanda\n", 56 | "\n", 57 | "Iceberg tables are automatically created in the `redpanda` database based on the Redpanda topics. Query the `key_value` topic:" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "id": "930682ce", 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "%%sql\n", 68 | "\n", 69 | "SELECT * FROM lab.redpanda.key_value" 70 | ] 71 | } 72 | ], 73 | "metadata": { 74 | "kernelspec": { 75 | "display_name": "Python 3 (ipykernel)", 76 | "language": "python", 77 | "name": "python3" 78 | }, 79 | "language_info": { 80 | "codemirror_mode": { 81 | "name": "ipython", 82 | "version": 3 83 | }, 84 | "file_extension": ".py", 85 | "mimetype": "text/x-python", 86 | "name": "python", 87 | "nbconvert_exporter": "python", 88 | "pygments_lexer": "ipython3", 89 | "version": "3.9.12" 90 | } 91 | }, 92 | "nbformat": 4, 93 | "nbformat_minor": 5 94 | } 95 | -------------------------------------------------------------------------------- /data-transforms/js/csv-json/dist/source.wat: -------------------------------------------------------------------------------- 1 | 2 | (module 3 | (memory (import "js_vm" "memory") 0) 4 | 5 | (func (export "file_length") (result i32) 6 | (i32.const 732) 7 | ) 8 | 9 | (func (export "get_file") (param $buffer_ptr i32) 10 | ;; Copy the source from data segment 0. 11 | (memory.init $source 12 | (local.get $buffer_ptr) ;; Memory destination 13 | (i32.const 0) ;; Start index 14 | (i32.const 732) ;; Length 15 | ) 16 | (data.drop $source) 17 | ) 18 | (data $source "\2f\2f\20\73\72\63\2f\69\6e\64\65\78\2e\6a\73\0a\69\6d\70\6f\72\74\20\7b\20\6f\6e\52\65\63\6f\72\64\57\72\69\74\74\65\6e\20\7d\20\66\72\6f\6d\20\22\40\72\65\64\70\61\6e\64\61\2d\64\61\74\61\2f\74\72\61\6e\73\66\6f\72\6d\2d\73\64\6b\22\3b\0a\6f\6e\52\65\63\6f\72\64\57\72\69\74\74\65\6e\28\63\73\76\54\6f\4a\73\6f\6e\54\72\61\6e\73\66\6f\72\6d\29\3b\0a\66\75\6e\63\74\69\6f\6e\20\63\73\76\54\6f\4a\73\6f\6e\54\72\61\6e\73\66\6f\72\6d\28\65\76\65\6e\74\2c\20\77\72\69\74\65\72\29\20\7b\0a\20\20\63\6f\6e\73\74\20\69\6e\70\75\74\20\3d\20\65\76\65\6e\74\2e\72\65\63\6f\72\64\2e\76\61\6c\75\65\2e\74\65\78\74\28\29\3b\0a\20\20\63\6f\6e\73\74\20\72\6f\77\73\20\3d\20\69\6e\70\75\74\2e\73\70\6c\69\74\28\22\5c\6e\22\29\3b\0a\20\20\63\6f\6e\73\6f\6c\65\2e\6c\6f\67\28\72\6f\77\73\29\3b\0a\20\20\66\6f\72\20\28\63\6f\6e\73\74\20\72\6f\77\20\6f\66\20\72\6f\77\73\29\20\7b\0a\20\20\20\20\63\6f\6e\73\74\20\63\6f\6c\75\6d\6e\73\20\3d\20\72\6f\77\2e\73\70\6c\69\74\28\22\2c\22\29\3b\0a\20\20\20\20\63\6f\6e\73\6f\6c\65\2e\6c\6f\67\28\63\6f\6c\75\6d\6e\73\29\3b\0a\20\20\20\20\69\66\20\28\63\6f\6c\75\6d\6e\73\2e\6c\65\6e\67\74\68\20\21\3d\3d\20\32\29\20\7b\0a\20\20\20\20\20\20\74\68\72\6f\77\20\6e\65\77\20\45\72\72\6f\72\28\22\75\6e\65\78\70\65\63\74\65\64\20\6e\75\6d\62\65\72\20\6f\66\20\63\6f\6c\75\6d\6e\73\22\29\3b\0a\20\20\20\20\7d\0a\20\20\20\20\63\6f\6e\73\74\20\71\75\61\6e\74\69\74\79\20\3d\20\70\61\72\73\65\49\6e\74\28\63\6f\6c\75\6d\6e\73\5b\31\5d\2c\20\31\30\29\3b\0a\20\20\20\20\69\66\20\28\69\73\4e\61\4e\28\71\75\61\6e\74\69\74\79\29\29\20\7b\0a\20\20\20\20\20\20\74\68\72\6f\77\20\6e\65\77\20\45\72\72\6f\72\28\22\69\6e\76\61\6c\69\64\20\71\75\61\6e\74\69\74\79\22\29\3b\0a\20\20\20\20\7d\0a\20\20\20\20\63\6f\6e\73\74\20\69\74\65\6d\51\75\61\6e\74\69\74\79\20\3d\20\7b\0a\20\20\20\20\20\20\69\74\65\6d\3a\20\63\6f\6c\75\6d\6e\73\5b\30\5d\2c\0a\20\20\20\20\20\20\71\75\61\6e\74\69\74\79\0a\20\20\20\20\7d\3b\0a\20\20\20\20\65\76\65\6e\74\2e\72\65\63\6f\72\64\2e\76\61\6c\75\65\20\3d\20\4a\53\4f\4e\2e\73\74\72\69\6e\67\69\66\79\28\69\74\65\6d\51\75\61\6e\74\69\74\79\29\3b\0a\20\20\20\20\77\72\69\74\65\72\2e\77\72\69\74\65\28\65\76\65\6e\74\2e\72\65\63\6f\72\64\29\3b\0a\20\20\7d\0a\7d\0a")) 19 | -------------------------------------------------------------------------------- /docs/modules/docker-compose/attachments/single-broker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | name: redpanda-quickstart-one-broker 2 | networks: 3 | redpanda_network: 4 | driver: bridge 5 | volumes: 6 | redpanda-0: null 7 | services: 8 | redpanda-0: 9 | command: 10 | - redpanda 11 | - start 12 | - --kafka-addr internal://0.0.0.0:9092,external://0.0.0.0:19092 13 | # Address the broker advertises to clients that connect to the Kafka API. 14 | # Use the internal addresses to connect to the Redpanda brokers' 15 | # from inside the same Docker network. 16 | # Use the external addresses to connect to the Redpanda brokers' 17 | # from outside the Docker network. 18 | - --advertise-kafka-addr internal://redpanda-0:9092,external://localhost:19092 19 | - --pandaproxy-addr internal://0.0.0.0:8082,external://0.0.0.0:18082 20 | # Address the broker advertises to clients that connect to the HTTP Proxy. 21 | - --advertise-pandaproxy-addr internal://redpanda-0:8082,external://localhost:18082 22 | - --schema-registry-addr internal://0.0.0.0:8081,external://0.0.0.0:18081 23 | # Redpanda brokers use the RPC API to communicate with each other internally. 24 | - --rpc-addr redpanda-0:33145 25 | - --advertise-rpc-addr redpanda-0:33145 26 | # Mode dev-container uses well-known configuration properties for development in containers. 27 | - --mode dev-container 28 | # Tells Seastar (the framework Redpanda uses under the hood) to use 1 core on the system. 29 | - --smp 1 30 | - --default-log-level=info 31 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 32 | container_name: redpanda-0 33 | volumes: 34 | - redpanda-0:/var/lib/redpanda/data 35 | networks: 36 | - redpanda_network 37 | ports: 38 | - 18081:18081 39 | - 18082:18082 40 | - 19092:19092 41 | - 19644:9644 42 | console: 43 | container_name: redpanda-console 44 | image: docker.redpanda.com/redpandadata/console:${REDPANDA_CONSOLE_VERSION:-latest} 45 | networks: 46 | - redpanda_network 47 | entrypoint: /bin/sh 48 | command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console' 49 | environment: 50 | CONFIG_FILEPATH: /tmp/config.yml 51 | CONSOLE_CONFIG_FILE: | 52 | kafka: 53 | brokers: ["redpanda-0:9092"] 54 | schemaRegistry: 55 | enabled: true 56 | urls: ["http://redpanda-0:8081"] 57 | redpanda: 58 | adminApi: 59 | enabled: true 60 | urls: ["http://redpanda-0:9644"] 61 | ports: 62 | - 8080:8080 63 | depends_on: 64 | - redpanda-0 65 | -------------------------------------------------------------------------------- /docker-compose/cdc/mysql-json/docker-compose.yml: -------------------------------------------------------------------------------- 1 | name: redpanda-cdc-mysql 2 | volumes: 3 | redpanda: null 4 | services: 5 | mysql: 6 | image: mysql/mysql-server:8.0.27 7 | hostname: mysql 8 | container_name: mysql 9 | ports: 10 | - 3306:3306 11 | environment: 12 | - MYSQL_ROOT_PASSWORD=debezium 13 | - MYSQL_USER=mysqluser 14 | - MYSQL_PASSWORD=mysqlpw 15 | volumes: 16 | - ./data/mysql.cnf:/etc/mysql/conf.d 17 | - ./data/mysql_bootstrap.sql:/docker-entrypoint-initdb.d/mysql_bootstrap.sql 18 | redpanda: 19 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 20 | container_name: redpanda 21 | command: 22 | - redpanda start 23 | # Mode dev-container uses well-known configuration properties for development in containers. 24 | - --mode dev-container 25 | # Tells Seastar (the framework Redpanda uses under the hood) to use 1 core on the system. 26 | - --smp 1 27 | - --kafka-addr internal://0.0.0.0:9092,external://0.0.0.0:19092 28 | # Address the broker advertises to clients that connect to the Kafka API. 29 | # Use the internal addresses to connect to the Redpanda brokers 30 | # from inside the same Docker network. 31 | # Use the external addresses to connect to the Redpanda brokers 32 | # from outside the Docker network. 33 | - --advertise-kafka-addr internal://redpanda:9092,external://localhost:19092 34 | - --pandaproxy-addr internal://0.0.0.0:8082,external://0.0.0.0:18082 35 | # Address the broker advertises to clients that connect to the HTTP Proxy. 36 | - --advertise-pandaproxy-addr internal://redpanda:8082,external://localhost:18082 37 | - --schema-registry-addr internal://0.0.0.0:8081,external://0.0.0.0:18081 38 | # Redpanda brokers use the RPC API to communicate with each other internally. 39 | - --rpc-addr redpanda:33145 40 | - --advertise-rpc-addr redpanda:33145 41 | ports: 42 | - 18081:18081 43 | - 18082:18082 44 | - 19092:19092 45 | - 19644:9644 46 | volumes: 47 | - redpanda:/var/lib/redpanda/data 48 | healthcheck: 49 | test: ["CMD-SHELL", "rpk cluster health | grep -E 'Healthy:.+true' || exit 1"] 50 | interval: 15s 51 | timeout: 3s 52 | retries: 5 53 | start_period: 5s 54 | debezium: 55 | image: debezium/connect:2.4 56 | container_name: debezium 57 | environment: 58 | BOOTSTRAP_SERVERS: redpanda:9092 59 | GROUP_ID: 1 60 | CONFIG_STORAGE_TOPIC: connect_configs 61 | OFFSET_STORAGE_TOPIC: connect_offsets 62 | depends_on: [mysql, redpanda] 63 | ports: 64 | - 8083:8083 -------------------------------------------------------------------------------- /.github/workflows/update-deps.yml: -------------------------------------------------------------------------------- 1 | name: depup 2 | 3 | on: 4 | schedule: 5 | - cron: '14 14 * * *' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | generate-matrix: 10 | runs-on: ubuntu-latest 11 | outputs: 12 | files: ${{ steps.set-matrix.outputs.files }} 13 | steps: 14 | - uses: actions/checkout@v4 15 | with: 16 | fetch-depth: 1 17 | 18 | - name: Gather all README.adoc paths 19 | id: set-matrix 20 | run: | 21 | echo "::group::Scanning for README.adoc…" 22 | files=$(find data-transforms -type f -name 'README.adoc' \ 23 | | jq -R -s -c 'split("\n")[:-1]') 24 | echo "files=$files" >> "$GITHUB_OUTPUT" 25 | echo "::endgroup::" 26 | 27 | reviewdog: 28 | needs: generate-matrix 29 | runs-on: ubuntu-latest 30 | concurrency: 31 | group: depup-${{ matrix.repo }}-${{ matrix.file }} 32 | cancel-in-progress: true 33 | strategy: 34 | matrix: 35 | repo: 36 | - redpanda-data/redpanda 37 | - redpanda-data/console 38 | file: ${{ fromJson(needs.generate-matrix.outputs.files) }} 39 | steps: 40 | - uses: actions/checkout@v4 41 | with: 42 | fetch-depth: 1 43 | 44 | - name: Run reviewdog depup 45 | id: depup 46 | uses: reviewdog/action-depup@v1 47 | with: 48 | repo: ${{ matrix.repo }} 49 | file: ${{ matrix.file }} 50 | # pick the right version_name per repo 51 | version_name: ${{ matrix.repo == 'redpanda-data/redpanda' && 'latest-redpanda-version' || 'latest-console-version' }} 52 | 53 | - name: Sanitize & trim branch name 54 | id: sanitize 55 | run: | 56 | sanitized=$(echo "${{ matrix.file }}" \ 57 | | tr '/.' '-' \ 58 | | sed 's/[^A-Za-z0-9_-]//g' \ 59 | | cut -c 1-50) 60 | echo "branch_name=depup-${sanitized}" >> "$GITHUB_ENV" 61 | 62 | - name: Create pull request 63 | if: steps.depup.outputs.updated == 'true' 64 | uses: peter-evans/create-pull-request@v6 65 | with: 66 | token: ${{ secrets.GITHUB_TOKEN }} 67 | branch: ${{ env.branch_name }} 68 | title: | 69 | chore(deps): update ${{ matrix.repo }} → ${{ steps.depup.outputs.latest }} 70 | commit-message: | 71 | chore(deps): update ${{ matrix.repo }} → ${{ steps.depup.outputs.latest }} 72 | body: | 73 | This bumps **${{ matrix.repo }}** 74 | from ${{ steps.depup.outputs.current }} → ${{ steps.depup.outputs.latest }}. 75 | 76 | Auto-generated by the depup workflow. 77 | reviewers: JakeSCahill 78 | -------------------------------------------------------------------------------- /docs/modules/docker-compose/attachments/cdc/mysql-json/docker-compose.yml: -------------------------------------------------------------------------------- 1 | name: redpanda-cdc-mysql 2 | volumes: 3 | redpanda: null 4 | services: 5 | mysql: 6 | image: mysql/mysql-server:8.0.27 7 | hostname: mysql 8 | container_name: mysql 9 | ports: 10 | - 3306:3306 11 | environment: 12 | - MYSQL_ROOT_PASSWORD=debezium 13 | - MYSQL_USER=mysqluser 14 | - MYSQL_PASSWORD=mysqlpw 15 | volumes: 16 | - ./data/mysql.cnf:/etc/mysql/conf.d 17 | - ./data/mysql_bootstrap.sql:/docker-entrypoint-initdb.d/mysql_bootstrap.sql 18 | redpanda: 19 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 20 | container_name: redpanda 21 | command: 22 | - redpanda start 23 | # Mode dev-container uses well-known configuration properties for development in containers. 24 | - --mode dev-container 25 | # Tells Seastar (the framework Redpanda uses under the hood) to use 1 core on the system. 26 | - --smp 1 27 | - --kafka-addr internal://0.0.0.0:9092,external://0.0.0.0:19092 28 | # Address the broker advertises to clients that connect to the Kafka API. 29 | # Use the internal addresses to connect to the Redpanda brokers 30 | # from inside the same Docker network. 31 | # Use the external addresses to connect to the Redpanda brokers 32 | # from outside the Docker network. 33 | - --advertise-kafka-addr internal://redpanda:9092,external://localhost:19092 34 | - --pandaproxy-addr internal://0.0.0.0:8082,external://0.0.0.0:18082 35 | # Address the broker advertises to clients that connect to the HTTP Proxy. 36 | - --advertise-pandaproxy-addr internal://redpanda:8082,external://localhost:18082 37 | - --schema-registry-addr internal://0.0.0.0:8081,external://0.0.0.0:18081 38 | # Redpanda brokers use the RPC API to communicate with each other internally. 39 | - --rpc-addr redpanda:33145 40 | - --advertise-rpc-addr redpanda:33145 41 | ports: 42 | - 18081:18081 43 | - 18082:18082 44 | - 19092:19092 45 | - 19644:9644 46 | volumes: 47 | - redpanda:/var/lib/redpanda/data 48 | healthcheck: 49 | test: ["CMD-SHELL", "rpk cluster health | grep -E 'Healthy:.+true' || exit 1"] 50 | interval: 15s 51 | timeout: 3s 52 | retries: 5 53 | start_period: 5s 54 | debezium: 55 | image: debezium/connect:2.4 56 | container_name: debezium 57 | environment: 58 | BOOTSTRAP_SERVERS: redpanda:9092 59 | GROUP_ID: 1 60 | CONFIG_STORAGE_TOPIC: connect_configs 61 | OFFSET_STORAGE_TOPIC: connect_offsets 62 | depends_on: [mysql, redpanda] 63 | ports: 64 | - 8083:8083 -------------------------------------------------------------------------------- /clients/chat-room/cloud/java/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 4.0.0 6 | 7 | com.example 8 | chat-room 9 | 1.0-SNAPSHOT 10 | 11 | chat-room 12 | 13 | 14 | UTF-8 15 | 11 16 | 11 17 | 18 | 19 | 20 | 21 | junit 22 | junit 23 | 4.11 24 | test 25 | 26 | 27 | org.apache.kafka 28 | kafka-clients 29 | 3.4.0 30 | 31 | 32 | com.google.code.gson 33 | gson 34 | 2.8.9 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | maven-clean-plugin 43 | 3.1.0 44 | 45 | 46 | maven-resources-plugin 47 | 3.0.2 48 | 49 | 50 | maven-compiler-plugin 51 | 3.8.0 52 | 53 | 54 | maven-surefire-plugin 55 | 2.22.1 56 | 57 | 58 | maven-jar-plugin 59 | 3.0.2 60 | 61 | 62 | maven-install-plugin 63 | 2.5.2 64 | 65 | 66 | maven-deploy-plugin 67 | 2.8.2 68 | 69 | 70 | maven-site-plugin 71 | 3.7.1 72 | 73 | 74 | maven-project-info-reports-plugin 75 | 3.0.0 76 | 77 | 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /clients/chat-room/docker/java/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 4.0.0 6 | 7 | com.example 8 | chat-room 9 | 1.0-SNAPSHOT 10 | 11 | chat-room 12 | 13 | 14 | UTF-8 15 | 11 16 | 11 17 | 18 | 19 | 20 | 21 | junit 22 | junit 23 | 4.11 24 | test 25 | 26 | 27 | org.apache.kafka 28 | kafka-clients 29 | 3.4.0 30 | 31 | 32 | com.google.code.gson 33 | gson 34 | 2.8.9 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | maven-clean-plugin 43 | 3.1.0 44 | 45 | 46 | maven-resources-plugin 47 | 3.0.2 48 | 49 | 50 | maven-compiler-plugin 51 | 3.8.0 52 | 53 | 54 | maven-surefire-plugin 55 | 2.22.1 56 | 57 | 58 | maven-jar-plugin 59 | 3.0.2 60 | 61 | 62 | maven-install-plugin 63 | 2.5.2 64 | 65 | 66 | maven-deploy-plugin 67 | 2.8.2 68 | 69 | 70 | maven-site-plugin 71 | 3.7.1 72 | 73 | 74 | maven-project-info-reports-plugin 75 | 3.0.0 76 | 77 | 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /docker-compose/cdc/postgres-json/docker-compose.yml: -------------------------------------------------------------------------------- 1 | name: redpanda-cdc-postgres 2 | volumes: 3 | redpanda: null 4 | services: 5 | postgres: 6 | image: debezium/postgres:16 7 | container_name: postgres 8 | ports: 9 | - 5432:5432 10 | healthcheck: 11 | test: "pg_isready -U postgresuser -d pandashop" 12 | interval: 2s 13 | timeout: 20s 14 | retries: 10 15 | environment: 16 | - POSTGRES_USER=postgresuser 17 | - POSTGRES_PASSWORD=postgrespw 18 | - POSTGRES_DB=pandashop 19 | - PGPASSWORD=postgrespw 20 | volumes: 21 | - ./data:/docker-entrypoint-initdb.d 22 | redpanda: 23 | image: docker.redpanda.com/redpandadata/redpanda:${REDPANDA_VERSION:-latest} 24 | container_name: redpanda 25 | command: 26 | - redpanda start 27 | # Mode dev-container uses well-known configuration properties for development in containers. 28 | - --mode dev-container 29 | # Tells Seastar (the framework Redpanda uses under the hood) to use 1 core on the system. 30 | - --smp 1 31 | - --kafka-addr internal://0.0.0.0:9092,external://0.0.0.0:19092 32 | # Address the broker advertises to clients that connect to the Kafka API. 33 | # Use the internal addresses to connect to the Redpanda brokers 34 | # from inside the same Docker network. 35 | # Use the external addresses to connect to the Redpanda brokers 36 | # from outside the Docker network. 37 | - --advertise-kafka-addr internal://redpanda:9092,external://localhost:19092 38 | - --pandaproxy-addr internal://0.0.0.0:8082,external://0.0.0.0:18082 39 | # Address the broker advertises to clients that connect to the HTTP Proxy. 40 | - --advertise-pandaproxy-addr internal://redpanda:8082,external://localhost:18082 41 | - --schema-registry-addr internal://0.0.0.0:8081,external://0.0.0.0:18081 42 | # Redpanda brokers use the RPC API to communicate with each other internally. 43 | - --rpc-addr redpanda:33145 44 | - --advertise-rpc-addr redpanda:33145 45 | ports: 46 | - 18081:18081 47 | - 18082:18082 48 | - 19092:19092 49 | - 19644:9644 50 | volumes: 51 | - redpanda:/var/lib/redpanda/data 52 | healthcheck: 53 | test: ["CMD-SHELL", "rpk cluster health | grep -E 'Healthy:.+true' || exit 1"] 54 | interval: 15s 55 | timeout: 3s 56 | retries: 5 57 | start_period: 5s 58 | debezium: 59 | image: debezium/connect:2.4 60 | container_name: debezium 61 | environment: 62 | BOOTSTRAP_SERVERS: redpanda:9092 63 | GROUP_ID: 1 64 | CONFIG_STORAGE_TOPIC: connect_configs 65 | OFFSET_STORAGE_TOPIC: connect_offsets 66 | depends_on: [postgres, redpanda] 67 | ports: 68 | - 8083:8083 69 | -------------------------------------------------------------------------------- /clients/chat-room/cloud/java/src/main/java/com/example/Admin.java: -------------------------------------------------------------------------------- 1 | package com.example; 2 | 3 | import org.apache.kafka.clients.admin.AdminClient; 4 | import org.apache.kafka.clients.admin.NewTopic; 5 | import org.apache.kafka.clients.admin.AdminClientConfig; 6 | import org.apache.kafka.clients.CommonClientConfigs; 7 | import org.apache.kafka.clients.producer.ProducerConfig; 8 | import org.apache.kafka.common.config.SaslConfigs; 9 | import org.apache.kafka.common.serialization.StringDeserializer; 10 | import org.apache.kafka.common.serialization.StringSerializer; 11 | import java.util.Collections; 12 | import java.util.Properties; 13 | 14 | public class Admin { 15 | private static final String BOOTSTRAP_SERVERS = ""; 16 | private static final String SASL_USERNAME = "redpanda-chat-account"; 17 | private static final String SASL_PASSWORD = ""; 18 | public static Properties getProducerProps() { 19 | Properties props = getAdminProps(); 20 | props.put("key.serializer", StringSerializer.class.getName()); 21 | props.put("value.serializer", StringSerializer.class.getName()); 22 | return props; 23 | } 24 | public static Properties getConsumerProps(String groupId) { 25 | Properties props = getAdminProps(); 26 | props.put("group.id", groupId); 27 | props.put("key.deserializer", StringDeserializer.class.getName()); 28 | props.put("value.deserializer", StringDeserializer.class.getName()); 29 | return props; 30 | } 31 | public static boolean topicExists(String topicName) { 32 | Properties props = getAdminProps(); 33 | try (AdminClient client = AdminClient.create(props)) { 34 | return client.listTopics().names().get().contains(topicName); 35 | } catch (Exception e) { 36 | throw new RuntimeException(e); 37 | } 38 | } 39 | public static void createTopic(String topicName) { 40 | Properties props = getAdminProps(); 41 | try (AdminClient client = AdminClient.create(props)) { 42 | NewTopic newTopic = new NewTopic(topicName, 1, (short) 1); 43 | client.createTopics(Collections.singletonList(newTopic)); 44 | } catch (Exception e) { 45 | throw new RuntimeException(e); 46 | } 47 | } 48 | private static Properties getAdminProps() { 49 | Properties props = new Properties(); 50 | props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS); 51 | props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL"); 52 | props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256"); 53 | props.put(SaslConfigs.SASL_JAAS_CONFIG, 54 | "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"" 55 | + SASL_USERNAME + "\" password=\"" + SASL_PASSWORD + "\";"); 56 | return props; 57 | } 58 | } --------------------------------------------------------------------------------