├── load-test-framework ├── src │ └── main │ │ ├── proto │ │ └── loadtest.proto │ │ ├── resources │ │ ├── log4j.properties │ │ └── gce │ │ │ ├── cps-gcloud-java-publisher_startup_script.sh │ │ │ ├── cps-gcloud-java-subscriber_startup_script.sh │ │ │ ├── cps-gcloud-node-subscriber_startup_script.sh │ │ │ ├── cps-gcloud-go-publisher_startup_script.sh │ │ │ ├── cps-gcloud-go-subscriber_startup_script.sh │ │ │ ├── cps-gcloud-node-publisher_startup_script.sh │ │ │ ├── cps-gcloud-python-publisher_startup_script.sh │ │ │ └── cps-gcloud-python-subscriber_startup_script.sh │ │ └── java │ │ └── com │ │ └── google │ │ └── pubsub │ │ ├── clients │ │ ├── flow_control │ │ │ ├── FlowController.java │ │ │ └── RateLimiterFlowController.java │ │ └── common │ │ │ ├── LoadtestTask.java │ │ │ ├── LogEveryN.java │ │ │ └── ShardedBlockingQueue.java │ │ └── flic │ │ ├── controllers │ │ ├── resource_controllers │ │ │ ├── ComputeResourceController.java │ │ │ └── ResourceController.java │ │ ├── test_parameters │ │ │ ├── TestParameterProvider.java │ │ │ ├── CoreScalingTestParameterProvider.java │ │ │ ├── MessageSizeScalingTestParameterProvider.java │ │ │ ├── StandardParameters.java │ │ │ ├── ScalingFactorTestParameterProvider.java │ │ │ └── TestParameterProviderConverter.java │ │ ├── ClientParams.java │ │ ├── ClientType.java │ │ └── Controller.java │ │ ├── common │ │ └── StatsUtils.java │ │ └── output │ │ └── ResultsOutput.java ├── python_src │ ├── requirements.txt │ ├── clients │ │ ├── flow_control │ │ │ ├── __init__.py │ │ │ ├── flow_controller.py │ │ │ ├── rate_limiter_flow_controller.py │ │ │ └── outstanding_count_flow_controller.py │ │ ├── __init__.py │ │ ├── to_float_seconds.py │ │ └── loadtest_worker_servicer.py │ ├── __init__.py │ └── proto_dir │ │ └── __init__.py ├── wiki │ └── diagram.png ├── node_src │ ├── package.json │ └── src │ │ ├── publisher_task_main.js │ │ ├── subscriber_task_main.js │ │ ├── settable_promise.js │ │ ├── flow_control │ │ ├── flow_controller.js │ │ └── rate_limiter_flow_controller.js │ │ ├── loadtest_service.js │ │ ├── main.js │ │ └── subscriber_task.js ├── go_src │ ├── go.mod │ ├── internal │ │ ├── util │ │ │ └── util.go │ │ ├── flow_control │ │ │ ├── flow_controller.go │ │ │ └── rate_limiter_flow_controller.go │ │ └── loadtest_service.go │ └── cmd │ │ └── main.go └── run.py ├── .kokoro ├── nightly │ ├── java8-osx.cfg │ ├── java8-win.cfg │ ├── java7.cfg │ ├── java11.cfg │ ├── java8.cfg │ ├── common.cfg │ ├── java11-integration.cfg │ ├── samples.cfg │ └── integration.cfg ├── presubmit │ ├── java8-osx.cfg │ ├── java8-win.cfg │ ├── java11.cfg │ ├── java7.cfg │ ├── clirr.cfg │ ├── java8.cfg │ ├── lint.cfg │ ├── dependencies.cfg │ ├── linkage-monitor.cfg │ ├── integration.cfg │ ├── graalvm-native.cfg │ ├── graalvm-native-17.cfg │ ├── samples.cfg │ └── common.cfg ├── requirements.in ├── common.cfg ├── build.bat ├── trampoline.sh ├── coerce_logs.sh ├── linkage-monitor.sh ├── populate-secrets.sh ├── common.sh └── dependencies.sh ├── sql-streaming-copier ├── src │ └── main │ │ └── java │ │ └── com │ │ └── google │ │ └── cloud │ │ └── pubsub │ │ └── sql │ │ ├── package-info.java │ │ ├── providers │ │ ├── package-info.java │ │ ├── StandardSinkProvider.java │ │ ├── StandardSourceProvider.java │ │ ├── BigQueryProvider.java │ │ ├── StandardSink.java │ │ ├── StandardSource.java │ │ ├── StandardSqlSource.java │ │ ├── StandardSqlSink.java │ │ └── PubsubLiteProvider.java │ │ ├── SqlStreamingOptions.java │ │ ├── UITemplateOptions.java │ │ ├── TemplateMain.java │ │ ├── UITemplateMain.java │ │ ├── MakePtransform.java │ │ ├── TableLoader.java │ │ ├── Rows.java │ │ ├── TemplateOptions.java │ │ ├── TableSpec.java │ │ └── RunPipeline.java ├── ui_metadata.json └── metadata.json ├── .gitignore ├── kafka-connector ├── config │ ├── cps-sink-connector.properties │ ├── cps-source-connector.properties │ ├── pubsub-lite-sink-connector.properties │ └── pubsub-lite-source-connector.properties └── src │ ├── main │ ├── resources │ │ └── log4j.properties │ └── java │ │ └── com │ │ └── google │ │ ├── pubsublite │ │ └── kafka │ │ │ ├── source │ │ │ ├── PollerFactory.java │ │ │ ├── Poller.java │ │ │ ├── PubSubLiteSourceConnector.java │ │ │ ├── PubSubLiteSourceTask.java │ │ │ ├── PollerFactoryImpl.java │ │ │ └── ConfigDefs.java │ │ │ └── sink │ │ │ ├── PublisherFactory.java │ │ │ ├── Constants.java │ │ │ ├── ConfigDefs.java │ │ │ ├── PubSubLiteSinkConnector.java │ │ │ └── PublisherFactoryImpl.java │ │ └── pubsub │ │ └── kafka │ │ ├── source │ │ ├── StreamingPullSubscriberFactory.java │ │ └── CloudPubSubSubscriber.java │ │ └── common │ │ └── ConnectorCredentialsProvider.java │ └── test │ └── java │ └── com │ └── google │ ├── pubsublite │ └── kafka │ │ └── source │ │ └── PubSubLiteSourceTaskTest.java │ └── pubsub │ └── kafka │ └── sink │ └── CloudPubSubSinkConnectorTest.java ├── .travis.yml ├── license-checks.xml ├── udfs ├── validate.js ├── redact.js ├── url_encode.js ├── url_decode.js ├── insert_field.js ├── insert_attributes.js ├── mask.js ├── timestamp_utc_converter.js ├── README.md ├── cast.js ├── filter_custom.js ├── to_hex.js ├── hoist.js ├── remove_whitespaces.js ├── upgrade_attribute.js ├── derive_field.js ├── filter_field_regex.js ├── rename_field.js ├── flatten_json.js ├── helper_number.js ├── timestamp_converter.js ├── helper_string.js └── unit_convert_distance.js ├── client └── README.md ├── java.header ├── .github └── workflows │ ├── new-release.yml │ └── ci.yaml ├── flink-connector ├── flink-connector-gcp-pubsub │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── google │ │ │ │ └── pubsub │ │ │ │ └── flink │ │ │ │ ├── internal │ │ │ │ ├── source │ │ │ │ │ ├── split │ │ │ │ │ │ ├── SubscriptionSplitState.java │ │ │ │ │ │ ├── SubscriptionSplitSerializer.java │ │ │ │ │ │ └── SubscriptionSplit.java │ │ │ │ │ ├── reader │ │ │ │ │ │ ├── NotifyingPullSubscriber.java │ │ │ │ │ │ ├── PubSubRecordEmitter.java │ │ │ │ │ │ └── AckTracker.java │ │ │ │ │ └── enumerator │ │ │ │ │ │ └── PubSubCheckpointSerializer.java │ │ │ │ └── sink │ │ │ │ │ ├── FlushablePublisher.java │ │ │ │ │ ├── PubSubSinkWriter.java │ │ │ │ │ ├── PubSubFlushablePublisher.java │ │ │ │ │ └── PubSubPublisherCache.java │ │ │ │ ├── PubSubSerializationSchema.java │ │ │ │ ├── util │ │ │ │ └── EmulatorEndpoint.java │ │ │ │ └── PubSubDeserializationSchema.java │ │ └── proto │ │ │ └── split.proto │ │ └── test │ │ └── java │ │ └── com │ │ └── google │ │ └── pubsub │ │ └── flink │ │ └── internal │ │ ├── source │ │ ├── split │ │ │ ├── SubscriptionSplitSerializerTest.java │ │ │ └── SubscriptionSplitTest.java │ │ ├── enumerator │ │ │ └── PubSubCheckpointSerializerTest.java │ │ └── reader │ │ │ └── PubSubRecordEmitterTest.java │ │ └── sink │ │ └── PubSubPublisherCacheTest.java ├── flink-connector-gcp-pubsub-e2e-tests │ └── src │ │ └── test │ │ └── java │ │ └── com │ │ └── google │ │ └── pubsub │ │ └── flink │ │ └── DockerImageVersions.java └── flink-examples-gcp-pubsub │ └── pom.xml ├── CONTRIBUTING.md └── README.md /load-test-framework/src/main/proto/loadtest.proto: -------------------------------------------------------------------------------- 1 | ../../../proto/loadtest.proto -------------------------------------------------------------------------------- /load-test-framework/python_src/requirements.txt: -------------------------------------------------------------------------------- 1 | grpcio 2 | google-cloud-pubsub 3 | cachetools 4 | -------------------------------------------------------------------------------- /.kokoro/nightly/java8-osx.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | build_file: "pubsub/.kokoro/build.sh" -------------------------------------------------------------------------------- /.kokoro/nightly/java8-win.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | build_file: "pubsub/.kokoro/build.bat" -------------------------------------------------------------------------------- /.kokoro/presubmit/java8-osx.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | build_file: "pubsub/.kokoro/build.sh" -------------------------------------------------------------------------------- /.kokoro/presubmit/java8-win.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | build_file: "pubsub/.kokoro/build.bat" -------------------------------------------------------------------------------- /load-test-framework/wiki/diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GoogleCloudPlatform/pubsub/HEAD/load-test-framework/wiki/diagram.png -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/package-info.java: -------------------------------------------------------------------------------- 1 | @javax.annotation.ParametersAreNonnullByDefault 2 | package com.google.cloud.pubsub.sql; 3 | -------------------------------------------------------------------------------- /.kokoro/requirements.in: -------------------------------------------------------------------------------- 1 | gcp-docuploader 2 | gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x 3 | wheel 4 | setuptools 5 | typing-extensions 6 | click<8.1.0 -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/package-info.java: -------------------------------------------------------------------------------- 1 | @javax.annotation.ParametersAreNonnullByDefault 2 | package com.google.cloud.pubsub.sql.providers; 3 | -------------------------------------------------------------------------------- /.kokoro/nightly/java7.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java7" 7 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | bin 3 | data 4 | .classpath 5 | .idea 6 | .project 7 | .settings 8 | *.iml 9 | *.out 10 | .metadata 11 | *.pyc 12 | node_modules/ 13 | venv/ 14 | .DS_Store 15 | .idea/ 16 | 17 | **/dependency-reduced-pom.xml 18 | 19 | -------------------------------------------------------------------------------- /.kokoro/nightly/java11.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java11" 7 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/java11.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java11" 7 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/java7.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java7" 7 | } -------------------------------------------------------------------------------- /kafka-connector/config/cps-sink-connector.properties: -------------------------------------------------------------------------------- 1 | name=CPSSinkConnector 2 | connector.class=com.google.pubsub.kafka.sink.CloudPubSubSinkConnector 3 | tasks.max=10 4 | topics=my-kafka-topic 5 | cps.topic=my-cps-topic 6 | cps.project=my-cps-project 7 | -------------------------------------------------------------------------------- /kafka-connector/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=DEBUG, CONSOLE 2 | log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender 3 | log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout 4 | 5 | log4j.org.apache.kafka = OFF 6 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/source/PollerFactory.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.source; 2 | 3 | import java.util.Map; 4 | 5 | interface PollerFactory { 6 | 7 | Poller newPoller(Map params); 8 | } 9 | -------------------------------------------------------------------------------- /load-test-framework/python_src/clients/flow_control/__init__.py: -------------------------------------------------------------------------------- 1 | from .flow_controller import FlowController 2 | from .outstanding_count_flow_controller import OutstandingCountFlowController 3 | from .rate_limiter_flow_controller import RateLimiterFlowController 4 | -------------------------------------------------------------------------------- /kafka-connector/config/cps-source-connector.properties: -------------------------------------------------------------------------------- 1 | name=CPSSourceConnector 2 | connector.class=com.google.pubsub.kafka.source.CloudPubSubSourceConnector 3 | tasks.max=10 4 | cps.project=my-cps-project 5 | cps.subscription=my-cps-subscription 6 | kafka.topic=my-kafka-topic 7 | 8 | -------------------------------------------------------------------------------- /kafka-connector/config/pubsub-lite-sink-connector.properties: -------------------------------------------------------------------------------- 1 | name=PubSubLiteSinkConnector 2 | connector.class=com.google.pubsublite.kafka.sink.PubSubLiteSinkConnector 3 | tasks.max=10 4 | topics=my-kafka-topic 5 | pubsublite.project=my-project 6 | pubsublite.location=europe-south7-q 7 | pubsublite.topic=my-topic -------------------------------------------------------------------------------- /.kokoro/nightly/java8.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 7 | } 8 | 9 | env_vars: { 10 | key: "REPORT_COVERAGE" 11 | value: "true" 12 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/clirr.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | 5 | env_vars: { 6 | key: "TRAMPOLINE_IMAGE" 7 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 8 | } 9 | 10 | env_vars: { 11 | key: "JOB_TYPE" 12 | value: "clirr" 13 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/java8.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 7 | } 8 | 9 | env_vars: { 10 | key: "REPORT_COVERAGE" 11 | value: "true" 12 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/lint.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | 5 | env_vars: { 6 | key: "TRAMPOLINE_IMAGE" 7 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 8 | } 9 | 10 | env_vars: { 11 | key: "JOB_TYPE" 12 | value: "lint" 13 | } -------------------------------------------------------------------------------- /kafka-connector/config/pubsub-lite-source-connector.properties: -------------------------------------------------------------------------------- 1 | name=PubSubLiteSourceConnector 2 | connector.class=com.google.pubsublite.kafka.source.PubSubLiteSourceConnector 3 | tasks.max=10 4 | pubsublite.project=my-project 5 | pubsublite.location=europe-south7-q 6 | pubsublite.subscription=my-subscription 7 | kafka.topic=my-kafka-topic 8 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/SqlStreamingOptions.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import org.apache.beam.sdk.extensions.sql.impl.BeamSqlPipelineOptions; 4 | import org.apache.beam.sdk.options.StreamingOptions; 5 | 6 | public interface SqlStreamingOptions extends StreamingOptions, BeamSqlPipelineOptions {} 7 | -------------------------------------------------------------------------------- /.kokoro/presubmit/dependencies.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 7 | } 8 | 9 | env_vars: { 10 | key: "TRAMPOLINE_BUILD_FILE" 11 | value: "github/pubsub/.kokoro/dependencies.sh" 12 | } -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | 3 | matrix: 4 | include: 5 | - os: linux 6 | jdk: oraclejdk9 7 | dist: trusty 8 | - os: linux 9 | jdk: openjdk9 10 | 11 | script: 12 | - mvn -q -B -f kafka-connector/pom.xml clean verify 13 | - mvn -q -B -f load-test-framework/pom.xml clean verify 14 | - mvn -q -B -f flink-connector/pom.xml clean verify 15 | -------------------------------------------------------------------------------- /.kokoro/presubmit/linkage-monitor.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 7 | } 8 | 9 | env_vars: { 10 | key: "TRAMPOLINE_BUILD_FILE" 11 | value: "github/pubsub/.kokoro/linkage-monitor.sh" 12 | } -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsub/kafka/source/StreamingPullSubscriberFactory.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsub.kafka.source; 2 | 3 | import com.google.cloud.pubsub.v1.MessageReceiver; 4 | import com.google.cloud.pubsub.v1.SubscriberInterface; 5 | 6 | public interface StreamingPullSubscriberFactory { 7 | SubscriberInterface newSubscriber(MessageReceiver receiver); 8 | } 9 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/source/Poller.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.source; 2 | 3 | import java.util.List; 4 | import javax.annotation.Nullable; 5 | import org.apache.kafka.connect.source.SourceRecord; 6 | 7 | interface Poller extends AutoCloseable { 8 | 9 | @Nullable 10 | List poll(); 11 | 12 | void close(); 13 | } 14 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactory.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.sink; 2 | 3 | import com.google.cloud.pubsublite.PublishMetadata; 4 | import com.google.cloud.pubsublite.internal.Publisher; 5 | import java.util.Map; 6 | 7 | interface PublisherFactory { 8 | 9 | Publisher newPublisher(Map params); 10 | } 11 | -------------------------------------------------------------------------------- /load-test-framework/node_src/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "worker", 3 | "version": "1.0.0", 4 | "description": "", 5 | "dependencies": { 6 | "@google-cloud/pubsub": "0.24.1", 7 | "minimist": "latest" 8 | }, 9 | "devDependencies": {}, 10 | "scripts": { 11 | "test-sub": "echo \"Error: no test specified\" && exit 1" 12 | }, 13 | "author": "Google", 14 | "license": "Apache2" 15 | } 16 | -------------------------------------------------------------------------------- /license-checks.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/StandardSinkProvider.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql.providers; 2 | 3 | import org.apache.beam.sdk.schemas.io.Providers.Identifyable; 4 | 5 | /** 6 | * Additional SinkProviders can be added by implementing this interface and annotating it with 7 | * AutoService. 8 | */ 9 | public interface StandardSinkProvider extends Identifyable { 10 | 11 | StandardSink getSink(); 12 | } 13 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/StandardSourceProvider.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql.providers; 2 | 3 | import org.apache.beam.sdk.schemas.io.Providers.Identifyable; 4 | 5 | /** 6 | * Additional SourceProviders can be added by implementing this interface and annotating it with 7 | * AutoService. 8 | */ 9 | public interface StandardSourceProvider extends Identifyable { 10 | 11 | StandardSource getSource(); 12 | } 13 | -------------------------------------------------------------------------------- /udfs/validate.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to validate a message. 3 | * 4 | * @param {Object} message - The message to validate. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The validated message. 7 | */ 8 | function validate(message, metadata) { 9 | 10 | const data = JSON.parse(message.data); 11 | 12 | if (data["field1"] < 10) { 13 | throw new Error("field1 is invalid"); 14 | } 15 | 16 | return message; 17 | } -------------------------------------------------------------------------------- /.kokoro/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Download trampoline resources. These will be in ${KOKORO_GFILE_DIR} 4 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 5 | 6 | # All builds use the trampoline script to run in docker. 7 | build_file: "pubsub/.kokoro/trampoline.sh" 8 | 9 | # Tell the trampoline which build file to use. 10 | env_vars: { 11 | key: "TRAMPOLINE_BUILD_FILE" 12 | value: "github/pubsub/.kokoro/build.sh" 13 | } -------------------------------------------------------------------------------- /load-test-framework/python_src/clients/flow_control/flow_controller.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class FlowController(ABC): 5 | @abstractmethod 6 | def request_start(self): 7 | """Request to start an operation 8 | 9 | :return: the number of currently allowed operations. Block until an 10 | operation is allowed. 11 | """ 12 | pass 13 | 14 | @abstractmethod 15 | def inform_finished(self, was_successful: bool): 16 | pass 17 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/BigQueryProvider.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql.providers; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.cloud.pubsub.sql.Rows; 5 | 6 | @AutoService(StandardSinkProvider.class) 7 | public class BigQueryProvider implements StandardSinkProvider { 8 | 9 | @Override 10 | public StandardSink getSink() { 11 | return (StandardSqlSink) () -> Rows.STANDARD_SCHEMA; 12 | } 13 | 14 | @Override 15 | public String identifier() { 16 | return "bigquery"; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /udfs/redact.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to redact PII fields from a message. 3 | * 4 | * @param {Object} message - The message to redact. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The redacted message. 7 | */ 8 | function redact(message, metadata) { 9 | // parse the messagae 10 | const data = JSON.parse(message.data); 11 | 12 | // redact PII fields 13 | delete data['field1']; 14 | delete data['field2']; 15 | delete data['field3']; 16 | 17 | message.data = JSON.stringify(data); 18 | 19 | return message; 20 | } -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/sink/Constants.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.sink; 2 | 3 | public final class Constants { 4 | 5 | private Constants() { 6 | } 7 | 8 | public static final String KAFKA_TOPIC_HEADER = "x-goog-pubsublite-source-kafka-topic"; 9 | public static final String KAFKA_PARTITION_HEADER = "x-goog-pubsublite-source-kafka-partition"; 10 | public static final String KAFKA_OFFSET_HEADER = "x-goog-pubsublite-source-kafka-offset"; 11 | public static final String KAFKA_EVENT_TIME_TYPE_HEADER = "x-goog-pubsublite-source-kafka-event-time-type"; 12 | 13 | } 14 | -------------------------------------------------------------------------------- /.kokoro/nightly/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | regex: "**/*sponge_log.txt" 8 | } 9 | } 10 | 11 | # Download trampoline resources. 12 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 13 | 14 | # Use the trampoline script to run in docker. 15 | build_file: "pubsub/.kokoro/trampoline.sh" 16 | 17 | env_vars: { 18 | key: "TRAMPOLINE_BUILD_FILE" 19 | value: "github/pubsub/.kokoro/build.sh" 20 | } 21 | 22 | env_vars: { 23 | key: "JOB_TYPE" 24 | value: "test" 25 | } -------------------------------------------------------------------------------- /load-test-framework/python_src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/StandardSink.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql.providers; 2 | 3 | import org.apache.beam.sdk.schemas.Schema; 4 | import org.apache.beam.sdk.transforms.PTransform; 5 | import org.apache.beam.sdk.values.PCollection; 6 | import org.apache.beam.sdk.values.Row; 7 | 8 | /** 9 | * Consumes rows conforming to the standard row definition. 10 | */ 11 | public interface StandardSink { 12 | 13 | Schema nativeSchema(); 14 | 15 | /** 16 | * Transform from the standard schema to the native schema. 17 | */ 18 | PTransform, PCollection> transform(); 19 | } 20 | -------------------------------------------------------------------------------- /load-test-framework/python_src/clients/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /load-test-framework/python_src/proto_dir/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /client/README.md: -------------------------------------------------------------------------------- 1 | The alpha client libraries in this directory have been replaced by the official 2 | [Google Cloud Pub/Sub Java client library](https://github.com/GoogleCloudPlatform/google-cloud-java/tree/master/google-cloud-clients/google-cloud-pubsub) and then by [Google Cloud Pub/Sub Client for Java](https://github.com/googleapis/java-pubsub). 3 | The semantics and performance improvements introduced in these alpha libraries 4 | have been preserved and improved in the official libraries, so we strongly 5 | encourage you to migrate. However, we have no plan to remove this code from our [Maven 6 | repository](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.google.pubsub%22). 7 | -------------------------------------------------------------------------------- /.kokoro/presubmit/integration.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 7 | } 8 | 9 | env_vars: { 10 | key: "JOB_TYPE" 11 | value: "integration" 12 | } 13 | 14 | env_vars: { 15 | key: "GOOGLE_CLOUD_PROJECT" 16 | value: "java-docs-samples-testing" 17 | } 18 | 19 | env_vars: { 20 | key: "GOOGLE_APPLICATION_CREDENTIALS" 21 | value: "secret_manager/java-docs-samples-service-account" 22 | } 23 | 24 | env_vars: { 25 | key: "SECRET_MANAGER_KEYS" 26 | value: "java-docs-samples-service-account" 27 | } -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/StandardSource.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql.providers; 2 | 3 | import com.google.cloud.pubsub.sql.Rows; 4 | import org.apache.beam.sdk.schemas.Schema; 5 | import org.apache.beam.sdk.transforms.PTransform; 6 | import org.apache.beam.sdk.values.PCollection; 7 | import org.apache.beam.sdk.values.Row; 8 | 9 | /** 10 | * Produces a row conforming to the standard row definition. 11 | */ 12 | public interface StandardSource { 13 | 14 | Schema nativeSchema(); 15 | 16 | /** 17 | * Transform from the native schema to the standard schema. 18 | */ 19 | PTransform, PCollection> transform(); 20 | } 21 | -------------------------------------------------------------------------------- /java.header: -------------------------------------------------------------------------------- 1 | ^/\*$ 2 | ^ \* Copyright \d\d\d\d,? Google (Inc\.|LLC)$ 3 | ^ \*$ 4 | ^ \* Licensed under the Apache License, Version 2\.0 \(the "License"\);$ 5 | ^ \* you may not use this file except in compliance with the License\.$ 6 | ^ \* You may obtain a copy of the License at$ 7 | ^ \*$ 8 | ^ \*[ ]+https?://www.apache.org/licenses/LICENSE-2\.0$ 9 | ^ \*$ 10 | ^ \* Unless required by applicable law or agreed to in writing, software$ 11 | ^ \* distributed under the License is distributed on an "AS IS" BASIS,$ 12 | ^ \* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\.$ 13 | ^ \* See the License for the specific language governing permissions and$ 14 | ^ \* limitations under the License\.$ 15 | ^ \*/$ -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/UITemplateOptions.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import org.apache.beam.sdk.options.Description; 4 | import org.apache.beam.sdk.options.Validation.Required; 5 | 6 | public interface UITemplateOptions extends SqlStreamingOptions { 7 | @Description("Source spec JSON. Structured as {\"id\":\"\",\"location\":\"\",\"properties\":{...}}") 8 | @Required() 9 | String getSourceSpec(); 10 | void setSourceSpec(String type); 11 | 12 | @Description("Sink spec JSON. Structured as {\"id\":\"\",\"location\":\"\",\"properties\":{...}}") 13 | @Required() 14 | String getSinkSpec(); 15 | void setSinkSpec(String type); 16 | } 17 | -------------------------------------------------------------------------------- /.kokoro/presubmit/graalvm-native.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/graalvm:22.3.0" 7 | } 8 | 9 | env_vars: { 10 | key: "JOB_TYPE" 11 | value: "graalvm" 12 | } 13 | 14 | env_vars: { 15 | key: "GCLOUD_PROJECT" 16 | value: "gcloud-devel" 17 | } 18 | 19 | env_vars: { 20 | key: "GOOGLE_CLOUD_PROJECT" 21 | value: "gcloud-devel" 22 | } 23 | 24 | env_vars: { 25 | key: "GOOGLE_APPLICATION_CREDENTIALS" 26 | value: "secret_manager/java-it-service-account" 27 | } 28 | 29 | env_vars: { 30 | key: "SECRET_MANAGER_KEYS" 31 | value: "java-it-service-account" 32 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/graalvm-native-17.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/graalvm17:22.3.0" 7 | } 8 | 9 | env_vars: { 10 | key: "JOB_TYPE" 11 | value: "graalvm17" 12 | } 13 | 14 | env_vars: { 15 | key: "GCLOUD_PROJECT" 16 | value: "gcloud-devel" 17 | } 18 | 19 | env_vars: { 20 | key: "GOOGLE_CLOUD_PROJECT" 21 | value: "gcloud-devel" 22 | } 23 | 24 | env_vars: { 25 | key: "GOOGLE_APPLICATION_CREDENTIALS" 26 | value: "secret_manager/java-it-service-account" 27 | } 28 | 29 | env_vars: { 30 | key: "SECRET_MANAGER_KEYS" 31 | value: "java-it-service-account" 32 | } -------------------------------------------------------------------------------- /udfs/url_encode.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to URL encode specific fields within a message. 3 | * 4 | * @param {Object} message - The message containing fields to URL encode. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with URL encoded fields. 7 | */ 8 | function url_encode(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // URL encode the specified field 13 | if (data['field'] !== undefined) { 14 | data['encodedString'] = encodeURIComponent(data['field']); 15 | } 16 | 17 | // Update the message with encoded data 18 | message.data = JSON.stringify(data); 19 | 20 | return message; 21 | } 22 | -------------------------------------------------------------------------------- /.kokoro/presubmit/samples.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 7 | } 8 | 9 | env_vars: { 10 | key: "JOB_TYPE" 11 | value: "samples" 12 | } 13 | 14 | env_vars: { 15 | key: "GCLOUD_PROJECT" 16 | value: "java-docs-samples-testing" 17 | } 18 | 19 | env_vars: { 20 | key: "GOOGLE_CLOUD_PROJECT" 21 | value: "java-docs-samples-testing" 22 | } 23 | 24 | env_vars: { 25 | key: "GOOGLE_APPLICATION_CREDENTIALS" 26 | value: "secret_manager/java-docs-samples-service-account" 27 | } 28 | 29 | env_vars: { 30 | key: "SECRET_MANAGER_KEYS" 31 | value: "java-docs-samples-service-account" 32 | } -------------------------------------------------------------------------------- /udfs/url_decode.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to URL decode specific fields within a message. 3 | * 4 | * @param {Object} message - The message containing fields to URL decode. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with URL decoded fields. 7 | */ 8 | function url_decode(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // URL decode the specified field (e.g. encodedString) 13 | if (data['encodedString'] !== undefined) { 14 | data['decodedString'] = decodeURIComponent(data['encodedString']); 15 | } 16 | 17 | // Update the message with decoded data 18 | message.data = JSON.stringify(data); 19 | 20 | return message; 21 | } 22 | -------------------------------------------------------------------------------- /udfs/insert_field.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to insert a new field into the message data. 3 | * 4 | * @param {Object} message - The message to insert the field into. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with the inserted field. 7 | */ 8 | function insert_field(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Define the field to insert and its value 13 | const newField = 'inserted_field'; 14 | const newValue = 'default_value'; 15 | 16 | // Insert the new field 17 | data[newField] = newValue; 18 | 19 | // Update the message with inserted field 20 | message.data = JSON.stringify(data); 21 | 22 | return message; 23 | } 24 | -------------------------------------------------------------------------------- /load-test-framework/python_src/clients/to_float_seconds.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | def to_float_seconds(time_or_duration): 16 | return time_or_duration.seconds + float(time_or_duration.nanos) / 1000000000.0 17 | -------------------------------------------------------------------------------- /udfs/insert_attributes.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to insert three new attributes into the message metadata. 3 | * 4 | * @param {Object} message - The message to insert attributes into. 5 | * @param {Object} metadata - The metadata to modify. 6 | * @returns {Object} The message with new attributes in metadata. 7 | */ 8 | function insert_attributes(message, metadata) { 9 | 10 | // Parse the message 11 | const data = JSON.parse(message.data); 12 | const attributes = message.attributes; 13 | 14 | // Add new attributes to insert 15 | attributes["key1"] = 'value1'; 16 | attributes["key2"] = 'value2'; 17 | attributes["key3"] = 'value3'; 18 | 19 | // Update the message with modified data 20 | message.data = JSON.stringify(data); 21 | 22 | return message; 23 | } 24 | -------------------------------------------------------------------------------- /.kokoro/presubmit/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | regex: "**/*sponge_log.txt" 8 | } 9 | } 10 | 11 | # Download trampoline resources. 12 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 13 | 14 | # Use the trampoline script to run in docker. 15 | build_file: "pubsub/.kokoro/trampoline.sh" 16 | 17 | env_vars: { 18 | key: "TRAMPOLINE_BUILD_FILE" 19 | value: "github/pubsub/.kokoro/build.sh" 20 | } 21 | 22 | env_vars: { 23 | key: "JOB_TYPE" 24 | value: "test" 25 | } 26 | 27 | before_action { 28 | fetch_keystore { 29 | keystore_resource { 30 | keystore_config_id: 73713 31 | keyname: "dpebot_codecov_token" 32 | } 33 | } 34 | } -------------------------------------------------------------------------------- /.kokoro/nightly/java11-integration.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-public-resources/java11014" 7 | } 8 | 9 | env_vars: { 10 | key: "JOB_TYPE" 11 | value: "integration" 12 | } 13 | 14 | env_vars: { 15 | key: "GCLOUD_PROJECT" 16 | value: "gcloud-devel" 17 | } 18 | 19 | env_vars: { 20 | key: "GOOGLE_CLOUD_PROJECT" 21 | value: "gcloud-devel" 22 | } 23 | 24 | env_vars: { 25 | key: "ENABLE_FLAKYBOT" 26 | value: "true" 27 | } 28 | 29 | env_vars: { 30 | key: "GOOGLE_APPLICATION_CREDENTIALS" 31 | value: "secret_manager/java-it-service-account" 32 | } 33 | 34 | env_vars: { 35 | key: "SECRET_MANAGER_KEYS" 36 | value: "java-it-service-account" 37 | } -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/TemplateMain.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import org.apache.beam.runners.dataflow.DataflowRunner; 4 | import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; 5 | import org.apache.beam.sdk.options.PipelineOptionsFactory; 6 | 7 | /** 8 | * The main class used in the dataflow template. Cannot be run locally. 9 | */ 10 | public class TemplateMain { 11 | public interface Options extends DataflowPipelineOptions, TemplateOptions {} 12 | 13 | public static void main(final String[] args) { 14 | Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); 15 | options.setEnableStreamingEngine(true); 16 | options.setRunner(DataflowRunner.class); 17 | RunPipeline.run(options); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /udfs/mask.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to mask sensitive fields in a message. 3 | * 4 | * @param {Object} message - The message to mask. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The masked message. 7 | */ 8 | function mask(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Define fields to mask 13 | const fieldsToMask = ['field1', 'field2', 'field3']; 14 | 15 | // Mask each field by replacing with asterisks 16 | fieldsToMask.forEach(field => { 17 | if (data[field]) { 18 | data[field] = '*'.repeat(data[field].length); 19 | } 20 | }); 21 | 22 | // Update the message with masked data 23 | message.data = JSON.stringify(data); 24 | 25 | return message; 26 | } 27 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/UITemplateMain.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import org.apache.beam.runners.dataflow.DataflowRunner; 4 | import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; 5 | import org.apache.beam.sdk.options.PipelineOptionsFactory; 6 | 7 | public class UITemplateMain { 8 | 9 | public interface Options extends DataflowPipelineOptions, UITemplateOptions {} 10 | 11 | public static void main(final String[] args) { 12 | Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); 13 | options.setEnableStreamingEngine(true); 14 | options.setRunner(DataflowRunner.class); 15 | RunPipeline.run(options, TableSpec.parse(options.getSourceSpec()), 16 | TableSpec.parse(options.getSinkSpec())); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.kokoro/nightly/samples.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 7 | } 8 | 9 | env_vars: { 10 | key: "JOB_TYPE" 11 | value: "samples" 12 | } 13 | 14 | env_vars: { 15 | key: "GCLOUD_PROJECT" 16 | value: "java-docs-samples-testing" 17 | } 18 | 19 | env_vars: { 20 | key: "GOOGLE_CLOUD_PROJECT" 21 | value: "java-docs-samples-testing" 22 | } 23 | 24 | env_vars: { 25 | key: "GOOGLE_APPLICATION_CREDENTIALS" 26 | value: "secret_manager/java-docs-samples-service-account" 27 | } 28 | 29 | env_vars: { 30 | key: "SECRET_MANAGER_KEYS" 31 | value: "java-docs-samples-service-account" 32 | } 33 | 34 | env_vars: { 35 | key: "ENABLE_BUILD_COP" 36 | value: "true" 37 | } -------------------------------------------------------------------------------- /udfs/timestamp_utc_converter.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to convert a timestamp field to GMT. 3 | * 4 | * @param {Object} message - The message containing the timestamp to convert. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with the converted timestamp. 7 | */ 8 | function timestamp_utc_converter(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Check if timestamp field exists 13 | if (data['timestamp']) { 14 | // Convert timestamp to GMT 15 | const date = new Date(data['timestamp']); 16 | data['gmt_timestamp'] = date.toUTCString(); 17 | } 18 | 19 | // Update the message with converted timestamp 20 | message.data = JSON.stringify(data); 21 | 22 | return message; 23 | } 24 | -------------------------------------------------------------------------------- /.kokoro/nightly/integration.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Configure the docker image for kokoro-trampoline. 4 | env_vars: { 5 | key: "TRAMPOLINE_IMAGE" 6 | value: "gcr.io/cloud-devrel-kokoro-resources/java8" 7 | } 8 | 9 | env_vars: { 10 | key: "JOB_TYPE" 11 | value: "integration" 12 | } 13 | 14 | env_vars: { 15 | key: "GCLOUD_PROJECT" 16 | value: "java-docs-samples-testing" 17 | } 18 | 19 | env_vars: { 20 | key: "GOOGLE_CLOUD_PROJECT" 21 | value: "java-docs-samples-testing" 22 | } 23 | 24 | env_vars: { 25 | key: "ENABLE_FLAKYBOT" 26 | value: "true" 27 | } 28 | 29 | env_vars: { 30 | key: "GOOGLE_APPLICATION_CREDENTIALS" 31 | value: "secret_manager/java-docs-samples-service-account" 32 | } 33 | 34 | env_vars: { 35 | key: "SECRET_MANAGER_KEYS" 36 | value: "java-docs-samples-service-account" 37 | } -------------------------------------------------------------------------------- /.kokoro/build.bat: -------------------------------------------------------------------------------- 1 | :: Copyright 2025 Google LLC 2 | :: 3 | :: Licensed under the Apache License, Version 2.0 (the "License"); 4 | :: you may not use this file except in compliance with the License. 5 | :: You may obtain a copy of the License at 6 | :: 7 | :: http://www.apache.org/licenses/LICENSE-2.0 8 | :: 9 | :: Unless required by applicable law or agreed to in writing, software 10 | :: distributed under the License is distributed on an "AS IS" BASIS, 11 | :: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | :: See the License for the specific language governing permissions and 13 | :: limitations under the License. 14 | :: Github action job to test core java library features on 15 | :: downstream client libraries before they are released. 16 | :: See documentation in type-shell-output.bat 17 | 18 | "C:\Program Files\Git\bin\bash.exe" %~dp0build.sh -------------------------------------------------------------------------------- /sql-streaming-copier/ui_metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Streaming SQL Copier (for UI use)", 3 | "description": "An Apache Beam streaming pipeline copies data from a streaming source in a standard format without parsing the payload and writes it to a streaming sink.", 4 | "parameters": [ 5 | { 6 | "name": "sourceSpec", 7 | "label": "sourceSpec", 8 | "helpText": "Source spec JSON. Structured as {\"id\":\"\",\"location\":\"\",\"properties\":{...}}", 9 | "custom_metadata": {"usePubsubCopyPicker": "true", "pickerMode": "source"} 10 | }, 11 | { 12 | "name": "sinkSpec", 13 | "label": "sinkSpec", 14 | "helpText": "Sink spec JSON. Structured as {\"id\":\"\",\"location\":\"\",\"properties\":{...}}", 15 | "custom_metadata": {"usePubsubCopyPicker": "true", "pickerMode": "sink"} 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/MakePtransform.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import org.apache.beam.sdk.transforms.PTransform; 4 | import org.apache.beam.sdk.transforms.SerializableFunction; 5 | import org.apache.beam.sdk.values.PInput; 6 | import org.apache.beam.sdk.values.POutput; 7 | 8 | public class MakePtransform { 9 | 10 | private MakePtransform() { 11 | } 12 | 13 | /** 14 | * Creates a PTransform from a SerializableFunction from PInput to POutput. 15 | */ 16 | public static PTransform from( 17 | SerializableFunction transform, String name) { 18 | return new PTransform<>(name) { 19 | @Override 20 | public OutputT expand(InputT input) { 21 | return transform.apply(input); 22 | } 23 | }; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /udfs/README.md: -------------------------------------------------------------------------------- 1 | # Pub/Sub UDFs 2 | 3 | Pub/Sub offers Single Message Transforms ([SMTs](https://cloud.google.com/pubsub/docs/smts/smts-overview)) to simplify data transformations for streaming pipelines. SMTs enable lightweight modifications to message data and attributes directly within Pub/Sub. SMTs eliminate the need for additional data processing steps or separate data transformation products. 4 | 5 | A JavaScript User-Defined Function ([UDF](https://cloud.google.com/pubsub/docs/smts/udfs-overview)) is a type of Single Message Transform (SMT). UDFs provide a flexible way to implement custom transformation logic within Pub/Sub 6 | 7 | 8 | ## Using the UDFs 9 | 10 | Please clone the repo and use it as a starting point for your Single Message Transforms. All UDFs within this repository are maintained in js format. This format is used to enable testing and deployment of the UDFs with Pub/Sub SMTs. -------------------------------------------------------------------------------- /load-test-framework/go_src/go.mod: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // You may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions 13 | // and limitations under the License. 14 | 15 | module google.com/cloud_pubsub_loadtest 16 | 17 | require ( 18 | cloud.google.com/go v0.35.1 19 | github.com/golang/protobuf v1.2.0 20 | golang.org/x/net v0.0.0-20181106065722-10aee1819953 21 | google.golang.org/grpc v1.18.0 22 | ) 23 | -------------------------------------------------------------------------------- /load-test-framework/node_src/src/publisher_task_main.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | let TaskWorker = require("./task.js").TaskWorker; 18 | let PublisherSubtaskWorker = require("./publisher_task.js").PublisherSubtaskWorker; 19 | 20 | TaskWorker.runWorker(new PublisherSubtaskWorker()); 21 | -------------------------------------------------------------------------------- /load-test-framework/node_src/src/subscriber_task_main.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | let TaskWorker = require("./task.js").TaskWorker; 18 | let SubscriberSubtaskWorker = require("./subscriber_task.js").SubscriberSubtaskWorker; 19 | 20 | TaskWorker.runWorker(new SubscriberSubtaskWorker()); -------------------------------------------------------------------------------- /.github/workflows/new-release.yml: -------------------------------------------------------------------------------- 1 | name: Create new release 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Set up JDK 1.8 13 | uses: actions/setup-java@v1 14 | with: 15 | java-version: 1.8 16 | - name: Build with Maven 17 | id: build 18 | working-directory: kafka-connector 19 | run: mvn -B package --file pom.xml 20 | - name: Upload Release Asset 21 | id: upload-release-asset 22 | uses: actions/upload-release-asset@v1 23 | env: 24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 25 | with: 26 | upload_url: ${{ github.event.release.upload_url }} 27 | asset_path: ./kafka-connector/target/pubsub-kafka-connector.jar 28 | asset_name: pubsub-kafka-connector.jar 29 | asset_content_type: application/java-archive 30 | -------------------------------------------------------------------------------- /load-test-framework/node_src/src/settable_promise.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | module.exports = class SettablePromise { 18 | constructor() { 19 | this.promise = new Promise(resolve => { 20 | this.resolve = resolve; 21 | }); 22 | } 23 | 24 | set() { 25 | this.resolve(null); 26 | } 27 | }; -------------------------------------------------------------------------------- /udfs/cast.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to cast fields within a message to specified types. 3 | * 4 | * @param {Object} message - The message containing fields to cast. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with casted fields. 7 | */ 8 | function cast(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Define type casting map 13 | const typeMap = { 14 | 'field1': String, 15 | 'field2': String, 16 | 'field3': String, 17 | }; 18 | 19 | // Cast each field according to the type map 20 | Object.entries(typeMap).forEach(([field, type]) => { 21 | if (data[field] !== undefined) { 22 | data[field] = type(data[field]); 23 | } 24 | }); 25 | 26 | // Update the message with new data 27 | message.data = JSON.stringify(data); 28 | 29 | return message; 30 | } 31 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/TableLoader.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import java.util.ServiceLoader; 4 | import org.apache.beam.sdk.extensions.sql.meta.BeamSqlTable; 5 | import org.apache.beam.sdk.extensions.sql.meta.Table; 6 | import org.apache.beam.sdk.extensions.sql.meta.provider.TableProvider; 7 | import org.apache.beam.sdk.extensions.sql.meta.store.InMemoryMetaStore; 8 | 9 | /** 10 | * Loads all TableProviders on the classpath, then looks up the correct one based on the table id. 11 | */ 12 | public class TableLoader { 13 | 14 | private TableLoader() { 15 | } 16 | 17 | private static final InMemoryMetaStore META_STORE = new InMemoryMetaStore(); 18 | 19 | static { 20 | ServiceLoader.load(TableProvider.class).forEach(META_STORE::registerProvider); 21 | } 22 | 23 | public static BeamSqlTable buildBeamSqlTable(Table table) { 24 | return META_STORE.buildBeamSqlTable(table); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /udfs/filter_custom.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to filter messages based on a custom condition defined in 3 | * the filter function. 4 | * 5 | * @param {Object} message - The message to filter. 6 | * @param {Object} metadata - The metadata of the message. 7 | * @returns {Object} The message if it meets the custom condition, otherwise null. 8 | */ 9 | function filter_custom(message, metadata) { 10 | // Parse the message 11 | const data = JSON.parse(message.data); 12 | 13 | // Filter out messages that match the filter function. 14 | if (filter(data)) { 15 | return null; // Return null to filter out the message 16 | } 17 | 18 | // Return the original message if not filtering 19 | return message; 20 | } 21 | 22 | /* 23 | * Returns true if the message should be filtered out 24 | * 25 | * @param {Object} data - The message to filter. 26 | */ 27 | function filter(data) { 28 | // Filter out messages that are not from US region. 29 | return data['region'] !== "US"; 30 | } -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/StandardSqlSource.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql.providers; 2 | 3 | import com.google.cloud.pubsub.sql.MakePtransform; 4 | import org.apache.beam.sdk.extensions.sql.SqlTransform; 5 | import org.apache.beam.sdk.transforms.PTransform; 6 | import org.apache.beam.sdk.values.PCollection; 7 | import org.apache.beam.sdk.values.Row; 8 | 9 | public interface StandardSqlSource extends StandardSource { 10 | 11 | /** 12 | * A statement transforming from the native schema to the standard schema if needed. The provided 13 | * table is PCOLLECTION. 14 | */ 15 | default String query() { 16 | return ""; 17 | } 18 | 19 | @Override 20 | default PTransform, PCollection> transform() { 21 | return MakePtransform.from(rows -> { 22 | if (!query().isEmpty()) { 23 | rows = rows.apply(SqlTransform.query(query())); 24 | } 25 | return rows; 26 | }, "StandardSqlSink Transform"); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /udfs/to_hex.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to convert a string field within a message to hex. 3 | * 4 | * @param {Object} message - The message to convert to hex. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {number[]} An array of hex byte values. 7 | */ 8 | function to_hex(message, metadata) { 9 | // parse the messagae 10 | const data = JSON.parse(message.data); 11 | 12 | // get the string to convert 13 | const str = data['field1']; 14 | 15 | // convert to hex 16 | const bytes = stringToHexByteArray(str); 17 | 18 | // update the message with the hex value 19 | data['field1'] = bytes; 20 | 21 | message.data = JSON.stringify(data); 22 | 23 | return message; 24 | } 25 | 26 | function stringToHexByteArray(str) { 27 | // Converts a string to an array of hex byte values 28 | const bytes = []; 29 | for (let i = 0; i < str.length; i++) { 30 | bytes.push(str.charCodeAt(i).toString(16).padStart(2, '0')); 31 | } 32 | return bytes; 33 | } -------------------------------------------------------------------------------- /load-test-framework/go_src/internal/util/util.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package util 18 | 19 | import ( 20 | "runtime" 21 | "time" 22 | ) 23 | 24 | func CurrentTimeMs() int64 { 25 | return time.Now().UnixNano() / int64(time.Millisecond) 26 | } 27 | 28 | func ScaledNumWorkers(scaleFactor int) int { 29 | workers := scaleFactor * runtime.NumCPU() 30 | if workers < 1 { 31 | workers = 1 32 | } 33 | return workers 34 | } 35 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2019 Google Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # You may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions 14 | # and limitations under the License. 15 | # 16 | 17 | log4j.rootLogger=INFO, stdout 18 | 19 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 20 | log4j.appender.stdout.Target=System.out 21 | log4j.appender.stdout.layout=com.google.pubsub.flic.common.LoggingPattern 22 | 23 | log4j.logger.org.apache.kafka = OFF 24 | log4j.logger.kafka = OFF 25 | log4j.logger.io.grpc = OFF 26 | log4j.logger.io.netty = OFF 27 | 28 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/sink/ConfigDefs.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.sink; 2 | 3 | import org.apache.kafka.common.config.ConfigDef; 4 | import org.apache.kafka.common.config.ConfigDef.Importance; 5 | 6 | final class ConfigDefs { 7 | 8 | private ConfigDefs() { 9 | } 10 | 11 | static final String PROJECT_FLAG = "pubsublite.project"; 12 | static final String LOCATION_FLAG = "pubsublite.location"; 13 | static final String TOPIC_NAME_FLAG = "pubsublite.topic"; 14 | 15 | static ConfigDef config() { 16 | return new ConfigDef() 17 | .define(PROJECT_FLAG, ConfigDef.Type.STRING, Importance.HIGH, 18 | "The project containing the topic to which to publish.") 19 | .define(LOCATION_FLAG, ConfigDef.Type.STRING, Importance.HIGH, 20 | "The cloud zone (like europe-south7-q) containing the topic to which to publish.") 21 | .define(TOPIC_NAME_FLAG, ConfigDef.Type.STRING, Importance.HIGH, 22 | "The name of the topic to which to publish."); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/StandardSqlSink.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql.providers; 2 | 3 | import com.google.cloud.pubsub.sql.MakePtransform; 4 | import org.apache.beam.sdk.extensions.sql.SqlTransform; 5 | import org.apache.beam.sdk.transforms.PTransform; 6 | import org.apache.beam.sdk.values.PCollection; 7 | import org.apache.beam.sdk.values.Row; 8 | 9 | public interface StandardSqlSink extends StandardSink { 10 | 11 | /** 12 | * A statement transforming from the standard schema to the native schema if needed. The empty 13 | * string if no statement is required. The provided table is PCOLLECTION. 14 | */ 15 | default String query() { 16 | return ""; 17 | } 18 | 19 | @Override 20 | default PTransform, PCollection> transform() { 21 | return MakePtransform.from(rows -> { 22 | if (!query().isEmpty()) { 23 | rows = rows.apply(SqlTransform.query(query())); 24 | } 25 | return rows; 26 | }, "StandardSqlSink Transform"); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /load-test-framework/go_src/internal/flow_control/flow_controller.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package flow_control 18 | 19 | type FlowController interface { 20 | // Request to start another operation, returns the channel to wait on. 21 | // A value received on the channel is the number of allowed operations. 22 | Start() <-chan int 23 | // Inform the flow controller that an operation has finished 24 | InformFinished(wasSuccessful bool) 25 | } 26 | -------------------------------------------------------------------------------- /.kokoro/trampoline.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2025 Google LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | set -eo pipefail 16 | # Always run the cleanup script, regardless of the success of bouncing into 17 | # the container. 18 | function cleanup() { 19 | chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh 20 | ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh 21 | echo "cleanup"; 22 | } 23 | trap cleanup EXIT 24 | 25 | $(dirname $0)/populate-secrets.sh # Secret Manager secrets. 26 | python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" -------------------------------------------------------------------------------- /udfs/hoist.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Hoists the 'event_time' field from message.data to the top level of the message object, if present. 3 | * @param {Object} message - The message object with a JSON string in message.data. 4 | * @returns {Object} The message with event_time at the top level if it existed in data. 5 | */ 6 | function hoist(message, metadata) { 7 | // Parse the message 8 | const data = JSON.parse(message.data); 9 | 10 | // Helper function to recursively search for 'event_time' 11 | function findEventTime(obj) { 12 | if (obj && typeof obj === 'object') { 13 | for (const key in obj) { 14 | if (key === 'event_time') { 15 | data['event_time'] = obj[key]; 16 | } else if (typeof obj[key] === 'object') { 17 | findEventTime(obj[key]); 18 | } 19 | } 20 | } 21 | } 22 | 23 | findEventTime(data); 24 | 25 | // Update the message with new data 26 | message.data = JSON.stringify(data); 27 | 28 | return message; 29 | } 30 | -------------------------------------------------------------------------------- /udfs/remove_whitespaces.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to remove extra whitespaces from fields in a message. 3 | * 4 | * @param {Object} message - The message containing fields to clean. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with cleaned fields. 7 | */ 8 | function remove_whitespaces(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Function to clean whitespaces from a value 13 | function cleanWhitespace(value) { 14 | if (typeof value === 'string') { 15 | // Replace multiple spaces with single space and trim 16 | return value.replace(/\s+/g, ' ').trim(); 17 | } 18 | return value; 19 | } 20 | 21 | // Clean whitespaces from all string fields 22 | Object.keys(data).forEach(key => { 23 | data[key] = cleanWhitespace(data[key]); 24 | }); 25 | 26 | // Update the message with cleaned data 27 | message.data = JSON.stringify(data); 28 | 29 | return message; 30 | } 31 | -------------------------------------------------------------------------------- /udfs/upgrade_attribute.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to extract a field from metadata and add it to the message data. 3 | * 4 | * @param {Object} message - The message to modify. 5 | * @param {Object} metadata - The metadata containing the field to extract. 6 | * @returns {Object} The message with the extracted field added to data. 7 | */ 8 | function upgrade_attribute(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | const attributes = message.attributes; 12 | 13 | // Define the field to extract from metadata 14 | const fieldToExtract = 'city'; 15 | 16 | // Check if attributes exist and field exists in attributes 17 | if (attributes && attributes[fieldToExtract] != null) { 18 | // Add field to data 19 | data[fieldToExtract] = attributes[fieldToExtract]; 20 | 21 | // Remove field from attributes 22 | delete attributes[fieldToExtract]; 23 | } 24 | 25 | // Update the message with modified data 26 | message.data = JSON.stringify(data); 27 | 28 | return message; 29 | } 30 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/source/split/SubscriptionSplitState.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.split; 17 | 18 | public final class SubscriptionSplitState { 19 | private final SubscriptionSplit split; 20 | 21 | public SubscriptionSplitState(SubscriptionSplit split) { 22 | this.split = split; 23 | } 24 | 25 | public SubscriptionSplit getSplit() { 26 | return split; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/clients/flow_control/FlowController.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.clients.flow_control; 18 | 19 | public interface FlowController { 20 | /** 21 | * Request starting a flow controlled action, block until allowed. Return the number of requests 22 | * allowed. 23 | */ 24 | int requestStart(); 25 | 26 | /** Inform the FlowController that an action has finished. */ 27 | void informFinished(boolean wasSuccessful); 28 | } 29 | -------------------------------------------------------------------------------- /udfs/derive_field.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to derive a composite field from existing fields in the message data. 3 | * 4 | * @param {Object} message - The message containing the fields to combine. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with the new composite field. 7 | */ 8 | function derive_field(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Define the fields to combine and the new field name 13 | const fieldsToCombine = ['field1', 'field2', 'field3']; 14 | const newFieldName = 'composite_field'; 15 | 16 | // Create composite field by combining values - custom logic goes here 17 | const compositeValue = fieldsToCombine 18 | .map(field => data[field] || '') 19 | .filter(value => value !== '') 20 | .join('_'); 21 | 22 | // Add the composite field to the data 23 | data[newFieldName] = compositeValue; 24 | 25 | // Update the message with new data 26 | message.data = JSON.stringify(data); 27 | 28 | return message; 29 | } 30 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/sink/FlushablePublisher.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.sink; 17 | 18 | import com.google.pubsub.v1.PubsubMessage; 19 | import java.io.Flushable; 20 | 21 | /** A publisher that waits for all outstanding publishes to complete when flushed. */ 22 | public interface FlushablePublisher extends Flushable { 23 | 24 | void publish(PubsubMessage message) throws InterruptedException; 25 | } 26 | -------------------------------------------------------------------------------- /udfs/filter_field_regex.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to filter a field in the message data using a regular expression. 3 | * 4 | * @param {Object} message - The message containing the field to filter. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with filtered field. 7 | */ 8 | function filter_field_regex(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Define the field to filter and the regex pattern 13 | const fieldToFilter = 'field1'; 14 | const regexPattern = /^[A-Za-z0-9]+$/; // Only allows alphanumeric characters 15 | 16 | // Check if the field exists and matches the regex pattern 17 | if (data[fieldToFilter] && typeof data[fieldToFilter] === 'string') { 18 | if (!regexPattern.test(data[fieldToFilter])) { 19 | // If the field doesn't match the pattern, remove it 20 | delete data[fieldToFilter]; 21 | } 22 | } 23 | 24 | // Update the message with filtered data 25 | message.data = JSON.stringify(data); 26 | 27 | return message; 28 | } 29 | 30 | -------------------------------------------------------------------------------- /load-test-framework/run.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import subprocess 16 | import sys 17 | 18 | if __name__ == '__main__': 19 | java_args = ['java', '-jar', 'target/driver.jar'] + sys.argv[1:] 20 | 21 | subprocess.call(['mvn', 'package']) 22 | subprocess.call(['cp', 'target/driver.jar', 'target/classes/gce/']) 23 | 24 | subprocess.call([ 25 | 'zip', '-FSr', './target/classes/gce/cps.zip', './proto', 26 | './python_src', './node_src/src', './node_src/package.json', './go_src' 27 | ]) 28 | 29 | subprocess.call(java_args) 30 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/sink/PubSubLiteSinkConnector.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.sink; 2 | 3 | import java.util.Collections; 4 | import java.util.List; 5 | import java.util.Map; 6 | import org.apache.kafka.common.config.ConfigDef; 7 | import org.apache.kafka.common.utils.AppInfoParser; 8 | import org.apache.kafka.connect.connector.Task; 9 | import org.apache.kafka.connect.sink.SinkConnector; 10 | 11 | public class PubSubLiteSinkConnector extends SinkConnector { 12 | private Map props; 13 | 14 | @Override 15 | public String version() { 16 | return AppInfoParser.getVersion(); 17 | } 18 | 19 | @Override 20 | public void start(Map map) { 21 | props = map; 22 | } 23 | 24 | @Override 25 | public Class taskClass() { 26 | return PubSubLiteSinkTask.class; 27 | } 28 | 29 | @Override 30 | public List> taskConfigs(int i) { 31 | return Collections.nCopies(i, props); 32 | } 33 | 34 | @Override 35 | public void stop() {} 36 | 37 | @Override 38 | public ConfigDef config() { 39 | return ConfigDefs.config(); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /udfs/rename_field.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to rename fields within a message. 3 | * 4 | * @param {Object} message - The message containing fields to rename. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with renamed fields. 7 | */ 8 | function rename_field(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Define field mapping (old field name -> new field name) 13 | const fieldMapping = { 14 | 'old_field1': 'new_field1', 15 | 'old_field2': 'new_field2', 16 | 'old_field3': 'new_field3' 17 | }; 18 | 19 | // Rename fields according to the mapping 20 | Object.entries(fieldMapping).forEach(([oldField, newField]) => { 21 | if (data[oldField] !== undefined) { 22 | // Copy value to new field name 23 | data[newField] = data[oldField]; 24 | // Remove old field 25 | delete data[oldField]; 26 | } 27 | }); 28 | 29 | // Update the message with renamed fields 30 | message.data = JSON.stringify(data); 31 | 32 | return message; 33 | } 34 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/source/PubSubLiteSourceConnector.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.source; 2 | 3 | import java.util.Collections; 4 | import java.util.List; 5 | import java.util.Map; 6 | import org.apache.kafka.common.config.ConfigDef; 7 | import org.apache.kafka.common.utils.AppInfoParser; 8 | import org.apache.kafka.connect.connector.Task; 9 | import org.apache.kafka.connect.source.SourceConnector; 10 | 11 | public class PubSubLiteSourceConnector extends SourceConnector { 12 | 13 | private Map props; 14 | 15 | @Override 16 | public String version() { 17 | return AppInfoParser.getVersion(); 18 | } 19 | 20 | @Override 21 | public void start(Map map) { 22 | props = map; 23 | } 24 | 25 | @Override 26 | public Class taskClass() { 27 | return PubSubLiteSourceTask.class; 28 | } 29 | 30 | @Override 31 | public List> taskConfigs(int i) { 32 | return Collections.nCopies(i, props); 33 | } 34 | 35 | @Override 36 | public void stop() { 37 | } 38 | 39 | @Override 40 | public ConfigDef config() { 41 | return ConfigDefs.config(); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /load-test-framework/node_src/src/flow_control/flow_controller.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | class FlowController { 18 | constructor() { 19 | } 20 | 21 | // Get a promise for when to start the next message. 22 | // The value of the promise is the number of allowed messages. 23 | async requestStart() { 24 | throw new Error('Unimplemented.'); 25 | } 26 | 27 | // Inform the flow controller of a completion. 28 | informFinished(wasSuccessful) { 29 | throw new Error('Unimplemented.'); 30 | } 31 | } 32 | 33 | module.exports = FlowController; 34 | -------------------------------------------------------------------------------- /load-test-framework/python_src/clients/flow_control/rate_limiter_flow_controller.py: -------------------------------------------------------------------------------- 1 | from clients.flow_control import FlowController 2 | from concurrent.futures import Executor, ThreadPoolExecutor 3 | import time 4 | from threading import Condition 5 | 6 | 7 | class RateLimiterFlowController(FlowController): 8 | """ 9 | A FlowController that allows actions at a given per second rate. 10 | """ 11 | 12 | def __init__(self, per_second_rate: float): 13 | self.seconds_per_token = 1 / per_second_rate 14 | self.tokens = 0 15 | self.condition = Condition() 16 | self.executor: Executor = ThreadPoolExecutor(max_workers=1) 17 | self.executor.submit(self._token_generator) 18 | 19 | def _token_generator(self): 20 | time.sleep(self.seconds_per_token) 21 | with self.condition: 22 | self.tokens += 1 23 | self.condition.notify() 24 | self._token_generator() 25 | 26 | def request_start(self): 27 | with self.condition: 28 | while self.tokens < 1: 29 | self.condition.wait() 30 | self.tokens -= 1 31 | 32 | def inform_finished(self, was_successful: bool): 33 | pass 34 | -------------------------------------------------------------------------------- /load-test-framework/node_src/src/loadtest_service.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | let PROTO_PATH = __dirname + '/../../proto/loadtest.proto'; 18 | let grpc = require('grpc'); 19 | let protoLoader = require('@grpc/proto-loader'); 20 | var packageDefinition = protoLoader.loadSync( 21 | PROTO_PATH, 22 | { 23 | keepCase: true, 24 | longs: String, 25 | enums: String, 26 | defaults: true, 27 | oneofs: true 28 | }); 29 | 30 | let serviceDescriptor = grpc.loadPackageDefinition(packageDefinition); 31 | 32 | module.exports = serviceDescriptor.google.pubsub.loadtest; 33 | -------------------------------------------------------------------------------- /udfs/flatten_json.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to flatten nested JSON objects in a message. 3 | * 4 | * @param {Object} message - The message containing nested JSON to flatten. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with flattened JSON. 7 | */ 8 | function flatten_json(message, metadata) { 9 | 10 | // Parse the message 11 | const data = JSON.parse(message.data); 12 | 13 | // Function to flatten nested objects 14 | function flatten(obj, prefix = '') { 15 | return Object.keys(obj).reduce((acc, key) => { 16 | const pre = prefix.length ? `${prefix}_` : ''; 17 | 18 | if (typeof obj[key] === 'object' && obj[key] !== null && !Array.isArray(obj[key])) { 19 | Object.assign(acc, flatten(obj[key], `${pre}${key}`)); 20 | } else { 21 | acc[`${pre}${key}`] = obj[key]; 22 | } 23 | 24 | return acc; 25 | }, {}); 26 | } 27 | 28 | // Flatten the data 29 | const flattenedData = flatten(data); 30 | 31 | // Update the message with flattened data 32 | message.data = JSON.stringify(flattenedData); 33 | 34 | return message; 35 | } 36 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub-e2e-tests/src/test/java/com/google/pubsub/flink/DockerImageVersions.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flink; 18 | 19 | /** 20 | * Utility class for defining the image names and versions of Docker containers used during the Java 21 | * tests. The names/versions are centralised here in order to make testing version updates easier, 22 | * as well as to provide a central file to use as a key when caching testing Docker files. 23 | */ 24 | public class DockerImageVersions { 25 | public static final String GOOGLE_CLOUD_PUBSUB_EMULATOR = 26 | "gcr.io/google.com/cloudsdktool/cloud-sdk:379.0.0"; 27 | } 28 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/gce/cps-gcloud-java-publisher_startup_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ####################################### 4 | # Query GCE for a provided metadata field. 5 | # See https://developers.google.com/compute/docs/metadata 6 | # Globals: 7 | # None 8 | # Arguments: 9 | # $1: The path to the metadata field to retrieve 10 | # Returns: 11 | # The value stored at the metadata field 12 | ####################################### 13 | function metadata() { 14 | curl --silent --show-error --header 'Metadata-Flavor: Google' \ 15 | "http://metadata/computeMetadata/v1/${1}"; 16 | } 17 | 18 | readonly TMP="$(mktemp -d)" 19 | readonly BUCKET=$(metadata instance/attributes/bucket) 20 | 21 | [[ "${TMP}" != "" ]] || error mktemp failed 22 | 23 | # Download the loadtest binary to this machine and install Java 8. 24 | /usr/bin/apt-get update 25 | /usr/bin/apt-get install -y openjdk-8-jre-headless & PIDAPT=$! 26 | /snap/bin/gsutil cp "gs://${BUCKET}/driver.jar" "${TMP}" 27 | 28 | wait $PIDAPT 29 | 30 | ulimit -n 32768 31 | 32 | # Limit the jvm to 2/3 available memory. 33 | MEM="$(cat /proc/meminfo | head -n 1 | awk '{print $2}')" 34 | let "MEMJAVA = (MEM * 2 / 3) / 1000" 35 | 36 | java -Xmx${MEMJAVA}m -cp ${TMP}/driver.jar com.google.pubsub.clients.gcloud.CPSPublisherTask 37 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/gce/cps-gcloud-java-subscriber_startup_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ####################################### 4 | # Query GCE for a provided metadata field. 5 | # See https://developers.google.com/compute/docs/metadata 6 | # Globals: 7 | # None 8 | # Arguments: 9 | # $1: The path to the metadata field to retrieve 10 | # Returns: 11 | # The value stored at the metadata field 12 | ####################################### 13 | function metadata() { 14 | curl --silent --show-error --header 'Metadata-Flavor: Google' \ 15 | "http://metadata/computeMetadata/v1/${1}"; 16 | } 17 | 18 | readonly TMP="$(mktemp -d)" 19 | readonly BUCKET=$(metadata instance/attributes/bucket) 20 | 21 | [[ "${TMP}" != "" ]] || error mktemp failed 22 | 23 | # Download the loadtest binary to this machine and install Java 8. 24 | /usr/bin/apt-get update 25 | /usr/bin/apt-get install -y openjdk-8-jre-headless & PIDAPT=$! 26 | /snap/bin/gsutil cp "gs://${BUCKET}/driver.jar" "${TMP}" 27 | 28 | wait $PIDAPT 29 | 30 | ulimit -n 32768 31 | 32 | # Limit the jvm to 2/3 available memory. 33 | MEM="$(cat /proc/meminfo | head -n 1 | awk '{print $2}')" 34 | let "MEMJAVA = (MEM * 2 / 3) / 1000" 35 | 36 | java -Xmx${MEMJAVA}m -cp ${TMP}/driver.jar com.google.pubsub.clients.gcloud.CPSSubscriberTask 37 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/proto/split.proto: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | syntax = "proto3"; 17 | 18 | package google.pubsub.flink; 19 | 20 | option java_multiple_files = true; 21 | option java_package = "com.google.pubsub.flink.proto"; 22 | 23 | message SubscriptionSplitProto { 24 | // The full Pub/Sub subscription path for this split. 25 | string subscription = 1; 26 | // Unique identifier for this subscription's split. 27 | string uid = 2; 28 | } 29 | 30 | message PubSubEnumeratorCheckpoint { 31 | message Assignment { 32 | int32 subtask = 1; 33 | SubscriptionSplitProto split = 2; 34 | } 35 | repeated Assignment assignments = 1; 36 | } 37 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/resource_controllers/ComputeResourceController.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers.resource_controllers; 18 | 19 | import com.google.common.util.concurrent.ListenableFuture; 20 | import com.google.pubsub.flic.controllers.Client; 21 | import java.util.List; 22 | import java.util.concurrent.ScheduledExecutorService; 23 | 24 | public abstract class ComputeResourceController extends ResourceController { 25 | ComputeResourceController(ScheduledExecutorService executor) { 26 | super(executor); 27 | } 28 | 29 | public abstract ListenableFuture> startClients(); 30 | } 31 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/clients/common/LoadtestTask.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.clients.common; 18 | 19 | import com.google.pubsub.flic.common.LoadtestProto; 20 | 21 | /** 22 | * Each task is responsible for triggering its workers when it is run. It controls its own 23 | * parallelism. 24 | */ 25 | public interface LoadtestTask { 26 | // Start the task 27 | void start(); 28 | 29 | // Stop the task 30 | void stop(); 31 | 32 | // A factory for constructing a task. 33 | public interface Factory { 34 | LoadtestTask newTask( 35 | LoadtestProto.StartRequest request, MetricsHandler handler, int workerCount); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/clients/common/LogEveryN.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.clients.common; 18 | 19 | import java.util.concurrent.atomic.AtomicInteger; 20 | import org.slf4j.Logger; 21 | 22 | public class LogEveryN { 23 | private final Logger logger; 24 | private final AtomicInteger counter = new AtomicInteger(0); 25 | private final int n; 26 | 27 | public LogEveryN(Logger logger, int n) { 28 | this.logger = logger; 29 | this.n = n; 30 | } 31 | 32 | public void error(String toLog) { 33 | int previous = counter.getAndUpdate(existing -> (existing + 1) % n); 34 | if (previous == 0) { 35 | logger.error(toLog); 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/gce/cps-gcloud-node-subscriber_startup_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ####################################### 4 | # Query GCE for a provided metadata field. 5 | # See https://developers.google.com/compute/docs/metadata 6 | # Globals: 7 | # None 8 | # Arguments: 9 | # $1: The path to the metadata field to retrieve 10 | # Returns: 11 | # The value stored at the metadata field 12 | ####################################### 13 | function metadata() { 14 | curl --silent --show-error --header 'Metadata-Flavor: Google' \ 15 | "http://metadata/computeMetadata/v1/${1}"; 16 | } 17 | 18 | readonly TMP="$(mktemp -d)" 19 | readonly BUCKET=$(metadata instance/attributes/bucket) 20 | 21 | [[ "${TMP}" != "" ]] || error mktemp failed 22 | 23 | # Add the correct repo for nodejs 24 | curl -sL https://deb.nodesource.com/setup_11.x | sudo -E bash - 25 | 26 | # Download the loadtest binary to this machine and install Java 8. 27 | /usr/bin/apt-get update 28 | /usr/bin/apt-get install -y unzip nodejs 29 | /snap/bin/gsutil cp "gs://${BUCKET}/cps.zip" "${TMP}" 30 | 31 | cd ${TMP} 32 | unzip cps.zip 33 | cd node_src 34 | npm install 35 | # increase heap max to 16GB to prevent oom on large tests. 36 | # subscriber already limits messages outstanding to 500MB per core. 37 | node --max-old-space-size=16000 src/main.js --publisher=false 38 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/common/StatsUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.common; 18 | 19 | import com.google.protobuf.Duration; 20 | import com.google.protobuf.util.Durations; 21 | 22 | public class StatsUtils { 23 | /** Returns the average QPS. */ 24 | public static double getQPS(long messageCount, Duration loadtestDuration) { 25 | return messageCount / (double) Durations.toSeconds(loadtestDuration); 26 | } 27 | 28 | /** Returns the average throughput in MB/s. */ 29 | public static double getThroughput( 30 | long messageCount, Duration loadtestDuration, long messageSize) { 31 | return getQPS(messageCount, loadtestDuration) * messageSize / 1000000.0; 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/gce/cps-gcloud-go-publisher_startup_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ####################################### 4 | # Query GCE for a provided metadata field. 5 | # See https://developers.google.com/compute/docs/metadata 6 | # Globals: 7 | # None 8 | # Arguments: 9 | # $1: The path to the metadata field to retrieve 10 | # Returns: 11 | # The value stored at the metadata field 12 | ####################################### 13 | function metadata() { 14 | curl --silent --show-error --header 'Metadata-Flavor: Google' \ 15 | "http://metadata/computeMetadata/v1/${1}"; 16 | } 17 | 18 | readonly TMP="$(mktemp -d)" 19 | readonly BUCKET=$(metadata instance/attributes/bucket) 20 | 21 | [[ "${TMP}" != "" ]] || error mktemp failed 22 | 23 | # Download the loadtest binary to this machine and install Java 8. 24 | /usr/bin/apt-get update 25 | /usr/bin/apt-get install -y unzip gcc 26 | /snap/bin/gsutil cp "gs://${BUCKET}/cps.zip" "${TMP}" 27 | 28 | cd ${TMP} 29 | 30 | # install go 31 | curl https://dl.google.com/go/go1.11.5.linux-amd64.tar.gz -o go.tar.gz 32 | tar -C /usr/local -xzf go.tar.gz 33 | export PATH=$PATH:/usr/local/go/bin 34 | mkdir gopath 35 | export GOPATH="${TMP}/gopath" 36 | mkdir gocache 37 | export GOCACHE="${TMP}/gocache" 38 | 39 | # unpack loadtest 40 | unzip cps.zip 41 | cd go_src/cmd 42 | 43 | go run main.go --publisher=true 44 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/gce/cps-gcloud-go-subscriber_startup_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ####################################### 4 | # Query GCE for a provided metadata field. 5 | # See https://developers.google.com/compute/docs/metadata 6 | # Globals: 7 | # None 8 | # Arguments: 9 | # $1: The path to the metadata field to retrieve 10 | # Returns: 11 | # The value stored at the metadata field 12 | ####################################### 13 | function metadata() { 14 | curl --silent --show-error --header 'Metadata-Flavor: Google' \ 15 | "http://metadata/computeMetadata/v1/${1}"; 16 | } 17 | 18 | readonly TMP="$(mktemp -d)" 19 | readonly BUCKET=$(metadata instance/attributes/bucket) 20 | 21 | [[ "${TMP}" != "" ]] || error mktemp failed 22 | 23 | # Download the loadtest binary to this machine and install Java 8. 24 | /usr/bin/apt-get update 25 | /usr/bin/apt-get install -y unzip gcc 26 | /snap/bin/gsutil cp "gs://${BUCKET}/cps.zip" "${TMP}" 27 | 28 | cd ${TMP} 29 | 30 | # install go 31 | curl https://dl.google.com/go/go1.11.5.linux-amd64.tar.gz -o go.tar.gz 32 | tar -C /usr/local -xzf go.tar.gz 33 | export PATH=$PATH:/usr/local/go/bin 34 | mkdir gopath 35 | export GOPATH="${TMP}/gopath" 36 | mkdir gocache 37 | export GOCACHE="${TMP}/gocache" 38 | 39 | # unpack loadtest 40 | unzip cps.zip 41 | cd go_src/cmd 42 | 43 | go run main.go --publisher=false 44 | -------------------------------------------------------------------------------- /sql-streaming-copier/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Streaming SQL Copier", 3 | "description": "An Apache Beam streaming pipeline copies data from a streaming source in a standard format without parsing the payload and writes it to a streaming sink.", 4 | "parameters": [ 5 | { 6 | "name": "sourceType", 7 | "label": "sourceType", 8 | "helpText": "Type of the source. Valid types are: [pubsub, pubsublite, kafka]" 9 | }, 10 | { 11 | "name": "sourceLocation", 12 | "label": "sourceLocation", 13 | "helpText": "Location within the source to read from. See README." 14 | }, 15 | { 16 | "name": "sourceOptions", 17 | "label": "sourceOptions", 18 | "helpText": "Additional options for configuring the source. See README.", 19 | "isOptional": true 20 | }, 21 | { 22 | "name": "sinkType", 23 | "label": "sinkType", 24 | "helpText": "Type of the sink. Valid types are: [pubsub, pubsublite, kafka, bigquery]" 25 | }, 26 | { 27 | "name": "sinkLocation", 28 | "label": "sinkLocation", 29 | "helpText": "Location within the sink to write to. See README." 30 | }, 31 | { 32 | "name": "sinkOptions", 33 | "label": "sinkOptions", 34 | "helpText": "Additional options for configuring the sink. See README.", 35 | "isOptional": true 36 | } 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/clients/flow_control/RateLimiterFlowController.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.clients.flow_control; 18 | 19 | import com.google.common.util.concurrent.RateLimiter; 20 | 21 | /** A FlowController that delegates to a guava RateLimiter for a static allowed rate. */ 22 | public class RateLimiterFlowController implements FlowController { 23 | private final RateLimiter limiter; 24 | 25 | public RateLimiterFlowController(double rate) { 26 | this.limiter = RateLimiter.create(rate); 27 | } 28 | 29 | @Override 30 | public int requestStart() { 31 | limiter.acquire(); 32 | return 1; 33 | } 34 | 35 | @Override 36 | public void informFinished(boolean wasSuccessful) {} 37 | } 38 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/gce/cps-gcloud-node-publisher_startup_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ####################################### 4 | # Query GCE for a provided metadata field. 5 | # See https://developers.google.com/compute/docs/metadata 6 | # Globals: 7 | # None 8 | # Arguments: 9 | # $1: The path to the metadata field to retrieve 10 | # Returns: 11 | # The value stored at the metadata field 12 | ####################################### 13 | function metadata() { 14 | curl --silent --show-error --header 'Metadata-Flavor: Google' \ 15 | "http://metadata/computeMetadata/v1/${1}"; 16 | } 17 | 18 | readonly TMP="$(mktemp -d)" 19 | readonly BUCKET=$(metadata instance/attributes/bucket) 20 | 21 | [[ "${TMP}" != "" ]] || error mktemp failed 22 | 23 | # Add the correct repo for nodejs 24 | curl -sL https://deb.nodesource.com/setup_11.x | sudo -E bash - 25 | 26 | # Download the loadtest binary to this machine and install Java 8. 27 | /usr/bin/apt-get update 28 | /usr/bin/apt-get install -y unzip nodejs 29 | /snap/bin/gsutil cp "gs://${BUCKET}/cps.zip" "${TMP}" 30 | 31 | cd ${TMP} 32 | unzip cps.zip 33 | cd node_src 34 | npm install 35 | # increase heap max to 16GB to prevent oom on large tests. 36 | # publisher already dynamically limits messages outstanding to 37 | # the amount needed for the next 2 seconds. 38 | node --max-old-space-size=16000 src/main.js --publisher=true 39 | -------------------------------------------------------------------------------- /.kokoro/coerce_logs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2025 Google LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # This script finds and moves sponge logs so that they can be found by placer 17 | # and are not flagged as flaky by sponge. 18 | 19 | set -eo pipefail 20 | 21 | ## Get the directory of the build script 22 | scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}")) 23 | ## cd to the parent directory, i.e. the root of the git repo 24 | cd ${scriptDir}/.. 25 | 26 | job=$(basename ${KOKORO_JOB_NAME}) 27 | 28 | echo "coercing sponge logs..." 29 | for xml in `find . -name *-sponge_log.xml` 30 | do 31 | class=$(basename ${xml} | cut -d- -f2) 32 | dir=$(dirname ${xml})/${job}/${class} 33 | text=$(dirname ${xml})/${class}-sponge_log.txt 34 | mkdir -p ${dir} 35 | mv ${xml} ${dir}/sponge_log.xml 36 | mv ${text} ${dir}/sponge_log.txt 37 | done -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsub/kafka/source/CloudPubSubSubscriber.java: -------------------------------------------------------------------------------- 1 | // Copyright 2016 Google Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | //////////////////////////////////////////////////////////////////////////////// 16 | package com.google.pubsub.kafka.source; 17 | 18 | import com.google.api.core.ApiFuture; 19 | import com.google.protobuf.Empty; 20 | import com.google.pubsub.v1.ReceivedMessage; 21 | import java.util.Collection; 22 | import java.util.List; 23 | 24 | /** 25 | * An interface for clients that want to subscribe to messages from to Google Cloud Pub/Sub. 27 | */ 28 | public interface CloudPubSubSubscriber extends AutoCloseable { 29 | ApiFuture> pull(); 30 | 31 | ApiFuture ackMessages(Collection ackIds); 32 | 33 | void close(); 34 | } 35 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/Rows.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import org.apache.beam.sdk.schemas.Schema; 4 | import org.apache.beam.sdk.schemas.Schema.FieldType; 5 | 6 | public class Rows { 7 | 8 | private Rows() { 9 | } 10 | 11 | public static final String MESSAGE_KEY_FIELD = "message_key"; 12 | public static final String EVENT_TIMESTAMP_FIELD = "event_timestamp"; 13 | public static final String ATTRIBUTES_FIELD = "attributes"; 14 | public static final String PAYLOAD_FIELD = "payload"; 15 | 16 | public static final String ATTRIBUTES_KEY_FIELD = "key"; 17 | public static final String ATTRIBUTES_VALUES_FIELD = "values"; 18 | 19 | public static final Schema ATTRIBUTES_ENTRY_SCHEMA = 20 | Schema.builder() 21 | .addStringField(ATTRIBUTES_KEY_FIELD) 22 | .addArrayField(ATTRIBUTES_VALUES_FIELD, Schema.FieldType.BYTES) 23 | .build(); 24 | public static final Schema.FieldType ATTRIBUTES_FIELD_TYPE = 25 | Schema.FieldType.array(Schema.FieldType.row(ATTRIBUTES_ENTRY_SCHEMA)); 26 | public static final Schema STANDARD_SCHEMA = 27 | Schema.builder() 28 | .addByteArrayField(PAYLOAD_FIELD) 29 | .addByteArrayField(MESSAGE_KEY_FIELD) 30 | .addDateTimeField(EVENT_TIMESTAMP_FIELD) 31 | .addArrayField(ATTRIBUTES_FIELD, Schema.FieldType.row(ATTRIBUTES_ENTRY_SCHEMA)) 32 | .build(); 33 | } 34 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/gce/cps-gcloud-python-publisher_startup_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ####################################### 4 | # Query GCE for a provided metadata field. 5 | # See https://developers.google.com/compute/docs/metadata 6 | # Globals: 7 | # None 8 | # Arguments: 9 | # $1: The path to the metadata field to retrieve 10 | # Returns: 11 | # The value stored at the metadata field 12 | ####################################### 13 | function metadata() { 14 | curl --silent --show-error --header 'Metadata-Flavor: Google' \ 15 | "http://metadata/computeMetadata/v1/${1}"; 16 | } 17 | 18 | readonly TMP="$(mktemp -d)" 19 | readonly BUCKET=$(metadata instance/attributes/bucket) 20 | 21 | [[ "${TMP}" != "" ]] || error mktemp failed 22 | 23 | # Download the loadtest binary to this machine and install python. 24 | apt-get update 25 | apt-get install -y unzip 26 | gsutil cp "gs://${BUCKET}/cps.zip" "${TMP}" 27 | 28 | # set up miniconda environment 29 | cd ${TMP} 30 | curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -o miniconda.sh 31 | chmod 755 miniconda.sh 32 | ./miniconda.sh -b -p /miniconda 33 | source /miniconda/bin/activate 34 | conda install -y python=3.7 pip 35 | 36 | # unpack loadtest and install requirements 37 | unzip cps.zip 38 | cd python_src 39 | python3.7 -m pip install -r requirements.txt 40 | 41 | # run loadtest server 42 | echo "running server..." 43 | python3.7 -m clients.cps_publisher_task & 44 | -------------------------------------------------------------------------------- /load-test-framework/go_src/internal/flow_control/rate_limiter_flow_controller.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package flow_control 18 | 19 | import ( 20 | "time" 21 | ) 22 | 23 | type rateLimiterFlowController struct { 24 | startChan <-chan int 25 | } 26 | 27 | func (fc *rateLimiterFlowController) Start() <-chan int { 28 | return fc.startChan 29 | } 30 | 31 | func (fc *rateLimiterFlowController) InformFinished(wasSuccessful bool) {} 32 | 33 | func NewRateLimiterFlowController(requestsPerSecond float32) FlowController { 34 | secondsPerRequest := 1 / requestsPerSecond 35 | ticker := time.NewTicker(time.Duration(secondsPerRequest) * time.Second) 36 | startChan := make(chan int) 37 | go func() { 38 | for range ticker.C { 39 | startChan <- 1 40 | } 41 | }() 42 | return &rateLimiterFlowController{ 43 | startChan: startChan, 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /load-test-framework/src/main/resources/gce/cps-gcloud-python-subscriber_startup_script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ####################################### 4 | # Query GCE for a provided metadata field. 5 | # See https://developers.google.com/compute/docs/metadata 6 | # Globals: 7 | # None 8 | # Arguments: 9 | # $1: The path to the metadata field to retrieve 10 | # Returns: 11 | # The value stored at the metadata field 12 | ####################################### 13 | function metadata() { 14 | curl --silent --show-error --header 'Metadata-Flavor: Google' \ 15 | "http://metadata/computeMetadata/v1/${1}"; 16 | } 17 | 18 | readonly TMP="$(mktemp -d)" 19 | readonly BUCKET=$(metadata instance/attributes/bucket) 20 | 21 | [[ "${TMP}" != "" ]] || error mktemp failed 22 | 23 | # Download the loadtest binary to this machine and install python. 24 | apt-get update 25 | apt-get install -y unzip 26 | gsutil cp "gs://${BUCKET}/cps.zip" "${TMP}" 27 | 28 | # set up miniconda environment 29 | cd ${TMP} 30 | curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -o miniconda.sh 31 | chmod 755 miniconda.sh 32 | ./miniconda.sh -b -p /miniconda 33 | source /miniconda/bin/activate 34 | conda install -y python=3.7 pip 35 | 36 | # unpack loadtest and install requirements 37 | unzip cps.zip 38 | cd python_src 39 | python3.7 -m pip install -r requirements.txt 40 | 41 | # run loadtest server 42 | echo "running server..." 43 | python3.7 -m clients.cps_subscriber_task & 44 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/test/java/com/google/pubsub/flink/internal/source/split/SubscriptionSplitSerializerTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.split; 17 | 18 | import static com.google.common.truth.Truth.assertThat; 19 | 20 | import com.google.pubsub.v1.ProjectSubscriptionName; 21 | import org.junit.Test; 22 | 23 | public class SubscriptionSplitSerializerTest { 24 | 25 | @Test 26 | public void testSerialization() throws Exception { 27 | SubscriptionSplit split = 28 | SubscriptionSplit.create(ProjectSubscriptionName.of("project", "subscription")); 29 | SubscriptionSplitSerializer serializer = new SubscriptionSplitSerializer(); 30 | assertThat(serializer.deserialize(serializer.getVersion(), serializer.serialize(split))) 31 | .isEqualTo(split); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/output/ResultsOutput.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.output; 18 | 19 | import com.google.pubsub.flic.common.LatencyTracker; 20 | import com.google.pubsub.flic.controllers.ClientType; 21 | import com.google.pubsub.flic.controllers.test_parameters.TestParameters; 22 | import java.util.List; 23 | 24 | public interface ResultsOutput { 25 | class TrackedResult { 26 | public TestParameters testParameters; 27 | public ClientType type; 28 | public LatencyTracker tracker; 29 | 30 | public TrackedResult(TestParameters testParameters, ClientType type, LatencyTracker tracker) { 31 | this.testParameters = testParameters; 32 | this.type = type; 33 | this.tracker = tracker; 34 | } 35 | } 36 | 37 | void outputStats(List results); 38 | } 39 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/TemplateOptions.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import java.util.Map; 4 | import javax.annotation.Nullable; 5 | import org.apache.beam.sdk.options.Description; 6 | import org.apache.beam.sdk.options.Validation.Required; 7 | 8 | public interface TemplateOptions extends SqlStreamingOptions { 9 | 10 | @Description("Type of the source. Valid types are: [pubsub, pubsublite, kafka]") 11 | @Required() 12 | String getSourceType(); 13 | 14 | void setSourceType(String type); 15 | 16 | @Description("Location within the source to read from. For example, a Cloud Pub/Sub topic.") 17 | @Required() 18 | String getSourceLocation(); 19 | 20 | void setSourceLocation(String location); 21 | 22 | @Description("Additional options to pass to the source.") 23 | @Nullable 24 | Map getSourceOptions(); 25 | 26 | void setSourceOptions(Map options); 27 | 28 | @Description("Type of the sink. Valid types are: [pubsub, pubsublite, kafka, bigquery]") 29 | @Required() 30 | String getSinkType(); 31 | 32 | void setSinkType(String type); 33 | 34 | @Description("Location within the sink to read from.") 35 | @Required() 36 | String getSinkLocation(); 37 | 38 | void setSinkLocation(String location); 39 | 40 | @Description("Additional options to pass to the sink.") 41 | @Nullable 42 | Map getSinkOptions(); 43 | 44 | void setSinkOptions(Map options); 45 | } 46 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/source/reader/NotifyingPullSubscriber.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.reader; 17 | 18 | import com.google.api.core.ApiFuture; 19 | import com.google.common.base.Optional; 20 | import com.google.pubsub.v1.PubsubMessage; 21 | 22 | public interface NotifyingPullSubscriber { 23 | /** Returns a {@link ApiFuture} that will be completed when messages are available to pull */ 24 | ApiFuture notifyDataAvailable(); 25 | 26 | /** Pulls a message if one is available. */ 27 | Optional pullMessage() throws Throwable; 28 | 29 | /** 30 | * If there is an outstanding {@link ApiFuture} to notify when data is available, this method can 31 | * be used to interrupt the notification 32 | */ 33 | void interruptNotify(); 34 | 35 | void shutdown(); 36 | } 37 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/source/enumerator/PubSubCheckpointSerializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.enumerator; 17 | 18 | import com.google.pubsub.flink.proto.PubSubEnumeratorCheckpoint; 19 | import java.io.IOException; 20 | import org.apache.flink.core.io.SimpleVersionedSerializer; 21 | 22 | public class PubSubCheckpointSerializer 23 | implements SimpleVersionedSerializer { 24 | @Override 25 | public int getVersion() { 26 | return 0; 27 | } 28 | 29 | @Override 30 | public byte[] serialize(PubSubEnumeratorCheckpoint message) { 31 | return message.toByteArray(); 32 | } 33 | 34 | @Override 35 | public PubSubEnumeratorCheckpoint deserialize(int i, byte[] bytes) throws IOException { 36 | return PubSubEnumeratorCheckpoint.parseFrom(bytes); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/source/split/SubscriptionSplitSerializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.split; 17 | 18 | import com.google.pubsub.flink.proto.SubscriptionSplitProto; 19 | import java.io.IOException; 20 | import org.apache.flink.core.io.SimpleVersionedSerializer; 21 | 22 | public final class SubscriptionSplitSerializer 23 | implements SimpleVersionedSerializer { 24 | @Override 25 | public int getVersion() { 26 | return 0; 27 | } 28 | 29 | @Override 30 | public byte[] serialize(SubscriptionSplit subscriptionSplit) { 31 | return subscriptionSplit.toProto().toByteArray(); 32 | } 33 | 34 | @Override 35 | public SubscriptionSplit deserialize(int i, byte[] bytes) throws IOException { 36 | return SubscriptionSplit.fromProto(SubscriptionSplitProto.parseFrom(bytes)); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/test_parameters/TestParameterProvider.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers.test_parameters; 18 | 19 | import com.google.common.collect.ImmutableList; 20 | import java.util.List; 21 | 22 | public interface TestParameterProvider { 23 | List parameters(); 24 | 25 | static TestParameterProvider of(TestParameters testParameters) { 26 | return new OneTestParameterProvider(testParameters); 27 | } 28 | 29 | class OneTestParameterProvider implements TestParameterProvider { 30 | private final TestParameters testParameters; 31 | 32 | protected OneTestParameterProvider(TestParameters testParameters) { 33 | this.testParameters = testParameters; 34 | } 35 | 36 | @Override 37 | public List parameters() { 38 | return ImmutableList.of(testParameters); 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/test_parameters/CoreScalingTestParameterProvider.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers.test_parameters; 18 | 19 | import com.google.common.collect.ImmutableList; 20 | import java.util.List; 21 | 22 | public class CoreScalingTestParameterProvider implements TestParameterProvider { 23 | private final TestParameters base; 24 | 25 | CoreScalingTestParameterProvider(TestParameters base) { 26 | this.base = base; 27 | } 28 | 29 | @Override 30 | public List parameters() { 31 | return ImmutableList.of( 32 | base.toBuilder().setNumCoresPerWorker(1).build(), 33 | base.toBuilder().setNumCoresPerWorker(2).build(), 34 | base.toBuilder().setNumCoresPerWorker(4).build(), 35 | base.toBuilder().setNumCoresPerWorker(8).build(), 36 | base.toBuilder().setNumCoresPerWorker(16).build()); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/source/PubSubLiteSourceTask.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.source; 2 | 3 | import com.google.common.annotations.VisibleForTesting; 4 | import java.util.List; 5 | import java.util.Map; 6 | import javax.annotation.Nullable; 7 | import org.apache.kafka.common.utils.AppInfoParser; 8 | import org.apache.kafka.connect.source.SourceRecord; 9 | import org.apache.kafka.connect.source.SourceTask; 10 | 11 | public class PubSubLiteSourceTask extends SourceTask { 12 | 13 | private final PollerFactory factory; 14 | private @Nullable 15 | Poller poller; 16 | 17 | @VisibleForTesting 18 | PubSubLiteSourceTask(PollerFactory factory) { 19 | this.factory = factory; 20 | } 21 | 22 | public PubSubLiteSourceTask() { 23 | this(new PollerFactoryImpl()); 24 | } 25 | 26 | @Override 27 | public String version() { 28 | return AppInfoParser.getVersion(); 29 | } 30 | 31 | @Override 32 | public void start(Map props) { 33 | if (poller != null) { 34 | throw new IllegalStateException("Called start when poller already exists."); 35 | } 36 | poller = factory.newPoller(props); 37 | } 38 | 39 | @Override 40 | public @Nullable 41 | List poll() { 42 | return poller.poll(); 43 | } 44 | 45 | @Override 46 | public void stop() { 47 | if (poller == null) { 48 | throw new IllegalStateException("Called stop when poller doesn't exist."); 49 | } 50 | try { 51 | poller.close(); 52 | } finally { 53 | poller = null; 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /load-test-framework/node_src/src/flow_control/rate_limiter_flow_controller.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | let FlowController = require('./flow_controller.js'); 18 | 19 | 20 | class RateLimiterFlowController extends FlowController { 21 | // A FlowController that allows actions at a given per second rate. 22 | constructor(perSecondRate) { 23 | super(); 24 | let seconds_per_run = 1 / perSecondRate; 25 | this.callbacks = []; 26 | setInterval(() => { 27 | let cb = this.callbacks.pop(); 28 | if (undefined === cb) return; 29 | cb(); 30 | }, seconds_per_run * 1000); 31 | } 32 | 33 | async requestStart() { 34 | await new Promise(resolve => { 35 | this.callbacks.push(() => { 36 | resolve(1); 37 | }); 38 | }); 39 | } 40 | 41 | informFinished(wasSuccessful) { 42 | } 43 | } 44 | 45 | module.exports = RateLimiterFlowController; 46 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/test_parameters/MessageSizeScalingTestParameterProvider.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers.test_parameters; 18 | 19 | import com.google.common.collect.ImmutableList; 20 | import java.util.List; 21 | 22 | public class MessageSizeScalingTestParameterProvider implements TestParameterProvider { 23 | private final TestParameters base; 24 | 25 | MessageSizeScalingTestParameterProvider(TestParameters base) { 26 | this.base = base; 27 | } 28 | 29 | @Override 30 | public List parameters() { 31 | return ImmutableList.of( 32 | base.toBuilder().setMessageSize(1000).build(), 33 | base.toBuilder().setMessageSize(10000).build(), 34 | base.toBuilder().setMessageSize(100000).build(), 35 | base.toBuilder().setMessageSize(1000000).build(), 36 | base.toBuilder().setMessageSize(9000000).build()); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/providers/PubsubLiteProvider.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql.providers; 2 | 3 | import com.google.auto.service.AutoService; 4 | import com.google.cloud.pubsub.sql.Rows; 5 | import org.apache.beam.sdk.schemas.Schema; 6 | import org.apache.beam.sdk.schemas.Schema.FieldType; 7 | 8 | @AutoService({StandardSourceProvider.class, StandardSinkProvider.class}) 9 | public class PubsubLiteProvider implements StandardSourceProvider, StandardSinkProvider { 10 | 11 | @Override 12 | public StandardSink getSink() { 13 | return (StandardSqlSink) () -> Rows.STANDARD_SCHEMA; 14 | } 15 | 16 | private static final Schema READ_SCHEMA = Schema.builder() 17 | .addByteArrayField(Rows.PAYLOAD_FIELD) 18 | .addByteArrayField(Rows.MESSAGE_KEY_FIELD) 19 | .addDateTimeField("publish_timestamp") 20 | .addField(Rows.EVENT_TIMESTAMP_FIELD, FieldType.DATETIME.withNullable(true)) 21 | .addArrayField(Rows.ATTRIBUTES_FIELD, Schema.FieldType.row(Rows.ATTRIBUTES_ENTRY_SCHEMA)) 22 | .build(); 23 | 24 | @Override 25 | public StandardSource getSource() { 26 | return new StandardSqlSource() { 27 | @Override 28 | public String query() { 29 | return "SELECT payload, message_key, attributes, IFNULL(event_timestamp, publish_timestamp) AS event_timestamp FROM PCOLLECTION"; 30 | } 31 | 32 | @Override 33 | public Schema nativeSchema() { 34 | return READ_SCHEMA; 35 | } 36 | }; 37 | } 38 | 39 | @Override 40 | public String identifier() { 41 | return "pubsublite"; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /kafka-connector/src/test/java/com/google/pubsublite/kafka/source/PubSubLiteSourceTaskTest.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.source; 2 | 3 | import static com.google.common.truth.Truth.assertThat; 4 | import static org.junit.Assert.assertThrows; 5 | import static org.mockito.ArgumentMatchers.any; 6 | import static org.mockito.Mockito.verify; 7 | import static org.mockito.Mockito.when; 8 | import static org.mockito.MockitoAnnotations.initMocks; 9 | 10 | import com.google.common.collect.ImmutableList; 11 | import com.google.common.collect.ImmutableMap; 12 | import org.junit.Before; 13 | import org.junit.Test; 14 | import org.junit.runner.RunWith; 15 | import org.junit.runners.JUnit4; 16 | import org.mockito.Mock; 17 | 18 | @RunWith(JUnit4.class) 19 | public class PubSubLiteSourceTaskTest { 20 | 21 | @Mock 22 | PollerFactory factory; 23 | @Mock 24 | Poller poller; 25 | PubSubLiteSourceTask task; 26 | 27 | @Before 28 | public void setUp() { 29 | initMocks(this); 30 | when(factory.newPoller(any())).thenReturn(poller); 31 | task = new PubSubLiteSourceTask(factory); 32 | task.start(ImmutableMap.of()); 33 | verify(factory).newPoller(ImmutableMap.of()); 34 | assertThrows(IllegalStateException.class, () -> task.start(ImmutableMap.of())); 35 | } 36 | 37 | @Test 38 | public void poll() { 39 | when(poller.poll()).thenReturn(ImmutableList.of()); 40 | assertThat(task.poll()).isEmpty(); 41 | } 42 | 43 | @Test 44 | public void stop() { 45 | task.stop(); 46 | verify(poller).close(); 47 | assertThrows(IllegalStateException.class, () -> task.stop()); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /udfs/helper_number.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF provides various number manipulation helper functions. 3 | * 4 | * @param {Object} message - The message containing number fields to manipulate. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with manipulated number fields. 7 | */ 8 | function helper_number(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Helper function to round to specified decimal places 13 | const roundToDecimal = (num, places) => { 14 | return Number(Math.round(num + 'e' + places) + 'e-' + places); 15 | }; 16 | 17 | // Helper function to format number with commas 18 | const formatWithCommas = (num) => { 19 | return num.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); 20 | }; 21 | 22 | // Helper function to convert to percentage 23 | const toPercentage = (num) => { 24 | return roundToDecimal(num * 100, 2); 25 | }; 26 | 27 | // Helper function to clamp number between min and max 28 | const clamp = (num, min, max) => { 29 | return Math.min(Math.max(num, min), max); 30 | }; 31 | 32 | // Apply transformations to specified fields 33 | data['field1'] = roundToDecimal(data['field1'], 2); 34 | data['field2'] = formatWithCommas(data['field2']); 35 | data['field3'] = toPercentage(data['field3']); 36 | data['field4'] = clamp(data['field4'], 0, 1000); 37 | 38 | // Update the message with transformed data 39 | message.data = JSON.stringify(data); 40 | 41 | return message; 42 | } 43 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/test_parameters/StandardParameters.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers.test_parameters; 18 | 19 | import com.google.protobuf.util.Durations; 20 | import java.util.Optional; 21 | 22 | public class StandardParameters { 23 | public static TestParameters LATENCY = 24 | TestParameters.builder() 25 | .setMessageSize(1) 26 | .setPublishBatchSize(1) 27 | .setPublishRatePerSec(Optional.of(1)) 28 | .setPublishBatchDuration(Durations.fromMillis(1)) 29 | .setNumCoresPerWorker(1) 30 | .build(); 31 | public static TestParameters THROUGHPUT = 32 | TestParameters.builder().setNumCoresPerWorker(16).build(); 33 | public static TestParameters NOOP = 34 | TestParameters.builder() 35 | .setBurnInDuration(Durations.fromSeconds(0)) 36 | .setLoadtestDuration(Durations.fromSeconds(0)) 37 | .setNumCoresPerWorker(1) 38 | .build(); 39 | } 40 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/test_parameters/ScalingFactorTestParameterProvider.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers.test_parameters; 18 | 19 | import com.google.common.collect.ImmutableList; 20 | import java.util.List; 21 | 22 | public class ScalingFactorTestParameterProvider implements TestParameterProvider { 23 | private final TestParameters base; 24 | 25 | ScalingFactorTestParameterProvider(TestParameters base) { 26 | this.base = base; 27 | } 28 | 29 | @Override 30 | public List parameters() { 31 | return ImmutableList.of( 32 | base.toBuilder().setSubscriberCpuScaling(0).build(), 33 | base.toBuilder().setSubscriberCpuScaling(1).build(), 34 | base.toBuilder().setSubscriberCpuScaling(2).build(), 35 | base.toBuilder().setSubscriberCpuScaling(4).build(), 36 | base.toBuilder().setSubscriberCpuScaling(8).build(), 37 | base.toBuilder().setSubscriberCpuScaling(12).build()); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Want to contribute? Great! First, read this page (including the small print at the end). 2 | 3 | ### Before you contribute 4 | Before we can use your code, you must sign the 5 | [Google Individual Contributor License Agreement](https://cla.developers.google.com/about/google-individual) 6 | (CLA), which you can do online. The CLA is necessary mainly because you own the 7 | copyright to your changes, even after your contribution becomes part of our 8 | codebase, so we need your permission to use and distribute your code. We also 9 | need to be sure of various other things—for instance that you'll tell us if you 10 | know that your code infringes on other people's patents. You don't have to sign 11 | the CLA until after you've submitted your code for review and a member has 12 | approved it, but you must do it before we can put your code into our codebase. 13 | Before you start working on a larger contribution, you should get in touch with 14 | us first through the issue tracker with your idea so that we can help out and 15 | possibly guide you. Coordinating up front makes it much easier to avoid 16 | frustration later on. 17 | 18 | ### Review Our Community Guidelines 19 | This project follows 20 | [Google's Open Source Community Guidelines](https://opensource.google/conduct/). 21 | 22 | ### Code reviews 23 | All submissions, including submissions by project members, require review. We 24 | use Github pull requests for this purpose. 25 | 26 | ### The small print 27 | Contributions made by corporations are covered by a different agreement than 28 | the one above, the 29 | [Software Grant and Corporate Contributor License Agreement](https://cla.developers.google.com/about/google-corporate). 30 | -------------------------------------------------------------------------------- /udfs/timestamp_converter.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to convert a timestamp field to a specified format. 3 | * 4 | * @param {Object} message - The message containing the timestamp to convert. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with the converted timestamp. 7 | */ 8 | function timestamp_converter(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Define the timestamp field and target format 13 | const timestampField = 'timestamp'; 14 | 15 | // Target format 16 | const targetFormat = 'YYYY---MM----DD HH:mm:ss'; 17 | 18 | // Check if timestamp field exists 19 | if (data[timestampField]) { 20 | // Convert timestamp to Date object 21 | const date = new Date(data['timestamp']); 22 | 23 | const map = { 24 | YYYY: date.getFullYear(), 25 | MM: String(date.getMonth() + 1).padStart(2, '0'), 26 | DD: String(date.getDate()).padStart(2, '0'), 27 | HH: String(date.getHours()).padStart(2, '0'), 28 | mm: String(date.getMinutes()).padStart(2, '0'), 29 | ss: String(date.getSeconds()).padStart(2, '0'), 30 | }; 31 | 32 | // Create formatted timestamp 33 | const formattedTimestamp = targetFormat.replace(/YYYY|MM|DD|HH|mm|ss/g, match => map[match]); 34 | 35 | // Add formatted timestamp to data 36 | data['formatted_timestamp'] = formattedTimestamp; 37 | } 38 | 39 | // Update the message with converted timestamp 40 | message.data = JSON.stringify(data); 41 | 42 | return message; 43 | } 44 | -------------------------------------------------------------------------------- /.kokoro/linkage-monitor.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2025 Google LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | set -eo pipefail 17 | # Display commands being run. 18 | set -x 19 | 20 | ## Get the directory of the build script 21 | scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}")) 22 | ## cd to the parent directory, i.e. the root of the git repo 23 | cd ${scriptDir}/.. 24 | 25 | # include common functions 26 | source ${scriptDir}/common.sh 27 | 28 | # Print out Java version 29 | java -version 30 | echo ${JOB_TYPE} 31 | 32 | # attempt to install 3 times with exponential backoff (starting with 10 seconds) 33 | retry_with_backoff 3 10 \ 34 | mvn install -B -V \ 35 | -DskipTests=true \ 36 | -Dclirr.skip=true \ 37 | -Denforcer.skip=true \ 38 | -Dmaven.javadoc.skip=true \ 39 | -Dgcloud.download.skip=true 40 | 41 | # Kokoro job cloud-opensource-java/ubuntu/linkage-monitor-gcs creates this JAR 42 | JAR=linkage-monitor-latest-all-deps.jar 43 | curl -v -O "https://storage.googleapis.com/cloud-opensource-java-linkage-monitor/${JAR}" 44 | 45 | # Fails if there's new linkage errors compared with baseline 46 | java -jar ${JAR} com.google.cloud:libraries-bom -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/sink/PubSubSinkWriter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.sink; 17 | 18 | import com.google.pubsub.flink.PubSubSerializationSchema; 19 | import java.io.IOException; 20 | import org.apache.flink.api.connector.sink2.SinkWriter; 21 | 22 | public class PubSubSinkWriter implements SinkWriter { 23 | private final FlushablePublisher publisher; 24 | private final PubSubSerializationSchema schema; 25 | 26 | public PubSubSinkWriter(FlushablePublisher publisher, PubSubSerializationSchema schema) { 27 | this.publisher = publisher; 28 | this.schema = schema; 29 | } 30 | 31 | @Override 32 | public void write(T value, Context context) throws InterruptedException { 33 | publisher.publish(schema.serialize(value)); 34 | } 35 | 36 | @Override 37 | public void flush(boolean endOfInput) throws IOException { 38 | publisher.flush(); 39 | } 40 | 41 | @Override 42 | public void close() throws IOException { 43 | publisher.flush(); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /load-test-framework/go_src/cmd/main.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package main 18 | 19 | import ( 20 | "google.com/cloud_pubsub_loadtest/internal/genproto" 21 | "flag" 22 | "fmt" 23 | "google.com/cloud_pubsub_loadtest/internal" 24 | "google.golang.org/grpc" 25 | "log" 26 | "net" 27 | ) 28 | 29 | var port = flag.Int("port", 5000, "The port to use for this server") 30 | var publisher = flag.Bool("publisher", true, "Run the publisher if true or subscriber if false") 31 | 32 | func main() { 33 | flag.Parse() 34 | var workerType string 35 | if *publisher { 36 | workerType = "publisher" 37 | } else { 38 | workerType = "subscriber" 39 | } 40 | log.Printf("Starting %s at port: %d", workerType, *port) 41 | lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) 42 | if err != nil { 43 | log.Fatalf("failed to listen: %v", err) 44 | } 45 | grpcServer := grpc.NewServer() 46 | genproto.RegisterLoadtestWorkerServer(grpcServer, internal.NewLoadtestWorkerService(*publisher)) 47 | err = grpcServer.Serve(lis) 48 | if err != nil { 49 | log.Fatalf("failed to serve: %v", err) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /load-test-framework/go_src/internal/loadtest_service.go: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package internal 18 | 19 | import ( 20 | "context" 21 | "google.com/cloud_pubsub_loadtest/internal/genproto" 22 | "google.com/cloud_pubsub_loadtest/internal/tasks" 23 | ) 24 | 25 | type loadtestWorkerService struct { 26 | workerTask tasks.Task 27 | } 28 | 29 | func (service *loadtestWorkerService) Check(context.Context, *genproto.CheckRequest) (*genproto.CheckResponse, error) { 30 | response := service.workerTask.Check() 31 | return &response, nil 32 | } 33 | 34 | func (service *loadtestWorkerService) Start( 35 | ctx context.Context, request *genproto.StartRequest) (*genproto.StartResponse, error) { 36 | service.workerTask.Start(*request) 37 | return &genproto.StartResponse{}, nil 38 | } 39 | 40 | func NewLoadtestWorkerService(isPublisher bool) genproto.LoadtestWorkerServer { 41 | if isPublisher { 42 | return &loadtestWorkerService{ 43 | workerTask: tasks.NewPublisherTask(), 44 | } 45 | } else { 46 | return &loadtestWorkerService{ 47 | workerTask: tasks.NewSubscriberTask(), 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /.kokoro/populate-secrets.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2025 Google LLC. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | set -eo pipefail 17 | 18 | function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} 19 | function msg { println "$*" >&2 ;} 20 | function println { printf '%s\n' "$(now) $*" ;} 21 | 22 | 23 | # Populates requested secrets set in SECRET_MANAGER_KEYS from service account: 24 | # kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com 25 | SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" 26 | msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" 27 | mkdir -p ${SECRET_LOCATION} 28 | for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") 29 | do 30 | msg "Retrieving secret ${key}" 31 | docker run --entrypoint=gcloud \ 32 | --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ 33 | gcr.io/google.com/cloudsdktool/cloud-sdk \ 34 | secrets versions access latest \ 35 | --project cloud-devrel-kokoro-resources \ 36 | --secret ${key} > \ 37 | "${SECRET_LOCATION}/${key}" 38 | if [[ $? == 0 ]]; then 39 | msg "Secret written to ${SECRET_LOCATION}/${key}" 40 | else 41 | msg "Error retrieving secret ${key}" 42 | fi 43 | done -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/PubSubSerializationSchema.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink; 17 | 18 | import com.google.protobuf.ByteString; 19 | import com.google.pubsub.v1.PubsubMessage; 20 | import java.io.Serializable; 21 | import org.apache.flink.api.common.serialization.SerializationSchema; 22 | 23 | public interface PubSubSerializationSchema extends Serializable { 24 | static PubSubSerializationSchema dataOnly(SerializationSchema schema) { 25 | return new PubSubSerializationSchema() { 26 | 27 | @Override 28 | public void open(SerializationSchema.InitializationContext context) throws Exception { 29 | schema.open(context); 30 | } 31 | 32 | @Override 33 | public PubsubMessage serialize(T value) { 34 | return PubsubMessage.newBuilder() 35 | .setData(ByteString.copyFrom(schema.serialize(value))) 36 | .build(); 37 | } 38 | }; 39 | } 40 | 41 | void open(SerializationSchema.InitializationContext context) throws Exception; 42 | 43 | PubsubMessage serialize(T value); 44 | } 45 | -------------------------------------------------------------------------------- /udfs/helper_string.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF provides various string manipulation helper functions. 3 | * 4 | * @param {Object} message - The message containing string fields to manipulate. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with manipulated string fields. 7 | */ 8 | function helper_string(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Helper function to capitalize first letter of each word 13 | const capitalizeWords = (str) => { 14 | return str.split(' ') 15 | .map(word => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) 16 | .join(' '); 17 | }; 18 | 19 | // Helper function to remove special characters 20 | const removeSpecialChars = (str) => { 21 | return str.replace(/[^a-zA-Z0-9\s]/g, ''); 22 | }; 23 | 24 | // Helper function to truncate string with ellipsis 25 | const truncateString = (str, maxLength) => { 26 | return str.length > maxLength ? str.substring(0, maxLength) + '...' : str; 27 | }; 28 | 29 | // Helper function to normalize whitespace 30 | const normalizeWhitespace = (str) => { 31 | return str.replace(/\s+/g, ' ').trim(); 32 | }; 33 | 34 | // Apply transformations to specified fields 35 | data['name'] = capitalizeWords(data['name']); 36 | data['description'] = removeSpecialChars(data['description']); 37 | data['description'] = truncateString(data['description'], 100); 38 | data['notes'] = normalizeWhitespace(data['notes']); 39 | 40 | // Update the message with transformed data 41 | message.data = JSON.stringify(data); 42 | 43 | return message; 44 | } 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/TableSpec.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import com.alibaba.fastjson.JSONObject; 5 | import com.google.auto.value.AutoValue; 6 | import java.util.Map; 7 | 8 | @AutoValue 9 | public abstract class TableSpec { 10 | // The id for the table type in beam sql. 11 | public abstract String id(); 12 | 13 | // The location string for the resource in beam sql. 14 | public abstract String location(); 15 | 16 | // Additional properties to be passed to the beam sql table. 17 | public abstract JSONObject properties(); 18 | 19 | public static Builder builder() { 20 | return new AutoValue_TableSpec.Builder(); 21 | } 22 | 23 | public static TableSpec parse(String specJson) { 24 | JSONObject parsed = JSON.parseObject(specJson); 25 | Builder toReturn = builder(); 26 | toReturn.setId(parsed.get("id").toString()); 27 | toReturn.setLocation(parsed.get("location").toString()); 28 | if (parsed.containsKey("properties")) { 29 | toReturn.setProperties(parsed.getJSONObject("properties")); 30 | } else { 31 | toReturn.setProperties(new JSONObject()); 32 | } 33 | return toReturn.build(); 34 | } 35 | 36 | @AutoValue.Builder 37 | public static abstract class Builder { 38 | public abstract Builder setId(String id); 39 | 40 | public abstract Builder setLocation(String location); 41 | 42 | public abstract Builder setProperties(JSONObject properties); 43 | public Builder setProperties(Map properties) { 44 | JSONObject object = new JSONObject(); 45 | properties.forEach(object::put); 46 | return setProperties(object); 47 | } 48 | 49 | public abstract TableSpec build(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/test_parameters/TestParameterProviderConverter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers.test_parameters; 18 | 19 | import com.beust.jcommander.IStringConverter; 20 | 21 | public class TestParameterProviderConverter implements IStringConverter { 22 | @Override 23 | public TestParameterProvider convert(String value) { 24 | switch (value) { 25 | case "latency": 26 | return TestParameterProvider.of(StandardParameters.LATENCY); 27 | case "throughput": 28 | return TestParameterProvider.of(StandardParameters.THROUGHPUT); 29 | case "core-scaling": 30 | return new CoreScalingTestParameterProvider(StandardParameters.THROUGHPUT); 31 | case "message-size": 32 | return new MessageSizeScalingTestParameterProvider(StandardParameters.THROUGHPUT); 33 | case "thread-scaling": 34 | return new ScalingFactorTestParameterProvider(StandardParameters.THROUGHPUT); 35 | case "noop": 36 | return TestParameterProvider.of(StandardParameters.NOOP); 37 | } 38 | return null; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /.kokoro/common.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2025 Google LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | function retry_with_backoff { 17 | attempts_left=$1 18 | sleep_seconds=$2 19 | shift 2 20 | command=$@ 21 | 22 | 23 | # store current flag state 24 | flags=$- 25 | 26 | # allow a failures to continue 27 | set +e 28 | ${command} 29 | exit_code=$? 30 | 31 | # restore "e" flag 32 | if [[ ${flags} =~ e ]] 33 | then set -e 34 | else set +e 35 | fi 36 | 37 | if [[ $exit_code == 0 ]] 38 | then 39 | return 0 40 | fi 41 | 42 | # failure 43 | if [[ ${attempts_left} > 0 ]] 44 | then 45 | echo "failure (${exit_code}), sleeping ${sleep_seconds}..." 46 | sleep ${sleep_seconds} 47 | new_attempts=$((${attempts_left} - 1)) 48 | new_sleep=$((${sleep_seconds} * 2)) 49 | retry_with_backoff ${new_attempts} ${new_sleep} ${command} 50 | fi 51 | 52 | return $exit_code 53 | } 54 | 55 | ## Helper functionss 56 | function now() { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n'; } 57 | function msg() { println "$*" >&2; } 58 | function println() { printf '%s\n' "$(now) $*"; } 59 | 60 | ## Helper comment to trigger updated repo dependency release -------------------------------------------------------------------------------- /flink-connector/flink-examples-gcp-pubsub/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | 8 | com.google.pubsub.flink 9 | flink-connector-parent 10 | 1.0.0-SNAPSHOT 11 | 12 | 13 | flink-examples-gcp-pubsub 14 | pom 15 | Google Cloud Pub/Sub Connector Examples 16 | 17 | 18 | true 19 | 20 | 21 | 22 | pubsub-streaming 23 | 24 | 25 | 26 | 27 | com.google.pubsub.flink 28 | flink-connector-gcp-pubsub 29 | ${project.version} 30 | 31 | 32 | 33 | 34 | 35 | 36 | org.apache.maven.plugins 37 | maven-deploy-plugin 38 | 3.1.0 39 | 40 | true 41 | 42 | 43 | 44 | org.sonatype.plugins 45 | nexus-staging-maven-plugin 46 | 1.6.13 47 | 48 | true 49 | 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/clients/common/ShardedBlockingQueue.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.clients.common; 18 | 19 | import java.util.ArrayList; 20 | import java.util.Collection; 21 | import java.util.concurrent.BlockingQueue; 22 | import java.util.concurrent.LinkedBlockingQueue; 23 | import java.util.concurrent.atomic.AtomicInteger; 24 | 25 | class ShardedBlockingQueue { 26 | private final AtomicInteger next = new AtomicInteger(0); 27 | private final ArrayList> delegates; 28 | 29 | ShardedBlockingQueue() { 30 | int cores = Runtime.getRuntime().availableProcessors(); 31 | delegates = new ArrayList<>(cores); 32 | for (int i = 0; i < cores; i++) { 33 | delegates.add(new LinkedBlockingQueue<>()); 34 | } 35 | } 36 | 37 | private BlockingQueue delegate() { 38 | return delegates.get(next.getAndIncrement() % delegates.size()); 39 | } 40 | 41 | void add(T toAdd) { 42 | delegate().add(toAdd); 43 | } 44 | 45 | void drainTo(Collection out) { 46 | for (BlockingQueue delegate : delegates) { 47 | delegate.drainTo(out); 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/ClientParams.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers; 18 | 19 | import com.google.auto.value.AutoValue; 20 | import com.google.pubsub.flic.controllers.test_parameters.TestParameters; 21 | 22 | /** Keeps track of the parameters that define a client. */ 23 | @AutoValue 24 | public abstract class ClientParams { 25 | public abstract TestParameters getTestParameters(); 26 | 27 | public abstract String getProject(); 28 | 29 | public abstract ClientType getClientType(); 30 | 31 | public abstract String getZone(); 32 | 33 | public abstract Builder toBuilder(); 34 | 35 | public static Builder builder() { 36 | return new AutoValue_ClientParams.Builder(); 37 | } 38 | 39 | @AutoValue.Builder 40 | public abstract static class Builder { 41 | public abstract Builder setTestParameters(TestParameters testParameters); 42 | 43 | public abstract Builder setProject(String project); 44 | 45 | public abstract Builder setClientType(ClientType type); 46 | 47 | public abstract Builder setZone(String zone); 48 | 49 | public abstract ClientParams build(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/sink/PubSubFlushablePublisher.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.sink; 17 | 18 | import com.google.api.core.ApiFuture; 19 | import com.google.api.core.ApiFutures; 20 | import com.google.cloud.pubsub.v1.Publisher; 21 | import com.google.pubsub.v1.PubsubMessage; 22 | import java.util.ArrayList; 23 | import java.util.List; 24 | 25 | public class PubSubFlushablePublisher implements FlushablePublisher { 26 | Publisher publisher; 27 | List> outstandingPublishes = new ArrayList<>(); 28 | 29 | public PubSubFlushablePublisher(Publisher publisher) { 30 | this.publisher = publisher; 31 | } 32 | 33 | @Override 34 | public void publish(PubsubMessage message) throws InterruptedException { 35 | outstandingPublishes.add(publisher.publish(message)); 36 | } 37 | 38 | @Override 39 | public void flush() { 40 | publisher.publishAllOutstanding(); 41 | try { 42 | ApiFutures.allAsList(outstandingPublishes).get(); 43 | outstandingPublishes.clear(); 44 | } catch (Exception e) { 45 | throw new RuntimeException(e); 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/test/java/com/google/pubsub/flink/internal/source/enumerator/PubSubCheckpointSerializerTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.enumerator; 17 | 18 | import static com.google.common.truth.Truth.assertThat; 19 | 20 | import com.google.pubsub.flink.proto.PubSubEnumeratorCheckpoint; 21 | import java.util.ArrayList; 22 | import java.util.List; 23 | import org.junit.Test; 24 | import org.junit.runner.RunWith; 25 | import org.junit.runners.JUnit4; 26 | 27 | @RunWith(JUnit4.class) 28 | public final class PubSubCheckpointSerializerTest { 29 | 30 | @Test 31 | public void testSerialization() throws Exception { 32 | List assignments = new ArrayList<>(); 33 | assignments.add(PubSubEnumeratorCheckpoint.Assignment.newBuilder().setSubtask(1).build()); 34 | PubSubEnumeratorCheckpoint proto = 35 | PubSubEnumeratorCheckpoint.newBuilder().addAllAssignments(assignments).build(); 36 | PubSubCheckpointSerializer serializer = new PubSubCheckpointSerializer(); 37 | assertThat(serializer.deserialize(serializer.getVersion(), serializer.serialize(proto))) 38 | .isEqualTo(proto); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/util/EmulatorEndpoint.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flink.util; 18 | 19 | import com.google.common.base.Optional; 20 | import javax.annotation.Nullable; 21 | 22 | /** 23 | * Utility class used to help connect {@link PubSubSink} and {@link PubSubSource} to a Google Cloud 24 | * Pub/Sub emulator. 25 | */ 26 | public class EmulatorEndpoint { 27 | public static final String EMULATOR_ENDPOINT_PREFIX = "emulator:///"; 28 | 29 | public static String toEmulatorEndpoint(String endpoint) { 30 | return EMULATOR_ENDPOINT_PREFIX + endpoint; 31 | } 32 | 33 | @Nullable 34 | public static String getEmulatorEndpoint(Optional endpoint) { 35 | String emulatorEndpoint = null; 36 | // Prioritize using an emulator endpoint set in env var PUBSUB_EMULATOR_HOST. 37 | if ((emulatorEndpoint = System.getenv("PUBSUB_EMULATOR_HOST")) != null) { 38 | return emulatorEndpoint; 39 | } 40 | if (endpoint.isPresent() && endpoint.get().startsWith(EMULATOR_ENDPOINT_PREFIX)) { 41 | return endpoint.get().replaceFirst(EMULATOR_ENDPOINT_PREFIX, ""); 42 | } 43 | return emulatorEndpoint; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /sql-streaming-copier/src/main/java/com/google/cloud/pubsub/sql/RunPipeline.java: -------------------------------------------------------------------------------- 1 | package com.google.cloud.pubsub.sql; 2 | 3 | import java.util.Map; 4 | import javax.annotation.Nullable; 5 | import org.apache.beam.sdk.Pipeline; 6 | import org.apache.beam.sdk.extensions.sql.zetasql.ZetaSQLQueryPlanner; 7 | import org.apache.beam.sdk.options.PipelineOptionsFactory; 8 | 9 | /** 10 | * A main class for running the pipeline locally. 11 | */ 12 | public class RunPipeline { 13 | 14 | private static Map getNonNull(@Nullable Map map) { 15 | if (map == null) { 16 | return Map.of(); 17 | } 18 | return map; 19 | } 20 | 21 | public static void run(TemplateOptions options) { 22 | TableSpec sourceSpec = 23 | TableSpec.builder() 24 | .setId(options.getSourceType()) 25 | .setLocation(options.getSourceLocation()) 26 | .setProperties(getNonNull(options.getSourceOptions())) 27 | .build(); 28 | TableSpec sinkSpec = 29 | TableSpec.builder() 30 | .setId(options.getSinkType()) 31 | .setLocation(options.getSinkLocation()) 32 | .setProperties(getNonNull(options.getSinkOptions())) 33 | .build(); 34 | run(options, sourceSpec, sinkSpec); 35 | } 36 | 37 | public static void run(SqlStreamingOptions options, TableSpec sourceSpec, TableSpec sinkSpec) { 38 | options.setStreaming(true); 39 | options.setPlannerName(ZetaSQLQueryPlanner.class.getName()); 40 | Pipeline pipeline = Pipeline.create(options); 41 | Compiler.compile(pipeline, sourceSpec, sinkSpec); 42 | // For a Dataflow Flex Template, do NOT waitUntilFinish(). 43 | pipeline.run(); 44 | } 45 | 46 | public static void main(final String[] args) { 47 | run(PipelineOptionsFactory.fromArgs(args).withValidation().as(TemplateOptions.class)); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /udfs/unit_convert_distance.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This UDF is used to convert distances between metric and imperial systems. 3 | * 4 | * @param {Object} message - The message containing distance values to convert. 5 | * @param {Object} metadata - The metadata of the message. 6 | * @returns {Object} The message with converted distance values. 7 | */ 8 | function unit_convert_distance(message, metadata) { 9 | // Parse the message 10 | const data = JSON.parse(message.data); 11 | 12 | // Conversion factors 13 | const METERS_TO_FEET = 3.28084; 14 | const FEET_TO_METERS = 0.3048; 15 | const KILOMETERS_TO_MILES = 0.621371; 16 | const MILES_TO_KILOMETERS = 1.60934; 17 | 18 | // Convert metric to imperial if metric field exists 19 | if (data['metric_distance']) { 20 | const meters = parseFloat(data['metric_distance']); 21 | data['imperial_distance'] = (meters * METERS_TO_FEET).toFixed(2); 22 | data['imperial_unit'] = 'feet'; 23 | } 24 | 25 | // Convert imperial to metric if imperial field exists 26 | if (data['imperial_distance']) { 27 | const feet = parseFloat(data['imperial_distance']); 28 | data['metric_distance'] = (feet * FEET_TO_METERS).toFixed(2); 29 | data['metric_unit'] = 'meters'; 30 | } 31 | 32 | // Convert kilometers to miles if kilometers field exists 33 | if (data['kilometers']) { 34 | const km = parseFloat(data['kilometers']); 35 | data['miles'] = (km * KILOMETERS_TO_MILES).toFixed(2); 36 | } 37 | 38 | // Convert miles to kilometers if miles field exists 39 | if (data['miles']) { 40 | const miles = parseFloat(data['miles']); 41 | data['kilometers'] = (miles * MILES_TO_KILOMETERS).toFixed(2); 42 | } 43 | 44 | // Update the message with converted data 45 | message.data = JSON.stringify(data); 46 | 47 | return message; 48 | } 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/GoogleCloudPlatform/pubsub.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/pubsub) 2 | 3 | This repository contains open-source projects managed by the owners of 4 | [Google Cloud Pub/Sub](https://cloud.google.com/pubsub/). The projects 5 | available are: 6 | 7 | * [Load Testing Framework](https://github.com/GoogleCloudPlatform/pubsub/tree/master/load-test-framework): 8 | Set up comparative load tests between [Apache Kafka](http://kafka.apache.org) 9 | and [Google Cloud Pub/Sub](https://cloud.google.com/pubsub/), as well as 10 | between different clients on the same stack (e.g. Http/Json and gRPC clients 11 | for CPS). Currently, we only support maven version 3 and Java 8. 12 | If you're having a problem building with those versions, please reach out to us with your issue or solution. 13 | * [Ordering Keys Prober](https://github.com/GoogleCloudPlatform/pubsub/tree/master/ordering-keys-prober): 14 | A reference implementation for how to use ordering keys effectively. 15 | * [Flink Connector](https://github.com/GoogleCloudPlatform/pubsub/tree/master/flink-connector): 16 | Send and receive messages to/from [Apache Flink](https://flink.apache.org/). 17 | * DEPRECATED - [Kafka Connector](https://github.com/GoogleCloudPlatform/pubsub/tree/master/kafka-connector): 18 | Send and receive messages from [Apache Kafka](http://kafka.apache.org). *The 19 | connector will have future release from its own [repo](https://github.com/googleapis/java-pubsub-group-kafka-connector/).* 20 | * DEPRECATED - [Experimental high-performance client library](https://github.com/GoogleCloudPlatform/pubsub/tree/master/client): 21 | For Java along with [samples](https://github.com/GoogleCloudPlatform/google-cloud-java/tree/master/google-cloud-examples/src/main/java/com/google/cloud/examples/pubsub/snippets). 22 | 23 | Note: To build each of these projects, we recommend using maven. 24 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/sink/PublisherFactoryImpl.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.sink; 2 | 3 | import com.google.cloud.pubsublite.CloudZone; 4 | import com.google.cloud.pubsublite.ProjectPath; 5 | import com.google.cloud.pubsublite.PublishMetadata; 6 | import com.google.cloud.pubsublite.TopicName; 7 | import com.google.cloud.pubsublite.TopicPath; 8 | import com.google.cloud.pubsublite.internal.Publisher; 9 | import com.google.cloud.pubsublite.internal.wire.PubsubContext; 10 | import com.google.cloud.pubsublite.internal.wire.PubsubContext.Framework; 11 | import com.google.cloud.pubsublite.internal.wire.RoutingPublisherBuilder; 12 | import com.google.cloud.pubsublite.internal.wire.SinglePartitionPublisherBuilder; 13 | import java.util.Map; 14 | import org.apache.kafka.common.config.ConfigValue; 15 | 16 | class PublisherFactoryImpl implements PublisherFactory { 17 | 18 | private static final Framework FRAMEWORK = Framework.of("KAFKA_CONNECT"); 19 | 20 | @Override 21 | public Publisher newPublisher(Map params) { 22 | Map config = ConfigDefs.config().validateAll(params); 23 | RoutingPublisherBuilder.Builder builder = RoutingPublisherBuilder.newBuilder(); 24 | TopicPath topic = TopicPath.newBuilder() 25 | .setProject(ProjectPath.parse("projects/" + config.get(ConfigDefs.PROJECT_FLAG).value()).project()) 26 | .setLocation(CloudZone.parse(config.get(ConfigDefs.LOCATION_FLAG).value().toString())) 27 | .setName(TopicName.of(config.get(ConfigDefs.TOPIC_NAME_FLAG).value().toString())).build(); 28 | builder.setTopic(topic); 29 | builder.setPublisherFactory( 30 | partition -> SinglePartitionPublisherBuilder.newBuilder().setTopic(topic) 31 | .setPartition(partition).setContext( 32 | PubsubContext.of(FRAMEWORK)).build()); 33 | return builder.build(); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/ClientType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers; 18 | 19 | import org.slf4j.Logger; 20 | import org.slf4j.LoggerFactory; 21 | 22 | public class ClientType { 23 | private static final Logger log = LoggerFactory.getLogger(ClientType.class); 24 | 25 | public enum MessagingType { 26 | CPS_GCLOUD 27 | } 28 | 29 | public enum Language { 30 | JAVA, 31 | PYTHON, 32 | RUBY, 33 | GO, 34 | NODE, 35 | DOTNET 36 | } 37 | 38 | public enum MessagingSide { 39 | PUBLISHER, 40 | SUBSCRIBER 41 | } 42 | 43 | public final MessagingType messaging; 44 | public final Language language; 45 | public final MessagingSide side; 46 | 47 | public ClientType(MessagingType messaging, Language language, MessagingSide side) { 48 | this.messaging = messaging; 49 | this.language = language; 50 | this.side = side; 51 | } 52 | 53 | public boolean isCps() { 54 | return messaging == MessagingType.CPS_GCLOUD; 55 | } 56 | 57 | public boolean isPublisher() { 58 | return side == MessagingSide.PUBLISHER; 59 | } 60 | 61 | @Override 62 | public String toString() { 63 | return (messaging.toString().replace("_", "-") + "-" + language + "-" + side).toLowerCase(); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/sink/PubSubPublisherCache.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.sink; 17 | 18 | import com.google.cloud.pubsub.v1.Publisher; 19 | import com.google.common.annotations.VisibleForTesting; 20 | import com.google.pubsub.v1.TopicName; 21 | import java.io.IOException; 22 | import java.util.HashMap; 23 | 24 | /** */ 25 | public class PubSubPublisherCache { 26 | private static final HashMap publishers = new HashMap<>(); 27 | 28 | public interface PublisherFactory { 29 | Publisher create(TopicName topicName) throws IOException; 30 | } 31 | 32 | static { 33 | Runtime.getRuntime().addShutdownHook(new Thread(PubSubPublisherCache::close)); 34 | } 35 | 36 | public static synchronized Publisher getOrCreate( 37 | TopicName topic, PublisherFactory publisherFactory) throws IOException { 38 | Publisher publisher = publishers.get(topic); 39 | if (publisher == null) { 40 | publisher = publisherFactory.create(topic); 41 | publishers.put(topic, publisher); 42 | } 43 | return publisher; 44 | } 45 | 46 | @VisibleForTesting 47 | static void close() { 48 | publishers.forEach((topic, publisher) -> publisher.shutdown()); 49 | publishers.clear(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /.kokoro/dependencies.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2025 Google LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | set -eo pipefail 17 | shopt -s nullglob 18 | 19 | ## Get the directory of the build script 20 | scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}")) 21 | ## cd to the parent directory, i.e. the root of the git repo 22 | cd ${scriptDir}/.. 23 | 24 | # include common functions 25 | source ${scriptDir}/common.sh 26 | 27 | # Print out Java 28 | java -version 29 | echo $JOB_TYPE 30 | 31 | function determineMavenOpts() { 32 | local javaVersion=$( 33 | # filter down to the version line, then pull out the version between quotes, 34 | # then trim the version number down to its minimal number (removing any 35 | # update or suffix number). 36 | java -version 2>&1 | grep "version" \ 37 | | sed -E 's/^.*"(.*?)".*$/\1/g' \ 38 | | sed -E 's/^(1\.[0-9]\.0).*$/\1/g' 39 | ) 40 | 41 | if [[ $javaVersion == 17* ]] 42 | then 43 | # MaxPermSize is no longer supported as of jdk 17 44 | echo -n "-Xmx1024m" 45 | else 46 | echo -n "-Xmx1024m -XX:MaxPermSize=128m" 47 | fi 48 | } 49 | 50 | export MAVEN_OPTS=$(determineMavenOpts) 51 | 52 | # this should run maven enforcer 53 | retry_with_backoff 3 10 \ 54 | mvn install -B -V -ntp \ 55 | -DskipTests=true \ 56 | -Dmaven.javadoc.skip=true \ 57 | -Dclirr.skip=true 58 | 59 | mvn -B dependency:analyze -DfailOnWarning=true -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/Controller.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers; 18 | 19 | import com.google.protobuf.Timestamp; 20 | import com.google.pubsub.flic.common.LatencyTracker; 21 | import com.google.pubsub.flic.common.MessageTracker; 22 | import java.util.Map; 23 | 24 | public interface Controller { 25 | /** 26 | * Sends a LoadtestFramework.Start RPC to all clients to commence the load test. When this 27 | * function returns it is guaranteed that all clients have started. 28 | */ 29 | void start(MessageTracker messageTracker); 30 | 31 | /** 32 | * Shuts down the given environment. When this function returns, each client is guaranteed to be 33 | * in process of being deleted, or else output directions on how to manually delete any potential 34 | * remaining instances if unable. 35 | */ 36 | void stop(); 37 | 38 | /** Waits for clients to complete the load test. */ 39 | void waitForClients() throws Throwable; 40 | 41 | /** 42 | * Gets the current start time. 43 | * 44 | * @return the start time 45 | */ 46 | Timestamp getStartTime(); 47 | 48 | /** 49 | * Gets the results for all available types. 50 | * 51 | * @return the map from type to result, every type running is a valid key 52 | */ 53 | Map getClientLatencyTrackers(); 54 | } 55 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/PubSubDeserializationSchema.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink; 17 | 18 | import com.google.pubsub.v1.PubsubMessage; 19 | import java.io.Serializable; 20 | import javax.annotation.Nullable; 21 | import org.apache.flink.api.common.serialization.DeserializationSchema; 22 | import org.apache.flink.api.common.typeinfo.TypeInformation; 23 | 24 | public interface PubSubDeserializationSchema extends Serializable { 25 | 26 | static PubSubDeserializationSchema dataOnly(DeserializationSchema schema) { 27 | return new PubSubDeserializationSchema() { 28 | @Override 29 | public void open(DeserializationSchema.InitializationContext context) throws Exception { 30 | schema.open(context); 31 | } 32 | 33 | @Override 34 | public T deserialize(PubsubMessage message) throws Exception { 35 | return schema.deserialize(message.getData().toByteArray()); 36 | } 37 | 38 | @Override 39 | public TypeInformation getProducedType() { 40 | return schema.getProducedType(); 41 | } 42 | }; 43 | } 44 | 45 | void open(DeserializationSchema.InitializationContext context) throws Exception; 46 | 47 | @Nullable 48 | T deserialize(PubsubMessage message) throws Exception; 49 | 50 | TypeInformation getProducedType(); 51 | } 52 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/test/java/com/google/pubsub/flink/internal/source/split/SubscriptionSplitTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.split; 17 | 18 | import static com.google.common.truth.Truth.assertThat; 19 | 20 | import com.google.pubsub.flink.proto.SubscriptionSplitProto; 21 | import com.google.pubsub.v1.ProjectSubscriptionName; 22 | import org.junit.Test; 23 | 24 | public class SubscriptionSplitTest { 25 | @Test 26 | public void toProto_andBack() { 27 | SubscriptionSplit split = 28 | SubscriptionSplit.create(ProjectSubscriptionName.of("project", "subscription")); 29 | assertThat(SubscriptionSplit.fromProto(split.toProto())).isEqualTo(split); 30 | } 31 | 32 | @Test 33 | public void fromProto_andBack() { 34 | SubscriptionSplitProto proto = 35 | SubscriptionSplitProto.newBuilder() 36 | .setSubscription(ProjectSubscriptionName.of("project", "subscription").toString()) 37 | .setUid("unique-id") 38 | .build(); 39 | assertThat(SubscriptionSplit.fromProto(proto).toProto()).isEqualTo(proto); 40 | } 41 | 42 | @Test 43 | public void splitId_returnsString() { 44 | ProjectSubscriptionName subscription = ProjectSubscriptionName.of("project", "subscription"); 45 | SubscriptionSplit split = SubscriptionSplit.create(subscription, "id"); 46 | assertThat(split.splitId()).isEqualTo(String.format("%s-id", subscription.toString())); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/source/PollerFactoryImpl.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.source; 2 | 3 | import com.google.cloud.pubsublite.CloudZone; 4 | import com.google.cloud.pubsublite.ProjectPath; 5 | import com.google.cloud.pubsublite.SubscriptionName; 6 | import com.google.cloud.pubsublite.SubscriptionPath; 7 | import com.google.cloud.pubsublite.cloudpubsub.FlowControlSettings; 8 | import com.google.cloud.pubsublite.kafka.ConsumerSettings; 9 | import java.util.Map; 10 | import org.apache.kafka.clients.consumer.Consumer; 11 | import org.apache.kafka.common.config.ConfigValue; 12 | 13 | class PollerFactoryImpl implements PollerFactory { 14 | 15 | @Override 16 | public Poller newPoller(Map params) { 17 | Map config = ConfigDefs.config().validateAll(params); 18 | SubscriptionPath path = SubscriptionPath.newBuilder() 19 | .setProject(ProjectPath 20 | .parse("projects/" + config.get(ConfigDefs.PROJECT_FLAG).value()).project()) 21 | .setLocation(CloudZone 22 | .parse(config.get(ConfigDefs.LOCATION_FLAG).value().toString())) 23 | .setName( 24 | SubscriptionName.of(config.get(ConfigDefs.SUBSCRIPTION_NAME_FLAG).value().toString())) 25 | .build(); 26 | FlowControlSettings flowControlSettings = FlowControlSettings.builder() 27 | .setMessagesOutstanding( 28 | (Long) config.get(ConfigDefs.FLOW_CONTROL_PARTITION_MESSAGES_FLAG).value()) 29 | .setBytesOutstanding( 30 | (Long) config.get(ConfigDefs.FLOW_CONTROL_PARTITION_BYTES_FLAG).value()).build(); 31 | Consumer consumer = ConsumerSettings.newBuilder().setAutocommit(true) 32 | .setSubscriptionPath( 33 | path).setPerPartitionFlowControlSettings(flowControlSettings).build().instantiate(); 34 | // There is only one topic for Pub/Sub Lite subscriptions, and the consumer only exposes this 35 | // topic. 36 | consumer.subscribe(consumer.listTopics().keySet()); 37 | return new PollerImpl(config.get(ConfigDefs.KAFKA_TOPIC_FLAG).value().toString(), consumer); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsublite/kafka/source/ConfigDefs.java: -------------------------------------------------------------------------------- 1 | package com.google.pubsublite.kafka.source; 2 | 3 | import org.apache.kafka.common.config.ConfigDef; 4 | import org.apache.kafka.common.config.ConfigDef.Importance; 5 | 6 | final class ConfigDefs { 7 | 8 | private ConfigDefs() { 9 | } 10 | 11 | static final String PROJECT_FLAG = "pubsublite.project"; 12 | static final String LOCATION_FLAG = "pubsublite.location"; 13 | static final String SUBSCRIPTION_NAME_FLAG = "pubsublite.subscription"; 14 | static final String KAFKA_TOPIC_FLAG = "kafka.topic"; 15 | static final String FLOW_CONTROL_PARTITION_MESSAGES_FLAG = "pubsublite.partition_flow_control.messages"; 16 | static final String FLOW_CONTROL_PARTITION_BYTES_FLAG = "pubsublite.partition_flow_control.bytes"; 17 | 18 | static ConfigDef config() { 19 | return new ConfigDef() 20 | .define(PROJECT_FLAG, ConfigDef.Type.STRING, Importance.HIGH, 21 | "The project containing the topic to which to publish.") 22 | .define(LOCATION_FLAG, ConfigDef.Type.STRING, Importance.HIGH, 23 | "The cloud zone (like europe-south7-q) containing the topic to which to publish.") 24 | .define(SUBSCRIPTION_NAME_FLAG, ConfigDef.Type.STRING, Importance.HIGH, 25 | "The name of the topic to which to publish.") 26 | .define( 27 | KAFKA_TOPIC_FLAG, 28 | ConfigDef.Type.STRING, 29 | Importance.HIGH, 30 | "The topic in Kafka which will receive messages that were pulled from Pub/Sub Lite.") 31 | .define( 32 | FLOW_CONTROL_PARTITION_MESSAGES_FLAG, 33 | ConfigDef.Type.LONG, 34 | Long.MAX_VALUE, 35 | Importance.MEDIUM, 36 | "The number of outstanding messages per-partition allowed. Set to Long.MAX_VALUE by default." 37 | ) 38 | .define( 39 | FLOW_CONTROL_PARTITION_BYTES_FLAG, 40 | ConfigDef.Type.LONG, 41 | 20_000_000, 42 | Importance.MEDIUM, 43 | "The number of outstanding bytes per-partition allowed. Set to 20MB by default." 44 | ); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /load-test-framework/node_src/src/main.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | let parseArgs = require('minimist'); 18 | let PublisherTask = require('./publisher_task.js').PublisherTask; 19 | let SubscriberTask = require('./subscriber_task.js').SubscriberTask; 20 | let loadtestService = require('./loadtest_service.js'); 21 | let grpc = require('grpc'); 22 | 23 | if (require.main === module) { 24 | var argv = parseArgs(process.argv.slice(2), { 25 | boolean: 'publisher', 26 | integer: 'port', 27 | default: { 28 | port: 5000 29 | } 30 | }); 31 | if (undefined === argv.publisher) { 32 | process.exit(1); 33 | } 34 | 35 | process.on('unhandledRejection', (reason, p) => { 36 | console.log('Unhandled Rejection at: Promise', p, 'reason:', reason, 'stack trace:', reason.stack); 37 | process.exit(1); 38 | }); 39 | 40 | let task; 41 | if (argv.publisher) { 42 | task = new PublisherTask(); 43 | } else { 44 | task = new SubscriberTask(); 45 | } 46 | task.init().then(() => { 47 | let server = new grpc.Server(); 48 | server.addService(loadtestService.LoadtestWorker.service, { 49 | Start: task.startHandler.bind(task), 50 | Check: task.checkHandler.bind(task) 51 | }); 52 | server.bind('0.0.0.0:' + argv.port, grpc.ServerCredentials.createInsecure()); 53 | server.start(); 54 | console.log("starting " + (argv.publisher ? "publisher" : "subscriber") + " at port " + argv.port); 55 | }); 56 | } -------------------------------------------------------------------------------- /kafka-connector/src/main/java/com/google/pubsub/kafka/common/ConnectorCredentialsProvider.java: -------------------------------------------------------------------------------- 1 | // Copyright 2018 Google Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | //////////////////////////////////////////////////////////////////////////////// 16 | package com.google.pubsub.kafka.common; 17 | 18 | import com.google.api.gax.core.CredentialsProvider; 19 | import com.google.auth.Credentials; 20 | import com.google.auth.oauth2.GoogleCredentials; 21 | import java.io.ByteArrayInputStream; 22 | import java.io.FileInputStream; 23 | import java.io.IOException; 24 | import java.util.Arrays; 25 | import java.util.List; 26 | 27 | public class ConnectorCredentialsProvider implements CredentialsProvider { 28 | 29 | private static final List CPS_SCOPE = 30 | Arrays.asList("https://www.googleapis.com/auth/pubsub"); 31 | 32 | GoogleCredentials credentials; 33 | 34 | public void loadFromFile(String credentialPath) throws IOException { 35 | this.credentials = GoogleCredentials.fromStream(new FileInputStream(credentialPath)); 36 | } 37 | 38 | public void loadJson(String credentialsJson) throws IOException { 39 | ByteArrayInputStream bs = new ByteArrayInputStream(credentialsJson.getBytes()); 40 | this.credentials = credentials = GoogleCredentials.fromStream(bs); 41 | } 42 | 43 | @Override 44 | public Credentials getCredentials() throws IOException { 45 | if (this.credentials == null) { 46 | return GoogleCredentials.getApplicationDefault().createScoped(this.CPS_SCOPE); 47 | } else { 48 | return this.credentials.createScoped(this.CPS_SCOPE); 49 | } 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /load-test-framework/python_src/clients/loadtest_worker_servicer.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from proto_dir.loadtest_pb2 import StartResponse 16 | from proto_dir import loadtest_pb2_grpc 17 | from clients.task import Task 18 | from concurrent.futures import Executor, ThreadPoolExecutor 19 | import time 20 | from clients.to_float_seconds import to_float_seconds 21 | 22 | 23 | class LoadtestWorkerServicer(loadtest_pb2_grpc.LoadtestWorkerServicer): 24 | """Provides methods that implement functionality of load test server.""" 25 | 26 | def __init__(self, task): 27 | self.task = task # type: Task 28 | self.executor: Executor = ThreadPoolExecutor(max_workers=1) 29 | self.stopped = False 30 | self.start_time = None 31 | self.test_duration = None 32 | 33 | def Start(self, request, context): 34 | self.task.start(request) 35 | self.start_time = to_float_seconds(request.start_time) 36 | self.test_duration = to_float_seconds(request.test_duration) 37 | self.executor.submit(self.wait_then_stop) 38 | return StartResponse() 39 | 40 | def _time_since_start(self): 41 | return time.time() - self.start_time 42 | 43 | def wait_then_stop(self): 44 | time.sleep((self.start_time + self.test_duration) - time.time()) 45 | self.task.stop() 46 | self.stopped = True 47 | 48 | def Check(self, request, context): 49 | response = self.task.check() 50 | response.running_duration.seconds = int(self._time_since_start()) 51 | response.is_finished = self.stopped 52 | return response 53 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/source/split/SubscriptionSplit.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.split; 17 | 18 | import com.google.auto.value.AutoValue; 19 | import com.google.pubsub.flink.proto.SubscriptionSplitProto; 20 | import com.google.pubsub.v1.ProjectSubscriptionName; 21 | import java.util.UUID; 22 | import org.apache.flink.api.connector.source.SourceSplit; 23 | 24 | @AutoValue 25 | public abstract class SubscriptionSplit implements SourceSplit { 26 | public abstract ProjectSubscriptionName subscriptionName(); 27 | 28 | public abstract String uid(); 29 | 30 | public static SubscriptionSplit create(ProjectSubscriptionName subscriptionName, String uuid) { 31 | return new AutoValue_SubscriptionSplit(subscriptionName, uuid); 32 | } 33 | 34 | public static SubscriptionSplit create(ProjectSubscriptionName subscriptionName) { 35 | return create(subscriptionName, UUID.randomUUID().toString()); 36 | } 37 | 38 | public static SubscriptionSplit fromProto(SubscriptionSplitProto proto) { 39 | return SubscriptionSplit.create( 40 | ProjectSubscriptionName.parse(proto.getSubscription()), proto.getUid()); 41 | } 42 | 43 | public SubscriptionSplitProto toProto() { 44 | return SubscriptionSplitProto.newBuilder() 45 | .setSubscription(subscriptionName().toString()) 46 | .setUid(uid()) 47 | .build(); 48 | } 49 | 50 | @Override 51 | public String splitId() { 52 | return String.format("%s-%s", subscriptionName().toString(), uid()); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /kafka-connector/src/test/java/com/google/pubsub/kafka/sink/CloudPubSubSinkConnectorTest.java: -------------------------------------------------------------------------------- 1 | // Copyright 2016 Google Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | //////////////////////////////////////////////////////////////////////////////// 16 | package com.google.pubsub.kafka.sink; 17 | 18 | import static org.junit.Assert.assertEquals; 19 | 20 | import com.google.pubsub.kafka.common.ConnectorUtils; 21 | import java.util.HashMap; 22 | import java.util.List; 23 | import java.util.Map; 24 | import org.junit.Before; 25 | import org.junit.Test; 26 | 27 | /** Tests for {@link CloudPubSubSinkConnector}. */ 28 | public class CloudPubSubSinkConnectorTest { 29 | 30 | private static final int NUM_TASKS = 10; 31 | private static final String CPS_PROJECT = "hello"; 32 | private static final String CPS_TOPIC = "world"; 33 | 34 | private CloudPubSubSinkConnector connector; 35 | private Map props; 36 | 37 | @Before 38 | public void setup() { 39 | connector = new CloudPubSubSinkConnector(); 40 | props = new HashMap<>(); 41 | props.put(ConnectorUtils.CPS_PROJECT_CONFIG, CPS_PROJECT); 42 | props.put(ConnectorUtils.CPS_TOPIC_CONFIG, CPS_TOPIC); 43 | } 44 | 45 | @Test 46 | public void testTaskConfigs() { 47 | connector.start(props); 48 | List> taskConfigs = connector.taskConfigs(NUM_TASKS); 49 | assertEquals(taskConfigs.size(), NUM_TASKS); 50 | for (int i = 0; i < taskConfigs.size(); ++i) { 51 | assertEquals(taskConfigs.get(i), props); 52 | } 53 | } 54 | 55 | @Test 56 | public void testTaskClass() { 57 | assertEquals(CloudPubSubSinkTask.class, connector.taskClass()); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/source/reader/PubSubRecordEmitter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.reader; 17 | 18 | import com.google.protobuf.util.Timestamps; 19 | import com.google.pubsub.flink.PubSubDeserializationSchema; 20 | import com.google.pubsub.flink.internal.source.split.SubscriptionSplitState; 21 | import com.google.pubsub.v1.PubsubMessage; 22 | import java.io.IOException; 23 | import org.apache.flink.api.connector.source.SourceOutput; 24 | import org.apache.flink.connector.base.source.reader.RecordEmitter; 25 | 26 | public class PubSubRecordEmitter 27 | implements RecordEmitter { 28 | private final PubSubDeserializationSchema deserializationSchema; 29 | private final AckTracker ackTracker; 30 | 31 | public PubSubRecordEmitter( 32 | PubSubDeserializationSchema deserializationSchema, AckTracker ackTracker) { 33 | this.deserializationSchema = deserializationSchema; 34 | this.ackTracker = ackTracker; 35 | } 36 | 37 | @Override 38 | public void emitRecord( 39 | PubsubMessage message, SourceOutput sourceOutput, SubscriptionSplitState state) 40 | throws Exception { 41 | try { 42 | sourceOutput.collect( 43 | deserializationSchema.deserialize(message), 44 | Timestamps.toMillis(message.getPublishTime())); 45 | ackTracker.stagePendingAck(message.getMessageId()); 46 | } catch (Exception e) { 47 | throw new IOException("Failed to deserialize PubsubMessage", e); 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/main/java/com/google/pubsub/flink/internal/source/reader/AckTracker.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.reader; 17 | 18 | import com.google.cloud.pubsub.v1.AckReplyConsumer; 19 | 20 | /** This class tracks the lifecycle of messages in {@link PubSubSource}. */ 21 | public interface AckTracker { 22 | /** 23 | * Track a new pending ack. Acks are pending when a message has been received but not yet 24 | * processed by the Flink pipeline. 25 | * 26 | *

If there is already a pending ack for {@code messageId}, the existing ack is replaced. 27 | */ 28 | void addPendingAck(String messageId, AckReplyConsumer ackReplyConsumer); 29 | 30 | /** 31 | * Stage a pending ack for the next checkpoint snapshot. Staged acks indicate that a message has 32 | * been emitted to the Flink pipeline and should be included in the next checkpoint. 33 | */ 34 | void stagePendingAck(String messageId); 35 | 36 | /** 37 | * Prepare all staged acks to be acknowledged to Google Cloud Pub/Sub when checkpoint {@code 38 | * checkpointId} completes. 39 | */ 40 | void addCheckpoint(long checkpointId); 41 | 42 | /** 43 | * Acknowledge all staged acks in checkpoint {@code checkpointId} and stop tracking them in this 44 | * {@link AckTracker}. 45 | */ 46 | void notifyCheckpointComplete(long checkpointId); 47 | 48 | /** 49 | * Negatively acknowledge (nack) and stop tracking all acks currently tracked by this {@link 50 | * AckTracker}. Nacked messages are eligible for redelivery by Google Cloud Pub/Sub before the 51 | * message's ack deadline expires. 52 | */ 53 | void nackAll(); 54 | } 55 | -------------------------------------------------------------------------------- /load-test-framework/python_src/clients/flow_control/outstanding_count_flow_controller.py: -------------------------------------------------------------------------------- 1 | from clients.flow_control import FlowController 2 | from threading import Condition 3 | from concurrent.futures import ThreadPoolExecutor, Executor 4 | from cachetools import TTLCache 5 | import sys 6 | import time 7 | 8 | expiry_latency_seconds = 15 9 | rate_update_delay_seconds = .1 10 | 11 | 12 | class OutstandingCountFlowController(FlowController): 13 | """ 14 | A FlowController that tries to ensure that the outstanding count is roughly equivalent to the 15 | completion rate in the next two seconds. 16 | """ 17 | 18 | def __init__(self, initial_per_second: float): 19 | self.rate_per_second = max(initial_per_second, 1.0) 20 | self.index = 0 21 | self.outstanding = 0 22 | self.condition = Condition() 23 | self.expiry_cache = TTLCache(sys.maxsize, expiry_latency_seconds) 24 | self.executor: Executor = ThreadPoolExecutor(max_workers=1) 25 | self.executor.submit(self.reset_rate_in, expiry_latency_seconds) 26 | 27 | def reset_rate_in(self, delay_seconds: float): 28 | time.sleep(delay_seconds) 29 | with self.condition: 30 | self.expiry_cache.expire() 31 | self.rate_per_second = float(self.expiry_cache.currsize) / expiry_latency_seconds 32 | self.condition.notify() 33 | self.reset_rate_in(rate_update_delay_seconds) 34 | 35 | def _num_available(self): 36 | return int((self.rate_per_second * 2) - self.outstanding) 37 | 38 | def request_start(self): 39 | with self.condition: 40 | num_available = self._num_available() 41 | while num_available < 1: 42 | self.condition.wait() 43 | num_available = self._num_available() 44 | self.outstanding += num_available 45 | return num_available 46 | 47 | @staticmethod 48 | def _next_index(index: int): 49 | return (index + 1) % sys.maxsize 50 | 51 | def inform_finished(self, was_successful: bool): 52 | with self.condition: 53 | if was_successful: 54 | index = self.index 55 | self.index = self._next_index(index) 56 | self.expiry_cache[index] = None 57 | self.outstanding -= 1 58 | self.condition.notify() 59 | -------------------------------------------------------------------------------- /load-test-framework/src/main/java/com/google/pubsub/flic/controllers/resource_controllers/ResourceController.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | package com.google.pubsub.flic.controllers.resource_controllers; 18 | 19 | import com.google.common.util.concurrent.ListenableFuture; 20 | import com.google.common.util.concurrent.SettableFuture; 21 | import java.util.concurrent.ScheduledExecutorService; 22 | 23 | /** 24 | * A ResourceController creates a resource when start is called and cleans it up when stop is 25 | * called. 26 | * 27 | *

These operations are complete when their future returns. 28 | */ 29 | public abstract class ResourceController { 30 | private final ScheduledExecutorService executor; 31 | 32 | protected ResourceController(ScheduledExecutorService executor) { 33 | this.executor = executor; 34 | } 35 | 36 | public ListenableFuture start() { 37 | SettableFuture future = SettableFuture.create(); 38 | executor.execute( 39 | () -> { 40 | try { 41 | startAction(); 42 | } catch (Exception e) { 43 | future.setException(e); 44 | return; 45 | } 46 | future.set(null); 47 | }); 48 | return future; 49 | } 50 | 51 | public ListenableFuture stop() { 52 | SettableFuture future = SettableFuture.create(); 53 | executor.execute( 54 | () -> { 55 | try { 56 | stopAction(); 57 | } catch (Exception e) { 58 | future.setException(e); 59 | return; 60 | } 61 | future.set(null); 62 | }); 63 | return future; 64 | } 65 | 66 | protected abstract void startAction() throws Exception; 67 | 68 | protected abstract void stopAction() throws Exception; 69 | } 70 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/test/java/com/google/pubsub/flink/internal/source/reader/PubSubRecordEmitterTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.source.reader; 17 | 18 | import static org.mockito.Mockito.verify; 19 | import static org.mockito.Mockito.when; 20 | 21 | import com.google.protobuf.ByteString; 22 | import com.google.protobuf.util.Timestamps; 23 | import com.google.pubsub.flink.PubSubDeserializationSchema; 24 | import com.google.pubsub.flink.internal.source.split.SubscriptionSplitState; 25 | import com.google.pubsub.v1.PubsubMessage; 26 | import org.apache.flink.api.connector.source.SourceOutput; 27 | import org.junit.Test; 28 | import org.junit.runner.RunWith; 29 | import org.mockito.Mock; 30 | import org.mockito.junit.MockitoJUnitRunner; 31 | 32 | @RunWith(MockitoJUnitRunner.class) 33 | public class PubSubRecordEmitterTest { 34 | @Mock PubSubDeserializationSchema mockDeserializer; 35 | @Mock SourceOutput mockSource; 36 | @Mock AckTracker mockAckTracker; 37 | 38 | @Test 39 | public void emit_deserializesMessage() throws Exception { 40 | PubSubRecordEmitter recordEmitter = 41 | new PubSubRecordEmitter(mockDeserializer, mockAckTracker); 42 | 43 | PubsubMessage message = 44 | PubsubMessage.newBuilder() 45 | .setData(ByteString.copyFromUtf8("message")) 46 | .setMessageId("message-id") 47 | .setPublishTime(Timestamps.fromMillis(12345L)) 48 | .build(); 49 | when(mockDeserializer.deserialize(message)).thenReturn("message"); 50 | 51 | recordEmitter.emitRecord(message, mockSource, new SubscriptionSplitState(null)); 52 | verify(mockDeserializer).deserialize(message); 53 | verify(mockSource).collect("message", 12345L); 54 | verify(mockAckTracker).stagePendingAck("message-id"); 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /flink-connector/flink-connector-gcp-pubsub/src/test/java/com/google/pubsub/flink/internal/sink/PubSubPublisherCacheTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.google.pubsub.flink.internal.sink; 17 | 18 | import static com.google.common.truth.Truth.assertThat; 19 | import static org.mockito.Mockito.verify; 20 | 21 | import com.google.cloud.pubsub.v1.Publisher; 22 | import com.google.pubsub.v1.TopicName; 23 | import org.junit.Test; 24 | import org.junit.runner.RunWith; 25 | import org.mockito.Mock; 26 | import org.mockito.junit.MockitoJUnitRunner; 27 | 28 | @RunWith(MockitoJUnitRunner.class) 29 | public class PubSubPublisherCacheTest { 30 | @Mock Publisher publisher1; 31 | @Mock Publisher publisher2; 32 | 33 | @Test 34 | public void getOrCreate_cachesPublishers() throws Exception { 35 | TopicName topic1 = TopicName.of("project1", "topic1"); 36 | TopicName topic2 = TopicName.of("project2", "topic2"); 37 | 38 | assertThat(PubSubPublisherCache.getOrCreate(topic1, (topic) -> publisher1)) 39 | .isEqualTo(publisher1); 40 | assertThat(PubSubPublisherCache.getOrCreate(topic1, (topic) -> publisher2)) 41 | .isEqualTo(publisher1); 42 | assertThat(PubSubPublisherCache.getOrCreate(topic2, (topic) -> publisher2)) 43 | .isEqualTo(publisher2); 44 | } 45 | 46 | @Test 47 | public void close_shutsdownPublishers() throws Exception { 48 | assertThat( 49 | PubSubPublisherCache.getOrCreate( 50 | TopicName.of("project1", "topic1"), (topic) -> publisher1)) 51 | .isEqualTo(publisher1); 52 | assertThat( 53 | PubSubPublisherCache.getOrCreate( 54 | TopicName.of("project2", "topic2"), (topic) -> publisher2)) 55 | .isEqualTo(publisher2); 56 | 57 | PubSubPublisherCache.close(); 58 | verify(publisher1).shutdown(); 59 | verify(publisher2).shutdown(); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # Github action job to test core java library features on 15 | # downstream client libraries before they are released. 16 | on: 17 | push: 18 | branches: 19 | - main 20 | pull_request: 21 | name: ci 22 | jobs: 23 | units: 24 | runs-on: ubuntu-latest 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | java: [8, 11] 29 | steps: 30 | - uses: actions/checkout@v3 31 | - uses: actions/setup-java@v3 32 | with: 33 | distribution: zulu 34 | java-version: ${{matrix.java}} 35 | - run: java -version 36 | - run: chmod +x .kokoro/* 37 | - run: .kokoro/build.sh 38 | env: 39 | JOB_TYPE: test 40 | windows: 41 | runs-on: windows-latest 42 | steps: 43 | - name: Support longpaths 44 | run: git config --system core.longpaths true 45 | - uses: actions/checkout@v3 46 | - uses: actions/setup-java@v3 47 | with: 48 | distribution: zulu 49 | java-version: 8 50 | - run: java -version 51 | - run: chmod +x .kokoro/* 52 | - run: .kokoro/build.bat 53 | env: 54 | JOB_TYPE: test 55 | dependencies: 56 | runs-on: ubuntu-latest 57 | strategy: 58 | matrix: 59 | java: [8, 11] 60 | steps: 61 | - uses: actions/checkout@v3 62 | - uses: actions/setup-java@v3 63 | with: 64 | distribution: zulu 65 | java-version: ${{matrix.java}} 66 | - run: java -version 67 | - run: chmod +x .kokoro/* 68 | - run: .kokoro/dependencies.sh 69 | lint: 70 | runs-on: ubuntu-latest 71 | steps: 72 | - uses: actions/checkout@v3 73 | - uses: actions/setup-java@v3 74 | with: 75 | distribution: zulu 76 | java-version: 11 77 | - run: java -version 78 | - run: chmod +x .kokoro/* 79 | - run: .kokoro/build.sh 80 | env: 81 | JOB_TYPE: lint -------------------------------------------------------------------------------- /load-test-framework/node_src/src/subscriber_task.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Google Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * You may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions 14 | * and limitations under the License. 15 | */ 16 | 17 | let task = require('./task.js'); 18 | let metrics_tracker = require('./metrics_tracker.js'); 19 | let {PubSub} = require('@google-cloud/pubsub'); 20 | 21 | let BYTES_PER_PROCESS = 500000000; // 500MB per process (1 per core) 22 | 23 | class SubscriberSubtaskWorker extends task.SubtaskWorker { 24 | constructor() { 25 | super(); 26 | } 27 | 28 | childStart(startRequest) { 29 | let client = new PubSub({ 30 | projectId: startRequest.project 31 | }); 32 | let topic = client.topic(startRequest.topic); 33 | let options = { 34 | flowControl: { 35 | maxBytes: BYTES_PER_PROCESS, 36 | maxMessages: Number.MAX_SAFE_INTEGER, 37 | }, 38 | }; 39 | let subscription = topic.subscription(startRequest.pubsub_options.subscription, options); 40 | subscription.on('message', this.onMessage.bind(this)); 41 | subscription.on(`error`, error => { 42 | console.error(`ERROR: ${error}`); 43 | }); 44 | } 45 | 46 | onMessage(message) { 47 | let latency = (new Date).getTime() - parseInt(message.attributes['sendTime']); 48 | let pubId = parseInt(message.attributes['clientId']); 49 | let sequenceNumber = parseInt(message.attributes['sequenceNumber']); 50 | let messageAndDuration = new metrics_tracker.MessageAndDuration( 51 | pubId, sequenceNumber, latency); 52 | this.metricsTracker.put(messageAndDuration); 53 | message.ack(); 54 | } 55 | } 56 | 57 | class SubscriberWorker extends task.TaskWorker { 58 | constructor() { 59 | super(__dirname + '/subscriber_task_main.js'); 60 | } 61 | } 62 | 63 | class SubscriberTask extends task.Task { 64 | getWorker() { 65 | return new SubscriberWorker(); 66 | } 67 | } 68 | 69 | module.exports = { 70 | SubscriberTask: SubscriberTask, 71 | SubscriberSubtaskWorker: SubscriberSubtaskWorker 72 | }; 73 | --------------------------------------------------------------------------------