├── .gcloudignore
├── datastream_utils
├── source_excluded_objects.json
├── source_config.json
├── destination_config.json
├── Dockerfile
├── datastream
│ ├── __init__.py
│ ├── datastream_v1alpha1_client.py
│ └── datastream_v1alpha1_messages.py
├── README.md
├── BUILD
├── runner.py
├── cloud_datastream_resource_manager_test.py
└── cloud_datastream_resource_manager.py
├── .gitignore
├── manage_datastream
└── datastream
│ ├── __init__.pyc
│ └── datastream_v1alpha1_client.pyc
├── cicd
├── build_docker.sh
├── Dockerfile
└── cloudbuild.yaml
├── DataValidationDockerfile
├── CONTRIBUTING.md
├── ora2pg
└── config
│ └── ora2pg.conf
├── Ora2PGDockerfile
├── dataflow.sh
├── data_validation.sh
├── deploy_resources.sh
├── ora2pg.sh
├── Makefile
├── README.md
└── LICENSE
/.gcloudignore:
--------------------------------------------------------------------------------
1 | # Ignore in Builds
2 |
--------------------------------------------------------------------------------
/datastream_utils/source_excluded_objects.json:
--------------------------------------------------------------------------------
1 | {}
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore Copied Oracle rpms
2 | oracle/*.rpm
3 |
--------------------------------------------------------------------------------
/datastream_utils/source_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "allowlist": {},
3 | "rejectlist": {}
4 | }
5 |
--------------------------------------------------------------------------------
/datastream_utils/destination_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "path": "",
3 | "file_rotation_mb":5,
4 | "file_rotation_interval":"15s",
5 | "avro_file_format": {}
6 | }
7 |
--------------------------------------------------------------------------------
/manage_datastream/datastream/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GoogleCloudPlatform/oracle-to-postgres/HEAD/manage_datastream/datastream/__init__.pyc
--------------------------------------------------------------------------------
/manage_datastream/datastream/datastream_v1alpha1_client.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GoogleCloudPlatform/oracle-to-postgres/HEAD/manage_datastream/datastream/datastream_v1alpha1_client.pyc
--------------------------------------------------------------------------------
/cicd/build_docker.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export PROJECT_ID=dms-heterogeneous
4 |
5 | cp -r ../oracle .
6 | gcloud builds submit --project=${PROJECT_ID} --tag=gcr.io/${PROJECT_ID}/build-oracle-to-postgres
7 |
8 | # Cleanup
9 | rm -rf oracle/
10 |
--------------------------------------------------------------------------------
/datastream_utils/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8.0-slim
2 |
3 | RUN pip install --upgrade pip
4 | RUN pip install google-apitools
5 | RUN pip install absl-py
6 |
7 | COPY runner.py .
8 | COPY cloud_datastream_resource_manager.py .
9 | COPY datastream datastream/
10 |
11 | ENTRYPOINT ["python", "runner.py"]
12 |
--------------------------------------------------------------------------------
/datastream_utils/datastream/__init__.py:
--------------------------------------------------------------------------------
1 | """Common imports for generated datastream client library."""
2 | # pylint:disable=wildcard-import
3 |
4 | from __future__ import absolute_import
5 |
6 | import pkgutil
7 |
8 | from apitools.base.py import *
9 | from .datastream_v1alpha1_client import *
10 | from .datastream_v1alpha1_messages import *
11 |
12 | __path__ = pkgutil.extend_path(__path__, __name__)
13 |
--------------------------------------------------------------------------------
/cicd/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM docker
2 |
3 | # Install General Requirements
4 | RUN apk update \
5 | && apk add bash \
6 | && apk add curl \
7 | && apk add make \
8 | && apk add unzip \
9 | && apk add wget \
10 | && apk add which \
11 | && apk add util-linux
12 |
13 | # Install GCloud
14 | RUN apk add python3 py3-pip
15 | RUN curl -sSL https://sdk.cloud.google.com | bash
16 | ENV PATH $PATH:/root/google-cloud-sdk/bin
17 | RUN gcloud components install beta --quiet
18 |
19 | # Copy Oracle Files
20 | RUN mkdir /oracle
21 | COPY oracle/*.rpm /oracle/
22 |
23 | CMD ["make"]
24 |
--------------------------------------------------------------------------------
/DataValidationDockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8.0-slim
2 |
3 | RUN apt-get update \
4 | && apt-get install gcc -y \
5 | && apt-get install wget -y \
6 | && apt-get clean
7 |
8 | RUN pip install --upgrade pip
9 | RUN pip install google-pso-data-validator==1.6.0
10 | RUN pip install cx_Oracle
11 |
12 | # Install Oracle12 ODBC required packages
13 | ENV ORACLE_SID oracle
14 | ENV ORACLE_ODBC_VERSION 12.2
15 | ENV ORACLE_HOME /usr/lib/oracle/${ORACLE_ODBC_VERSION}/client64
16 |
17 | RUN apt-get -y install --fix-missing --upgrade vim alien unixodbc-dev wget libaio1 libaio-dev
18 |
19 | COPY oracle/*.rpm ./
20 | RUN alien -i *.rpm && rm *.rpm \
21 | && echo "/usr/lib/oracle/${ORACLE_ODBC_VERSION}/client64/lib/" > /etc/ld.so.conf.d/oracle.conf \
22 | && ln -s /usr/include/oracle/${ORACLE_ODBC_VERSION}/client64 $ORACLE_HOME/include \
23 | && ldconfig -v
24 |
25 | ENTRYPOINT ["python", "-m", "data_validation"]
26 |
--------------------------------------------------------------------------------
/datastream_utils/README.md:
--------------------------------------------------------------------------------
1 | # Datastream Manager Client
2 |
3 | ## Set Up
4 |
5 | **Build the Datastream Proto Packages**
6 |
7 | ```
8 | blaze build google/cloud/datastream:python_client_v1alpha1
9 |
10 | cp -r blaze-genfiles/google/cloud/datastream/datastream experimental/dhercher/datastream_utils/
11 | ```
12 |
13 | **Copy Dataflow Testing Utils**
14 |
15 | This should eventually happen in reverse. This util should be imported by
16 | Dataflow.
17 |
18 | `g4 copy
19 | cloud/dataflow/testing/integration/teleport/environment/cloud_datastream_resource_manager.py
20 | experimental/dhercher/datastream_utils/`
21 |
22 | `g4 copy
23 | cloud/dataflow/testing/integration/teleport/environment/cloud_datastream_resource_manager_test.py
24 | experimental/dhercher/datastream_utils/`
25 |
26 | **[All changes required to make the util OSS](http://cl/368307780)**
27 |
28 | ## Run Tests
29 |
30 | `blaze test
31 | //experimental/dhercher/datastream_utils:cloud_datastream_resource_manager_test`
32 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | We'd love to accept your patches and contributions to this project. There are
4 | just a few small guidelines you need to follow.
5 |
6 | ## Contributor License Agreement
7 |
8 | Contributions to this project must be accompanied by a Contributor License
9 | Agreement. You (or your employer) retain the copyright to your contribution;
10 | this simply gives us permission to use and redistribute your contributions as
11 | part of the project. Head over to to see
12 | your current agreements on file or to sign a new one.
13 |
14 | You generally only need to submit a CLA once, so if you've already submitted one
15 | (even if it was for a different project), you probably don't need to do it
16 | again.
17 |
18 | ## Code reviews
19 |
20 | All submissions, including submissions by project members, require review. We
21 | use GitHub pull requests for this purpose. Consult
22 | [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
23 | information on using pull requests.
24 |
25 | ## Community Guidelines
26 |
27 | This project follows [Google's Open Source Community
28 | Guidelines](https://opensource.google/conduct/).
29 |
30 | ## Development environment
31 |
32 | Clone repo locally and run make to see available commands.
33 |
--------------------------------------------------------------------------------
/ora2pg/config/ora2pg.conf:
--------------------------------------------------------------------------------
1 | #################### Ora2Pg Configuration file #####################
2 |
3 | #------------------------------------------------------------------------------
4 | # INPUT SECTION (Oracle connection or input file)
5 | #------------------------------------------------------------------------------
6 |
7 | # Set the Oracle home directory
8 | ORACLE_HOME /usr/local/oracle/10g
9 |
10 | # Set Oracle database connection (datasource, user, password)
11 | ORACLE_DSN dbi:Oracle:host=;sid=;port=
12 | ORACLE_USER
13 | ORACLE_PWD
14 |
15 | PG_VERSION 11
16 |
17 | #------------------------------------------------------------------------------
18 | # SCHEMA SECTION (Oracle schema to export and use of schema in PostgreSQL)
19 | #------------------------------------------------------------------------------
20 |
21 | # Export Oracle schema to PostgreSQL schema
22 | EXPORT_SCHEMA 1
23 | FORCE_OWNER
24 | ROLES 1
25 |
26 | USE_RESERVED_WORDS 1
27 | FKEY_DEFERRABLE 1
28 |
29 | #------------------------------------------------------------------------------
30 | # EXPORT SECTION (Export type and filters)
31 | #------------------------------------------------------------------------------
32 |
33 | TYPE TABLE
34 |
35 | OUTPUT_DIR /data
36 | OUTPUT output.sql
37 | FILE_PER_INDEX 0
38 | FILE_PER_CONSTRAINT 0
39 | FILE_PER_FKEYS 1
40 | FILE_PER_TABLE 0
41 |
42 |
--------------------------------------------------------------------------------
/Ora2PGDockerfile:
--------------------------------------------------------------------------------
1 | FROM perl:slim
2 |
3 | ARG ORA2PG_VERSION=21.0
4 | ARG ORA2PG_HASH=607179813b79c105de8934f75e426fccb69c41d4
5 |
6 | # Install General Requirements
7 | RUN apt-get update \
8 | && apt-get install wget -y \
9 | && apt-get install unzip -y \
10 | && apt-get clean
11 |
12 | # Install ora2pg
13 | RUN wget --output-document=/tmp/ora2pg.zip https://github.com/darold/ora2pg/archive/${ORA2PG_HASH}.zip
14 | RUN unzip -d /tmp/ /tmp/ora2pg.zip
15 | RUN cd /tmp/ora2pg-${ORA2PG_HASH}/ && perl Makefile.PL && make && make install
16 |
17 | # Install Oracle12 ODBC required packages
18 | ENV ORACLE_SID oracle
19 | ENV ORACLE_ODBC_VERSION 12.2
20 | ENV ORACLE_HOME /usr/lib/oracle/${ORACLE_ODBC_VERSION}/client64
21 |
22 | RUN apt-get -y install --fix-missing --upgrade vim alien unixodbc-dev wget libaio1 libaio-dev
23 |
24 | COPY oracle/*.rpm ./
25 | RUN alien -i *.rpm && rm *.rpm \
26 | && echo "/usr/lib/oracle/${ORACLE_ODBC_VERSION}/client64/lib/" > /etc/ld.so.conf.d/oracle.conf \
27 | && ln -s /usr/include/oracle/${ORACLE_ODBC_VERSION}/client64 $ORACLE_HOME/include \
28 | && ldconfig -v
29 |
30 | # Instal DBI module with Orcle, Postgres and Compress::Zlib module
31 | RUN perl -MCPAN -e 'install DBI' &&\
32 | perl -MCPAN -e 'install DBD::Pg' &&\
33 | perl -MCPAN -e 'install DBD::Oracle' &&\
34 | perl -MCPAN -e 'install Bundle::Compress::Zlib'
35 |
36 |
37 | # Create Directories
38 | RUN mkdir /config /data
39 | RUN ln -s /config/ora2pg.conf /etc/ora2pg/ora2pg.conf
40 |
41 | VOLUME /config
42 | VOLUME /data
43 |
44 | WORKDIR /
45 |
46 | CMD ["ora2pg"]
47 |
--------------------------------------------------------------------------------
/datastream_utils/BUILD:
--------------------------------------------------------------------------------
1 | load("//devtools/python/blaze:pytype.bzl", "pytype_strict_library")
2 | load("//devtools/python/blaze:strict.bzl", "py_strict_test")
3 |
4 | package(
5 | default_testonly = 1,
6 | # default_visibility = [
7 | # "//*",
8 | # ],
9 | )
10 |
11 | ################################################################################
12 | # Definitions for the teleport pipeline resource manager utilities
13 | ################################################################################
14 |
15 | pytype_strict_library(
16 | name = "cloud_datastream_resource_manager",
17 | srcs = ["cloud_datastream_resource_manager.py"],
18 | srcs_version = "PY3",
19 | deps = [
20 | "//cloud/dataflow/testing/creds:service_accounts",
21 | "//cloud/dataflow/testing/framework/environment:file_helper",
22 | "//cloud/dataflow/testing/framework/protos:resource_manager_result_py_pb2",
23 | "//cloud/dataflow/testing/framework/protos:shared_framework_enums_py_pb2",
24 | "//google/cloud/datastream:python_client_v1alpha1",
25 | "//pipeline/testing/uif/environment:resource_manager",
26 | ],
27 | )
28 |
29 | py_strict_test(
30 | name = "cloud_datastream_resource_manager_test",
31 | srcs = ["cloud_datastream_resource_manager_test.py"],
32 | python_version = "PY3",
33 | srcs_version = "PY3",
34 | deps = [
35 | ":cloud_datastream_resource_manager",
36 | "//google/cloud/datastream:python_client_v1alpha1",
37 | "//net/proto2/python/public:use_pure_python", # fixdeps: keep go/proto_python_default
38 | "//testing/pybase",
39 | "//third_party/py/mock",
40 | ],
41 | )
42 |
43 | # For runner we will use both g3 and reqs (maybe)?
44 | # "//third_party/py/absl:app",
45 | # "//third_party/py/absl/flags",
46 | # "//third_party/py/absl/logging",
47 |
--------------------------------------------------------------------------------
/dataflow.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 |
17 | # Dataflow Config Vars
18 | sudo apt-get install uuid-runtime -y
19 | NEW_UUID=$(uuidgen | head -c 6 | awk '{print tolower($0)}')
20 | export DATAFLOW_JOB_NAME="${DATAFLOW_JOB_PREFIX}-${NEW_UUID}"
21 | export DATABASE_HOST=$(gcloud sql instances list --project=${PROJECT_ID} | grep "${CLOUD_SQL}" | awk '{print $6;}')
22 |
23 | gcloud config set project ${PROJECT_ID}
24 | if [ "$1" == "create" ]
25 | then
26 | gcloud dataflow flex-template run "${DATAFLOW_JOB_NAME}" --quiet \
27 | --project="${PROJECT_ID}" --region="${REGION}" \
28 | --enable-streaming-engine \
29 | --template-file-gcs-location="${TEMPLATE_IMAGE_SPEC}" \
30 | --parameters schemaMap=":",gcsPubSubSubscription="projects/${PROJECT_ID}/subscriptions/${PUBSUB_SUBSCRIPTION}",inputFilePattern="${GCS_STREAM_PATH}",databaseHost=${DATABASE_HOST},databasePort="5432",databaseUser=${DATABASE_USER},databasePassword=${DATABASE_PASSWORD},maxNumWorkers=10,autoscalingAlgorithm="THROUGHPUT_BASED"
31 | elif [ "$1" == "destroy" ]
32 | then
33 | # Kill Running Jobs
34 | DATAFLOW_JOB_ID=$(gcloud dataflow jobs list --status=active | grep ${DATAFLOW_JOB_PREFIX} | awk '{print $1;}')
35 | if [ "$DATAFLOW_JOB_ID" != '' ]; then
36 | gcloud dataflow jobs cancel ${DATAFLOW_JOB_ID} --region=${REGION} --project="${PROJECT_ID}"
37 | echo 'Killing Old Dataflow Jobs: Feel free to go to lunch'
38 | fi
39 | else
40 | echo "Dataflow Jobs"
41 | gcloud dataflow jobs list --status=active --region="${REGION}" --project="${PROJECT_ID}"
42 | fi
43 |
--------------------------------------------------------------------------------
/data_validation.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2021 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | if [ "${DATABASE_HOST}" == "" ]
17 | then
18 | export DATABASE_HOST=$(gcloud sql instances list --project=${PROJECT_ID} | grep "${CLOUD_SQL}" | awk '{print $6;}')
19 | fi
20 |
21 | if [ "$1" == "build" ]
22 | then
23 | docker build . -f DataValidationDockerfile -t data-validation
24 | elif [ "$1" == "deploy" ]
25 | then
26 | docker run -v ${PWD}/data_validation/.config/:/root/.config --rm ${DOCKER_DVT} \
27 | connections add -c oracle Raw --json "{\"host\":\"${ORACLE_HOST}\",\"user\":\"${ORACLE_USER}\",\"password\":\"${ORACLE_PASSWORD}\",\"source_type\":\"Oracle\",\"database\":\"${ORACLE_DATABASE}\"}"
28 |
29 | docker run -v ${PWD}/data_validation/.config/:/root/.config --rm ${DOCKER_DVT} \
30 | connections add -c postgres Raw --json "{\"host\":\"${DATABASE_HOST}\",\"user\":\"${DATABASE_USER}\",\"password\":\"${DATABASE_PASSWORD}\",\"source_type\":\"Postgres\",\"database\":\"postgres\"}"
31 | elif [ "$1" == "run" ]
32 | then
33 | export TABLES_LIST=$(docker run -v ${PWD}/data_validation/.config/:/root/.config --rm ${DOCKER_DVT} find-tables --source-conn oracle --target-conn postgres)
34 | docker run -v ${PWD}/data_validation/.config/:/root/.config --rm ${DOCKER_DVT} \
35 | run --source-conn oracle --target-conn postgres --tables-list "${TABLES_LIST}" --type Column
36 | elif [ "$1" == "destroy" ]
37 | then
38 | docker rmi -f ${DOCKER_DVT}
39 | rm -rf data_validation/.config/
40 | else
41 | docker run -v ${PWD}/data_validation/.config/:/root/.config --rm ${DOCKER_DVT} --help
42 | fi
43 |
--------------------------------------------------------------------------------
/cicd/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2020 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # To Build Manually
16 | # export PROJECT_ID=dms-heterogeneous
17 | # gcloud builds submit --config cicd/cloudbuild.yaml --project=${PROJECT_ID}
18 |
19 | timeout: 7200s
20 | steps:
21 | # - id: cloud-build
22 | # name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
23 | # args: ["make", "cloud-build"]
24 | - id: deploy-resources
25 | name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
26 | args: ['make', 'deploy-resources']
27 | # waitFor: ["cloud-build"]
28 | - id: ora2pg
29 | name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
30 | args: ['make', 'ora2pg-drops']
31 | waitFor: ["deploy-resources"]
32 | - id: deploy-ora2pg
33 | name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
34 | args: ["make", "deploy-ora2pg"]
35 | waitFor: ["ora2pg"]
36 | - id: deploy-datastream
37 | name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
38 | args: ["make", "deploy-datastream"]
39 | waitFor: ["deploy-ora2pg"]
40 | - id: deploy-dataflow
41 | name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
42 | args: ["make", "deploy-dataflow"]
43 | waitFor: ["deploy-datastream"]
44 | - id: sleep-for-run
45 | name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
46 | args: ["sleep", "400"]
47 | waitFor: ["deploy-dataflow"]
48 | - id: validate
49 | name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
50 | args: ["make", "validate"]
51 | waitFor: ["sleep-for-run"]
52 | - id: destroy
53 | name: "gcr.io/dms-heterogeneous/build-oracle-to-postgres"
54 | args: ["make", "destroy"]
55 | waitFor: ["validate"]
56 | options:
57 | env:
58 | - 'PROJECT_ID=${PROJECT_ID}'
59 | - 'PROJECT_NUMBER=${PROJECT_NUMBER}'
60 | - 'DATABASE_HOST=10.128.0.26'
61 | - 'ORACLE_HOST=10.128.0.26'
62 | - 'CLOUD_SQL=ora2pg-demo'
63 | - 'PRIVATE_CONNECTION_NAME="projects/dms-heterogeneous/locations/us-central1/privateConnections/default-private-connect"'
64 | - 'DOCKER_DATASTREAM=gcr.io/${PROJECT_ID}/datastream'
65 | - 'DOCKER_DVT=gcr.io/${PROJECT_ID}/data-validation'
66 | - 'DOCKER_ORA2PG=gcr.io/${PROJECT_ID}/ora2pg'
67 | - 'ORACLE_SCHEMAS=HR'
68 | pool:
69 | name: 'projects/${PROJECT_ID}/locations/us-central1/workerPools/build-pool'
70 |
--------------------------------------------------------------------------------
/deploy_resources.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2021 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # Ensure current user is the owner for all files
17 | mkdir ora2pg/data/
18 | sudo chown -R $USER:$USER .
19 |
20 | # Enable All Services Required
21 | gcloud services enable \
22 | storage.googleapis.com \
23 | dataflow.googleapis.com \
24 | datastream.googleapis.com \
25 | compute.googleapis.com \
26 | sqladmin.googleapis.com \
27 | servicenetworking.googleapis.com \
28 | pubsub.googleapis.com \
29 | --project=${PROJECT_ID}
30 |
31 | # Create GCS Bucket
32 | gsutil mb -p ${PROJECT_NUMBER} ${GCS_BUCKET}
33 |
34 | # Deploy CloudSQL Instance
35 | SQL_INSTANCE=$(gcloud sql instances list --project=${PROJECT_ID} | grep "${CLOUD_SQL}")
36 | if [ "${SQL_INSTANCE}" == "" ]
37 | then
38 | gcloud compute addresses create google-managed-services-postgres \
39 | --global --purpose=VPC_PEERING \
40 | --prefix-length=16 --network=default \
41 | --project=${PROJECT_ID}
42 | gcloud services vpc-peerings connect \
43 | --service=servicenetworking.googleapis.com \
44 | --ranges=google-managed-services-postgres \
45 | --network=default \
46 | --project=${PROJECT_ID}
47 | gcloud beta sql instances create ${CLOUD_SQL} \
48 | --database-version=POSTGRES_11 \
49 | --cpu=4 --memory=3840MiB \
50 | --region=${REGION} \
51 | --no-assign-ip \
52 | --network=default \
53 | --root-password=${DATABASE_PASSWORD} \
54 | --project=${PROJECT_ID}
55 | else
56 | echo "CloudSQL Instance Exists"
57 | echo ${SQL_INSTANCE}
58 | fi
59 |
60 | SERVICE_ACCOUNT=$(gcloud sql instances describe ${CLOUD_SQL} --project=${PROJECT_ID} | grep 'serviceAccountEmailAddress' | awk '{print $2;}')
61 | gsutil iam ch serviceAccount:${SERVICE_ACCOUNT}:objectViewer ${GCS_BUCKET}
62 |
63 | # Create Pub/Sub Resources for GCS Notifications
64 | TOPIC_EXISTS=$(gcloud pubsub topics list --project=${PROJECT_ID} | grep ${PUBSUB_TOPIC})
65 | if [ "${TOPIC_EXISTS}" == "" ]
66 | then
67 | echo "Deploying Pub/Sub Resources"
68 | gcloud pubsub topics create ${PUBSUB_TOPIC} --project=${PROJECT_ID}
69 |
70 | gcloud pubsub subscriptions create ${PUBSUB_SUBSCRIPTION} \
71 | --topic=${PUBSUB_TOPIC} --project=${PROJECT_ID}
72 |
73 | gsutil notification create -f "json" -p "${DATASTREAM_ROOT_PATH}" -t "${PUBSUB_TOPIC}" "${GCS_BUCKET}"
74 | fi
75 |
--------------------------------------------------------------------------------
/ora2pg.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Copyright 2020 Google Inc.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 |
18 | if [ "$1" == "build" ]
19 | then
20 | docker build . -f Ora2PGDockerfile -t ${DOCKER_ORA2PG}
21 | elif [ "$1" == "run" ]
22 | then
23 | # Clear Out Old Reesults
24 | rm ora2pg/data/output.sql
25 |
26 | # Create New Ora2PG Data
27 | CURR_DIR=$(pwd)
28 | for ORACLE_TYPE in ${ORACLE_TYPES}
29 | do
30 | if [ "${ORACLE_SCHEMAS}" == "" ]
31 | then
32 | echo "Ora2PG: ${ORACLE_TYPE}_output.sql"
33 | docker run \
34 | -v ${CURR_DIR}/ora2pg/config:/config \
35 | -v ${CURR_DIR}/ora2pg/data:/data \
36 | ${DOCKER_ORA2PG} ora2pg --type ${ORACLE_TYPE} --out ${ORACLE_TYPE}_output.sql \
37 | --source ${ORACLE_DSN} --user ${ORACLE_USER} \
38 | --password ${ORACLE_PASSWORD} --forceowner ${DATABASE_USER}
39 | sudo chown -R $USER:$USER .
40 | sed -i '/CREATE TABLE/a \\trowid bigint GENERATED BY DEFAULT AS IDENTITY,' ora2pg/data/${ORACLE_TYPE}_output.sql
41 | cat ora2pg/data/${ORACLE_TYPE}_output.sql >> ora2pg/data/output.sql
42 |
43 | # If a table does not have a PK, add rowid
44 | CREATED_TABLES=$(grep 'CREATE TABLE' ora2pg/data/${ORACLE_TYPE}_output.sql | awk '{print $3;}')
45 | for CREATED_TABLE in ${CREATED_TABLES}
46 | do
47 | PK=$(grep "ALTER TABLE ${CREATED_TABLE} ADD PRIMARY KEY" ora2pg/data/${ORACLE_TYPE}_output.sql)
48 | if [ "${PK}" == "" ]
49 | then
50 | echo "ALTER TABLE ${CREATED_TABLE} ADD PRIMARY KEY (rowid);" >> ora2pg/data/output.sql
51 | fi
52 | done
53 | else
54 | for ORACLE_SCHEMA in ${ORACLE_SCHEMAS}
55 | do
56 | echo "Ora2PG: ${ORACLE_SCHEMA}.${ORACLE_TYPE}_output.sql"
57 | docker run \
58 | -v ${CURR_DIR}/ora2pg/config:/config \
59 | -v ${CURR_DIR}/ora2pg/data:/data \
60 | ${DOCKER_ORA2PG} ora2pg --type ${ORACLE_TYPE} --namespace ${ORACLE_SCHEMA} \
61 | --out ${ORACLE_TYPE}_${ORACLE_SCHEMA}_output.sql
62 | sudo chown -R $USER:$USER .
63 | sed -i '/CREATE TABLE/a \\trowid bigint GENERATED BY DEFAULT AS IDENTITY,' ora2pg/data/${ORACLE_TYPE}_${ORACLE_SCHEMA}_output.sql
64 | cat ora2pg/data/${ORACLE_TYPE}_${ORACLE_SCHEMA}_output.sql >> ora2pg/data/output.sql
65 |
66 | # If a table does not have a PK, add rowid
67 | CREATED_TABLES=$(grep 'CREATE TABLE' ora2pg/data/${ORACLE_TYPE}_${ORACLE_SCHEMA}_output.sql | awk '{print $3;}')
68 | for CREATED_TABLE in ${CREATED_TABLES}
69 | do
70 | PK=$(grep "ALTER TABLE ${CREATED_TABLE} ADD PRIMARY KEY" ora2pg/data/${ORACLE_TYPE}_${ORACLE_SCHEMA}_output.sql)
71 | if [ "${PK}" == "" ]
72 | then
73 | echo "ALTER TABLE ${CREATED_TABLE} ADD PRIMARY KEY (rowid);" >> ora2pg/data/output.sql
74 | fi
75 | done
76 | done
77 | fi
78 | done
79 | elif [ "$1" == "deploy" ]
80 | then
81 | # Deploy to GCS
82 | gsutil rm ${GCS_BUCKET}/resources/ora2pg/*
83 | gsutil cp ora2pg/data/output.sql ${GCS_BUCKET}/resources/ora2pg/output.sql
84 |
85 | # Apply ora2pg results in CloudSQL
86 | gcloud sql import sql \
87 | ${CLOUD_SQL} ${GCS_BUCKET}/resources/ora2pg/output.sql \
88 | --user=${DATABASE_USER} --project=${PROJECT_ID} --database=postgres --quiet
89 | fi
90 |
--------------------------------------------------------------------------------
/datastream_utils/runner.py:
--------------------------------------------------------------------------------
1 | """Deploy and Manage Datastream Jobs.
2 |
3 | Utilities to deploy and manage Datastream resources via CLI.
4 | """
5 |
6 | from typing import Any, Sequence
7 |
8 | from absl import app
9 | from absl import flags
10 |
11 | import cloud_datastream_resource_manager
12 |
13 | flags.DEFINE_enum("action", "list", ["create", "tear-down", "list"],
14 | "Datastream Action to Run.")
15 | flags.DEFINE_string("project-number", None,
16 | "The GCP Project Number to be used",
17 | required=True, short_name="p")
18 | flags.DEFINE_string("stream-prefix", None,
19 | "Alphanumeric lowercase resource prefix",
20 | required=True, short_name="sp")
21 | flags.DEFINE_string("gcs-prefix", None,
22 | "Alphanumeric lowercase resource prefix",
23 | required=True, short_name="gp")
24 | flags.DEFINE_string("source-prefix", None,
25 | "Alphanumeric lowercase resource prefix",
26 | required=True, short_name="op")
27 | flags.DEFINE_string("gcs-bucket", None,
28 | "GCS Bucket Name supplied with or w/o gs:// prefix",
29 | required=True)
30 | flags.DEFINE_string("gcs-root-path", "/data/",
31 | "GCS root path for Datastream to insert data")
32 |
33 | flags.DEFINE_string("oracle-host", None, "Host for Oracle DB", required=True)
34 | flags.DEFINE_string("oracle-port", "1521",
35 | "Port for Oracle DB (default 1521)")
36 | flags.DEFINE_string("oracle-user", None, "User for Oracle DB connections",
37 | required=True)
38 | flags.DEFINE_string("oracle-password", None,
39 | "Password for Oracle DB connections", required=True)
40 | flags.DEFINE_string("oracle-database", None, "Database to connect to Oracle",
41 | required=True)
42 | flags.DEFINE_string("private-connection", None,
43 | "The name of the private connection to use when required.")
44 |
45 | flags.DEFINE_string("schema-names", None,
46 | "Names of the schemas to include in Stream")
47 | flags.DEFINE_string("table-names", None,
48 | "Names of the tables to include in Stream")
49 |
50 |
51 | def _get_flag(field: str) -> Any:
52 | """Returns the value of the request flag."""
53 | return flags.FLAGS.get_flag_value(field, None)
54 |
55 |
56 | def main(unused_argv: Sequence[str] = None) -> None:
57 | action = _get_flag("action")
58 | project_number = _get_flag("project-number")
59 |
60 | stream_prefix = _get_flag("stream-prefix")
61 | cp_gcs_prefix = _get_flag("gcs-prefix")
62 | cp_source_prefix = _get_flag("source-prefix")
63 |
64 | gcs_bucket = _get_flag("gcs-bucket")
65 | gcs_root_path = _get_flag("gcs-root-path")
66 |
67 | schema_names = _get_flag("schema-names")
68 | table_names = _get_flag("table-names")
69 |
70 | oracle_cp = {
71 | "hostname": _get_flag("oracle-host"),
72 | "port": int(_get_flag("oracle-port")),
73 | "databaseService": _get_flag("oracle-database"),
74 | "username": _get_flag("oracle-user"),
75 | "password": _get_flag("oracle-password"),
76 | }
77 | if table_names:
78 | allowed_tables = [(schema_names, table) for table in table_names.split()]
79 | elif schema_names:
80 | allowed_tables = [(schema, None) for schema in schema_names.split()]
81 | else:
82 | allowed_tables = []
83 |
84 | manager = cloud_datastream_resource_manager.CloudDatastreamResourceManager(
85 | project_number=project_number,
86 | gcs_bucket_name=gcs_bucket,
87 | gcs_root_path=gcs_root_path,
88 | stream_name=stream_prefix,
89 | source_cp_name=cp_source_prefix,
90 | target_cp_name=cp_gcs_prefix,
91 | oracle_cp=oracle_cp,
92 | allowed_tables=allowed_tables,
93 | add_uid_suffix=False,
94 | private_connection_name=_get_flag("private-connection"),
95 | )
96 | print(manager.Describe())
97 |
98 | if action == "create":
99 | manager.SetUp()
100 | elif action == "tear-down":
101 | manager.TearDown()
102 | elif action == "list":
103 | manager.ListStreams()
104 |
105 |
106 | if __name__ == "__main__":
107 | app.run(main)
108 |
--------------------------------------------------------------------------------
/datastream_utils/cloud_datastream_resource_manager_test.py:
--------------------------------------------------------------------------------
1 | """Tests for google3.cloud.dataflow.testing.integration.teleport.environment.cloud_datastream_resource_manager."""
2 |
3 | import logging
4 | import mock
5 |
6 | from google3.experimental.dhercher.datastream_utils import cloud_datastream_resource_manager
7 | from google3.google.cloud.datastream import datastream
8 | from google3.testing.pybase import googletest
9 |
10 | _EX_ORACLE_CP = {
11 | "hostname": "127.0.0.1",
12 | "username": "oracle",
13 | "databaseService": "XE",
14 | "password": "oracle",
15 | "port": 1521
16 | }
17 |
18 |
19 | class CloudDatastreamResourceManagerTest(googletest.TestCase):
20 |
21 | def test_default_property_names(self):
22 | client_mock = mock.MagicMock()
23 | rm = cloud_datastream_resource_manager.CloudDatastreamResourceManager(
24 | 1234567890, "bucket-name",
25 | client=client_mock, oracle_cp=_EX_ORACLE_CP)
26 |
27 | self.assertEqual(rm.datastream_parent,
28 | "projects/1234567890/locations/us-central1")
29 | self.assertIsNotNone(rm.suffix)
30 | self.assertIsNotNone(rm.path_suffix)
31 | self.assertIn("streams/datastream-test-", rm.full_stream_name)
32 | self.assertIn("connectionProfiles/oracle-cp-",
33 | rm.full_source_connection_name)
34 | self.assertIn("connectionProfiles/gcs-cp-", rm.full_dest_connection_name)
35 | self.assertEqual(rm.gcs_bucket, "gs://bucket-name")
36 | self.assertStartsWith(rm.gcs_location,
37 | "gs://bucket-name/rootprefix/")
38 |
39 | def test_create_cps(self):
40 | client_mock = mock.MagicMock()
41 | rm = cloud_datastream_resource_manager.CloudDatastreamResourceManager(
42 | 1234567890, "bucket-name",
43 | client=client_mock, oracle_cp=_EX_ORACLE_CP)
44 | unused_oracle_result = rm._CreateDatabaseConnectionProfile()
45 |
46 | unused_gcs_result = rm._CreateGcsConnectionProfile(
47 | "test",
48 | bucket_name="anybucket",
49 | root_path="anyroot")
50 |
51 | def test_list_cps(self):
52 | client_mock = mock.MagicMock()
53 | rm = cloud_datastream_resource_manager.CloudDatastreamResourceManager(
54 | 1234567890, "bucket-name",
55 | client=client_mock, oracle_cp=_EX_ORACLE_CP)
56 | logging.warning(rm._ListConnectionProfiles())
57 |
58 | def test_create_stream(self):
59 | client_mock = mock.create_autospec(datastream.DatastreamV1alpha1,
60 | instance=True)
61 | unused_rm = (
62 | cloud_datastream_resource_manager.CloudDatastreamResourceManager(
63 | 1234567890, "bucket-name",
64 | client=client_mock, oracle_cp=_EX_ORACLE_CP))
65 |
66 | def test__get_oracle_rdbms(self):
67 | client_mock = mock.create_autospec(datastream.DatastreamV1alpha1,
68 | instance=True,
69 | spec_set=False)
70 |
71 | rm = cloud_datastream_resource_manager.CloudDatastreamResourceManager(
72 | 1234567890, "bucket-name",
73 | client=client_mock, oracle_cp=_EX_ORACLE_CP)
74 |
75 | table_list = [("test", "table"), ("test2", None)]
76 | oracle_rdbms = rm._get_oracle_rdbms(table_list)
77 |
78 | expected_rdbms = datastream.OracleRdbms(oracleSchemas=[
79 | datastream.OracleSchema(
80 | schemaName="test",
81 | oracleTables=[datastream.OracleTable(tableName="table")]),
82 | datastream.OracleSchema(schemaName="test2", oracleTables=[])])
83 | self.assertEqual(oracle_rdbms, expected_rdbms)
84 |
85 | def test__get_mysql_rdbms(self):
86 | client_mock = mock.create_autospec(datastream.DatastreamV1alpha1,
87 | instance=True,
88 | spec_set=False)
89 |
90 | rm = cloud_datastream_resource_manager.CloudDatastreamResourceManager(
91 | 1234567890, "bucket-name",
92 | client=client_mock, oracle_cp=_EX_ORACLE_CP)
93 |
94 | table_list = [("test", "table"), ("test2", None)]
95 | mysql_rdbms = rm._get_mysql_rdbms(table_list)
96 |
97 | expected_rdbms = datastream.MysqlRdbms(mysqlDatabases=[
98 | datastream.MysqlDatabase(
99 | databaseName="test",
100 | mysqlTables=[datastream.MysqlTable(tableName="table")]),
101 | datastream.MysqlDatabase(databaseName="test2", mysqlTables=[])])
102 | self.assertEqual(mysql_rdbms, expected_rdbms)
103 |
104 | def test_full_flow(self):
105 | client_mock = mock.create_autospec(datastream.DatastreamV1alpha1,
106 | instance=True,
107 | spec_set=False)
108 |
109 | rm = cloud_datastream_resource_manager.CloudDatastreamResourceManager(
110 | 1234567890, "bucket-name",
111 | client=client_mock, oracle_cp=_EX_ORACLE_CP)
112 |
113 | class SuccessResult(object):
114 |
115 | def __init__(self, **kwargs):
116 | self.kwargs = kwargs
117 |
118 | def __getattr__(self, name):
119 | return self.kwargs.get(name)
120 |
121 | always_success = SuccessResult(done=True)
122 |
123 | client_mock.projects_locations_connectionProfiles = mock.Mock()
124 | client_mock.projects_locations_connectionProfiles.Create.return_value = always_success
125 | client_mock.projects_locations_connectionProfiles.Delete.return_value = always_success
126 | client_mock.projects_locations_streams = mock.Mock()
127 | client_mock.projects_locations_streams.Create.return_value = always_success
128 | client_mock.projects_locations_streams.Start.return_value = always_success
129 | client_mock.projects_locations_streams.Pause.return_value = always_success
130 | client_mock.projects_locations_streams.Delete.return_value = always_success
131 | client_mock.projects_locations_operations = mock.Mock()
132 | client_mock.projects_locations_operations.Get.return_value = always_success
133 |
134 | rm.SetUp()
135 | rm.TearDown()
136 |
137 |
138 | if __name__ == "__main__":
139 | googletest.main()
140 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Copyright 2020 Google Inc.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 |
16 | export PROJECT_ID?=
17 | export PROJECT_NUMBER?=
18 | export REGION?=us-central1
19 | export CLOUDSDK_CONFIG?=${HOME}/.config/gcloud/
20 |
21 | export STREAM_NAME?=oracle-to-postgres
22 | export GCS_BUCKET?=gs://${PROJECT_ID}
23 | export PUBSUB_TOPIC?=${STREAM_NAME}
24 | export PUBSUB_SUBSCRIPTION?=${PUBSUB_TOPIC}-subscription
25 |
26 | export CLOUD_SQL?=
27 | export DATABASE_USER?=postgres
28 | export DATABASE_PASSWORD?=postgres
29 | export DATABASE_HOST?=
30 |
31 | export ORACLE_HOST?=
32 | export ORACLE_PORT?=1521
33 | export ORACLE_USER?=system
34 | export ORACLE_PASSWORD?=oracle
35 | export ORACLE_DATABASE?=XE
36 |
37 | export ORACLE_DSN:=dbi:Oracle:host=${ORACLE_HOST};sid=${ORACLE_DATABASE};port=${ORACLE_PORT}
38 |
39 | # The PrivateConnection, in the format:
40 | # projects/${PROJECT_ID}/locations/${REGION}/privateConnections/
41 | export PRIVATE_CONNECTION_NAME?=
42 | export ORACLE_CP_NAME?="oracle-${STREAM_NAME}"
43 | export GCS_CP_NAME?="gcs-${STREAM_NAME}"
44 |
45 | # Desired Oracle Schemas and object types to replicate
46 | # For schemas, leave blank for all.
47 | export ORACLE_SCHEMAS?=
48 | export ORACLE_TYPES?=TABLE VIEW
49 |
50 | # Oracle host for DataStream incase this is different from local
51 | export ORACLE_DATASTREAM_HOST?=${ORACLE_HOST}
52 | export ORACLE_DATASTREAM_PORT?=1521
53 |
54 | export DATAFLOW_JOB_PREFIX?=oracle-to-postgres
55 | export TEMPLATE_IMAGE_SPEC?=gs://dataflow-templates/2021-11-12-00_RC00/flex/Cloud_Datastream_to_SQL
56 | export DATASTREAM_ROOT_PATH?=ora2pg/${STREAM_NAME}/
57 | export GCS_STREAM_PATH?=${GCS_BUCKET}/${DATASTREAM_ROOT_PATH}
58 |
59 | # Data Validation
60 | # export PSO_DV_CONFIG_HOME?=${GCS_BUCKET}/dvt/
61 |
62 | # Docker Image Tags
63 | export DOCKER_GCLOUD?=gcr.io/google.com/cloudsdktool/cloud-sdk:latest
64 | export DOCKER_DATASTREAM?=datastream
65 | export DOCKER_DVT?=data-validation
66 | export DOCKER_ORA2PG?=ora2pg
67 |
68 | variables:
69 | @echo "Project ID: ${PROJECT_ID}"
70 | @echo "CloudSQL Output: ${CLOUD_SQL}"
71 | @echo "GCS Bucket: ${GCS_BUCKET}"
72 | @echo "GCS Datastream Path: ${GCS_STREAM_PATH}"
73 |
74 | @echo ""
75 | @echo "Build Docker Images Used in Ora2PG: make build"
76 | @echo "Deploy Required Resources: make deploy-resources"
77 | @echo "Run Ora2PG SQL Conversion Files: make ora2pg"
78 | @echo "Apply Ora2PG SQL to PSQL: make deploy-ora2pg"
79 | @echo "Deploy DataStream: make deploy-datastream"
80 | @echo "Deploy Dataflow: make deploy-dataflow"
81 | @echo "Validate Oracle vs Postgres: make validate"
82 |
83 | list: variables
84 | @echo "List All Oracle to Postgres Objects: ${PROJECT_ID}"
85 | gcloud beta datastream streams list --location=${REGION} --project=${PROJECT_ID} --quiet
86 | gcloud sql instances list --project=${PROJECT_ID} | grep "${CLOUD_SQL}"
87 | ./dataflow.sh
88 |
89 | build: variables
90 | echo "Build Oracle to Postgres Docker Images: ${PROJECT_ID}"
91 | docker build datastream_utils/ -t ${DOCKER_DATASTREAM}
92 | docker build . -f DataValidationDockerfile -t ${DOCKER_DVT}
93 | docker build . -f Ora2PGDockerfile -t ${DOCKER_ORA2PG}
94 |
95 | cloud-build: variables build
96 | echo "Push Ora2PG Docker Images to GCR: ${PROJECT_ID}"
97 | docker push ${DOCKER_DATASTREAM}
98 | docker push ${DOCKER_DVT}
99 | docker push ${DOCKER_ORA2PG}
100 |
101 | deploy-resources: variables
102 | echo "Deploy Oracle to Postgres Resources: ${PROJECT_ID}"
103 | ./deploy_resources.sh
104 | ./data_validation.sh deploy
105 |
106 | ora2pg: variables
107 | ./ora2pg.sh run
108 |
109 | ora2pg-drops: variables ora2pg
110 | sed -i '1s/^/DROP SCHEMA IF EXISTS hr CASCADE;\n/' ora2pg/data/output.sql
111 |
112 | deploy-ora2pg: variables
113 | ./ora2pg.sh deploy
114 |
115 | deploy-datastream: variables
116 | echo "Deploy DataStream from Oracle to GCS: ${PROJECT_ID}"
117 | # Create Connection Profiles
118 | gcloud beta datastream connection-profiles create ${ORACLE_CP_NAME} --display-name ${ORACLE_CP_NAME} --type ORACLE --database-service=${ORACLE_DATABASE} --oracle-hostname=${ORACLE_DATASTREAM_HOST} --oracle-port=${ORACLE_DATASTREAM_PORT} --oracle-username=${ORACLE_USER} --oracle-password=${ORACLE_PASSWORD} --private-connection-name=${PRIVATE_CONNECTION_NAME} --location=${REGION} --project=${PROJECT_ID} --quiet || true
119 | gcloud beta datastream connection-profiles create ${GCS_CP_NAME} --display-name ${GCS_CP_NAME} --type GOOGLE-CLOUD-STORAGE --bucket-name "${PROJECT_ID}" --root-path "/${DATASTREAM_ROOT_PATH}" --location=${REGION} --project=${PROJECT_ID} --quiet || true
120 |
121 | # Create & Start Datastream Stream
122 | gcloud beta datastream streams create ${STREAM_NAME} --display-name ${STREAM_NAME} --backfill-all \
123 | --source-name="${ORACLE_CP_NAME}" --oracle-source-config=datastream_utils/source_config.json --oracle-excluded-objects=datastream_utils/source_excluded_objects.json \
124 | --destination-name="${GCS_CP_NAME}" --gcs-destination-config=datastream_utils/destination_config.json \
125 | --location=${REGION} --project=${PROJECT_ID} --quiet || true
126 | sleep 20
127 | gcloud beta datastream streams update ${STREAM_NAME} --state=RUNNING --update-mask=state --location=${REGION} --project=${PROJECT_ID} --quiet || true
128 |
129 | deploy-dataflow: variables
130 | echo "Deploy Dataflow from GCS to Postgres: ${PROJECT_ID}"
131 | ./dataflow.sh create
132 |
133 | validate: variables
134 | ./data_validation.sh run
135 |
136 | destroy-datastream: variables
137 | @echo "Tearing Down DataStream: ${PROJECT_ID}"
138 | gcloud beta datastream streams delete ${STREAM_NAME} --location=${REGION} --project=${PROJECT_ID} --quiet
139 | gcloud beta datastream connection-profiles delete ${ORACLE_CP_NAME} --location=${REGION} --project=${PROJECT_ID} --quiet
140 | gcloud beta datastream connection-profiles delete ${GCS_CP_NAME} --location=${REGION} --project=${PROJECT_ID} --quiet
141 |
142 | destroy-dataflow: variables
143 | @echo "Tearing Down Dataflow: ${PROJECT_ID}"
144 | ./dataflow.sh destroy
145 |
146 | destroy: variables destroy-dataflow destroy-datastream
147 | @echo "Tearing Down DataStream to Postgres: ${PROJECT_ID}"
148 | gsutil -m rm ${GCS_STREAM_PATH}**
149 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Oracle to PostgreSQL Toolkit
2 |
3 | ## Migration Setup
4 | To use the Oracle to PostgreSQL toolkit, clone the directory and edit the Makefile to add your configurations.
5 |
6 | `git clone https://github.com/GoogleCloudPlatform/oracle-to-postgres.git`
7 |
8 | ### Requirements
9 |
10 | - An Oracle Database which is supported by Datastream
11 | - You are able to enable LogMiner
12 | - Your [Oracle Database is supported by Datastream](https://cloud.google.com/datastream/docs/sources?hl=pl#oracleknownlimitations)
13 | - A PostgreSQL database, by default a Cloud SQL for PostgreSQL instance is expected.
14 | - A bastion VM with access to connect to both Oracle and PostgreSQL
15 | - Connectivity to Datastream can be direct or utilize the VM as a bastion with forward tunnels
16 | - Clone this repository into your environment
17 | - Add your Oracle rpm files to the `oracle/` directory
18 | - Install Docker & gcloud on the VM
19 | - During setup the tool will enable required APIs and deploy required resources if they do not already exist
20 | - Dataflow, Cloud Storage, Pub/Sub, Datastream, Compute, SQLAdmin, and Networking (more details can be found in `deploy_resources.sh`)
21 | - The Cloud Storage bucket and Pub/Sub topic and subscription will be created if they do not exist. You should ensure your VM user has proper access to create the following:
22 | - Create Cloud Storage buckets
23 | - Create a Pub/Sub topic and subscription
24 | - Enable Cloud Storage Pub/Sub notifications on your bucket (must be a bucket owner)
25 | - Cloud SQL will be deployed if it does not already exist
26 |
27 | Nearly all parameters you will require are controlled via the Makefile.
28 | There are a small number of configurations you may need to edit for Ora2Pg, Dataflow, and Datastream which are networking dependent. If you are using a private networking setup, please see the added instructions at the end of each stage.
29 |
30 | ## Migration Stages
31 | The stages of a migration are intended to be iterative as you will sometimes need to restart replication to account for new configurations. Due to this iterative nature, we suggest using a QA or Dev environment first, before moving into production.
32 |
33 | ### Building Resources (make build)
34 | After you have created the required resources and added your configurations at the top of your `Makefile`, you will be ready to build the Docker images used during replication.
35 | The Docker images which are built are as follows
36 | - [Ora2Pg](http://ora2pg.darold.net/): A docker image which is used to run Ora2Pg.
37 | - GCloud: For more recent APIs we use a docker gcloud to ensure the latest version of gcloud
38 | - Datastream management: This image is used to list, deploy, and delete Datastream resources.
39 | - [Data validation](https://github.com/GoogleCloudPlatform/professional-services-data-validator): An image built from Google's open source Data Validation tooling.
40 |
41 | ### Deploying Resources (make deploy-resources)
42 |
43 | This stage will ensure all the expected resources exist, and if not will create them. The logic is designed to be run several times if needed, and will not recreate existing resources.
44 | The final step in the resource deployment is to test the connection details from the current VM to both Oracle and PostgreSQL. These tests are performed by the data validation docker image, which will also store connection details for future use.
45 |
46 | ### Executing Ora2Pg (make ora2pg)
47 |
48 | The next step in a migration is to run the Ora2Pg schema conversion tooling. The raw Ora2Pg files will all be stored in `ora2pg/data/` along with a single file with the accumulated conversions for the current run (`ora2pg/data/output.sql`).
49 | The table definitions created by Ora2Pg by default are often all you will require. However, if customization is required this can be done by editing `ora2pg/config/ora2pg.conf` and re-running the `make ora2pg` step. You should be sure to manually review the `output.sql` file to confirm your expected conversion has been run.
50 |
51 | Although they will not be applied directly by default, any of the Ora2Pg object types can be converted, you will find the raw SQL files in `ora2pg/data/` and you can manually address issues and upload non-data objects to PostgreSQL as needed (ie. PL/SQL).
52 |
53 | ### Applying Ora2Pg (make deploy-ora2pg)
54 |
55 | When you are confident in the Ora2Pg conversion, then you are ready to apply the schema in your PostgreSQL database. Running the apply step will load your schema file into Cloud Storage and import it into your Cloud SQL for PostgreSQL database.
56 | If you need to customize using a non-CloudSQL database then simply import the `ora2pg/data/output.sql` file directly using the PostgreSQL CLI.
57 |
58 | #### Re-running make deploy-ora2pg
59 |
60 | Once a schema is applied, future runs of the same schema will fail. If you need to re-apply the schema you should first run the following SQL DROP command against each PostgreSQL schema in question:
61 | `DROP SCHEMA IF EXISTS CASCADE;`
62 |
63 | ### Deploying Datastream (make deploy-datastream)
64 |
65 | When your schema has been applied, you are ready to begin data replication. The datastream deployment stage will create a set of connection profiles and a stream using your supplied configuration.
66 | If you wish to perform this stage manually in the UI, please feel free to follow the steps outlined in the [Datastream Quickstart and Documentation](https://cloud.google.com/datastream/docs/quickstart).
67 |
68 | #### Private Connectivity
69 |
70 | To use the [PrivateConnection feature in Datastream](https://cloud.google.com/datastream/docs/create-a-private-connectivity-configuration), begin by creating a PrivateConnection in the UI. You will then be able to add this reference in your Makefile to be used in the Datastream creation.
71 |
72 | ### Deploying Dataflow (make deploy-dataflow)
73 |
74 | Deploying Dataflow will create a streaming Dataflow job from the Datastream to PostgreSQL template. This job will replicate data from Cloud Storage into PostgreSQL as soon as it is deployed. The specific setting for Dataflow can be seen in `dataflow.sh`, though generally the defaults will be able to replicate into PostgreSQL as quickly as possible (scaling up PostgreSQL resources will often scale up data replication if this is needed).
75 |
76 | #### Private Connectivity
77 |
78 | To add private connectivity to Dataflow, please add the private networking configurations to `dataflow.sh`. These configurations will include changing the dataflow run command:
79 | - Adding `--disable-public-ips`
80 | - Specifying you desired `--network`
81 |
82 | #### Redeploying Data Replication
83 |
84 | To redeploy data replication you should first cancel the old Dataflow job. When you are ready to re-deploy, a rewrite to any Cloud Storage file will cause it to be consumed (and avoid the need to restart Datastream replication). Running the following rewrite command will read all files again once Dataflow is redeployed.
85 | `gsutil -m rewrite -r -k gs://bucket/path/to/data/`
86 |
87 | ### Data Validation (make validate)
88 |
89 | Once your data appears to be replicating (or before) you can run data validation to check on the row count comparisons between each source and target table.
90 | Running validation will allow you to understand when you replication is up to date and during the cutover allow you to validate all data matches before starting a cutover.
91 |
92 | For more details on the options around Data Validation, please see the open source [Data Validation Tool documentation](https://github.com/GoogleCloudPlatform/professional-services-data-validator).
93 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
--------------------------------------------------------------------------------
/datastream_utils/cloud_datastream_resource_manager.py:
--------------------------------------------------------------------------------
1 | """Utilities to start and manage a CDC stream from Cloud Datastream."""
2 |
3 | import logging
4 | import time
5 | from typing import List, Tuple
6 | import uuid
7 |
8 | try:
9 | from google3.google.cloud.datastream import datastream # pylint: disable=g-import-not-at-top
10 | except ModuleNotFoundError:
11 | import datastream # pytype: disable=import-error pylint: disable=g-import-not-at-top
12 |
13 |
14 | DEFAULT_REGION = "us-central1"
15 |
16 | DATASTREAM_URL = "https://datastream.googleapis.com/"
17 |
18 | DATASTREAM_EXPORT_FILEFORMAT_AVRO = "avro"
19 | DATASTREAM_EXPORT_FILEFORMAT_JSON = "json"
20 |
21 | DEFAULT_DATASTREAM_EXPORT_FILEFORMAT = DATASTREAM_EXPORT_FILEFORMAT_AVRO
22 |
23 | DEFAULT_GCS_ROOT_PATH = "/rootprefix/"
24 | DEFAULT_STREAM_NAME = "datastream-test"
25 | DEFAULT_SOURCE_CP_NAME = "oracle-cp"
26 | DEFAULT_DEST_CP_NAME = "gcs-cp"
27 |
28 |
29 | class CloudDatastreamResourceManager(object):
30 | """Resource manager to start a CDC stream from Cloud Datastream.
31 |
32 | This resource manager creates the set of resources to create a stream from
33 | Cloud Datastream, and starts the stream.
34 |
35 | On Teardown, all resources are deleted.
36 | """
37 |
38 | def __init__(
39 | self,
40 | project_number,
41 | gcs_bucket_name,
42 | region=None,
43 | client=None,
44 | authorized_http=None,
45 | stream_name=None,
46 | source_cp_name=None,
47 | target_cp_name=None,
48 | oracle_cp=None,
49 | mysql_cp=None,
50 | allowed_tables=None,
51 | gcs_root_path=None,
52 | add_uid_suffix=True,
53 | datastream_api_url=None,
54 | datastream_export_file_format=None,
55 | private_connection_name=None,
56 | ):
57 | """Initialize the CloudDatastreamResourceManager.
58 |
59 | Args:
60 | project_number: The GCP Project number identifying your project.
61 | gcs_bucket_name: The GCS bucket name without gs:// added.
62 | region: The GCP region where DataStream is deployed.
63 | client: The Datastream client to be used.
64 | authorized_http: An authorized http to be supplied
65 | to the Datastream client.
66 | stream_name: The name or prefix of the stream.
67 | source_cp_name: The name or prefix of the source CP.
68 | target_cp_name: The name or target of the source CP.
69 | oracle_cp: The connection profile configuration for an Oracle source.
70 | mysql_cp: The connection profile configuration for a MySQL source.
71 | allowed_tables: A List of allowed schema and table tuples.
72 | gcs_root_path: The GCS root directory for DataStream (ie. /rootpath/).
73 | add_uid_suffix: Whether or not to add a UID to all stream objects.
74 | datastream_api_url: The URL to use when calling DataStream.
75 | datastream_export_file_format: avro/json
76 | private_connection_name: The name of the PrivateConnection to
77 | use if required
78 | (eg. projects//locations//
79 | privateConnections/).
80 | """
81 | self.project_number = project_number
82 | self.region = region or DEFAULT_REGION
83 |
84 | self._stream_name = stream_name or DEFAULT_STREAM_NAME
85 | self._source_cp_name = source_cp_name or DEFAULT_SOURCE_CP_NAME
86 | self._target_cp_name = target_cp_name or DEFAULT_DEST_CP_NAME
87 | self._suffix = str(uuid.uuid4())[:8] if add_uid_suffix else ""
88 |
89 | self.oracle_cp = oracle_cp
90 | self.mysql_cp = mysql_cp
91 | self.private_connection_name = private_connection_name
92 | self.allowed_tables = allowed_tables or []
93 |
94 | self.gcs_bucket_name = gcs_bucket_name.replace("gs://", "")
95 | self._gcs_root_path = gcs_root_path or DEFAULT_GCS_ROOT_PATH
96 | self.datastream_export_file_format = (
97 | datastream_export_file_format or DEFAULT_DATASTREAM_EXPORT_FILEFORMAT
98 | )
99 | if client:
100 | self.client = client
101 | else:
102 | logging.info("Creating DataStream Client with Authorized HTTP")
103 | api_url = datastream_api_url or DATASTREAM_URL
104 | self.client = datastream.DatastreamV1alpha1(
105 | url=api_url, http=authorized_http, get_credentials=True)
106 |
107 | @property
108 | def datastream_parent(self):
109 | return "projects/%s/locations/%s" % (self.project_number, self.region)
110 |
111 | @property
112 | def suffix(self):
113 | if self._suffix:
114 | return "-%s" % self._suffix
115 | return ""
116 |
117 | @property
118 | def path_suffix(self):
119 | if self._suffix:
120 | return "%s/" % self._suffix
121 | return ""
122 |
123 | @property
124 | def stream_name(self):
125 | return self._stream_name + self.suffix
126 |
127 | @property
128 | def full_stream_name(self):
129 | return self.datastream_parent + "/streams/" + self.stream_name
130 |
131 | @property
132 | def source_connection_name(self):
133 | return self._source_cp_name + self.suffix
134 |
135 | @property
136 | def full_source_connection_name(self):
137 | return (
138 | self.datastream_parent +
139 | "/connectionProfiles/" +
140 | self.source_connection_name
141 | )
142 |
143 | @property
144 | def dest_connection_name(self):
145 | return self._target_cp_name + self.suffix
146 |
147 | @property
148 | def full_dest_connection_name(self):
149 | return (
150 | self.datastream_parent +
151 | "/connectionProfiles/" +
152 | self.dest_connection_name
153 | )
154 |
155 | @property
156 | def gcs_root_path(self):
157 | return self._gcs_root_path + self.path_suffix
158 |
159 | @property
160 | def gcs_bucket(self):
161 | return "gs://%s" % self.gcs_bucket_name
162 |
163 | @property
164 | def gcs_location(self):
165 | return self.gcs_bucket + self.gcs_root_path
166 |
167 | def SetUp(self):
168 | """Create and start all resources for a CDC Datastream.
169 |
170 | In this order:
171 | - Create a source Database Connection Profile
172 | - Create a destintation GCS Connection Profile
173 | - Create a stream that reads from source into destination
174 | - Start the stream
175 | """
176 | logging.info("Setting up Source Connection Profile")
177 | # Create the Oracle Connection Profile
178 | self._CreateDatabaseConnectionProfile()
179 |
180 | logging.info("Setting up GCS Connection Profile")
181 | self._CreateGcsConnectionProfile(
182 | self.dest_connection_name,
183 | bucket_name=self.gcs_bucket_name,
184 | root_path=self.gcs_root_path)
185 |
186 | logging.info("Creating stream on Datastream")
187 | stream_op_result = self._CreateStream(self.stream_name,
188 | self.full_source_connection_name,
189 | self.full_dest_connection_name,
190 | self.datastream_export_file_format)
191 |
192 | if stream_op_result.error:
193 | raise ValueError(str(stream_op_result))
194 |
195 | logging.info("Starting CDC stream on Datastream")
196 | result = self._UpdateStreamState(
197 | self.full_stream_name, datastream.Stream.StateValueValuesEnum.RUNNING)
198 |
199 | if result.error:
200 | raise ValueError(str(result.error))
201 |
202 | def TearDown(self):
203 | """Stop and delete all resources started in SetUp.
204 |
205 | In this order:
206 | - Stop stream, then delete it
207 | - Delete destination GCS Connection Profile
208 | - Delete source Database Connection Profile
209 | """
210 | self._StopAndDeleteStream(self.full_stream_name)
211 |
212 | self._DeleteConnectionProfile(self.full_source_connection_name)
213 | self._DeleteConnectionProfile(self.full_dest_connection_name)
214 |
215 | def Describe(self):
216 | return "Manage a stream from Cloud Datastream."
217 |
218 | def ListStreams(self):
219 | streams = self._ListStreams()
220 | for stream in streams.streams:
221 | if not self._stream_name in stream.name:
222 | continue
223 | stream_log = "Stream Name: %s" % stream.name
224 | stream_cp_source_log = "\tSource CP: %s" % stream.sourceConfig.sourceConnectionProfileName
225 | stream_cp_dest_log = "\tDest CP: %s" % stream.destinationConfig.destinationConnectionProfileName
226 |
227 | logging.info(stream_log)
228 | logging.info(stream_cp_source_log)
229 | logging.info(stream_cp_dest_log)
230 |
231 | def _UpdateStreamState(self, stream_name, state):
232 | request = datastream.DatastreamProjectsLocationsStreamsPatchRequest(
233 | name=stream_name,
234 | stream=datastream.Stream(state=state),
235 | updateMask="state")
236 |
237 | response = self.client.projects_locations_streams.Patch(request)
238 | return self._WaitForCompletion(response)
239 |
240 | def _WaitForCompletion(self, response, timeout=120):
241 | # After requesting an operation, we need to wait for its completion
242 | start = time.time()
243 | while not response.done:
244 | time.sleep(5)
245 | response = self.client.projects_locations_operations.Get(
246 | datastream.DatastreamProjectsLocationsOperationsGetRequest(
247 | name=response.name))
248 |
249 | if time.time() - start > timeout:
250 | logging.warning("Timed out waiting for operation completion %s",
251 | response.name)
252 | break
253 |
254 | return response
255 |
256 | def _DeleteConnectionProfile(self, cp_name):
257 | delete_req = (
258 | datastream.DatastreamProjectsLocationsConnectionProfilesDeleteRequest(
259 | name=cp_name))
260 |
261 | try:
262 | return self.client.projects_locations_connectionProfiles.Delete(
263 | delete_req)
264 | except datastream.HttpError:
265 | logging.exception("Unable to delete connection profile %r.",
266 | cp_name)
267 | return None
268 |
269 | def _StopAndDeleteStream(self, stream_name):
270 | try:
271 | self._UpdateStreamState(stream_name,
272 | datastream.Stream.StateValueValuesEnum.PAUSED)
273 | except datastream.HttpError:
274 | logging.exception("There was an issue stopping Datastream stream %r.",
275 | stream_name)
276 | return None
277 |
278 | delete_request = datastream.DatastreamProjectsLocationsStreamsDeleteRequest(
279 | name=stream_name)
280 |
281 | return self.client.projects_locations_streams.Delete(delete_request)
282 |
283 | def _ListStreams(self):
284 | request = (
285 | datastream.DatastreamProjectsLocationsStreamsListRequest(
286 | parent=self.datastream_parent))
287 | return self.client.projects_locations_streams.List(request)
288 |
289 | def _ListConnectionProfiles(self):
290 | request = (
291 | datastream.DatastreamProjectsLocationsConnectionProfilesListRequest(
292 | parent=self.datastream_parent))
293 | return self.client.projects_locations_connectionProfiles.List(request)
294 |
295 | def _ListPrivateConnections(self):
296 | request = (
297 | datastream.DatastreamProjectsLocationsPrivateConnectionsListRequest(
298 | parent=self.datastream_parent))
299 | return self.client.projects_locations_privateConnections.List(request)
300 |
301 | def _CreateDatabaseConnectionProfile(self):
302 | if self.oracle_cp:
303 | return self._CreateOracleConnectionProfile(self.source_connection_name,
304 | self.oracle_cp)
305 | elif self.mysql_cp:
306 | return self._CreateMysqlConnectionProfile(
307 | self.source_connection_name,
308 | self.getMysqlConnectionProfile())
309 | else:
310 | raise Exception("No Source Connection Profile Supplied")
311 |
312 | def getMysqlConnectionProfile(self):
313 | if not isinstance(self.mysql_cp, dict):
314 | self.mysql_cp = self.mysql_cp.getDatastreamCP()
315 |
316 | if not self.mysql_cp["sslConfig"]:
317 | self.mysql_cp["sslConfig"] = datastream.MysqlSslConfig()
318 |
319 | logging.info("Logging MySQL CP:")
320 | logging.info(self.mysql_cp)
321 | return self.mysql_cp
322 |
323 | def _CreateMysqlConnectionProfile(self, name, mysql_cp):
324 | logging.info(
325 | "Creating connection profile %r for MySQL database. Parent: %r", name,
326 | self.datastream_parent)
327 | logging.debug("Database properties: %r", mysql_cp)
328 | connection_profile = datastream.ConnectionProfile(
329 | displayName=name,
330 | mysqlProfile=datastream.MysqlProfile(**mysql_cp),
331 | noConnectivity=datastream.NoConnectivitySettings())
332 | request = (
333 | datastream.DatastreamProjectsLocationsConnectionProfilesCreateRequest(
334 | parent=self.datastream_parent,
335 | connectionProfileId=name,
336 | connectionProfile=connection_profile))
337 | response = self.client.projects_locations_connectionProfiles.Create(request)
338 | return self._WaitForCompletion(response)
339 |
340 | def _CreateOracleConnectionProfile(self, name, oracle_cp):
341 | logging.info(
342 | "Creating connection profile %r for Oracle database. Parent: %r", name,
343 | self.datastream_parent)
344 | logging.debug("Database properties: %r", oracle_cp)
345 | private_conn = self._get_private_connection()
346 | no_conn = datastream.NoConnectivitySettings() if not private_conn else None
347 | connection_profile = datastream.ConnectionProfile(
348 | displayName=name,
349 | oracleProfile=datastream.OracleProfile(**oracle_cp),
350 | noConnectivity=no_conn,
351 | privateConnectivity=private_conn)
352 | request = (
353 | datastream.DatastreamProjectsLocationsConnectionProfilesCreateRequest(
354 | parent=self.datastream_parent,
355 | connectionProfileId=name,
356 | connectionProfile=connection_profile))
357 | response = self.client.projects_locations_connectionProfiles.Create(request)
358 | return self._WaitForCompletion(response)
359 |
360 | def _CreateGcsConnectionProfile(self, name, bucket_name, root_path):
361 | connection_profile = datastream.ConnectionProfile(
362 | displayName=name,
363 | gcsProfile=datastream.GcsProfile(bucketName=bucket_name,
364 | rootPath=root_path),
365 | noConnectivity=datastream.NoConnectivitySettings())
366 | request = (
367 | datastream.DatastreamProjectsLocationsConnectionProfilesCreateRequest(
368 | parent=self.datastream_parent,
369 | connectionProfileId=name,
370 | connectionProfile=connection_profile,
371 | ))
372 | response = self.client.projects_locations_connectionProfiles.Create(request)
373 | return self._WaitForCompletion(response)
374 |
375 | def _get_source_config(self):
376 | if self.oracle_cp:
377 | return datastream.SourceConfig(
378 | sourceConnectionProfileName=self.full_source_connection_name,
379 | oracleSourceConfig=datastream.OracleSourceConfig(
380 | allowlist=self._get_oracle_rdbms(self.allowed_tables),
381 | rejectlist=datastream.OracleRdbms(),
382 | ),
383 | )
384 | elif self.mysql_cp:
385 | return datastream.SourceConfig(
386 | sourceConnectionProfileName=self.full_source_connection_name,
387 | mysqlSourceConfig=datastream.MysqlSourceConfig(
388 | allowlist=self._get_mysql_rdbms(self.allowed_tables),
389 | rejectlist=datastream.MysqlRdbms(),
390 | ),
391 | )
392 |
393 | def _get_private_connection(self):
394 | """Return PrivateConnection object if it is required in the CP."""
395 | if self.private_connection_name:
396 | return datastream.PrivateConnectivity(
397 | privateConnectionName=self.private_connection_name)
398 |
399 | return None
400 |
401 | def _get_oracle_rdbms(self, table_list: List[Tuple[str, str]]):
402 | """Return an Oracle Rdbms with the desired tables set.
403 |
404 | Args:
405 | table_list: A List of allowed schema and table tuples.
406 | Returns:
407 | An initialized OracleRdbms filtered against the supplied tables.
408 | """
409 | schema_tables = {}
410 | for table_obj in table_list:
411 | schema_name = table_obj[0]
412 | table_name = table_obj[1]
413 | if schema_name not in schema_tables:
414 | schema_tables[schema_name] = []
415 |
416 | if table_name:
417 | schema_tables[schema_name].append(
418 | datastream.OracleTable(tableName=table_name))
419 |
420 | oracle_schemas = [
421 | datastream.OracleSchema(schemaName=schema_name, oracleTables=tables)
422 | for schema_name, tables in schema_tables.items()]
423 |
424 | return datastream.OracleRdbms(oracleSchemas=oracle_schemas)
425 |
426 | def _get_mysql_rdbms(self, table_list: List[Tuple[str, str]]):
427 | """Return a Mysql Rdbms with the desired tables set.
428 |
429 | Args:
430 | table_list: A List of allowed schema and table tuples.
431 | Returns:
432 | An initialized MysqlRdbms filtered against the supplied tables.
433 | """
434 | db_tables = {}
435 | for table_obj in table_list:
436 | db_name = table_obj[0]
437 | table_name = table_obj[1]
438 | if db_name not in db_tables:
439 | db_tables[db_name] = []
440 |
441 | if table_name:
442 | db_tables[db_name].append(
443 | datastream.MysqlTable(tableName=table_name))
444 |
445 | mysql_dbs = [
446 | datastream.MysqlDatabase(databaseName=db_name, mysqlTables=tables)
447 | for db_name, tables in db_tables.items()]
448 |
449 | return datastream.MysqlRdbms(mysqlDatabases=mysql_dbs)
450 |
451 | def _getGcsDestinationConfig(self, export_file_format):
452 | if export_file_format == DATASTREAM_EXPORT_FILEFORMAT_JSON:
453 | return datastream.GcsDestinationConfig(
454 | jsonFileFormat=datastream.JsonFileFormat(
455 | compression=datastream.JsonFileFormat
456 | .CompressionValueValuesEnum.GZIP,
457 | schemaFileFormat=datastream.JsonFileFormat
458 | .SchemaFileFormatValueValuesEnum.NO_SCHEMA_FILE
459 | ),
460 | fileRotationInterval="10s", # Rotate files every 10 seconds.
461 | fileRotationMb=4, # Files of at-most 4mb.
462 | )
463 | else:
464 | return datastream.GcsDestinationConfig(
465 | gcsFileFormat=(datastream.GcsDestinationConfig
466 | .GcsFileFormatValueValuesEnum.AVRO),
467 | fileRotationInterval="10s", # Rotate files every 10 seconds.
468 | fileRotationMb=4, # Files of at-most 4mb.
469 | )
470 |
471 | def _CreateStream(self,
472 | name,
473 | oracle_cp_name,
474 | gcs_cp_name,
475 | export_file_format):
476 | stream = datastream.Stream(
477 | displayName=name,
478 | destinationConfig=datastream.DestinationConfig(
479 | destinationConnectionProfileName=gcs_cp_name,
480 | gcsDestinationConfig=
481 | self._getGcsDestinationConfig(export_file_format),
482 | ),
483 | sourceConfig=self._get_source_config(),
484 | backfillAll=datastream.BackfillAllStrategy(),
485 | )
486 |
487 | request = (
488 | datastream.DatastreamProjectsLocationsStreamsCreateRequest(
489 | parent=self.datastream_parent, streamId=name, stream=stream))
490 |
491 | response = self.client.projects_locations_streams.Create(request)
492 |
493 | response = self._WaitForCompletion(response)
494 |
495 | logging.debug("Stream creation response: %r", response)
496 | if not response.error:
497 | logging.info("SUCCESS: Created stream %r", name)
498 | return response
499 |
500 | def _StartStream(self, stream_name):
501 | request = datastream.DatastreamProjectsLocationsStreamsStartRequest(
502 | name=stream_name)
503 |
504 | response = self.client.projects_locations_streams.Start(request)
505 | return self._WaitForCompletion(response)
506 |
--------------------------------------------------------------------------------
/datastream_utils/datastream/datastream_v1alpha1_client.py:
--------------------------------------------------------------------------------
1 | """Generated client library for datastream version v1alpha1."""
2 | # NOTE: This file is autogenerated and should not be edited by hand.
3 |
4 | from __future__ import absolute_import
5 |
6 | from apitools.base.py import base_api
7 | from . import datastream_v1alpha1_messages as messages
8 |
9 |
10 | class DatastreamV1alpha1(base_api.BaseApiClient):
11 | """Generated client library for service datastream version v1alpha1."""
12 |
13 | MESSAGES_MODULE = messages
14 | BASE_URL = 'https://datastream.googleapis.com/'
15 | MTLS_BASE_URL = 'https://datastream.mtls.googleapis.com/'
16 |
17 | _PACKAGE = 'datastream'
18 | _SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
19 | _VERSION = 'v1alpha1'
20 | _CLIENT_ID = '1042881264118.apps.googleusercontent.com'
21 | _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
22 | _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
23 | _CLIENT_CLASS_NAME = 'DatastreamV1alpha1'
24 | _URL_VERSION = 'v1alpha1'
25 | _API_KEY = None
26 |
27 | def __init__(self, url='', credentials=None,
28 | get_credentials=True, http=None, model=None,
29 | log_request=False, log_response=False,
30 | credentials_args=None, default_global_params=None,
31 | additional_http_headers=None, response_encoding=None):
32 | """Create a new datastream handle."""
33 | url = url or self.BASE_URL
34 | super(DatastreamV1alpha1, self).__init__(
35 | url, credentials=credentials,
36 | get_credentials=get_credentials, http=http, model=model,
37 | log_request=log_request, log_response=log_response,
38 | credentials_args=credentials_args,
39 | default_global_params=default_global_params,
40 | additional_http_headers=additional_http_headers,
41 | response_encoding=response_encoding)
42 | self.projects_locations_connectionProfiles = self.ProjectsLocationsConnectionProfilesService(self)
43 | self.projects_locations_operations = self.ProjectsLocationsOperationsService(self)
44 | self.projects_locations_privateConnections_routes = self.ProjectsLocationsPrivateConnectionsRoutesService(self)
45 | self.projects_locations_privateConnections = self.ProjectsLocationsPrivateConnectionsService(self)
46 | self.projects_locations_streams_objects = self.ProjectsLocationsStreamsObjectsService(self)
47 | self.projects_locations_streams = self.ProjectsLocationsStreamsService(self)
48 | self.projects_locations = self.ProjectsLocationsService(self)
49 | self.projects = self.ProjectsService(self)
50 |
51 | class ProjectsLocationsConnectionProfilesService(base_api.BaseApiService):
52 | """Service class for the projects_locations_connectionProfiles resource."""
53 |
54 | _NAME = 'projects_locations_connectionProfiles'
55 |
56 | def __init__(self, client):
57 | super(DatastreamV1alpha1.ProjectsLocationsConnectionProfilesService, self).__init__(client)
58 | self._upload_configs = {
59 | }
60 |
61 | def Create(self, request, global_params=None):
62 | r"""Use this method to create a connection profile in a project and location.
63 |
64 | Args:
65 | request: (DatastreamProjectsLocationsConnectionProfilesCreateRequest) input message
66 | global_params: (StandardQueryParameters, default: None) global arguments
67 | Returns:
68 | (Operation) The response message.
69 | """
70 | config = self.GetMethodConfig('Create')
71 | return self._RunMethod(
72 | config, request, global_params=global_params)
73 |
74 | Create.method_config = lambda: base_api.ApiMethodInfo(
75 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/connectionProfiles',
76 | http_method='POST',
77 | method_id='datastream.projects.locations.connectionProfiles.create',
78 | ordered_params=['parent'],
79 | path_params=['parent'],
80 | query_params=['connectionProfileId', 'requestId'],
81 | relative_path='v1alpha1/{+parent}/connectionProfiles',
82 | request_field='connectionProfile',
83 | request_type_name='DatastreamProjectsLocationsConnectionProfilesCreateRequest',
84 | response_type_name='Operation',
85 | supports_download=False,
86 | )
87 |
88 | def Delete(self, request, global_params=None):
89 | r"""Use this method to delete a connection profile..
90 |
91 | Args:
92 | request: (DatastreamProjectsLocationsConnectionProfilesDeleteRequest) input message
93 | global_params: (StandardQueryParameters, default: None) global arguments
94 | Returns:
95 | (Operation) The response message.
96 | """
97 | config = self.GetMethodConfig('Delete')
98 | return self._RunMethod(
99 | config, request, global_params=global_params)
100 |
101 | Delete.method_config = lambda: base_api.ApiMethodInfo(
102 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/connectionProfiles/{connectionProfilesId}',
103 | http_method='DELETE',
104 | method_id='datastream.projects.locations.connectionProfiles.delete',
105 | ordered_params=['name'],
106 | path_params=['name'],
107 | query_params=['requestId'],
108 | relative_path='v1alpha1/{+name}',
109 | request_field='',
110 | request_type_name='DatastreamProjectsLocationsConnectionProfilesDeleteRequest',
111 | response_type_name='Operation',
112 | supports_download=False,
113 | )
114 |
115 | def Discover(self, request, global_params=None):
116 | r"""Use this method to discover a connection profile. The discover API call exposes the data objects and metadata belonging to the profile. Typically, a request returns children data objects under a parent data object that's optionally supplied in the request.
117 |
118 | Args:
119 | request: (DatastreamProjectsLocationsConnectionProfilesDiscoverRequest) input message
120 | global_params: (StandardQueryParameters, default: None) global arguments
121 | Returns:
122 | (DiscoverConnectionProfileResponse) The response message.
123 | """
124 | config = self.GetMethodConfig('Discover')
125 | return self._RunMethod(
126 | config, request, global_params=global_params)
127 |
128 | Discover.method_config = lambda: base_api.ApiMethodInfo(
129 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/connectionProfiles:discover',
130 | http_method='POST',
131 | method_id='datastream.projects.locations.connectionProfiles.discover',
132 | ordered_params=['parent'],
133 | path_params=['parent'],
134 | query_params=[],
135 | relative_path='v1alpha1/{+parent}/connectionProfiles:discover',
136 | request_field='discoverConnectionProfileRequest',
137 | request_type_name='DatastreamProjectsLocationsConnectionProfilesDiscoverRequest',
138 | response_type_name='DiscoverConnectionProfileResponse',
139 | supports_download=False,
140 | )
141 |
142 | def Get(self, request, global_params=None):
143 | r"""Use this method to get details about a connection profile.
144 |
145 | Args:
146 | request: (DatastreamProjectsLocationsConnectionProfilesGetRequest) input message
147 | global_params: (StandardQueryParameters, default: None) global arguments
148 | Returns:
149 | (ConnectionProfile) The response message.
150 | """
151 | config = self.GetMethodConfig('Get')
152 | return self._RunMethod(
153 | config, request, global_params=global_params)
154 |
155 | Get.method_config = lambda: base_api.ApiMethodInfo(
156 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/connectionProfiles/{connectionProfilesId}',
157 | http_method='GET',
158 | method_id='datastream.projects.locations.connectionProfiles.get',
159 | ordered_params=['name'],
160 | path_params=['name'],
161 | query_params=[],
162 | relative_path='v1alpha1/{+name}',
163 | request_field='',
164 | request_type_name='DatastreamProjectsLocationsConnectionProfilesGetRequest',
165 | response_type_name='ConnectionProfile',
166 | supports_download=False,
167 | )
168 |
169 | def List(self, request, global_params=None):
170 | r"""Use this method to list connection profiles created in a project and location.
171 |
172 | Args:
173 | request: (DatastreamProjectsLocationsConnectionProfilesListRequest) input message
174 | global_params: (StandardQueryParameters, default: None) global arguments
175 | Returns:
176 | (ListConnectionProfilesResponse) The response message.
177 | """
178 | config = self.GetMethodConfig('List')
179 | return self._RunMethod(
180 | config, request, global_params=global_params)
181 |
182 | List.method_config = lambda: base_api.ApiMethodInfo(
183 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/connectionProfiles',
184 | http_method='GET',
185 | method_id='datastream.projects.locations.connectionProfiles.list',
186 | ordered_params=['parent'],
187 | path_params=['parent'],
188 | query_params=['filter', 'orderBy', 'pageSize', 'pageToken'],
189 | relative_path='v1alpha1/{+parent}/connectionProfiles',
190 | request_field='',
191 | request_type_name='DatastreamProjectsLocationsConnectionProfilesListRequest',
192 | response_type_name='ListConnectionProfilesResponse',
193 | supports_download=False,
194 | )
195 |
196 | def Patch(self, request, global_params=None):
197 | r"""Use this method to update the parameters of a connection profile.
198 |
199 | Args:
200 | request: (DatastreamProjectsLocationsConnectionProfilesPatchRequest) input message
201 | global_params: (StandardQueryParameters, default: None) global arguments
202 | Returns:
203 | (Operation) The response message.
204 | """
205 | config = self.GetMethodConfig('Patch')
206 | return self._RunMethod(
207 | config, request, global_params=global_params)
208 |
209 | Patch.method_config = lambda: base_api.ApiMethodInfo(
210 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/connectionProfiles/{connectionProfilesId}',
211 | http_method='PATCH',
212 | method_id='datastream.projects.locations.connectionProfiles.patch',
213 | ordered_params=['name'],
214 | path_params=['name'],
215 | query_params=['requestId', 'updateMask'],
216 | relative_path='v1alpha1/{+name}',
217 | request_field='connectionProfile',
218 | request_type_name='DatastreamProjectsLocationsConnectionProfilesPatchRequest',
219 | response_type_name='Operation',
220 | supports_download=False,
221 | )
222 |
223 | class ProjectsLocationsOperationsService(base_api.BaseApiService):
224 | """Service class for the projects_locations_operations resource."""
225 |
226 | _NAME = 'projects_locations_operations'
227 |
228 | def __init__(self, client):
229 | super(DatastreamV1alpha1.ProjectsLocationsOperationsService, self).__init__(client)
230 | self._upload_configs = {
231 | }
232 |
233 | def Cancel(self, request, global_params=None):
234 | r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
235 |
236 | Args:
237 | request: (DatastreamProjectsLocationsOperationsCancelRequest) input message
238 | global_params: (StandardQueryParameters, default: None) global arguments
239 | Returns:
240 | (Empty) The response message.
241 | """
242 | config = self.GetMethodConfig('Cancel')
243 | return self._RunMethod(
244 | config, request, global_params=global_params)
245 |
246 | Cancel.method_config = lambda: base_api.ApiMethodInfo(
247 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel',
248 | http_method='POST',
249 | method_id='datastream.projects.locations.operations.cancel',
250 | ordered_params=['name'],
251 | path_params=['name'],
252 | query_params=[],
253 | relative_path='v1alpha1/{+name}:cancel',
254 | request_field='cancelOperationRequest',
255 | request_type_name='DatastreamProjectsLocationsOperationsCancelRequest',
256 | response_type_name='Empty',
257 | supports_download=False,
258 | )
259 |
260 | def Delete(self, request, global_params=None):
261 | r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
262 |
263 | Args:
264 | request: (DatastreamProjectsLocationsOperationsDeleteRequest) input message
265 | global_params: (StandardQueryParameters, default: None) global arguments
266 | Returns:
267 | (Empty) The response message.
268 | """
269 | config = self.GetMethodConfig('Delete')
270 | return self._RunMethod(
271 | config, request, global_params=global_params)
272 |
273 | Delete.method_config = lambda: base_api.ApiMethodInfo(
274 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}',
275 | http_method='DELETE',
276 | method_id='datastream.projects.locations.operations.delete',
277 | ordered_params=['name'],
278 | path_params=['name'],
279 | query_params=[],
280 | relative_path='v1alpha1/{+name}',
281 | request_field='',
282 | request_type_name='DatastreamProjectsLocationsOperationsDeleteRequest',
283 | response_type_name='Empty',
284 | supports_download=False,
285 | )
286 |
287 | def Get(self, request, global_params=None):
288 | r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
289 |
290 | Args:
291 | request: (DatastreamProjectsLocationsOperationsGetRequest) input message
292 | global_params: (StandardQueryParameters, default: None) global arguments
293 | Returns:
294 | (Operation) The response message.
295 | """
296 | config = self.GetMethodConfig('Get')
297 | return self._RunMethod(
298 | config, request, global_params=global_params)
299 |
300 | Get.method_config = lambda: base_api.ApiMethodInfo(
301 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}',
302 | http_method='GET',
303 | method_id='datastream.projects.locations.operations.get',
304 | ordered_params=['name'],
305 | path_params=['name'],
306 | query_params=[],
307 | relative_path='v1alpha1/{+name}',
308 | request_field='',
309 | request_type_name='DatastreamProjectsLocationsOperationsGetRequest',
310 | response_type_name='Operation',
311 | supports_download=False,
312 | )
313 |
314 | def List(self, request, global_params=None):
315 | r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
316 |
317 | Args:
318 | request: (DatastreamProjectsLocationsOperationsListRequest) input message
319 | global_params: (StandardQueryParameters, default: None) global arguments
320 | Returns:
321 | (ListOperationsResponse) The response message.
322 | """
323 | config = self.GetMethodConfig('List')
324 | return self._RunMethod(
325 | config, request, global_params=global_params)
326 |
327 | List.method_config = lambda: base_api.ApiMethodInfo(
328 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/operations',
329 | http_method='GET',
330 | method_id='datastream.projects.locations.operations.list',
331 | ordered_params=['name'],
332 | path_params=['name'],
333 | query_params=['filter', 'pageSize', 'pageToken'],
334 | relative_path='v1alpha1/{+name}/operations',
335 | request_field='',
336 | request_type_name='DatastreamProjectsLocationsOperationsListRequest',
337 | response_type_name='ListOperationsResponse',
338 | supports_download=False,
339 | )
340 |
341 | class ProjectsLocationsPrivateConnectionsRoutesService(base_api.BaseApiService):
342 | """Service class for the projects_locations_privateConnections_routes resource."""
343 |
344 | _NAME = 'projects_locations_privateConnections_routes'
345 |
346 | def __init__(self, client):
347 | super(DatastreamV1alpha1.ProjectsLocationsPrivateConnectionsRoutesService, self).__init__(client)
348 | self._upload_configs = {
349 | }
350 |
351 | def Create(self, request, global_params=None):
352 | r"""Use this method to create a route for a private connectivity in a project and location.
353 |
354 | Args:
355 | request: (DatastreamProjectsLocationsPrivateConnectionsRoutesCreateRequest) input message
356 | global_params: (StandardQueryParameters, default: None) global arguments
357 | Returns:
358 | (Operation) The response message.
359 | """
360 | config = self.GetMethodConfig('Create')
361 | return self._RunMethod(
362 | config, request, global_params=global_params)
363 |
364 | Create.method_config = lambda: base_api.ApiMethodInfo(
365 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/privateConnections/{privateConnectionsId}/routes',
366 | http_method='POST',
367 | method_id='datastream.projects.locations.privateConnections.routes.create',
368 | ordered_params=['parent'],
369 | path_params=['parent'],
370 | query_params=['requestId', 'routeId'],
371 | relative_path='v1alpha1/{+parent}/routes',
372 | request_field='route',
373 | request_type_name='DatastreamProjectsLocationsPrivateConnectionsRoutesCreateRequest',
374 | response_type_name='Operation',
375 | supports_download=False,
376 | )
377 |
378 | def Delete(self, request, global_params=None):
379 | r"""Use this method to delete a route.
380 |
381 | Args:
382 | request: (DatastreamProjectsLocationsPrivateConnectionsRoutesDeleteRequest) input message
383 | global_params: (StandardQueryParameters, default: None) global arguments
384 | Returns:
385 | (Operation) The response message.
386 | """
387 | config = self.GetMethodConfig('Delete')
388 | return self._RunMethod(
389 | config, request, global_params=global_params)
390 |
391 | Delete.method_config = lambda: base_api.ApiMethodInfo(
392 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/privateConnections/{privateConnectionsId}/routes/{routesId}',
393 | http_method='DELETE',
394 | method_id='datastream.projects.locations.privateConnections.routes.delete',
395 | ordered_params=['name'],
396 | path_params=['name'],
397 | query_params=['requestId'],
398 | relative_path='v1alpha1/{+name}',
399 | request_field='',
400 | request_type_name='DatastreamProjectsLocationsPrivateConnectionsRoutesDeleteRequest',
401 | response_type_name='Operation',
402 | supports_download=False,
403 | )
404 |
405 | def Get(self, request, global_params=None):
406 | r"""Use this method to get details about a route.
407 |
408 | Args:
409 | request: (DatastreamProjectsLocationsPrivateConnectionsRoutesGetRequest) input message
410 | global_params: (StandardQueryParameters, default: None) global arguments
411 | Returns:
412 | (Route) The response message.
413 | """
414 | config = self.GetMethodConfig('Get')
415 | return self._RunMethod(
416 | config, request, global_params=global_params)
417 |
418 | Get.method_config = lambda: base_api.ApiMethodInfo(
419 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/privateConnections/{privateConnectionsId}/routes/{routesId}',
420 | http_method='GET',
421 | method_id='datastream.projects.locations.privateConnections.routes.get',
422 | ordered_params=['name'],
423 | path_params=['name'],
424 | query_params=[],
425 | relative_path='v1alpha1/{+name}',
426 | request_field='',
427 | request_type_name='DatastreamProjectsLocationsPrivateConnectionsRoutesGetRequest',
428 | response_type_name='Route',
429 | supports_download=False,
430 | )
431 |
432 | def List(self, request, global_params=None):
433 | r"""Use this method to list routes created for a private connectivity in a project and location.
434 |
435 | Args:
436 | request: (DatastreamProjectsLocationsPrivateConnectionsRoutesListRequest) input message
437 | global_params: (StandardQueryParameters, default: None) global arguments
438 | Returns:
439 | (ListRoutesResponse) The response message.
440 | """
441 | config = self.GetMethodConfig('List')
442 | return self._RunMethod(
443 | config, request, global_params=global_params)
444 |
445 | List.method_config = lambda: base_api.ApiMethodInfo(
446 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/privateConnections/{privateConnectionsId}/routes',
447 | http_method='GET',
448 | method_id='datastream.projects.locations.privateConnections.routes.list',
449 | ordered_params=['parent'],
450 | path_params=['parent'],
451 | query_params=['filter', 'orderBy', 'pageSize', 'pageToken'],
452 | relative_path='v1alpha1/{+parent}/routes',
453 | request_field='',
454 | request_type_name='DatastreamProjectsLocationsPrivateConnectionsRoutesListRequest',
455 | response_type_name='ListRoutesResponse',
456 | supports_download=False,
457 | )
458 |
459 | class ProjectsLocationsPrivateConnectionsService(base_api.BaseApiService):
460 | """Service class for the projects_locations_privateConnections resource."""
461 |
462 | _NAME = 'projects_locations_privateConnections'
463 |
464 | def __init__(self, client):
465 | super(DatastreamV1alpha1.ProjectsLocationsPrivateConnectionsService, self).__init__(client)
466 | self._upload_configs = {
467 | }
468 |
469 | def Create(self, request, global_params=None):
470 | r"""Use this method to create a private connectivity configuration.
471 |
472 | Args:
473 | request: (DatastreamProjectsLocationsPrivateConnectionsCreateRequest) input message
474 | global_params: (StandardQueryParameters, default: None) global arguments
475 | Returns:
476 | (Operation) The response message.
477 | """
478 | config = self.GetMethodConfig('Create')
479 | return self._RunMethod(
480 | config, request, global_params=global_params)
481 |
482 | Create.method_config = lambda: base_api.ApiMethodInfo(
483 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/privateConnections',
484 | http_method='POST',
485 | method_id='datastream.projects.locations.privateConnections.create',
486 | ordered_params=['parent'],
487 | path_params=['parent'],
488 | query_params=['privateConnectionId', 'requestId'],
489 | relative_path='v1alpha1/{+parent}/privateConnections',
490 | request_field='privateConnection',
491 | request_type_name='DatastreamProjectsLocationsPrivateConnectionsCreateRequest',
492 | response_type_name='Operation',
493 | supports_download=False,
494 | )
495 |
496 | def Delete(self, request, global_params=None):
497 | r"""Use this method to delete a private connectivity configuration.
498 |
499 | Args:
500 | request: (DatastreamProjectsLocationsPrivateConnectionsDeleteRequest) input message
501 | global_params: (StandardQueryParameters, default: None) global arguments
502 | Returns:
503 | (Operation) The response message.
504 | """
505 | config = self.GetMethodConfig('Delete')
506 | return self._RunMethod(
507 | config, request, global_params=global_params)
508 |
509 | Delete.method_config = lambda: base_api.ApiMethodInfo(
510 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/privateConnections/{privateConnectionsId}',
511 | http_method='DELETE',
512 | method_id='datastream.projects.locations.privateConnections.delete',
513 | ordered_params=['name'],
514 | path_params=['name'],
515 | query_params=['force', 'requestId'],
516 | relative_path='v1alpha1/{+name}',
517 | request_field='',
518 | request_type_name='DatastreamProjectsLocationsPrivateConnectionsDeleteRequest',
519 | response_type_name='Operation',
520 | supports_download=False,
521 | )
522 |
523 | def Get(self, request, global_params=None):
524 | r"""Use this method to get details about a private connectivity configuration.
525 |
526 | Args:
527 | request: (DatastreamProjectsLocationsPrivateConnectionsGetRequest) input message
528 | global_params: (StandardQueryParameters, default: None) global arguments
529 | Returns:
530 | (PrivateConnection) The response message.
531 | """
532 | config = self.GetMethodConfig('Get')
533 | return self._RunMethod(
534 | config, request, global_params=global_params)
535 |
536 | Get.method_config = lambda: base_api.ApiMethodInfo(
537 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/privateConnections/{privateConnectionsId}',
538 | http_method='GET',
539 | method_id='datastream.projects.locations.privateConnections.get',
540 | ordered_params=['name'],
541 | path_params=['name'],
542 | query_params=[],
543 | relative_path='v1alpha1/{+name}',
544 | request_field='',
545 | request_type_name='DatastreamProjectsLocationsPrivateConnectionsGetRequest',
546 | response_type_name='PrivateConnection',
547 | supports_download=False,
548 | )
549 |
550 | def List(self, request, global_params=None):
551 | r"""Use this method to list private connectivity configurations in a project and location.
552 |
553 | Args:
554 | request: (DatastreamProjectsLocationsPrivateConnectionsListRequest) input message
555 | global_params: (StandardQueryParameters, default: None) global arguments
556 | Returns:
557 | (ListPrivateConnectionsResponse) The response message.
558 | """
559 | config = self.GetMethodConfig('List')
560 | return self._RunMethod(
561 | config, request, global_params=global_params)
562 |
563 | List.method_config = lambda: base_api.ApiMethodInfo(
564 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/privateConnections',
565 | http_method='GET',
566 | method_id='datastream.projects.locations.privateConnections.list',
567 | ordered_params=['parent'],
568 | path_params=['parent'],
569 | query_params=['filter', 'orderBy', 'pageSize', 'pageToken'],
570 | relative_path='v1alpha1/{+parent}/privateConnections',
571 | request_field='',
572 | request_type_name='DatastreamProjectsLocationsPrivateConnectionsListRequest',
573 | response_type_name='ListPrivateConnectionsResponse',
574 | supports_download=False,
575 | )
576 |
577 | class ProjectsLocationsStreamsObjectsService(base_api.BaseApiService):
578 | """Service class for the projects_locations_streams_objects resource."""
579 |
580 | _NAME = 'projects_locations_streams_objects'
581 |
582 | def __init__(self, client):
583 | super(DatastreamV1alpha1.ProjectsLocationsStreamsObjectsService, self).__init__(client)
584 | self._upload_configs = {
585 | }
586 |
587 | def Get(self, request, global_params=None):
588 | r"""Use this method to get details about a stream object.
589 |
590 | Args:
591 | request: (DatastreamProjectsLocationsStreamsObjectsGetRequest) input message
592 | global_params: (StandardQueryParameters, default: None) global arguments
593 | Returns:
594 | (StreamObject) The response message.
595 | """
596 | config = self.GetMethodConfig('Get')
597 | return self._RunMethod(
598 | config, request, global_params=global_params)
599 |
600 | Get.method_config = lambda: base_api.ApiMethodInfo(
601 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}/objects/{objectsId}',
602 | http_method='GET',
603 | method_id='datastream.projects.locations.streams.objects.get',
604 | ordered_params=['name'],
605 | path_params=['name'],
606 | query_params=[],
607 | relative_path='v1alpha1/{+name}',
608 | request_field='',
609 | request_type_name='DatastreamProjectsLocationsStreamsObjectsGetRequest',
610 | response_type_name='StreamObject',
611 | supports_download=False,
612 | )
613 |
614 | def List(self, request, global_params=None):
615 | r"""Use this method to list the objects of a specific stream.
616 |
617 | Args:
618 | request: (DatastreamProjectsLocationsStreamsObjectsListRequest) input message
619 | global_params: (StandardQueryParameters, default: None) global arguments
620 | Returns:
621 | (ListStreamObjectsResponse) The response message.
622 | """
623 | config = self.GetMethodConfig('List')
624 | return self._RunMethod(
625 | config, request, global_params=global_params)
626 |
627 | List.method_config = lambda: base_api.ApiMethodInfo(
628 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}/objects',
629 | http_method='GET',
630 | method_id='datastream.projects.locations.streams.objects.list',
631 | ordered_params=['parent'],
632 | path_params=['parent'],
633 | query_params=['filter', 'orderBy', 'pageSize', 'pageToken'],
634 | relative_path='v1alpha1/{+parent}/objects',
635 | request_field='',
636 | request_type_name='DatastreamProjectsLocationsStreamsObjectsListRequest',
637 | response_type_name='ListStreamObjectsResponse',
638 | supports_download=False,
639 | )
640 |
641 | def Patch(self, request, global_params=None):
642 | r"""Use this method to update a specific stream object.
643 |
644 | Args:
645 | request: (DatastreamProjectsLocationsStreamsObjectsPatchRequest) input message
646 | global_params: (StandardQueryParameters, default: None) global arguments
647 | Returns:
648 | (Operation) The response message.
649 | """
650 | config = self.GetMethodConfig('Patch')
651 | return self._RunMethod(
652 | config, request, global_params=global_params)
653 |
654 | Patch.method_config = lambda: base_api.ApiMethodInfo(
655 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}/objects/{objectsId}',
656 | http_method='PATCH',
657 | method_id='datastream.projects.locations.streams.objects.patch',
658 | ordered_params=['name'],
659 | path_params=['name'],
660 | query_params=['requestId', 'updateMask'],
661 | relative_path='v1alpha1/{+name}',
662 | request_field='streamObject',
663 | request_type_name='DatastreamProjectsLocationsStreamsObjectsPatchRequest',
664 | response_type_name='Operation',
665 | supports_download=False,
666 | )
667 |
668 | class ProjectsLocationsStreamsService(base_api.BaseApiService):
669 | """Service class for the projects_locations_streams resource."""
670 |
671 | _NAME = 'projects_locations_streams'
672 |
673 | def __init__(self, client):
674 | super(DatastreamV1alpha1.ProjectsLocationsStreamsService, self).__init__(client)
675 | self._upload_configs = {
676 | }
677 |
678 | def Create(self, request, global_params=None):
679 | r"""Use this method to create a stream.
680 |
681 | Args:
682 | request: (DatastreamProjectsLocationsStreamsCreateRequest) input message
683 | global_params: (StandardQueryParameters, default: None) global arguments
684 | Returns:
685 | (Operation) The response message.
686 | """
687 | config = self.GetMethodConfig('Create')
688 | return self._RunMethod(
689 | config, request, global_params=global_params)
690 |
691 | Create.method_config = lambda: base_api.ApiMethodInfo(
692 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams',
693 | http_method='POST',
694 | method_id='datastream.projects.locations.streams.create',
695 | ordered_params=['parent'],
696 | path_params=['parent'],
697 | query_params=['force', 'requestId', 'streamId', 'validateOnly'],
698 | relative_path='v1alpha1/{+parent}/streams',
699 | request_field='stream',
700 | request_type_name='DatastreamProjectsLocationsStreamsCreateRequest',
701 | response_type_name='Operation',
702 | supports_download=False,
703 | )
704 |
705 | def Delete(self, request, global_params=None):
706 | r"""Use this method to delete a stream.
707 |
708 | Args:
709 | request: (DatastreamProjectsLocationsStreamsDeleteRequest) input message
710 | global_params: (StandardQueryParameters, default: None) global arguments
711 | Returns:
712 | (Operation) The response message.
713 | """
714 | config = self.GetMethodConfig('Delete')
715 | return self._RunMethod(
716 | config, request, global_params=global_params)
717 |
718 | Delete.method_config = lambda: base_api.ApiMethodInfo(
719 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}',
720 | http_method='DELETE',
721 | method_id='datastream.projects.locations.streams.delete',
722 | ordered_params=['name'],
723 | path_params=['name'],
724 | query_params=['requestId'],
725 | relative_path='v1alpha1/{+name}',
726 | request_field='',
727 | request_type_name='DatastreamProjectsLocationsStreamsDeleteRequest',
728 | response_type_name='Operation',
729 | supports_download=False,
730 | )
731 |
732 | def FetchErrors(self, request, global_params=None):
733 | r"""Use this method to fetch any errors associated with a stream.
734 |
735 | Args:
736 | request: (DatastreamProjectsLocationsStreamsFetchErrorsRequest) input message
737 | global_params: (StandardQueryParameters, default: None) global arguments
738 | Returns:
739 | (Operation) The response message.
740 | """
741 | config = self.GetMethodConfig('FetchErrors')
742 | return self._RunMethod(
743 | config, request, global_params=global_params)
744 |
745 | FetchErrors.method_config = lambda: base_api.ApiMethodInfo(
746 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}:fetchErrors',
747 | http_method='POST',
748 | method_id='datastream.projects.locations.streams.fetchErrors',
749 | ordered_params=['stream'],
750 | path_params=['stream'],
751 | query_params=[],
752 | relative_path='v1alpha1/{+stream}:fetchErrors',
753 | request_field='fetchErrorsRequest',
754 | request_type_name='DatastreamProjectsLocationsStreamsFetchErrorsRequest',
755 | response_type_name='Operation',
756 | supports_download=False,
757 | )
758 |
759 | def Get(self, request, global_params=None):
760 | r"""Use this method to get details about a stream.
761 |
762 | Args:
763 | request: (DatastreamProjectsLocationsStreamsGetRequest) input message
764 | global_params: (StandardQueryParameters, default: None) global arguments
765 | Returns:
766 | (Stream) The response message.
767 | """
768 | config = self.GetMethodConfig('Get')
769 | return self._RunMethod(
770 | config, request, global_params=global_params)
771 |
772 | Get.method_config = lambda: base_api.ApiMethodInfo(
773 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}',
774 | http_method='GET',
775 | method_id='datastream.projects.locations.streams.get',
776 | ordered_params=['name'],
777 | path_params=['name'],
778 | query_params=[],
779 | relative_path='v1alpha1/{+name}',
780 | request_field='',
781 | request_type_name='DatastreamProjectsLocationsStreamsGetRequest',
782 | response_type_name='Stream',
783 | supports_download=False,
784 | )
785 |
786 | def List(self, request, global_params=None):
787 | r"""Use this method to list streams in a project and location.
788 |
789 | Args:
790 | request: (DatastreamProjectsLocationsStreamsListRequest) input message
791 | global_params: (StandardQueryParameters, default: None) global arguments
792 | Returns:
793 | (ListStreamsResponse) The response message.
794 | """
795 | config = self.GetMethodConfig('List')
796 | return self._RunMethod(
797 | config, request, global_params=global_params)
798 |
799 | List.method_config = lambda: base_api.ApiMethodInfo(
800 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams',
801 | http_method='GET',
802 | method_id='datastream.projects.locations.streams.list',
803 | ordered_params=['parent'],
804 | path_params=['parent'],
805 | query_params=['filter', 'orderBy', 'pageSize', 'pageToken'],
806 | relative_path='v1alpha1/{+parent}/streams',
807 | request_field='',
808 | request_type_name='DatastreamProjectsLocationsStreamsListRequest',
809 | response_type_name='ListStreamsResponse',
810 | supports_download=False,
811 | )
812 |
813 | def Patch(self, request, global_params=None):
814 | r"""Use this method to update the configuration of a stream.
815 |
816 | Args:
817 | request: (DatastreamProjectsLocationsStreamsPatchRequest) input message
818 | global_params: (StandardQueryParameters, default: None) global arguments
819 | Returns:
820 | (Operation) The response message.
821 | """
822 | config = self.GetMethodConfig('Patch')
823 | return self._RunMethod(
824 | config, request, global_params=global_params)
825 |
826 | Patch.method_config = lambda: base_api.ApiMethodInfo(
827 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}',
828 | http_method='PATCH',
829 | method_id='datastream.projects.locations.streams.patch',
830 | ordered_params=['name'],
831 | path_params=['name'],
832 | query_params=['force', 'requestId', 'updateMask', 'validateOnly'],
833 | relative_path='v1alpha1/{+name}',
834 | request_field='stream',
835 | request_type_name='DatastreamProjectsLocationsStreamsPatchRequest',
836 | response_type_name='Operation',
837 | supports_download=False,
838 | )
839 |
840 | def Pause(self, request, global_params=None):
841 | r"""Use this method to pause a running stream.
842 |
843 | Args:
844 | request: (DatastreamProjectsLocationsStreamsPauseRequest) input message
845 | global_params: (StandardQueryParameters, default: None) global arguments
846 | Returns:
847 | (Operation) The response message.
848 | """
849 | config = self.GetMethodConfig('Pause')
850 | return self._RunMethod(
851 | config, request, global_params=global_params)
852 |
853 | Pause.method_config = lambda: base_api.ApiMethodInfo(
854 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}:pause',
855 | http_method='POST',
856 | method_id='datastream.projects.locations.streams.pause',
857 | ordered_params=['name'],
858 | path_params=['name'],
859 | query_params=[],
860 | relative_path='v1alpha1/{+name}:pause',
861 | request_field='pauseStreamRequest',
862 | request_type_name='DatastreamProjectsLocationsStreamsPauseRequest',
863 | response_type_name='Operation',
864 | supports_download=False,
865 | )
866 |
867 | def Resume(self, request, global_params=None):
868 | r"""Use this method to resume a paused stream.
869 |
870 | Args:
871 | request: (DatastreamProjectsLocationsStreamsResumeRequest) input message
872 | global_params: (StandardQueryParameters, default: None) global arguments
873 | Returns:
874 | (Operation) The response message.
875 | """
876 | config = self.GetMethodConfig('Resume')
877 | return self._RunMethod(
878 | config, request, global_params=global_params)
879 |
880 | Resume.method_config = lambda: base_api.ApiMethodInfo(
881 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}:resume',
882 | http_method='POST',
883 | method_id='datastream.projects.locations.streams.resume',
884 | ordered_params=['name'],
885 | path_params=['name'],
886 | query_params=[],
887 | relative_path='v1alpha1/{+name}:resume',
888 | request_field='resumeStreamRequest',
889 | request_type_name='DatastreamProjectsLocationsStreamsResumeRequest',
890 | response_type_name='Operation',
891 | supports_download=False,
892 | )
893 |
894 | def Start(self, request, global_params=None):
895 | r"""Use this method to start a stream.
896 |
897 | Args:
898 | request: (DatastreamProjectsLocationsStreamsStartRequest) input message
899 | global_params: (StandardQueryParameters, default: None) global arguments
900 | Returns:
901 | (Operation) The response message.
902 | """
903 | config = self.GetMethodConfig('Start')
904 | return self._RunMethod(
905 | config, request, global_params=global_params)
906 |
907 | Start.method_config = lambda: base_api.ApiMethodInfo(
908 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/streams/{streamsId}:start',
909 | http_method='POST',
910 | method_id='datastream.projects.locations.streams.start',
911 | ordered_params=['name'],
912 | path_params=['name'],
913 | query_params=[],
914 | relative_path='v1alpha1/{+name}:start',
915 | request_field='startStreamRequest',
916 | request_type_name='DatastreamProjectsLocationsStreamsStartRequest',
917 | response_type_name='Operation',
918 | supports_download=False,
919 | )
920 |
921 | class ProjectsLocationsService(base_api.BaseApiService):
922 | """Service class for the projects_locations resource."""
923 |
924 | _NAME = 'projects_locations'
925 |
926 | def __init__(self, client):
927 | super(DatastreamV1alpha1.ProjectsLocationsService, self).__init__(client)
928 | self._upload_configs = {
929 | }
930 |
931 | def FetchStaticIps(self, request, global_params=None):
932 | r"""The FetchStaticIps API call exposes the static ips used by Datastream. Typically, a request returns children data objects under a parent data object that's optionally supplied in the request.
933 |
934 | Args:
935 | request: (DatastreamProjectsLocationsFetchStaticIpsRequest) input message
936 | global_params: (StandardQueryParameters, default: None) global arguments
937 | Returns:
938 | (FetchStaticIpsResponse) The response message.
939 | """
940 | config = self.GetMethodConfig('FetchStaticIps')
941 | return self._RunMethod(
942 | config, request, global_params=global_params)
943 |
944 | FetchStaticIps.method_config = lambda: base_api.ApiMethodInfo(
945 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}:fetchStaticIps',
946 | http_method='GET',
947 | method_id='datastream.projects.locations.fetchStaticIps',
948 | ordered_params=['name'],
949 | path_params=['name'],
950 | query_params=['pageSize', 'pageToken'],
951 | relative_path='v1alpha1/{+name}:fetchStaticIps',
952 | request_field='',
953 | request_type_name='DatastreamProjectsLocationsFetchStaticIpsRequest',
954 | response_type_name='FetchStaticIpsResponse',
955 | supports_download=False,
956 | )
957 |
958 | def Get(self, request, global_params=None):
959 | r"""Gets information about a location.
960 |
961 | Args:
962 | request: (DatastreamProjectsLocationsGetRequest) input message
963 | global_params: (StandardQueryParameters, default: None) global arguments
964 | Returns:
965 | (Location) The response message.
966 | """
967 | config = self.GetMethodConfig('Get')
968 | return self._RunMethod(
969 | config, request, global_params=global_params)
970 |
971 | Get.method_config = lambda: base_api.ApiMethodInfo(
972 | flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}',
973 | http_method='GET',
974 | method_id='datastream.projects.locations.get',
975 | ordered_params=['name'],
976 | path_params=['name'],
977 | query_params=[],
978 | relative_path='v1alpha1/{+name}',
979 | request_field='',
980 | request_type_name='DatastreamProjectsLocationsGetRequest',
981 | response_type_name='Location',
982 | supports_download=False,
983 | )
984 |
985 | def List(self, request, global_params=None):
986 | r"""Lists information about the supported locations for this service.
987 |
988 | Args:
989 | request: (DatastreamProjectsLocationsListRequest) input message
990 | global_params: (StandardQueryParameters, default: None) global arguments
991 | Returns:
992 | (ListLocationsResponse) The response message.
993 | """
994 | config = self.GetMethodConfig('List')
995 | return self._RunMethod(
996 | config, request, global_params=global_params)
997 |
998 | List.method_config = lambda: base_api.ApiMethodInfo(
999 | flat_path='v1alpha1/projects/{projectsId}/locations',
1000 | http_method='GET',
1001 | method_id='datastream.projects.locations.list',
1002 | ordered_params=['name'],
1003 | path_params=['name'],
1004 | query_params=['filter', 'includeUnrevealedLocations', 'pageSize', 'pageToken'],
1005 | relative_path='v1alpha1/{+name}/locations',
1006 | request_field='',
1007 | request_type_name='DatastreamProjectsLocationsListRequest',
1008 | response_type_name='ListLocationsResponse',
1009 | supports_download=False,
1010 | )
1011 |
1012 | class ProjectsService(base_api.BaseApiService):
1013 | """Service class for the projects resource."""
1014 |
1015 | _NAME = 'projects'
1016 |
1017 | def __init__(self, client):
1018 | super(DatastreamV1alpha1.ProjectsService, self).__init__(client)
1019 | self._upload_configs = {
1020 | }
1021 |
--------------------------------------------------------------------------------
/datastream_utils/datastream/datastream_v1alpha1_messages.py:
--------------------------------------------------------------------------------
1 | """Generated message classes for datastream version v1alpha1.
2 |
3 | """
4 | # NOTE: This file is autogenerated and should not be edited by hand.
5 |
6 | from __future__ import absolute_import
7 |
8 | from apitools.base.protorpclite import messages as _messages
9 | from apitools.base.py import encoding
10 | from apitools.base.py import extra_types
11 |
12 |
13 | package = 'datastream'
14 |
15 |
16 | class AvroFileFormat(_messages.Message):
17 | r"""AVRO file format configuration."""
18 |
19 |
20 | class BackfillAllStrategy(_messages.Message):
21 | r"""Backfill strategy to automatically backfill the Stream's objects.
22 | Specific objects can be excluded.
23 |
24 | Fields:
25 | mysqlExcludedObjects: MySQL data source objects to avoid backfilling.
26 | oracleExcludedObjects: Oracle data source objects to avoid backfilling.
27 | """
28 |
29 | mysqlExcludedObjects = _messages.MessageField('MysqlRdbms', 1)
30 | oracleExcludedObjects = _messages.MessageField('OracleRdbms', 2)
31 |
32 |
33 | class BackfillNoneStrategy(_messages.Message):
34 | r"""Backfill strategy to disable automatic backfill for the Stream's
35 | objects.
36 | """
37 |
38 |
39 |
40 | class CancelOperationRequest(_messages.Message):
41 | r"""The request message for Operations.CancelOperation."""
42 |
43 |
44 | class ConnectionProfile(_messages.Message):
45 | r"""A ConnectionProfile object.
46 |
47 | Messages:
48 | LabelsValue: Labels.
49 |
50 | Fields:
51 | createTime: Output only. The create time of the resource.
52 | displayName: Required. Display name.
53 | forwardSshConnectivity: Forward SSH tunnel connectivity.
54 | gcsProfile: Cloud Storage ConnectionProfile configuration.
55 | labels: Labels.
56 | mysqlProfile: MySQL ConnectionProfile configuration.
57 | name: Output only. The resource's name.
58 | noConnectivity: No connectivity option chosen.
59 | oracleProfile: Oracle ConnectionProfile configuration.
60 | privateConnectivity: Private connectivity.
61 | staticServiceIpConnectivity: Static Service IP connectivity.
62 | updateTime: Output only. The update time of the resource.
63 | """
64 |
65 | @encoding.MapUnrecognizedFields('additionalProperties')
66 | class LabelsValue(_messages.Message):
67 | r"""Labels.
68 |
69 | Messages:
70 | AdditionalProperty: An additional property for a LabelsValue object.
71 |
72 | Fields:
73 | additionalProperties: Additional properties of type LabelsValue
74 | """
75 |
76 | class AdditionalProperty(_messages.Message):
77 | r"""An additional property for a LabelsValue object.
78 |
79 | Fields:
80 | key: Name of the additional property.
81 | value: A string attribute.
82 | """
83 |
84 | key = _messages.StringField(1)
85 | value = _messages.StringField(2)
86 |
87 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
88 |
89 | createTime = _messages.StringField(1)
90 | displayName = _messages.StringField(2)
91 | forwardSshConnectivity = _messages.MessageField('ForwardSshTunnelConnectivity', 3)
92 | gcsProfile = _messages.MessageField('GcsProfile', 4)
93 | labels = _messages.MessageField('LabelsValue', 5)
94 | mysqlProfile = _messages.MessageField('MysqlProfile', 6)
95 | name = _messages.StringField(7)
96 | noConnectivity = _messages.MessageField('NoConnectivitySettings', 8)
97 | oracleProfile = _messages.MessageField('OracleProfile', 9)
98 | privateConnectivity = _messages.MessageField('PrivateConnectivity', 10)
99 | staticServiceIpConnectivity = _messages.MessageField('StaticServiceIpConnectivity', 11)
100 | updateTime = _messages.StringField(12)
101 |
102 |
103 | class DatastreamProjectsLocationsConnectionProfilesCreateRequest(_messages.Message):
104 | r"""A DatastreamProjectsLocationsConnectionProfilesCreateRequest object.
105 |
106 | Fields:
107 | connectionProfile: A ConnectionProfile resource to be passed as the
108 | request body.
109 | connectionProfileId: Required. The connection profile identifier.
110 | parent: Required. The parent that owns the collection of
111 | ConnectionProfiles.
112 | requestId: Optional. A request ID to identify requests. Specify a unique
113 | request ID so that if you must retry your request, the server will know
114 | to ignore the request if it has already been completed. The server will
115 | guarantee that for at least 60 minutes since the first request. For
116 | example, consider a situation where you make an initial request and t he
117 | request times out. If you make the request again with the same request
118 | ID, the server can check if original operation with the same request ID
119 | was received, and if so, will ignore the second request. This prevents
120 | clients from accidentally creating duplicate commitments. The request ID
121 | must be a valid UUID with the exception that zero UUID is not supported
122 | (00000000-0000-0000-0000-000000000000).
123 | """
124 |
125 | connectionProfile = _messages.MessageField('ConnectionProfile', 1)
126 | connectionProfileId = _messages.StringField(2)
127 | parent = _messages.StringField(3, required=True)
128 | requestId = _messages.StringField(4)
129 |
130 |
131 | class DatastreamProjectsLocationsConnectionProfilesDeleteRequest(_messages.Message):
132 | r"""A DatastreamProjectsLocationsConnectionProfilesDeleteRequest object.
133 |
134 | Fields:
135 | name: Required. The name of the connection profile resource to delete.
136 | requestId: Optional. A request ID to identify requests. Specify a unique
137 | request ID so that if you must retry your request, the server will know
138 | to ignore the request if it has already been completed. The server will
139 | guarantee that for at least 60 minutes after the first request. For
140 | example, consider a situation where you make an initial request and t he
141 | request times out. If you make the request again with the same request
142 | ID, the server can check if original operation with the same request ID
143 | was received, and if so, will ignore the second request. This prevents
144 | clients from accidentally creating duplicate commitments. The request ID
145 | must be a valid UUID with the exception that zero UUID is not supported
146 | (00000000-0000-0000-0000-000000000000).
147 | """
148 |
149 | name = _messages.StringField(1, required=True)
150 | requestId = _messages.StringField(2)
151 |
152 |
153 | class DatastreamProjectsLocationsConnectionProfilesDiscoverRequest(_messages.Message):
154 | r"""A DatastreamProjectsLocationsConnectionProfilesDiscoverRequest object.
155 |
156 | Fields:
157 | discoverConnectionProfileRequest: A DiscoverConnectionProfileRequest
158 | resource to be passed as the request body.
159 | parent: Required. The parent resource of the ConnectionProfile type. Must
160 | be in the format `projects/*/locations/*`.
161 | """
162 |
163 | discoverConnectionProfileRequest = _messages.MessageField('DiscoverConnectionProfileRequest', 1)
164 | parent = _messages.StringField(2, required=True)
165 |
166 |
167 | class DatastreamProjectsLocationsConnectionProfilesGetRequest(_messages.Message):
168 | r"""A DatastreamProjectsLocationsConnectionProfilesGetRequest object.
169 |
170 | Fields:
171 | name: Required. The name of the connection profile resource to get.
172 | """
173 |
174 | name = _messages.StringField(1, required=True)
175 |
176 |
177 | class DatastreamProjectsLocationsConnectionProfilesListRequest(_messages.Message):
178 | r"""A DatastreamProjectsLocationsConnectionProfilesListRequest object.
179 |
180 | Fields:
181 | filter: Filter request.
182 | orderBy: Order by fields for the result.
183 | pageSize: Maximum number of connection profiles to return. If unspecified,
184 | at most 50 connection profiles will be returned. The maximum value is
185 | 1000; values above 1000 will be coerced to 1000.
186 | pageToken: Page token received from a previous `ListConnectionProfiles`
187 | call. Provide this to retrieve the subsequent page. When paginating, all
188 | other parameters provided to `ListConnectionProfiles` must match the
189 | call that provided the page token.
190 | parent: Required. The parent that owns the collection of connection
191 | profiles.
192 | """
193 |
194 | filter = _messages.StringField(1)
195 | orderBy = _messages.StringField(2)
196 | pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
197 | pageToken = _messages.StringField(4)
198 | parent = _messages.StringField(5, required=True)
199 |
200 |
201 | class DatastreamProjectsLocationsConnectionProfilesPatchRequest(_messages.Message):
202 | r"""A DatastreamProjectsLocationsConnectionProfilesPatchRequest object.
203 |
204 | Fields:
205 | connectionProfile: A ConnectionProfile resource to be passed as the
206 | request body.
207 | name: Output only. The resource's name.
208 | requestId: Optional. A request ID to identify requests. Specify a unique
209 | request ID so that if you must retry your request, the server will know
210 | to ignore the request if it has already been completed. The server will
211 | guarantee that for at least 60 minutes since the first request. For
212 | example, consider a situation where you make an initial request and t he
213 | request times out. If you make the request again with the same request
214 | ID, the server can check if original operation with the same request ID
215 | was received, and if so, will ignore the second request. This prevents
216 | clients from accidentally creating duplicate commitments. The request ID
217 | must be a valid UUID with the exception that zero UUID is not supported
218 | (00000000-0000-0000-0000-000000000000).
219 | updateMask: Optional. Field mask is used to specify the fields to be
220 | overwritten in the ConnectionProfile resource by the update. The fields
221 | specified in the update_mask are relative to the resource, not the full
222 | request. A field will be overwritten if it is in the mask. If the user
223 | does not provide a mask then all fields will be overwritten.
224 | """
225 |
226 | connectionProfile = _messages.MessageField('ConnectionProfile', 1)
227 | name = _messages.StringField(2, required=True)
228 | requestId = _messages.StringField(3)
229 | updateMask = _messages.StringField(4)
230 |
231 |
232 | class DatastreamProjectsLocationsFetchStaticIpsRequest(_messages.Message):
233 | r"""A DatastreamProjectsLocationsFetchStaticIpsRequest object.
234 |
235 | Fields:
236 | name: Required. The name resource of the Response type. Must be in the
237 | format `projects/*/locations/*`.
238 | pageSize: Maximum number of Ips to return, will likely not be specified.
239 | pageToken: A page token, received from a previous `ListStaticIps` call.
240 | will likely not be specified.
241 | """
242 |
243 | name = _messages.StringField(1, required=True)
244 | pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
245 | pageToken = _messages.StringField(3)
246 |
247 |
248 | class DatastreamProjectsLocationsGetRequest(_messages.Message):
249 | r"""A DatastreamProjectsLocationsGetRequest object.
250 |
251 | Fields:
252 | name: Resource name for the location.
253 | """
254 |
255 | name = _messages.StringField(1, required=True)
256 |
257 |
258 | class DatastreamProjectsLocationsListRequest(_messages.Message):
259 | r"""A DatastreamProjectsLocationsListRequest object.
260 |
261 | Fields:
262 | filter: A filter to narrow down results to a preferred subset. The
263 | filtering language accepts strings like "displayName=tokyo", and is
264 | documented in more detail in [AIP-160](https://google.aip.dev/160).
265 | includeUnrevealedLocations: If true, the returned list will include
266 | locations which are not yet revealed.
267 | name: The resource that owns the locations collection, if applicable.
268 | pageSize: The maximum number of results to return. If not set, the service
269 | selects a default.
270 | pageToken: A page token received from the `next_page_token` field in the
271 | response. Send that page token to receive the subsequent page.
272 | """
273 |
274 | filter = _messages.StringField(1)
275 | includeUnrevealedLocations = _messages.BooleanField(2)
276 | name = _messages.StringField(3, required=True)
277 | pageSize = _messages.IntegerField(4, variant=_messages.Variant.INT32)
278 | pageToken = _messages.StringField(5)
279 |
280 |
281 | class DatastreamProjectsLocationsOperationsCancelRequest(_messages.Message):
282 | r"""A DatastreamProjectsLocationsOperationsCancelRequest object.
283 |
284 | Fields:
285 | cancelOperationRequest: A CancelOperationRequest resource to be passed as
286 | the request body.
287 | name: The name of the operation resource to be cancelled.
288 | """
289 |
290 | cancelOperationRequest = _messages.MessageField('CancelOperationRequest', 1)
291 | name = _messages.StringField(2, required=True)
292 |
293 |
294 | class DatastreamProjectsLocationsOperationsDeleteRequest(_messages.Message):
295 | r"""A DatastreamProjectsLocationsOperationsDeleteRequest object.
296 |
297 | Fields:
298 | name: The name of the operation resource to be deleted.
299 | """
300 |
301 | name = _messages.StringField(1, required=True)
302 |
303 |
304 | class DatastreamProjectsLocationsOperationsGetRequest(_messages.Message):
305 | r"""A DatastreamProjectsLocationsOperationsGetRequest object.
306 |
307 | Fields:
308 | name: The name of the operation resource.
309 | """
310 |
311 | name = _messages.StringField(1, required=True)
312 |
313 |
314 | class DatastreamProjectsLocationsOperationsListRequest(_messages.Message):
315 | r"""A DatastreamProjectsLocationsOperationsListRequest object.
316 |
317 | Fields:
318 | filter: The standard list filter.
319 | name: The name of the operation's parent resource.
320 | pageSize: The standard list page size.
321 | pageToken: The standard list page token.
322 | """
323 |
324 | filter = _messages.StringField(1)
325 | name = _messages.StringField(2, required=True)
326 | pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
327 | pageToken = _messages.StringField(4)
328 |
329 |
330 | class DatastreamProjectsLocationsPrivateConnectionsCreateRequest(_messages.Message):
331 | r"""A DatastreamProjectsLocationsPrivateConnectionsCreateRequest object.
332 |
333 | Fields:
334 | parent: Required. The parent that owns the collection of
335 | PrivateConnections.
336 | privateConnection: A PrivateConnection resource to be passed as the
337 | request body.
338 | privateConnectionId: Required. The private connectivity identifier.
339 | requestId: Optional. A request ID to identify requests. Specify a unique
340 | request ID so that if you must retry your request, the server will know
341 | to ignore the request if it has already been completed. The server will
342 | guarantee that for at least 60 minutes since the first request. For
343 | example, consider a situation where you make an initial request and t he
344 | request times out. If you make the request again with the same request
345 | ID, the server can check if original operation with the same request ID
346 | was received, and if so, will ignore the second request. This prevents
347 | clients from accidentally creating duplicate commitments. The request ID
348 | must be a valid UUID with the exception that zero UUID is not supported
349 | (00000000-0000-0000-0000-000000000000).
350 | """
351 |
352 | parent = _messages.StringField(1, required=True)
353 | privateConnection = _messages.MessageField('PrivateConnection', 2)
354 | privateConnectionId = _messages.StringField(3)
355 | requestId = _messages.StringField(4)
356 |
357 |
358 | class DatastreamProjectsLocationsPrivateConnectionsDeleteRequest(_messages.Message):
359 | r"""A DatastreamProjectsLocationsPrivateConnectionsDeleteRequest object.
360 |
361 | Fields:
362 | force: Optional. If set to true, any child routes that belong to this
363 | PrivateConnection will also be deleted.
364 | name: Required. The name of the private connectivity configuration to
365 | delete.
366 | requestId: Optional. A request ID to identify requests. Specify a unique
367 | request ID so that if you must retry your request, the server will know
368 | to ignore the request if it has already been completed. The server will
369 | guarantee that for at least 60 minutes after the first request. For
370 | example, consider a situation where you make an initial request and t he
371 | request times out. If you make the request again with the same request
372 | ID, the server can check if original operation with the same request ID
373 | was received, and if so, will ignore the second request. This prevents
374 | clients from accidentally creating duplicate commitments. The request ID
375 | must be a valid UUID with the exception that zero UUID is not supported
376 | (00000000-0000-0000-0000-000000000000).
377 | """
378 |
379 | force = _messages.BooleanField(1)
380 | name = _messages.StringField(2, required=True)
381 | requestId = _messages.StringField(3)
382 |
383 |
384 | class DatastreamProjectsLocationsPrivateConnectionsGetRequest(_messages.Message):
385 | r"""A DatastreamProjectsLocationsPrivateConnectionsGetRequest object.
386 |
387 | Fields:
388 | name: Required. The name of the private connectivity configuration to get.
389 | """
390 |
391 | name = _messages.StringField(1, required=True)
392 |
393 |
394 | class DatastreamProjectsLocationsPrivateConnectionsListRequest(_messages.Message):
395 | r"""A DatastreamProjectsLocationsPrivateConnectionsListRequest object.
396 |
397 | Fields:
398 | filter: Filter request.
399 | orderBy: Order by fields for the result.
400 | pageSize: Maximum number of private connectivity configurations to return.
401 | If unspecified, at most 50 private connectivity configurations that will
402 | be returned. The maximum value is 1000; values above 1000 will be
403 | coerced to 1000.
404 | pageToken: Page token received from a previous `ListPrivateConnections`
405 | call. Provide this to retrieve the subsequent page. When paginating, all
406 | other parameters provided to `ListPrivateConnections` must match the
407 | call that provided the page token.
408 | parent: Required. The parent that owns the collection of private
409 | connectivity configurations.
410 | """
411 |
412 | filter = _messages.StringField(1)
413 | orderBy = _messages.StringField(2)
414 | pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
415 | pageToken = _messages.StringField(4)
416 | parent = _messages.StringField(5, required=True)
417 |
418 |
419 | class DatastreamProjectsLocationsPrivateConnectionsRoutesCreateRequest(_messages.Message):
420 | r"""A DatastreamProjectsLocationsPrivateConnectionsRoutesCreateRequest
421 | object.
422 |
423 | Fields:
424 | parent: Required. The parent that owns the collection of Routes.
425 | requestId: Optional. A request ID to identify requests. Specify a unique
426 | request ID so that if you must retry your request, the server will know
427 | to ignore the request if it has already been completed. The server will
428 | guarantee that for at least 60 minutes since the first request. For
429 | example, consider a situation where you make an initial request and t he
430 | request times out. If you make the request again with the same request
431 | ID, the server can check if original operation with the same request ID
432 | was received, and if so, will ignore the second request. This prevents
433 | clients from accidentally creating duplicate commitments. The request ID
434 | must be a valid UUID with the exception that zero UUID is not supported
435 | (00000000-0000-0000-0000-000000000000).
436 | route: A Route resource to be passed as the request body.
437 | routeId: Required. The Route identifier.
438 | """
439 |
440 | parent = _messages.StringField(1, required=True)
441 | requestId = _messages.StringField(2)
442 | route = _messages.MessageField('Route', 3)
443 | routeId = _messages.StringField(4)
444 |
445 |
446 | class DatastreamProjectsLocationsPrivateConnectionsRoutesDeleteRequest(_messages.Message):
447 | r"""A DatastreamProjectsLocationsPrivateConnectionsRoutesDeleteRequest
448 | object.
449 |
450 | Fields:
451 | name: Required. The name of the Route resource to delete.
452 | requestId: Optional. A request ID to identify requests. Specify a unique
453 | request ID so that if you must retry your request, the server will know
454 | to ignore the request if it has already been completed. The server will
455 | guarantee that for at least 60 minutes after the first request. For
456 | example, consider a situation where you make an initial request and t he
457 | request times out. If you make the request again with the same request
458 | ID, the server can check if original operation with the same request ID
459 | was received, and if so, will ignore the second request. This prevents
460 | clients from accidentally creating duplicate commitments. The request ID
461 | must be a valid UUID with the exception that zero UUID is not supported
462 | (00000000-0000-0000-0000-000000000000).
463 | """
464 |
465 | name = _messages.StringField(1, required=True)
466 | requestId = _messages.StringField(2)
467 |
468 |
469 | class DatastreamProjectsLocationsPrivateConnectionsRoutesGetRequest(_messages.Message):
470 | r"""A DatastreamProjectsLocationsPrivateConnectionsRoutesGetRequest object.
471 |
472 | Fields:
473 | name: Required. The name of the Route resource to get.
474 | """
475 |
476 | name = _messages.StringField(1, required=True)
477 |
478 |
479 | class DatastreamProjectsLocationsPrivateConnectionsRoutesListRequest(_messages.Message):
480 | r"""A DatastreamProjectsLocationsPrivateConnectionsRoutesListRequest object.
481 |
482 | Fields:
483 | filter: Filter request.
484 | orderBy: Order by fields for the result.
485 | pageSize: Maximum number of Routes to return. The service may return fewer
486 | than this value. If unspecified, at most 50 Routes will be returned. The
487 | maximum value is 1000; values above 1000 will be coerced to 1000.
488 | pageToken: Page token received from a previous `ListRoutes` call. Provide
489 | this to retrieve the subsequent page. When paginating, all other
490 | parameters provided to `ListRoutes` must match the call that provided
491 | the page token.
492 | parent: Required. The parent that owns the collection of Routess.
493 | """
494 |
495 | filter = _messages.StringField(1)
496 | orderBy = _messages.StringField(2)
497 | pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
498 | pageToken = _messages.StringField(4)
499 | parent = _messages.StringField(5, required=True)
500 |
501 |
502 | class DatastreamProjectsLocationsStreamsCreateRequest(_messages.Message):
503 | r"""A DatastreamProjectsLocationsStreamsCreateRequest object.
504 |
505 | Fields:
506 | force: Optional. Create the stream without validating it.
507 | parent: Required. The parent that owns the collection of streams.
508 | requestId: Optional. A request ID to identify requests. Specify a unique
509 | request ID so that if you must retry your request, the server will know
510 | to ignore the request if it has already been completed. The server will
511 | guarantee that for at least 60 minutes since the first request. For
512 | example, consider a situation where you make an initial request and t he
513 | request times out. If you make the request again with the same request
514 | ID, the server can check if original operation with the same request ID
515 | was received, and if so, will ignore the second request. This prevents
516 | clients from accidentally creating duplicate commitments. The request ID
517 | must be a valid UUID with the exception that zero UUID is not supported
518 | (00000000-0000-0000-0000-000000000000).
519 | stream: A Stream resource to be passed as the request body.
520 | streamId: Required. The stream identifier.
521 | validateOnly: Optional. Only validate the stream, but do not create any
522 | resources. The default is false.
523 | """
524 |
525 | force = _messages.BooleanField(1)
526 | parent = _messages.StringField(2, required=True)
527 | requestId = _messages.StringField(3)
528 | stream = _messages.MessageField('Stream', 4)
529 | streamId = _messages.StringField(5)
530 | validateOnly = _messages.BooleanField(6)
531 |
532 |
533 | class DatastreamProjectsLocationsStreamsDeleteRequest(_messages.Message):
534 | r"""A DatastreamProjectsLocationsStreamsDeleteRequest object.
535 |
536 | Fields:
537 | name: Required. The name of the stream resource to delete.
538 | requestId: Optional. A request ID to identify requests. Specify a unique
539 | request ID so that if you must retry your request, the server will know
540 | to ignore the request if it has already been completed. The server will
541 | guarantee that for at least 60 minutes after the first request. For
542 | example, consider a situation where you make an initial request and t he
543 | request times out. If you make the request again with the same request
544 | ID, the server can check if original operation with the same request ID
545 | was received, and if so, will ignore the second request. This prevents
546 | clients from accidentally creating duplicate commitments. The request ID
547 | must be a valid UUID with the exception that zero UUID is not supported
548 | (00000000-0000-0000-0000-000000000000).
549 | """
550 |
551 | name = _messages.StringField(1, required=True)
552 | requestId = _messages.StringField(2)
553 |
554 |
555 | class DatastreamProjectsLocationsStreamsFetchErrorsRequest(_messages.Message):
556 | r"""A DatastreamProjectsLocationsStreamsFetchErrorsRequest object.
557 |
558 | Fields:
559 | fetchErrorsRequest: A FetchErrorsRequest resource to be passed as the
560 | request body.
561 | stream: Name of the Stream resource for which to fetch any errors.
562 | """
563 |
564 | fetchErrorsRequest = _messages.MessageField('FetchErrorsRequest', 1)
565 | stream = _messages.StringField(2, required=True)
566 |
567 |
568 | class DatastreamProjectsLocationsStreamsGetRequest(_messages.Message):
569 | r"""A DatastreamProjectsLocationsStreamsGetRequest object.
570 |
571 | Fields:
572 | name: Required. The name of the stream resource to get.
573 | """
574 |
575 | name = _messages.StringField(1, required=True)
576 |
577 |
578 | class DatastreamProjectsLocationsStreamsListRequest(_messages.Message):
579 | r"""A DatastreamProjectsLocationsStreamsListRequest object.
580 |
581 | Fields:
582 | filter: Filter request.
583 | orderBy: Order by fields for the result.
584 | pageSize: Maximum number of streams to return. If unspecified, at most 50
585 | streams will be returned. The maximum value is 1000; values above 1000
586 | will be coerced to 1000.
587 | pageToken: Page token received from a previous `ListStreams` call. Provide
588 | this to retrieve the subsequent page. When paginating, all other
589 | parameters provided to `ListStreams` must match the call that provided
590 | the page token.
591 | parent: Required. The parent that owns the collection of streams.
592 | """
593 |
594 | filter = _messages.StringField(1)
595 | orderBy = _messages.StringField(2)
596 | pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
597 | pageToken = _messages.StringField(4)
598 | parent = _messages.StringField(5, required=True)
599 |
600 |
601 | class DatastreamProjectsLocationsStreamsObjectsGetRequest(_messages.Message):
602 | r"""A DatastreamProjectsLocationsStreamsObjectsGetRequest object.
603 |
604 | Fields:
605 | name: Required. The name of the Route resource to get.
606 | """
607 |
608 | name = _messages.StringField(1, required=True)
609 |
610 |
611 | class DatastreamProjectsLocationsStreamsObjectsListRequest(_messages.Message):
612 | r"""A DatastreamProjectsLocationsStreamsObjectsListRequest object.
613 |
614 | Fields:
615 | filter: Filter request.
616 | orderBy: Order by fields for the result.
617 | pageSize: Maximum number of objects to return. Default is 50. The maximum
618 | value is 1000; values above 1000 will be coerced to 1000.
619 | pageToken: Page token received from a previous `ListStreamObjectsRequest`
620 | call. Provide this to retrieve the subsequent page. When paginating, all
621 | other parameters provided to `ListStreamObjectsRequest` must match the
622 | call that provided the page token.
623 | parent: Required. The parent stream that owns the collection of objects.
624 | """
625 |
626 | filter = _messages.StringField(1)
627 | orderBy = _messages.StringField(2)
628 | pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
629 | pageToken = _messages.StringField(4)
630 | parent = _messages.StringField(5, required=True)
631 |
632 |
633 | class DatastreamProjectsLocationsStreamsObjectsPatchRequest(_messages.Message):
634 | r"""A DatastreamProjectsLocationsStreamsObjectsPatchRequest object.
635 |
636 | Fields:
637 | name: Output only. The object's name.
638 | requestId: Optional. A request ID to identify requests. Specify a unique
639 | request ID so that if you must retry your request, the server will know
640 | to ignore the request if it has already been completed. The server will
641 | guarantee that for at least 60 minutes since the first request. For
642 | example, consider a situation where you make an initial request and t he
643 | request times out. If you make the request again with the same request
644 | ID, the server can check if original operation with the same request ID
645 | was received, and if so, will ignore the second request. This prevents
646 | clients from accidentally creating duplicate commitments. The request ID
647 | must be a valid UUID with the exception that zero UUID is not supported
648 | (00000000-0000-0000-0000-000000000000).
649 | streamObject: A StreamObject resource to be passed as the request body.
650 | updateMask: Optional. Field mask is used to specify the fields to be
651 | overwritten in the stream resource by the update. The fields specified
652 | in the update_mask are relative to the resource, not the full request. A
653 | field will be overwritten if it is in the mask. If the user does not
654 | provide a mask then all fields will be overwritten.
655 | """
656 |
657 | name = _messages.StringField(1, required=True)
658 | requestId = _messages.StringField(2)
659 | streamObject = _messages.MessageField('StreamObject', 3)
660 | updateMask = _messages.StringField(4)
661 |
662 |
663 | class DatastreamProjectsLocationsStreamsPatchRequest(_messages.Message):
664 | r"""A DatastreamProjectsLocationsStreamsPatchRequest object.
665 |
666 | Fields:
667 | force: Optional. Execute the update without validating it.
668 | name: Output only. The stream's name.
669 | requestId: Optional. A request ID to identify requests. Specify a unique
670 | request ID so that if you must retry your request, the server will know
671 | to ignore the request if it has already been completed. The server will
672 | guarantee that for at least 60 minutes since the first request. For
673 | example, consider a situation where you make an initial request and t he
674 | request times out. If you make the request again with the same request
675 | ID, the server can check if original operation with the same request ID
676 | was received, and if so, will ignore the second request. This prevents
677 | clients from accidentally creating duplicate commitments. The request ID
678 | must be a valid UUID with the exception that zero UUID is not supported
679 | (00000000-0000-0000-0000-000000000000).
680 | stream: A Stream resource to be passed as the request body.
681 | updateMask: Optional. Field mask is used to specify the fields to be
682 | overwritten in the stream resource by the update. The fields specified
683 | in the update_mask are relative to the resource, not the full request. A
684 | field will be overwritten if it is in the mask. If the user does not
685 | provide a mask then all fields will be overwritten.
686 | validateOnly: Optional. Only validate the stream with the changes, without
687 | actually updating it. The default is false.
688 | """
689 |
690 | force = _messages.BooleanField(1)
691 | name = _messages.StringField(2, required=True)
692 | requestId = _messages.StringField(3)
693 | stream = _messages.MessageField('Stream', 4)
694 | updateMask = _messages.StringField(5)
695 | validateOnly = _messages.BooleanField(6)
696 |
697 |
698 | class DatastreamProjectsLocationsStreamsPauseRequest(_messages.Message):
699 | r"""A DatastreamProjectsLocationsStreamsPauseRequest object.
700 |
701 | Fields:
702 | name: Name of the Stream resource to pause.
703 | pauseStreamRequest: A PauseStreamRequest resource to be passed as the
704 | request body.
705 | """
706 |
707 | name = _messages.StringField(1, required=True)
708 | pauseStreamRequest = _messages.MessageField('PauseStreamRequest', 2)
709 |
710 |
711 | class DatastreamProjectsLocationsStreamsResumeRequest(_messages.Message):
712 | r"""A DatastreamProjectsLocationsStreamsResumeRequest object.
713 |
714 | Fields:
715 | name: Name of the Stream resource to resume.
716 | resumeStreamRequest: A ResumeStreamRequest resource to be passed as the
717 | request body.
718 | """
719 |
720 | name = _messages.StringField(1, required=True)
721 | resumeStreamRequest = _messages.MessageField('ResumeStreamRequest', 2)
722 |
723 |
724 | class DatastreamProjectsLocationsStreamsStartRequest(_messages.Message):
725 | r"""A DatastreamProjectsLocationsStreamsStartRequest object.
726 |
727 | Fields:
728 | name: Name of the Stream resource to start.
729 | startStreamRequest: A StartStreamRequest resource to be passed as the
730 | request body.
731 | """
732 |
733 | name = _messages.StringField(1, required=True)
734 | startStreamRequest = _messages.MessageField('StartStreamRequest', 2)
735 |
736 |
737 | class DestinationConfig(_messages.Message):
738 | r"""The configuration of the stream destination.
739 |
740 | Fields:
741 | destinationConnectionProfileName: Required. Destination connection profile
742 | identifier.
743 | gcsDestinationConfig: A GcsDestinationConfig attribute.
744 | """
745 |
746 | destinationConnectionProfileName = _messages.StringField(1)
747 | gcsDestinationConfig = _messages.MessageField('GcsDestinationConfig', 2)
748 |
749 |
750 | class DiscoverConnectionProfileRequest(_messages.Message):
751 | r"""Request message for 'discover' ConnectionProfile request.
752 |
753 | Fields:
754 | connectionProfile: An ad-hoc ConnectionProfile configuration.
755 | connectionProfileName: A reference to an existing ConnectionProfile.
756 | mysqlRdbms: MySQL RDBMS to enrich with child data objects and metadata.
757 | oracleRdbms: Oracle RDBMS to enrich with child data objects and metadata.
758 | recursionDepth: The number of hierarchy levels below the current level to
759 | be retrieved.
760 | recursive: Whether to retrieve the full hierarchy of data objects (TRUE)
761 | or only the current level (FALSE).
762 | """
763 |
764 | connectionProfile = _messages.MessageField('ConnectionProfile', 1)
765 | connectionProfileName = _messages.StringField(2)
766 | mysqlRdbms = _messages.MessageField('MysqlRdbms', 3)
767 | oracleRdbms = _messages.MessageField('OracleRdbms', 4)
768 | recursionDepth = _messages.IntegerField(5, variant=_messages.Variant.INT32)
769 | recursive = _messages.BooleanField(6)
770 |
771 |
772 | class DiscoverConnectionProfileResponse(_messages.Message):
773 | r"""A DiscoverConnectionProfileResponse object.
774 |
775 | Fields:
776 | mysqlRdbms: Enriched MySQL RDBMS object.
777 | oracleRdbms: Enriched Oracle RDBMS object.
778 | """
779 |
780 | mysqlRdbms = _messages.MessageField('MysqlRdbms', 1)
781 | oracleRdbms = _messages.MessageField('OracleRdbms', 2)
782 |
783 |
784 | class Empty(_messages.Message):
785 | r"""A generic empty message that you can re-use to avoid defining duplicated
786 | empty messages in your APIs. A typical example is to use it as the request
787 | or the response type of an API method. For instance: service Foo { rpc
788 | Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON
789 | representation for `Empty` is empty JSON object `{}`.
790 | """
791 |
792 |
793 |
794 | class Error(_messages.Message):
795 | r"""Represent a user-facing Error.
796 |
797 | Messages:
798 | DetailsValue: Additional information about the error.
799 |
800 | Fields:
801 | details: Additional information about the error.
802 | errorTime: The time when the error occurred.
803 | errorUuid: A unique identifier for this specific error, allowing it to be
804 | traced throughout the system in logs and API responses.
805 | message: A message containing more information about the error that
806 | occurred.
807 | reason: A title that explains the reason for the error.
808 | """
809 |
810 | @encoding.MapUnrecognizedFields('additionalProperties')
811 | class DetailsValue(_messages.Message):
812 | r"""Additional information about the error.
813 |
814 | Messages:
815 | AdditionalProperty: An additional property for a DetailsValue object.
816 |
817 | Fields:
818 | additionalProperties: Additional properties of type DetailsValue
819 | """
820 |
821 | class AdditionalProperty(_messages.Message):
822 | r"""An additional property for a DetailsValue object.
823 |
824 | Fields:
825 | key: Name of the additional property.
826 | value: A string attribute.
827 | """
828 |
829 | key = _messages.StringField(1)
830 | value = _messages.StringField(2)
831 |
832 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
833 |
834 | details = _messages.MessageField('DetailsValue', 1)
835 | errorTime = _messages.StringField(2)
836 | errorUuid = _messages.StringField(3)
837 | message = _messages.StringField(4)
838 | reason = _messages.StringField(5)
839 |
840 |
841 | class FetchErrorsRequest(_messages.Message):
842 | r"""Request message for 'FetchErrors' request."""
843 |
844 |
845 | class FetchErrorsResponse(_messages.Message):
846 | r"""Response message for a 'FetchErrors' response.
847 |
848 | Fields:
849 | errors: The list of errors on the Stream.
850 | """
851 |
852 | errors = _messages.MessageField('Error', 1, repeated=True)
853 |
854 |
855 | class FetchStaticIpsResponse(_messages.Message):
856 | r"""Response message for a 'FetchStaticIps' response.
857 |
858 | Fields:
859 | nextPageToken: A token that can be sent as `page_token` to retrieve the
860 | next page. If this field is omitted, there are no subsequent pages.
861 | staticIps: list of static ips by account
862 | """
863 |
864 | nextPageToken = _messages.StringField(1)
865 | staticIps = _messages.StringField(2, repeated=True)
866 |
867 |
868 | class ForwardSshTunnelConnectivity(_messages.Message):
869 | r"""Forward SSH Tunnel connectivity.
870 |
871 | Fields:
872 | hostname: Required. Hostname for the SSH tunnel.
873 | password: Input only. SSH password.
874 | port: Port for the SSH tunnel, default value is 22.
875 | privateKey: Input only. SSH private key.
876 | username: Required. Username for the SSH tunnel.
877 | """
878 |
879 | hostname = _messages.StringField(1)
880 | password = _messages.StringField(2)
881 | port = _messages.IntegerField(3, variant=_messages.Variant.INT32)
882 | privateKey = _messages.StringField(4)
883 | username = _messages.StringField(5)
884 |
885 |
886 | class GcsDestinationConfig(_messages.Message):
887 | r"""Google Cloud Storage destination configuration
888 |
889 | Enums:
890 | GcsFileFormatValueValuesEnum: File format that data should be written in.
891 | Deprecated field (b/169501737) - use file_format instead.
892 |
893 | Fields:
894 | avroFileFormat: AVRO file format configuration.
895 | fileRotationInterval: The maximum duration for which new events are added
896 | before a file is closed and a new file is created.
897 | fileRotationMb: The maximum file size to be saved in the bucket.
898 | gcsFileFormat: File format that data should be written in. Deprecated
899 | field (b/169501737) - use file_format instead.
900 | jsonFileFormat: JSON file format configuration.
901 | path: Path inside the Cloud Storage bucket to write data to.
902 | """
903 |
904 | class GcsFileFormatValueValuesEnum(_messages.Enum):
905 | r"""File format that data should be written in. Deprecated field
906 | (b/169501737) - use file_format instead.
907 |
908 | Values:
909 | GCS_FILE_FORMAT_UNSPECIFIED: Unspecified Cloud Storage file format.
910 | AVRO: Avro file format
911 | """
912 | GCS_FILE_FORMAT_UNSPECIFIED = 0
913 | AVRO = 1
914 |
915 | avroFileFormat = _messages.MessageField('AvroFileFormat', 1)
916 | fileRotationInterval = _messages.StringField(2)
917 | fileRotationMb = _messages.IntegerField(3, variant=_messages.Variant.INT32)
918 | gcsFileFormat = _messages.EnumField('GcsFileFormatValueValuesEnum', 4)
919 | jsonFileFormat = _messages.MessageField('JsonFileFormat', 5)
920 | path = _messages.StringField(6)
921 |
922 |
923 | class GcsProfile(_messages.Message):
924 | r"""Cloud Storage bucket profile.
925 |
926 | Fields:
927 | bucketName: Required. The full project and resource path for Cloud Storage
928 | bucket including the name.
929 | rootPath: The root path inside the Cloud Storage bucket.
930 | """
931 |
932 | bucketName = _messages.StringField(1)
933 | rootPath = _messages.StringField(2)
934 |
935 |
936 | class JsonFileFormat(_messages.Message):
937 | r"""JSON file format configuration.
938 |
939 | Enums:
940 | CompressionValueValuesEnum: Compression of the loaded JSON file.
941 | SchemaFileFormatValueValuesEnum: The schema file format along JSON data
942 | files.
943 |
944 | Fields:
945 | compression: Compression of the loaded JSON file.
946 | schemaFileFormat: The schema file format along JSON data files.
947 | """
948 |
949 | class CompressionValueValuesEnum(_messages.Enum):
950 | r"""Compression of the loaded JSON file.
951 |
952 | Values:
953 | JSON_COMPRESSION_UNSPECIFIED: Unspecified json file compression.
954 | NO_COMPRESSION: Do not compress JSON file.
955 | GZIP: Gzip compression.
956 | """
957 | JSON_COMPRESSION_UNSPECIFIED = 0
958 | NO_COMPRESSION = 1
959 | GZIP = 2
960 |
961 | class SchemaFileFormatValueValuesEnum(_messages.Enum):
962 | r"""The schema file format along JSON data files.
963 |
964 | Values:
965 | SCHEMA_FILE_FORMAT_UNSPECIFIED: Unspecified schema file format.
966 | NO_SCHEMA_FILE: Do not attach schema file.
967 | AVRO_SCHEMA_FILE: Avro schema format.
968 | """
969 | SCHEMA_FILE_FORMAT_UNSPECIFIED = 0
970 | NO_SCHEMA_FILE = 1
971 | AVRO_SCHEMA_FILE = 2
972 |
973 | compression = _messages.EnumField('CompressionValueValuesEnum', 1)
974 | schemaFileFormat = _messages.EnumField('SchemaFileFormatValueValuesEnum', 2)
975 |
976 |
977 | class ListConnectionProfilesResponse(_messages.Message):
978 | r"""A ListConnectionProfilesResponse object.
979 |
980 | Fields:
981 | connectionProfiles: List of connection profiles.
982 | nextPageToken: A token, which can be sent as `page_token` to retrieve the
983 | next page. If this field is omitted, there are no subsequent pages.
984 | unreachable: Locations that could not be reached.
985 | """
986 |
987 | connectionProfiles = _messages.MessageField('ConnectionProfile', 1, repeated=True)
988 | nextPageToken = _messages.StringField(2)
989 | unreachable = _messages.StringField(3, repeated=True)
990 |
991 |
992 | class ListLocationsResponse(_messages.Message):
993 | r"""The response message for Locations.ListLocations.
994 |
995 | Fields:
996 | locations: A list of locations that matches the specified filter in the
997 | request.
998 | nextPageToken: The standard List next-page token.
999 | """
1000 |
1001 | locations = _messages.MessageField('Location', 1, repeated=True)
1002 | nextPageToken = _messages.StringField(2)
1003 |
1004 |
1005 | class ListOperationsResponse(_messages.Message):
1006 | r"""The response message for Operations.ListOperations.
1007 |
1008 | Fields:
1009 | nextPageToken: The standard List next-page token.
1010 | operations: A list of operations that matches the specified filter in the
1011 | request.
1012 | """
1013 |
1014 | nextPageToken = _messages.StringField(1)
1015 | operations = _messages.MessageField('Operation', 2, repeated=True)
1016 |
1017 |
1018 | class ListPrivateConnectionsResponse(_messages.Message):
1019 | r"""A ListPrivateConnectionsResponse object.
1020 |
1021 | Fields:
1022 | nextPageToken: A token, which can be sent as `page_token` to retrieve the
1023 | next page. If this field is omitted, there are no subsequent pages.
1024 | privateConnections: List of private connectivity configurations.
1025 | unreachable: Locations that could not be reached.
1026 | """
1027 |
1028 | nextPageToken = _messages.StringField(1)
1029 | privateConnections = _messages.MessageField('PrivateConnection', 2, repeated=True)
1030 | unreachable = _messages.StringField(3, repeated=True)
1031 |
1032 |
1033 | class ListRoutesResponse(_messages.Message):
1034 | r"""route list response
1035 |
1036 | Fields:
1037 | nextPageToken: A token, which can be sent as `page_token` to retrieve the
1038 | next page. If this field is omitted, there are no subsequent pages.
1039 | routes: List of Routes.
1040 | unreachable: Locations that could not be reached.
1041 | """
1042 |
1043 | nextPageToken = _messages.StringField(1)
1044 | routes = _messages.MessageField('Route', 2, repeated=True)
1045 | unreachable = _messages.StringField(3, repeated=True)
1046 |
1047 |
1048 | class ListStreamObjectsResponse(_messages.Message):
1049 | r"""Response containing the objects for a stream.
1050 |
1051 | Fields:
1052 | nextPageToken: A token, which can be sent as `page_token` to retrieve the
1053 | next page.
1054 | streamObjects: List of stream objects.
1055 | unreachable: Locations that could not be reached.
1056 | """
1057 |
1058 | nextPageToken = _messages.StringField(1)
1059 | streamObjects = _messages.MessageField('StreamObject', 2, repeated=True)
1060 | unreachable = _messages.StringField(3, repeated=True)
1061 |
1062 |
1063 | class ListStreamsResponse(_messages.Message):
1064 | r"""A ListStreamsResponse object.
1065 |
1066 | Fields:
1067 | nextPageToken: A token, which can be sent as `page_token` to retrieve the
1068 | next page. If this field is omitted, there are no subsequent pages.
1069 | streams: List of streams
1070 | unreachable: Locations that could not be reached.
1071 | """
1072 |
1073 | nextPageToken = _messages.StringField(1)
1074 | streams = _messages.MessageField('Stream', 2, repeated=True)
1075 | unreachable = _messages.StringField(3, repeated=True)
1076 |
1077 |
1078 | class Location(_messages.Message):
1079 | r"""A resource that represents Google Cloud Platform location.
1080 |
1081 | Messages:
1082 | LabelsValue: Cross-service attributes for the location. For example
1083 | {"cloud.googleapis.com/region": "us-east1"}
1084 | MetadataValue: Service-specific metadata. For example the available
1085 | capacity at the given location.
1086 |
1087 | Fields:
1088 | displayName: The friendly name for this location, typically a nearby city
1089 | name. For example, "Tokyo".
1090 | labels: Cross-service attributes for the location. For example
1091 | {"cloud.googleapis.com/region": "us-east1"}
1092 | locationId: The canonical id for this location. For example: `"us-east1"`.
1093 | metadata: Service-specific metadata. For example the available capacity at
1094 | the given location.
1095 | name: Resource name for the location, which may vary between
1096 | implementations. For example: `"projects/example-project/locations/us-
1097 | east1"`
1098 | """
1099 |
1100 | @encoding.MapUnrecognizedFields('additionalProperties')
1101 | class LabelsValue(_messages.Message):
1102 | r"""Cross-service attributes for the location. For example
1103 | {"cloud.googleapis.com/region": "us-east1"}
1104 |
1105 | Messages:
1106 | AdditionalProperty: An additional property for a LabelsValue object.
1107 |
1108 | Fields:
1109 | additionalProperties: Additional properties of type LabelsValue
1110 | """
1111 |
1112 | class AdditionalProperty(_messages.Message):
1113 | r"""An additional property for a LabelsValue object.
1114 |
1115 | Fields:
1116 | key: Name of the additional property.
1117 | value: A string attribute.
1118 | """
1119 |
1120 | key = _messages.StringField(1)
1121 | value = _messages.StringField(2)
1122 |
1123 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1124 |
1125 | @encoding.MapUnrecognizedFields('additionalProperties')
1126 | class MetadataValue(_messages.Message):
1127 | r"""Service-specific metadata. For example the available capacity at the
1128 | given location.
1129 |
1130 | Messages:
1131 | AdditionalProperty: An additional property for a MetadataValue object.
1132 |
1133 | Fields:
1134 | additionalProperties: Properties of the object. Contains field @type
1135 | with type URL.
1136 | """
1137 |
1138 | class AdditionalProperty(_messages.Message):
1139 | r"""An additional property for a MetadataValue object.
1140 |
1141 | Fields:
1142 | key: Name of the additional property.
1143 | value: A extra_types.JsonValue attribute.
1144 | """
1145 |
1146 | key = _messages.StringField(1)
1147 | value = _messages.MessageField('extra_types.JsonValue', 2)
1148 |
1149 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1150 |
1151 | displayName = _messages.StringField(1)
1152 | labels = _messages.MessageField('LabelsValue', 2)
1153 | locationId = _messages.StringField(3)
1154 | metadata = _messages.MessageField('MetadataValue', 4)
1155 | name = _messages.StringField(5)
1156 |
1157 |
1158 | class MysqlColumn(_messages.Message):
1159 | r"""MySQL Column.
1160 |
1161 | Fields:
1162 | collation: Column collation.
1163 | columnName: Column name.
1164 | dataType: The MySQL data type. Full data types list can be found here:
1165 | https://dev.mysql.com/doc/refman/8.0/en/data-types.html
1166 | length: Column length.
1167 | nullable: Whether or not the column can accept a null value.
1168 | ordinalPosition: The ordinal position of the column in the table.
1169 | primaryKey: Whether or not the column represents a primary key.
1170 | """
1171 |
1172 | collation = _messages.StringField(1)
1173 | columnName = _messages.StringField(2)
1174 | dataType = _messages.StringField(3)
1175 | length = _messages.IntegerField(4, variant=_messages.Variant.INT32)
1176 | nullable = _messages.BooleanField(5)
1177 | ordinalPosition = _messages.IntegerField(6, variant=_messages.Variant.INT32)
1178 | primaryKey = _messages.BooleanField(7)
1179 |
1180 |
1181 | class MysqlDatabase(_messages.Message):
1182 | r"""MySQL database.
1183 |
1184 | Fields:
1185 | databaseName: Database name.
1186 | mysqlTables: Tables in the database.
1187 | """
1188 |
1189 | databaseName = _messages.StringField(1)
1190 | mysqlTables = _messages.MessageField('MysqlTable', 2, repeated=True)
1191 |
1192 |
1193 | class MysqlProfile(_messages.Message):
1194 | r"""MySQL database profile.
1195 |
1196 | Fields:
1197 | hostname: Required. Hostname for the MySQL connection.
1198 | password: Required. Input only. Password for the MySQL connection.
1199 | port: Port for the MySQL connection, default value is 3306.
1200 | sslConfig: SSL configuration for the MySQL connection.
1201 | username: Required. Username for the MySQL connection.
1202 | """
1203 |
1204 | hostname = _messages.StringField(1)
1205 | password = _messages.StringField(2)
1206 | port = _messages.IntegerField(3, variant=_messages.Variant.INT32)
1207 | sslConfig = _messages.MessageField('MysqlSslConfig', 4)
1208 | username = _messages.StringField(5)
1209 |
1210 |
1211 | class MysqlRdbms(_messages.Message):
1212 | r"""MySQL database structure
1213 |
1214 | Fields:
1215 | mysqlDatabases: Mysql databases on the server
1216 | """
1217 |
1218 | mysqlDatabases = _messages.MessageField('MysqlDatabase', 1, repeated=True)
1219 |
1220 |
1221 | class MysqlSourceConfig(_messages.Message):
1222 | r"""MySQL source configuration
1223 |
1224 | Fields:
1225 | allowlist: MySQL objects to retrieve from the source.
1226 | rejectlist: MySQL objects to exclude from the stream.
1227 | """
1228 |
1229 | allowlist = _messages.MessageField('MysqlRdbms', 1)
1230 | rejectlist = _messages.MessageField('MysqlRdbms', 2)
1231 |
1232 |
1233 | class MysqlSslConfig(_messages.Message):
1234 | r"""MySQL SSL configuration information.
1235 |
1236 | Fields:
1237 | caCertificate: Input only. PEM-encoded certificate of the CA that signed
1238 | the source database server's certificate.
1239 | caCertificateSet: Output only. Indicates whether the ca_certificate field
1240 | is set.
1241 | clientCertificate: Input only. PEM-encoded certificate that will be used
1242 | by the replica to authenticate against the source database server. If
1243 | this field is used then the 'client_key' and the 'ca_certificate' fields
1244 | are mandatory.
1245 | clientCertificateSet: Output only. Indicates whether the
1246 | client_certificate field is set.
1247 | clientKey: Input only. PEM-encoded private key associated with the Client
1248 | Certificate. If this field is used then the 'client_certificate' and the
1249 | 'ca_certificate' fields are mandatory.
1250 | clientKeySet: Output only. Indicates whether the client_key field is set.
1251 | """
1252 |
1253 | caCertificate = _messages.StringField(1)
1254 | caCertificateSet = _messages.BooleanField(2)
1255 | clientCertificate = _messages.StringField(3)
1256 | clientCertificateSet = _messages.BooleanField(4)
1257 | clientKey = _messages.StringField(5)
1258 | clientKeySet = _messages.BooleanField(6)
1259 |
1260 |
1261 | class MysqlTable(_messages.Message):
1262 | r"""MySQL table.
1263 |
1264 | Fields:
1265 | mysqlColumns: MySQL columns in the database. When unspecified as part of
1266 | include/exclude lists, includes/excludes everything.
1267 | tableName: Table name.
1268 | """
1269 |
1270 | mysqlColumns = _messages.MessageField('MysqlColumn', 1, repeated=True)
1271 | tableName = _messages.StringField(2)
1272 |
1273 |
1274 | class NoConnectivitySettings(_messages.Message):
1275 | r"""No connectivity settings."""
1276 |
1277 |
1278 | class Operation(_messages.Message):
1279 | r"""This resource represents a long-running operation that is the result of
1280 | a network API call.
1281 |
1282 | Messages:
1283 | MetadataValue: Service-specific metadata associated with the operation. It
1284 | typically contains progress information and common metadata such as
1285 | create time. Some services might not provide such metadata. Any method
1286 | that returns a long-running operation should document the metadata type,
1287 | if any.
1288 | ResponseValue: The normal response of the operation in case of success. If
1289 | the original method returns no data on success, such as `Delete`, the
1290 | response is `google.protobuf.Empty`. If the original method is standard
1291 | `Get`/`Create`/`Update`, the response should be the resource. For other
1292 | methods, the response should have the type `XxxResponse`, where `Xxx` is
1293 | the original method name. For example, if the original method name is
1294 | `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
1295 |
1296 | Fields:
1297 | done: If the value is `false`, it means the operation is still in
1298 | progress. If `true`, the operation is completed, and either `error` or
1299 | `response` is available.
1300 | error: The error result of the operation in case of failure or
1301 | cancellation.
1302 | metadata: Service-specific metadata associated with the operation. It
1303 | typically contains progress information and common metadata such as
1304 | create time. Some services might not provide such metadata. Any method
1305 | that returns a long-running operation should document the metadata type,
1306 | if any.
1307 | name: The server-assigned name, which is only unique within the same
1308 | service that originally returns it. If you use the default HTTP mapping,
1309 | the `name` should be a resource name ending with
1310 | `operations/{unique_id}`.
1311 | response: The normal response of the operation in case of success. If the
1312 | original method returns no data on success, such as `Delete`, the
1313 | response is `google.protobuf.Empty`. If the original method is standard
1314 | `Get`/`Create`/`Update`, the response should be the resource. For other
1315 | methods, the response should have the type `XxxResponse`, where `Xxx` is
1316 | the original method name. For example, if the original method name is
1317 | `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
1318 | """
1319 |
1320 | @encoding.MapUnrecognizedFields('additionalProperties')
1321 | class MetadataValue(_messages.Message):
1322 | r"""Service-specific metadata associated with the operation. It typically
1323 | contains progress information and common metadata such as create time.
1324 | Some services might not provide such metadata. Any method that returns a
1325 | long-running operation should document the metadata type, if any.
1326 |
1327 | Messages:
1328 | AdditionalProperty: An additional property for a MetadataValue object.
1329 |
1330 | Fields:
1331 | additionalProperties: Properties of the object. Contains field @type
1332 | with type URL.
1333 | """
1334 |
1335 | class AdditionalProperty(_messages.Message):
1336 | r"""An additional property for a MetadataValue object.
1337 |
1338 | Fields:
1339 | key: Name of the additional property.
1340 | value: A extra_types.JsonValue attribute.
1341 | """
1342 |
1343 | key = _messages.StringField(1)
1344 | value = _messages.MessageField('extra_types.JsonValue', 2)
1345 |
1346 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1347 |
1348 | @encoding.MapUnrecognizedFields('additionalProperties')
1349 | class ResponseValue(_messages.Message):
1350 | r"""The normal response of the operation in case of success. If the
1351 | original method returns no data on success, such as `Delete`, the response
1352 | is `google.protobuf.Empty`. If the original method is standard
1353 | `Get`/`Create`/`Update`, the response should be the resource. For other
1354 | methods, the response should have the type `XxxResponse`, where `Xxx` is
1355 | the original method name. For example, if the original method name is
1356 | `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
1357 |
1358 | Messages:
1359 | AdditionalProperty: An additional property for a ResponseValue object.
1360 |
1361 | Fields:
1362 | additionalProperties: Properties of the object. Contains field @type
1363 | with type URL.
1364 | """
1365 |
1366 | class AdditionalProperty(_messages.Message):
1367 | r"""An additional property for a ResponseValue object.
1368 |
1369 | Fields:
1370 | key: Name of the additional property.
1371 | value: A extra_types.JsonValue attribute.
1372 | """
1373 |
1374 | key = _messages.StringField(1)
1375 | value = _messages.MessageField('extra_types.JsonValue', 2)
1376 |
1377 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1378 |
1379 | done = _messages.BooleanField(1)
1380 | error = _messages.MessageField('Status', 2)
1381 | metadata = _messages.MessageField('MetadataValue', 3)
1382 | name = _messages.StringField(4)
1383 | response = _messages.MessageField('ResponseValue', 5)
1384 |
1385 |
1386 | class OperationMetadata(_messages.Message):
1387 | r"""Represents the metadata of the long-running operation.
1388 |
1389 | Fields:
1390 | apiVersion: Output only. API version used to start the operation.
1391 | createTime: Output only. The time the operation was created.
1392 | endTime: Output only. The time the operation finished running.
1393 | requestedCancellation: Output only. Identifies whether the user has
1394 | requested cancellation of the operation. Operations that have
1395 | successfully been cancelled have Operation.error value with a
1396 | google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
1397 | statusMessage: Output only. Human-readable status of the operation, if
1398 | any.
1399 | target: Output only. Server-defined resource path for the target of the
1400 | operation.
1401 | validationResult: Output only. Results of executed validations if there
1402 | are any.
1403 | verb: Output only. Name of the verb executed by the operation.
1404 | """
1405 |
1406 | apiVersion = _messages.StringField(1)
1407 | createTime = _messages.StringField(2)
1408 | endTime = _messages.StringField(3)
1409 | requestedCancellation = _messages.BooleanField(4)
1410 | statusMessage = _messages.StringField(5)
1411 | target = _messages.StringField(6)
1412 | validationResult = _messages.MessageField('ValidationResult', 7)
1413 | verb = _messages.StringField(8)
1414 |
1415 |
1416 | class OracleColumn(_messages.Message):
1417 | r"""Oracle Column.
1418 |
1419 | Fields:
1420 | columnName: Column name.
1421 | dataType: The Oracle data type.
1422 | encoding: Column encoding.
1423 | length: Column length.
1424 | nullable: Whether or not the column can accept a null value.
1425 | ordinalPosition: The ordinal position of the column in the table.
1426 | precision: Column precision.
1427 | primaryKey: Whether or not the column represents a primary key.
1428 | scale: Column scale.
1429 | """
1430 |
1431 | columnName = _messages.StringField(1)
1432 | dataType = _messages.StringField(2)
1433 | encoding = _messages.StringField(3)
1434 | length = _messages.IntegerField(4, variant=_messages.Variant.INT32)
1435 | nullable = _messages.BooleanField(5)
1436 | ordinalPosition = _messages.IntegerField(6, variant=_messages.Variant.INT32)
1437 | precision = _messages.IntegerField(7, variant=_messages.Variant.INT32)
1438 | primaryKey = _messages.BooleanField(8)
1439 | scale = _messages.IntegerField(9, variant=_messages.Variant.INT32)
1440 |
1441 |
1442 | class OracleProfile(_messages.Message):
1443 | r"""Oracle database profile.
1444 |
1445 | Messages:
1446 | ConnectionAttributesValue: Connection string attributes
1447 |
1448 | Fields:
1449 | connectionAttributes: Connection string attributes
1450 | databaseService: Required. Database for the Oracle connection.
1451 | hostname: Required. Hostname for the Oracle connection.
1452 | password: Required. Password for the Oracle connection.
1453 | port: Port for the Oracle connection, default value is 1521.
1454 | username: Required. Username for the Oracle connection.
1455 | """
1456 |
1457 | @encoding.MapUnrecognizedFields('additionalProperties')
1458 | class ConnectionAttributesValue(_messages.Message):
1459 | r"""Connection string attributes
1460 |
1461 | Messages:
1462 | AdditionalProperty: An additional property for a
1463 | ConnectionAttributesValue object.
1464 |
1465 | Fields:
1466 | additionalProperties: Additional properties of type
1467 | ConnectionAttributesValue
1468 | """
1469 |
1470 | class AdditionalProperty(_messages.Message):
1471 | r"""An additional property for a ConnectionAttributesValue object.
1472 |
1473 | Fields:
1474 | key: Name of the additional property.
1475 | value: A string attribute.
1476 | """
1477 |
1478 | key = _messages.StringField(1)
1479 | value = _messages.StringField(2)
1480 |
1481 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1482 |
1483 | connectionAttributes = _messages.MessageField('ConnectionAttributesValue', 1)
1484 | databaseService = _messages.StringField(2)
1485 | hostname = _messages.StringField(3)
1486 | password = _messages.StringField(4)
1487 | port = _messages.IntegerField(5, variant=_messages.Variant.INT32)
1488 | username = _messages.StringField(6)
1489 |
1490 |
1491 | class OracleRdbms(_messages.Message):
1492 | r"""Oracle database structure.
1493 |
1494 | Fields:
1495 | oracleSchemas: Oracle schemas/databases in the database server.
1496 | """
1497 |
1498 | oracleSchemas = _messages.MessageField('OracleSchema', 1, repeated=True)
1499 |
1500 |
1501 | class OracleSchema(_messages.Message):
1502 | r"""Oracle schema.
1503 |
1504 | Fields:
1505 | oracleTables: Tables in the schema.
1506 | schemaName: Schema name.
1507 | """
1508 |
1509 | oracleTables = _messages.MessageField('OracleTable', 1, repeated=True)
1510 | schemaName = _messages.StringField(2)
1511 |
1512 |
1513 | class OracleSourceConfig(_messages.Message):
1514 | r"""Oracle data source configuration
1515 |
1516 | Fields:
1517 | allowlist: Oracle objects to include in the stream.
1518 | rejectlist: Oracle objects to exclude from the stream.
1519 | """
1520 |
1521 | allowlist = _messages.MessageField('OracleRdbms', 1)
1522 | rejectlist = _messages.MessageField('OracleRdbms', 2)
1523 |
1524 |
1525 | class OracleTable(_messages.Message):
1526 | r"""Oracle table.
1527 |
1528 | Fields:
1529 | oracleColumns: Oracle columns in the schema. When unspecified as part of
1530 | inclue/exclude lists, includes/excludes everything.
1531 | tableName: Table name.
1532 | """
1533 |
1534 | oracleColumns = _messages.MessageField('OracleColumn', 1, repeated=True)
1535 | tableName = _messages.StringField(2)
1536 |
1537 |
1538 | class PauseStreamRequest(_messages.Message):
1539 | r"""Request message for 'PauseStream' request."""
1540 |
1541 |
1542 | class PrivateConnection(_messages.Message):
1543 | r"""The PrivateConnection resource is used to establish private connectivity
1544 | between DataStream and a customer's network.
1545 |
1546 | Enums:
1547 | StateValueValuesEnum: Output only. The state of the Private Connection.
1548 |
1549 | Messages:
1550 | LabelsValue: Labels.
1551 |
1552 | Fields:
1553 | createTime: Output only. The create time of the resource.
1554 | displayName: Required. Display name.
1555 | error: Output only. In case of error, the details of the error in a user-
1556 | friendly format.
1557 | labels: Labels.
1558 | name: Output only. The resource's name.
1559 | state: Output only. The state of the Private Connection.
1560 | updateTime: Output only. The update time of the resource.
1561 | vpcPeeringConfig: VPC Peering Config
1562 | """
1563 |
1564 | class StateValueValuesEnum(_messages.Enum):
1565 | r"""Output only. The state of the Private Connection.
1566 |
1567 | Values:
1568 | STATE_UNSPECIFIED:
1569 | CREATING: The private connection is in creation state - creating
1570 | resources.
1571 | CREATED: The private connection has been created with all of it's
1572 | resources.
1573 | FAILED: The private connection creation has failed.
1574 | """
1575 | STATE_UNSPECIFIED = 0
1576 | CREATING = 1
1577 | CREATED = 2
1578 | FAILED = 3
1579 |
1580 | @encoding.MapUnrecognizedFields('additionalProperties')
1581 | class LabelsValue(_messages.Message):
1582 | r"""Labels.
1583 |
1584 | Messages:
1585 | AdditionalProperty: An additional property for a LabelsValue object.
1586 |
1587 | Fields:
1588 | additionalProperties: Additional properties of type LabelsValue
1589 | """
1590 |
1591 | class AdditionalProperty(_messages.Message):
1592 | r"""An additional property for a LabelsValue object.
1593 |
1594 | Fields:
1595 | key: Name of the additional property.
1596 | value: A string attribute.
1597 | """
1598 |
1599 | key = _messages.StringField(1)
1600 | value = _messages.StringField(2)
1601 |
1602 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1603 |
1604 | createTime = _messages.StringField(1)
1605 | displayName = _messages.StringField(2)
1606 | error = _messages.MessageField('Error', 3)
1607 | labels = _messages.MessageField('LabelsValue', 4)
1608 | name = _messages.StringField(5)
1609 | state = _messages.EnumField('StateValueValuesEnum', 6)
1610 | updateTime = _messages.StringField(7)
1611 | vpcPeeringConfig = _messages.MessageField('VpcPeeringConfig', 8)
1612 |
1613 |
1614 | class PrivateConnectivity(_messages.Message):
1615 | r"""Private Connectivity
1616 |
1617 | Fields:
1618 | privateConnectionName: A string attribute.
1619 | """
1620 |
1621 | privateConnectionName = _messages.StringField(1)
1622 |
1623 |
1624 | class ResumeStreamRequest(_messages.Message):
1625 | r"""Request message for 'ResumeStream' request."""
1626 |
1627 |
1628 | class Route(_messages.Message):
1629 | r"""The Route resource is the child of the PrivateConnection resource. It
1630 | used to define a route for a PrivateConnection setup.
1631 |
1632 | Messages:
1633 | LabelsValue: Labels.
1634 |
1635 | Fields:
1636 | createTime: Output only. The create time of the resource.
1637 | destinationAddress: Required. Destination address for connection
1638 | destinationPort: Destination port for connection
1639 | displayName: Required. Display name.
1640 | labels: Labels.
1641 | name: Output only. The resource's name.
1642 | updateTime: Output only. The update time of the resource.
1643 | """
1644 |
1645 | @encoding.MapUnrecognizedFields('additionalProperties')
1646 | class LabelsValue(_messages.Message):
1647 | r"""Labels.
1648 |
1649 | Messages:
1650 | AdditionalProperty: An additional property for a LabelsValue object.
1651 |
1652 | Fields:
1653 | additionalProperties: Additional properties of type LabelsValue
1654 | """
1655 |
1656 | class AdditionalProperty(_messages.Message):
1657 | r"""An additional property for a LabelsValue object.
1658 |
1659 | Fields:
1660 | key: Name of the additional property.
1661 | value: A string attribute.
1662 | """
1663 |
1664 | key = _messages.StringField(1)
1665 | value = _messages.StringField(2)
1666 |
1667 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1668 |
1669 | createTime = _messages.StringField(1)
1670 | destinationAddress = _messages.StringField(2)
1671 | destinationPort = _messages.IntegerField(3, variant=_messages.Variant.INT32)
1672 | displayName = _messages.StringField(4)
1673 | labels = _messages.MessageField('LabelsValue', 5)
1674 | name = _messages.StringField(6)
1675 | updateTime = _messages.StringField(7)
1676 |
1677 |
1678 | class SourceConfig(_messages.Message):
1679 | r"""The configuration of the stream source.
1680 |
1681 | Fields:
1682 | mysqlSourceConfig: MySQL data source configuration
1683 | oracleSourceConfig: Oracle data source configuration
1684 | sourceConnectionProfileName: Required. Source connection profile
1685 | identifier.
1686 | """
1687 |
1688 | mysqlSourceConfig = _messages.MessageField('MysqlSourceConfig', 1)
1689 | oracleSourceConfig = _messages.MessageField('OracleSourceConfig', 2)
1690 | sourceConnectionProfileName = _messages.StringField(3)
1691 |
1692 |
1693 | class StandardQueryParameters(_messages.Message):
1694 | r"""Query parameters accepted by all methods.
1695 |
1696 | Enums:
1697 | FXgafvValueValuesEnum: V1 error format.
1698 | AltValueValuesEnum: Data format for response.
1699 |
1700 | Fields:
1701 | f__xgafv: V1 error format.
1702 | access_token: OAuth access token.
1703 | alt: Data format for response.
1704 | callback: JSONP
1705 | fields: Selector specifying which fields to include in a partial response.
1706 | key: API key. Your API key identifies your project and provides you with
1707 | API access, quota, and reports. Required unless you provide an OAuth 2.0
1708 | token.
1709 | oauth_token: OAuth 2.0 token for the current user.
1710 | prettyPrint: Returns response with indentations and line breaks.
1711 | quotaUser: Available to use for quota purposes for server-side
1712 | applications. Can be any arbitrary string assigned to a user, but should
1713 | not exceed 40 characters.
1714 | trace: A tracing token of the form "token:" to include in api
1715 | requests.
1716 | uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
1717 | upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
1718 | """
1719 |
1720 | class AltValueValuesEnum(_messages.Enum):
1721 | r"""Data format for response.
1722 |
1723 | Values:
1724 | json: Responses with Content-Type of application/json
1725 | media: Media download with context-dependent Content-Type
1726 | proto: Responses with Content-Type of application/x-protobuf
1727 | """
1728 | json = 0
1729 | media = 1
1730 | proto = 2
1731 |
1732 | class FXgafvValueValuesEnum(_messages.Enum):
1733 | r"""V1 error format.
1734 |
1735 | Values:
1736 | _1: v1 error format
1737 | _2: v2 error format
1738 | """
1739 | _1 = 0
1740 | _2 = 1
1741 |
1742 | f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
1743 | access_token = _messages.StringField(2)
1744 | alt = _messages.EnumField('AltValueValuesEnum', 3, default='json')
1745 | callback = _messages.StringField(4)
1746 | fields = _messages.StringField(5)
1747 | key = _messages.StringField(6)
1748 | oauth_token = _messages.StringField(7)
1749 | prettyPrint = _messages.BooleanField(8, default=True)
1750 | quotaUser = _messages.StringField(9)
1751 | trace = _messages.StringField(10)
1752 | uploadType = _messages.StringField(11)
1753 | upload_protocol = _messages.StringField(12)
1754 |
1755 |
1756 | class StartStreamRequest(_messages.Message):
1757 | r"""Request message for 'StartStream' request."""
1758 |
1759 |
1760 | class StaticServiceIpConnectivity(_messages.Message):
1761 | r"""Static IP address connectivity."""
1762 |
1763 |
1764 | class Status(_messages.Message):
1765 | r"""The `Status` type defines a logical error model that is suitable for
1766 | different programming environments, including REST APIs and RPC APIs. It is
1767 | used by [gRPC](https://github.com/grpc). Each `Status` message contains
1768 | three pieces of data: error code, error message, and error details. You can
1769 | find out more about this error model and how to work with it in the [API
1770 | Design Guide](https://cloud.google.com/apis/design/errors).
1771 |
1772 | Messages:
1773 | DetailsValueListEntry: A DetailsValueListEntry object.
1774 |
1775 | Fields:
1776 | code: The status code, which should be an enum value of google.rpc.Code.
1777 | details: A list of messages that carry the error details. There is a
1778 | common set of message types for APIs to use.
1779 | message: A developer-facing error message, which should be in English. Any
1780 | user-facing error message should be localized and sent in the
1781 | google.rpc.Status.details field, or localized by the client.
1782 | """
1783 |
1784 | @encoding.MapUnrecognizedFields('additionalProperties')
1785 | class DetailsValueListEntry(_messages.Message):
1786 | r"""A DetailsValueListEntry object.
1787 |
1788 | Messages:
1789 | AdditionalProperty: An additional property for a DetailsValueListEntry
1790 | object.
1791 |
1792 | Fields:
1793 | additionalProperties: Properties of the object. Contains field @type
1794 | with type URL.
1795 | """
1796 |
1797 | class AdditionalProperty(_messages.Message):
1798 | r"""An additional property for a DetailsValueListEntry object.
1799 |
1800 | Fields:
1801 | key: Name of the additional property.
1802 | value: A extra_types.JsonValue attribute.
1803 | """
1804 |
1805 | key = _messages.StringField(1)
1806 | value = _messages.MessageField('extra_types.JsonValue', 2)
1807 |
1808 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1809 |
1810 | code = _messages.IntegerField(1, variant=_messages.Variant.INT32)
1811 | details = _messages.MessageField('DetailsValueListEntry', 2, repeated=True)
1812 | message = _messages.StringField(3)
1813 |
1814 |
1815 | class Stream(_messages.Message):
1816 | r"""A Stream object.
1817 |
1818 | Enums:
1819 | StateValueValuesEnum: The state of the stream.
1820 |
1821 | Messages:
1822 | LabelsValue: Labels.
1823 |
1824 | Fields:
1825 | backfillAll: Automatically backfill objects included in the stream source
1826 | configuration. Specific objects can be excluded.
1827 | backfillNone: Do not automatically backfill any objects.
1828 | createTime: Output only. The creation time of the stream.
1829 | destinationConfig: Required. Destination connection profile configuration.
1830 | displayName: Required. Display name.
1831 | errors: Output only. Errors on the Stream.
1832 | labels: Labels.
1833 | name: Output only. The stream's name.
1834 | sourceConfig: Required. Source connection profile configuration.
1835 | state: The state of the stream.
1836 | updateTime: Output only. The last update time of the stream.
1837 | """
1838 |
1839 | class StateValueValuesEnum(_messages.Enum):
1840 | r"""The state of the stream.
1841 |
1842 | Values:
1843 | STATE_UNSPECIFIED: Unspecified stream state.
1844 | CREATED: The stream has been created.
1845 | RUNNING: The stream is running.
1846 | PAUSED: The stream is paused.
1847 | MAINTENANCE: The stream is in maintenance mode. Updates are rejected on
1848 | the resource in this state.
1849 | FAILED: The stream is experiencing an error that is preventing data from
1850 | being streamed.
1851 | FAILED_PERMANENTLY: The stream has experienced a terminal failure.
1852 | STARTING: The stream is starting, but not yet running.
1853 | DRAINING: The Stream is no longer reading new events, but still writing
1854 | events in the buffer.
1855 | """
1856 | STATE_UNSPECIFIED = 0
1857 | CREATED = 1
1858 | RUNNING = 2
1859 | PAUSED = 3
1860 | MAINTENANCE = 4
1861 | FAILED = 5
1862 | FAILED_PERMANENTLY = 6
1863 | STARTING = 7
1864 | DRAINING = 8
1865 |
1866 | @encoding.MapUnrecognizedFields('additionalProperties')
1867 | class LabelsValue(_messages.Message):
1868 | r"""Labels.
1869 |
1870 | Messages:
1871 | AdditionalProperty: An additional property for a LabelsValue object.
1872 |
1873 | Fields:
1874 | additionalProperties: Additional properties of type LabelsValue
1875 | """
1876 |
1877 | class AdditionalProperty(_messages.Message):
1878 | r"""An additional property for a LabelsValue object.
1879 |
1880 | Fields:
1881 | key: Name of the additional property.
1882 | value: A string attribute.
1883 | """
1884 |
1885 | key = _messages.StringField(1)
1886 | value = _messages.StringField(2)
1887 |
1888 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1889 |
1890 | backfillAll = _messages.MessageField('BackfillAllStrategy', 1)
1891 | backfillNone = _messages.MessageField('BackfillNoneStrategy', 2)
1892 | createTime = _messages.StringField(3)
1893 | destinationConfig = _messages.MessageField('DestinationConfig', 4)
1894 | displayName = _messages.StringField(5)
1895 | errors = _messages.MessageField('Error', 6, repeated=True)
1896 | labels = _messages.MessageField('LabelsValue', 7)
1897 | name = _messages.StringField(8)
1898 | sourceConfig = _messages.MessageField('SourceConfig', 9)
1899 | state = _messages.EnumField('StateValueValuesEnum', 10)
1900 | updateTime = _messages.StringField(11)
1901 |
1902 |
1903 | class StreamObject(_messages.Message):
1904 | r"""A specific stream object (e.g a specific DB table).
1905 |
1906 | Messages:
1907 | LabelsValue: Labels.
1908 |
1909 | Fields:
1910 | createTime: Output only. The creation time of the object.
1911 | displayName: Required. Display name.
1912 | errors: Output only. Active errors on the object.
1913 | labels: Labels.
1914 | name: Output only. The object's name.
1915 | updateTime: Output only. The last update time of the object.
1916 | """
1917 |
1918 | @encoding.MapUnrecognizedFields('additionalProperties')
1919 | class LabelsValue(_messages.Message):
1920 | r"""Labels.
1921 |
1922 | Messages:
1923 | AdditionalProperty: An additional property for a LabelsValue object.
1924 |
1925 | Fields:
1926 | additionalProperties: Additional properties of type LabelsValue
1927 | """
1928 |
1929 | class AdditionalProperty(_messages.Message):
1930 | r"""An additional property for a LabelsValue object.
1931 |
1932 | Fields:
1933 | key: Name of the additional property.
1934 | value: A string attribute.
1935 | """
1936 |
1937 | key = _messages.StringField(1)
1938 | value = _messages.StringField(2)
1939 |
1940 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
1941 |
1942 | createTime = _messages.StringField(1)
1943 | displayName = _messages.StringField(2)
1944 | errors = _messages.MessageField('Error', 3, repeated=True)
1945 | labels = _messages.MessageField('LabelsValue', 4)
1946 | name = _messages.StringField(5)
1947 | updateTime = _messages.StringField(6)
1948 |
1949 |
1950 | class Validation(_messages.Message):
1951 | r"""A Validation object.
1952 |
1953 | Enums:
1954 | StatusValueValuesEnum: Validation execution status.
1955 |
1956 | Fields:
1957 | code: A custom code identifying this validation.
1958 | description: A short description of the validation.
1959 | message: Messages reflecting the validation results.
1960 | status: Validation execution status.
1961 | """
1962 |
1963 | class StatusValueValuesEnum(_messages.Enum):
1964 | r"""Validation execution status.
1965 |
1966 | Values:
1967 | STATUS_UNSPECIFIED: Unspecified status.
1968 | NOT_EXECUTED: Validation did not execute.
1969 | FAILED: Validation failed.
1970 | PASSED: Validation passed.
1971 | """
1972 | STATUS_UNSPECIFIED = 0
1973 | NOT_EXECUTED = 1
1974 | FAILED = 2
1975 | PASSED = 3
1976 |
1977 | code = _messages.StringField(1)
1978 | description = _messages.StringField(2)
1979 | message = _messages.MessageField('ValidationMessage', 3, repeated=True)
1980 | status = _messages.EnumField('StatusValueValuesEnum', 4)
1981 |
1982 |
1983 | class ValidationMessage(_messages.Message):
1984 | r"""Represent user-facing validation result message.
1985 |
1986 | Enums:
1987 | LevelValueValuesEnum: Message severity level (warning or error).
1988 |
1989 | Messages:
1990 | MetadataValue: Additional metadata related to the result.
1991 |
1992 | Fields:
1993 | code: A custom code identifying this specific message.
1994 | level: Message severity level (warning or error).
1995 | message: The result of the validation.
1996 | metadata: Additional metadata related to the result.
1997 | """
1998 |
1999 | class LevelValueValuesEnum(_messages.Enum):
2000 | r"""Message severity level (warning or error).
2001 |
2002 | Values:
2003 | LEVEL_UNSPECIFIED: Unspecified level.
2004 | WARNING: Potentially cause issues with the Stream.
2005 | ERROR: Definitely cause issues with the Stream.
2006 | """
2007 | LEVEL_UNSPECIFIED = 0
2008 | WARNING = 1
2009 | ERROR = 2
2010 |
2011 | @encoding.MapUnrecognizedFields('additionalProperties')
2012 | class MetadataValue(_messages.Message):
2013 | r"""Additional metadata related to the result.
2014 |
2015 | Messages:
2016 | AdditionalProperty: An additional property for a MetadataValue object.
2017 |
2018 | Fields:
2019 | additionalProperties: Additional properties of type MetadataValue
2020 | """
2021 |
2022 | class AdditionalProperty(_messages.Message):
2023 | r"""An additional property for a MetadataValue object.
2024 |
2025 | Fields:
2026 | key: Name of the additional property.
2027 | value: A string attribute.
2028 | """
2029 |
2030 | key = _messages.StringField(1)
2031 | value = _messages.StringField(2)
2032 |
2033 | additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
2034 |
2035 | code = _messages.StringField(1)
2036 | level = _messages.EnumField('LevelValueValuesEnum', 2)
2037 | message = _messages.StringField(3)
2038 | metadata = _messages.MessageField('MetadataValue', 4)
2039 |
2040 |
2041 | class ValidationResult(_messages.Message):
2042 | r"""Contains the current validation results.
2043 |
2044 | Fields:
2045 | validations: A list of validations (includes both executed as well as not
2046 | executed validations).
2047 | """
2048 |
2049 | validations = _messages.MessageField('Validation', 1, repeated=True)
2050 |
2051 |
2052 | class VpcPeeringConfig(_messages.Message):
2053 | r"""The VPC Peering configuration is used to create VPC peering between
2054 | DataStream and the consumer's VPC.
2055 |
2056 | Fields:
2057 | subnet: Required. A free subnet for peering. (CIDR of /29)
2058 | TODO(b/172995841) add validators.
2059 | vpcName: Required. fully qualified name of the VPC DataStream will peer
2060 | to.
2061 | """
2062 |
2063 | subnet = _messages.StringField(1)
2064 | vpcName = _messages.StringField(2)
2065 |
2066 |
2067 | encoding.AddCustomJsonFieldMapping(
2068 | StandardQueryParameters, 'f__xgafv', '$.xgafv')
2069 | encoding.AddCustomJsonEnumMapping(
2070 | StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
2071 | encoding.AddCustomJsonEnumMapping(
2072 | StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
2073 |
--------------------------------------------------------------------------------