├── .travis.yml
├── docs
├── images
│ ├── 1.png
│ ├── 2.png
│ └── gke.png
├── Summary.md
├── Translators-JP.md
└── Translators.md
├── src
├── main
│ ├── resources
│ │ └── application.conf
│ └── java
│ │ └── com
│ │ └── datastax
│ │ └── powertools
│ │ └── dcp
│ │ ├── TranslatorType.java
│ │ ├── api
│ │ ├── DynamoStatementType.java
│ │ └── DynamoDBResponse.java
│ │ ├── managed
│ │ ├── ddbstreams
│ │ │ ├── StreamsRecordProcessorFactory.java
│ │ │ ├── StreamsRecordProcessor.java
│ │ │ └── DynamoStreamsManager.java
│ │ ├── dynamodb
│ │ │ └── DynamoManager.java
│ │ └── dse
│ │ │ ├── CassandraStatements.java
│ │ │ ├── CassandraManager.java
│ │ │ ├── DockerHelper.java
│ │ │ └── TableDef.java
│ │ ├── DynamoDSETranslator.java
│ │ ├── DCProxyApplication.java
│ │ ├── DCProxyConfiguration.java
│ │ └── resources
│ │ └── DCProxyResource.java
└── test
│ └── java
│ └── com
│ └── datastax
│ └── powertools
│ └── dcp
│ ├── resources
│ └── DynamoDBResourceTest.java
│ ├── AbstractDCPTest.java
│ ├── CRUDTest.java
│ ├── example
│ ├── CatalogItem.java
│ └── CatalogIntegrationTest.java
│ └── DynamoDSETranslatorJSONBlobTest.java
├── migrate
├── runit.sh.oss
├── .idea
│ ├── sbt.xml
│ ├── encodings.xml
│ ├── scala_compiler.xml
│ ├── modules.xml
│ ├── hydra.xml
│ ├── misc.xml
│ ├── compiler.xml
│ └── workspace.xml
├── runit.sh
├── src
│ └── main
│ │ ├── resources
│ │ └── create_schema.cql
│ │ └── scala
│ │ └── com
│ │ └── datastax
│ │ └── powertools
│ │ └── DynamoReader.scala
├── rundump.sh
├── README-JP.md
├── README.md
├── pom.xml.dse
└── pom.xml
├── conf
└── dynamo-cassandra-proxy.yaml.template
├── Dockerfile
├── docker-compose.yaml
├── common
└── cassandra
│ └── conf-dir
│ └── resources
│ └── cassandra
│ └── conf
│ └── cassandra-rackdc.properties
├── .gitignore
├── README-JP.md
├── k8s-local
└── proxy-suite.yaml
├── gke
└── proxy-suite.yaml
├── README.md
├── pom.xml
└── LICENSE
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: java
2 | jdk:
3 | - oraclejdk11
4 | services:
5 | - docker
6 |
--------------------------------------------------------------------------------
/docs/images/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/datastax/dynamo-cassandra-proxy/HEAD/docs/images/1.png
--------------------------------------------------------------------------------
/docs/images/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/datastax/dynamo-cassandra-proxy/HEAD/docs/images/2.png
--------------------------------------------------------------------------------
/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | datastax-java-driver {
2 | advanced.reconnect-on-init = true
3 | }
--------------------------------------------------------------------------------
/docs/images/gke.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/datastax/dynamo-cassandra-proxy/HEAD/docs/images/gke.png
--------------------------------------------------------------------------------
/migrate/runit.sh.oss:
--------------------------------------------------------------------------------
1 | dse spark-submit --class com.datastax.powertools.migrate.dynamoDB ./target/dynamoDB-0.1.jar JPH_test last_name first_name
2 |
--------------------------------------------------------------------------------
/migrate/.idea/sbt.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/migrate/runit.sh:
--------------------------------------------------------------------------------
1 | ~/datastax/spark-2.4.0-bin-hadoop2.7/bin/spark-submit --class com.datastax.powertools.migrate.dynamoDB --packages datastax:spark-cassandra-connector:2.4.0-s_2.11 ./target/dynamoDB-0.1.jar JPH_test
2 |
--------------------------------------------------------------------------------
/migrate/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/migrate/.idea/scala_compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/migrate/src/main/resources/create_schema.cql:
--------------------------------------------------------------------------------
1 | CREATE TABLE testks.jph_test (
2 | last_name text,
3 | first_name text,
4 | json_blob text,
5 | PRIMARY KEY (last_name, first_name)
6 | ) WITH CLUSTERING ORDER BY (first_name ASC)
7 | ;
8 |
--------------------------------------------------------------------------------
/conf/dynamo-cassandra-proxy.yaml.template:
--------------------------------------------------------------------------------
1 | dynamoRegion: us-east-2
2 | dynamoAccessKey:
3 | dynamoSecretKey:
4 | awsDynamodbEndpoint: apigateway.us-east-2.amazonaws.com
5 | streamsEnabled: false
6 | contactPoints: localhost
7 | dockerCassandra: false
8 |
--------------------------------------------------------------------------------
/migrate/rundump.sh:
--------------------------------------------------------------------------------
1 | ~/datastax/spark-2.4.0-bin-hadoop2.7/bin/spark-submit --class com.github.traviscrawford.spark.dynamodb.DynamoBackupJob --packages com.github.traviscrawford:spark-dynamodb:0.0.14-SNAPSHOT ./target/dynamoDB-0.1.jar -table JPH_test -output JPH_test.out -overwrite true
2 |
--------------------------------------------------------------------------------
/migrate/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/migrate/.idea/hydra.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM openjdk:11
2 |
3 | COPY target/dynamodb-cassandra-proxy-0.1.0.jar /opt/dynamo-cassandra-proxy/dynamodb-cassandra-proxy-0.1.0.jar
4 | COPY conf/dynamo-cassandra-proxy.yaml.template /opt/dynamo-cassandra-proxy/dynamo-cassandra-proxy.yaml
5 |
6 | CMD java -jar /opt/dynamo-cassandra-proxy/dynamodb-cassandra-proxy-0.1.0.jar server /opt/dynamo-cassandra-proxy/dynamo-cassandra-proxy.yaml
7 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3'
2 | services:
3 | ddac:
4 | image: "datastax/ddac"
5 | container_name: "ddac"
6 | environment:
7 | - DS_LICENSE=accept
8 | ports:
9 | - "9042:9042"
10 | ddb-cassandra-proxy:
11 | build: ./
12 | container_name: "ddb-cas-proxy"
13 | command: |
14 | java -Ddw.contactPoints="ddac" -jar /opt/dynamo-cassandra-proxy/dynamodb-cassandra-proxy-0.1.0.jar server /opt/dynamo-cassandra-proxy/dynamo-cassandra-proxy.yaml
15 | ports:
16 | - "8080:8080"
17 | depends_on:
18 | - ddac
19 | restart: unless-stopped
20 |
--------------------------------------------------------------------------------
/migrate/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/migrate/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/src/test/java/com/datastax/powertools/dcp/resources/DynamoDBResourceTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.resources;
17 |
18 | public class DynamoDBResourceTest {
19 |
20 | }
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/TranslatorType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp;
17 |
18 | public enum TranslatorType {
19 | JSON_BLOB, COLLECTION, VERSIONS
20 | }
21 |
--------------------------------------------------------------------------------
/common/cassandra/conf-dir/resources/cassandra/conf/cassandra-rackdc.properties:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one
2 | # or more contributor license agreements. See the NOTICE file
3 | # distributed with this work for additional information
4 | # regarding copyright ownership. The ASF licenses this file
5 | # to you under the Apache License, Version 2.0 (the
6 | # "License"); you may not use this file except in compliance
7 | # with the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | # These properties are used with GossipingPropertyFileSnitch and will
18 | # indicate the rack and dc for this node
19 | dc=dc1
20 | rack=rack1
21 |
22 | # Add a suffix to a datacenter name. Used by the Ec2Snitch and Ec2MultiRegionSnitch
23 | # to append a string to the EC2 region name.
24 | #dc_suffix=
25 |
26 | # Uncomment the following line to make this snitch prefer the internal ip when possible, as the Ec2MultiRegionSnitch does.
27 | # prefer_local=true
28 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/api/DynamoStatementType.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.api;
17 |
18 |
19 | public enum DynamoStatementType {
20 | PutItem,
21 | GetItem,
22 | CreateTable,
23 | DeleteTable,
24 | DescribeTable,
25 | DeleteItem,
26 | Query;
27 |
28 | public static DynamoStatementType valueOfLowerCase(String arg){
29 | DynamoStatementType[] stmtValues = DynamoStatementType.values();
30 |
31 | for (DynamoStatementType stmtValue : stmtValues) {
32 | if(stmtValue.toString().toLowerCase().equals(arg.toLowerCase())){
33 | return stmtValue;
34 | }
35 | }
36 | throw new RuntimeException("invalid argument for DynamoDB Statement Type");
37 | }
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/src/test/java/com/datastax/powertools/dcp/AbstractDCPTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.powertools.dcp;
2 |
3 | import org.junit.ClassRule;
4 |
5 | import org.slf4j.Logger;
6 | import org.slf4j.LoggerFactory;
7 |
8 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
9 | import com.amazonaws.services.dynamodbv2.local.embedded.DynamoDBEmbedded;
10 | import com.datastax.powertools.dcp.managed.dynamodb.DynamoManager;
11 | import io.dropwizard.testing.ConfigOverride;
12 | import io.dropwizard.testing.junit.DropwizardAppRule;
13 |
14 | public class AbstractDCPTest
15 | {
16 | private static final Logger logger = LoggerFactory.getLogger(AbstractDCPTest.class);
17 | private static AmazonDynamoDB awsClient = DynamoDBEmbedded.create().amazonDynamoDB();
18 | private static DynamoManager proxyClient;
19 |
20 | @ClassRule
21 | public static final DropwizardAppRule RULE = new DropwizardAppRule<>(DCProxyApplication.class, "",
22 | ConfigOverride.config("dockerCassandra", "true"));
23 |
24 | protected synchronized AmazonDynamoDB getProxyClient()
25 | {
26 | if (proxyClient == null)
27 | {
28 | proxyClient = new DynamoManager();
29 | proxyClient.configure(RULE.getConfiguration());
30 | }
31 |
32 | return proxyClient.get();
33 | }
34 |
35 | protected AmazonDynamoDB getAwsClient()
36 | {
37 | return awsClient;
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/managed/ddbstreams/StreamsRecordProcessorFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.managed.ddbstreams;
17 |
18 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
19 | import com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.IRecordProcessor;
20 | import com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.IRecordProcessorFactory;
21 |
22 | public class StreamsRecordProcessorFactory implements IRecordProcessorFactory {
23 | private final String tableName;
24 | private final AmazonDynamoDB dynamoDBClient;
25 |
26 | public StreamsRecordProcessorFactory(AmazonDynamoDB dynamoDBClient, String tableName) {
27 | this.tableName = tableName;
28 | this.dynamoDBClient = dynamoDBClient;
29 | }
30 |
31 | @Override
32 | public IRecordProcessor createProcessor() {
33 | return new StreamsRecordProcessor(dynamoDBClient, tableName);
34 | }
35 | }
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/api/DynamoDBResponse.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.api;
17 |
18 | import com.amazonaws.AmazonWebServiceResult;
19 |
20 | public class DynamoDBResponse {
21 |
22 |
23 | private String error;
24 | private final int statusCode;
25 | private AmazonWebServiceResult result;
26 |
27 | public boolean hasError(){
28 | return error != null;
29 | }
30 | public String getError() {
31 | return error;
32 | }
33 | public int getStatusCode() {
34 | return statusCode;
35 | }
36 |
37 | public AmazonWebServiceResult getResult() {
38 | return result;
39 | }
40 |
41 | public void setResult(AmazonWebServiceResult result) {
42 | this.result = result;
43 | }
44 |
45 | public DynamoDBResponse(AmazonWebServiceResult result, int statusCode) {
46 | this.result = result;
47 | this.statusCode = statusCode;
48 | }
49 |
50 | public void setError(String error) {
51 | this.error = error;
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/test/java/com/datastax/powertools/dcp/CRUDTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.powertools.dcp;
2 |
3 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
4 | import com.amazonaws.services.dynamodbv2.model.AttributeDefinition;
5 | import com.amazonaws.services.dynamodbv2.model.CreateTableRequest;
6 | import com.amazonaws.services.dynamodbv2.model.DescribeTableResult;
7 | import com.amazonaws.services.dynamodbv2.model.KeySchemaElement;
8 | import com.amazonaws.services.dynamodbv2.model.KeyType;
9 | import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput;
10 | import com.amazonaws.services.dynamodbv2.model.ScalarAttributeType;
11 | import org.junit.Test;
12 | import org.testng.Assert;
13 |
14 | import java.util.Date;
15 |
16 | public class CRUDTest extends AbstractDCPTest
17 | {
18 | @Test
19 | public void testCreate() {
20 | AmazonDynamoDB proxyClient = getProxyClient();
21 | AmazonDynamoDB awsClient = getAwsClient();
22 |
23 | CreateTableRequest req = new CreateTableRequest()
24 | .withTableName("foo")
25 | .withProvisionedThroughput(new ProvisionedThroughput().withReadCapacityUnits(100L).withWriteCapacityUnits(100L))
26 | .withKeySchema(new KeySchemaElement("Name", KeyType.HASH))
27 | .withAttributeDefinitions(new AttributeDefinition("Name", ScalarAttributeType.S));
28 |
29 | proxyClient.createTable(req);
30 | awsClient.createTable(req);
31 |
32 | DescribeTableResult r = proxyClient.describeTable("foo");
33 | DescribeTableResult r2 = proxyClient.describeTable("foo");
34 |
35 | Date now = new Date();
36 | r.getTable().withCreationDateTime(now);
37 | r2.getTable().withCreationDateTime(now);
38 |
39 | Assert.assertEquals(r, r2);
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/migrate/README-JP.md:
--------------------------------------------------------------------------------
1 | # DynamodbMigrate
2 |
3 | この migrate ディレクトリの目的は、AWS DynamoDB データベースから DSE / Cassandra へのワンタイムにマイグレーションをサポートすることです。このコードは、AWS SDK を利用して、DynamoDB のレコードをスキャンして dataframe に入れる Travis Crawford の github を利用しています。本リポジトリのコードは、この dataframe を利用し、構造に修正を加えて、Cassandra のテーブルに書き出します。
4 |
5 | このデモを実行するには、以下をローカルシステムにインストール済みで利用可能であることが前提となります。
6 |
7 | 1. Datastax Enterprise 6.7.x
8 | 2. Spark
9 | 3. git
10 | 4. mvn
11 | 5. Travis Crawford のリポジトリ
12 |
13 |
14 | # ローカルの DSE / Cassandra の起動
15 |
16 |
17 | ### ローカルの OSX または Linux マシンで起動します。DSE tarball の例を以下に示します(Search 機能には -s、Analytics 機能には -k を指定)
18 |
19 | * `dse cassandra -k`
20 |
21 | ## デモの用意と実行
22 |
23 |
24 |
25 | ### 必要な dependency のビルド
26 |
27 | このコードは、DynamoDB のレコードをスキャンして dataframe に入れる Travis Crawford の github を利用しています。
28 |
29 | ### これは、fork した Travis Crawford の dependency です
30 |
31 | * コードを保存するディレクトリに移動します。
32 | * 次のコマンドを実行します。
33 | `https://github.com/jphaugla/spark-dynamodb.git`
34 | * このコードをビルドします。
35 | ```bash
36 | mvn package
37 | ```
38 | * この dependency をインストールします。
39 | ```bash
40 | mvn install -Dgpg.skip
41 | ```
42 | * 詳細については、readme を参照してください。
43 | https://github.com/jphaugla/spark-dynamodb/blob/master/README.md
44 |
45 |
46 | ### このデモを実行するには、GitHub からソースコードをダウンロードする必要があります。
47 |
48 | * ダウンロードしたプロジェクトの migrate ディレクトリに移ります。
49 | * 次のコマンドを実行します。
50 | `git clone git@github.com:datastax/dynamo-cassandra-proxy.git`
51 | * コードをビルドします。
52 | `cd dynamodb-cassandra-proxy/migrate && mvn package`
53 |
54 |
55 | ### 実行方法
56 |
57 | * プロジェクトの migrate ディレクトリで、プロデューサーアプリを開始します。
58 |
59 | `./runit.sh`
60 |
61 |
62 | #### mvn の問題
63 | Spark と DSE の dependency に関わる jar ファイルを削除する必要がありました。
64 |
65 | rm -rf ~/.ivy2
66 | rm -rf ~/.m2
67 |
68 | ## 今後について
69 |
70 | * run.sh スクリプトの一部として、テーブル名の他に、hash_key と sort_key を渡す必要がある。できれば、テーブル名だけを渡し、AWS SDK を使って hash_key と sort_key を取得したい。
71 | * DSE Analytics を使用した実行。本リポジトリと spark-dynamodb GitHub では、DSE Analytics 6.7.3 の Spark のバージョンに対応した pom.xml ファイルを作成している。しかし、それらのバージョンで動作させることができなかったので、現状は放棄。
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/managed/dynamodb/DynamoManager.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.managed.dynamodb;
17 |
18 | import com.amazonaws.client.builder.AwsClientBuilder;
19 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
20 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
21 | import com.datastax.powertools.dcp.DCProxyConfiguration;
22 | import io.dropwizard.lifecycle.Managed;
23 |
24 | import java.util.Properties;
25 |
26 | public class DynamoManager implements Managed {
27 |
28 | private AmazonDynamoDB ddb ;
29 | private String dynamodbEndpoint;
30 | private String signinRegion;
31 | private String accessKey;
32 | private String secretKey;
33 |
34 | public void configure(DCProxyConfiguration config) {
35 | this.dynamodbEndpoint = config.getDsDynamodbEndpoint();
36 | this.signinRegion = config.getDynamoRegion();
37 | this.accessKey = config.getDynamoAccessKey();
38 | this.secretKey = config.getDynamoSecretKey();
39 |
40 | Properties props = System.getProperties();
41 | props.setProperty("aws.accessKeyId", accessKey);
42 | props.setProperty("aws.secretKey", secretKey);
43 | }
44 |
45 |
46 | public void start() throws Exception {
47 | }
48 |
49 | public void stop() throws Exception {
50 |
51 | }
52 |
53 | public synchronized AmazonDynamoDB get() {
54 | if (ddb == null) {
55 | AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration(dynamodbEndpoint, signinRegion);
56 | ddb = AmazonDynamoDBClientBuilder.standard().withEndpointConfiguration(endpointConfiguration).build();
57 | }
58 |
59 | return ddb;
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/src/test/java/com/datastax/powertools/dcp/example/CatalogItem.java:
--------------------------------------------------------------------------------
1 | package com.datastax.powertools.dcp.example;
2 |
3 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBAttribute;
4 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBHashKey;
5 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBIgnore;
6 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBRangeKey;
7 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTable;
8 |
9 | import java.util.Objects;
10 | import java.util.Set;
11 |
12 | @DynamoDBTable(tableName="ProductCatalog")
13 | public class CatalogItem {
14 |
15 | private Integer id;
16 | private String title;
17 | private String ISBN;
18 | private Integer booksInStock;
19 | private Set bookAuthors;
20 | private String someProp;
21 |
22 | @DynamoDBHashKey(attributeName="Id")
23 | public Integer getId() { return id; }
24 | public void setId(Integer id) {this.id = id; }
25 |
26 | @DynamoDBRangeKey(attributeName="Title")
27 | public String getTitle() {return title; }
28 | public void setTitle(String title) { this.title = title; }
29 |
30 | @DynamoDBAttribute(attributeName="ISBN")
31 | public String getISBN() { return ISBN; }
32 | public void setISBN(String ISBN) { this.ISBN = ISBN; }
33 |
34 | @DynamoDBAttribute(attributeName="Authors")
35 | public Set getBookAuthors() { return bookAuthors; }
36 | public void setBookAuthors(Set bookAuthors) { this.bookAuthors = bookAuthors; }
37 |
38 | @DynamoDBAttribute(attributeName="booksInStock")
39 | public Integer getBooksInStock() { return booksInStock; }
40 | public void setBooksInStock(Integer booksInStock) {this.booksInStock= booksInStock; }
41 |
42 |
43 | @DynamoDBIgnore
44 | public String getSomeProp() { return someProp; }
45 | public void setSomeProp(String someProp) { this.someProp = someProp; }
46 |
47 | @Override
48 | public boolean equals(Object o)
49 | {
50 | if (this == o) return true;
51 | if (o == null || getClass() != o.getClass()) return false;
52 | CatalogItem that = (CatalogItem) o;
53 | return id.equals(that.id) &&
54 | Objects.equals(title, that.title) &&
55 | Objects.equals(ISBN, that.ISBN) &&
56 | Objects.equals(booksInStock, that.booksInStock) &&
57 | Objects.equals(bookAuthors, that.bookAuthors);
58 | }
59 |
60 | @Override
61 | public int hashCode()
62 | {
63 | return Objects.hash(id, title, ISBN, bookAuthors, booksInStock);
64 | }
65 | }
--------------------------------------------------------------------------------
/migrate/README.md:
--------------------------------------------------------------------------------
1 | # DynamodbMigrate
2 |
3 | The purpose of this migrate directory is the one-time migration of data from AWS dynamoDB database to DataStax Cassandra. This code relies upon a Travis Crawford github that using AWS SDK to scan the DynamoDB records into a dataframe. The code in this repository uses this dataframe, modifies the structure, and writes to a Cassandra table.
4 |
5 | In order to run this demo, It is assumed that you have the following installed and available on your local system.
6 |
7 | 1. Datastax Enterprise 6.7.x
8 | 2. Spark
9 | 3. git
10 | 4. mvn
11 | 5. Travis Crawford repository
12 |
13 |
14 | # Getting Started with Local DSE/Cassandra
15 |
16 |
17 | ### Starting DSE tarball install on the local OSX or Linux machine (-s starts search, -k starts Spark)
18 |
19 | * `dse cassandra -k`
20 |
21 | ## Getting and running the demo
22 |
23 |
24 |
25 | ### Build necessary dependency
26 |
27 | This code relies on a github from Travis Crawford to scan dynamoDB records to a dataframe. The code
28 |
29 | ### This is the Travis Crawford dependency which is forked
30 |
31 | * Navigate to the directory where you would like to save the code.
32 | * Execute the following command:
33 | `https://github.com/jphaugla/spark-dynamodb.git`
34 | * Build this code
35 | ```bash
36 | mvn package
37 | ```
38 | * Install this dependency
39 | ```bash
40 | mvn install -Dgpg.skip
41 | ```
42 | * for more information, refer to the readme
43 | https://github.com/jphaugla/spark-dynamodb/blob/master/README.md
44 |
45 |
46 | ### In order to run this demo you will need to download the source from GitHub.
47 |
48 | * Navigate to the migrate directory of the downloaded project
49 | * Execute the following command:
50 | `git clone git@github.com:datastax/dynamo-cassandra-proxy.git`
51 | * Build the code
52 | `cd dynamodb-cassandra-proxy/migrate && mvn package`
53 |
54 |
55 | ### To run
56 |
57 | * From the migrate directory of the project start the producer app
58 |
59 | `./runit.sh`
60 |
61 |
62 | #### PROBLEMS with mvn
63 | Needed to clean out jar files on spark and dse dependencies
64 |
65 | rm -rf ~/.ivy2
66 | rm -rf ~/.m2
67 | ## Futures
68 |
69 | * Must pass in the table name as well as the hash_key and sort_key as part of the run.sh script. Would prefer to only pass the table name and use AWS SDK to get the hash_key and sort_key
70 | * Run using DSE Analytics. In this repository and in the spark-dynamodb github, pom.xml files are created to match with spark version in dse analytics 6.7.3. However, could not get these version to work so abandoned the effort
71 |
--------------------------------------------------------------------------------
/src/test/java/com/datastax/powertools/dcp/DynamoDSETranslatorJSONBlobTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp;
17 |
18 | import com.datastax.powertools.dcp.managed.dse.CassandraManager;
19 | import com.fasterxml.jackson.databind.JsonNode;
20 | import com.fasterxml.jackson.databind.ObjectMapper;
21 | import org.testng.annotations.Test;
22 |
23 | import java.io.IOException;
24 |
25 | public class DynamoDSETranslatorJSONBlobTest {
26 | private String stringJson = "{\"hash_key\":{\"S\":\"hash_value\"},\"json_blob\":{\"favorites\":{\"SS\":[\"puppies\",\"kittens\",\"other cute animals\"]},\"city\":{\"S\":\"NYC\"}}}";
27 | //private String stringJson2 = "{\"favorites\":{\"SS\":[\"puppies\",\"kittens\",\"other cute animals\"]}}";
28 | private String stringJson2 = "{\"SS\":[\"puppies\",\"kittens\",\"other cute animals\"]}";
29 | //private String stringJson2 = "[\"puppies\",\"kittens\",\"other cute animals\"]";
30 |
31 |
32 | @Test
33 | public void testStripJson() throws IOException {
34 | CassandraManager dm = new CassandraManager();
35 | DynamoDSETranslatorJSONBlob jsonBlobTranslator = new DynamoDSETranslatorJSONBlob(dm);
36 |
37 | ObjectMapper mapper = new ObjectMapper();
38 | JsonNode jsonObj= mapper.readTree(stringJson);
39 |
40 | JsonNode result = jsonBlobTranslator.stripDynamoTypes(jsonObj);
41 | System.out.println(jsonObj.toString());
42 | System.out.println(result.toString());
43 | }
44 |
45 | @Test
46 | public void testStripJsonArray() throws IOException {
47 | CassandraManager dm = new CassandraManager();
48 | DynamoDSETranslatorJSONBlob jsonBlobTranslator = new DynamoDSETranslatorJSONBlob(dm);
49 |
50 | ObjectMapper mapper = new ObjectMapper();
51 | JsonNode jsonObj= mapper.readTree(stringJson2);
52 |
53 | JsonNode result = jsonBlobTranslator.stripDynamoTypes(jsonObj);
54 | System.out.println(jsonObj.toString());
55 | System.out.println(result.toString());
56 | }
57 | }
--------------------------------------------------------------------------------
/docs/Summary.md:
--------------------------------------------------------------------------------
1 | ## Running existing DynamoDB applications on a Cassandra cluster
2 |
3 | Many developers build on premises and then deploy to the cloud for production. The proxy enables customers to run their existing DynamoDB applications using Cassandra clusters on-prem.
4 |
5 | 
6 |
7 | ## Live replication of DynamoDB applications to Cassandra
8 |
9 | DynamoDB Streams can be used to enable hybrid workload management and transfers from DynamoDB cloud deployments to on-prem Cassandra-proxied deployments. This is supported in the current implementation and, like DynamoDB Global Tables, it uses DynamoDB Streams to move the data. For hybrid transfer to DynamoDB, check out the Cassandra CDC improvements which could be leveraged and stay tuned to the DataStax blog for updates on our Change Data Capture (CDC) capabilities.
10 |
11 | 
12 |
13 |
14 | ## What's in the Proxy?
15 | The proxy is designed to enable users to back their DynamoDB applications with Cassandra. We determined that the best way to help users leverage this new tool and to help it flourish was to make it an open source Apache 2 licensed project.
16 | The code consists of a scalable proxy layer that sits between your app and the database. It provides compatibility with the DynamoDB SDK which allows existing DynamoDB applications to read and write data to Cassandra without application changes.
17 |
18 |
19 | ## How It Works
20 |
21 | A few design decisions were made when designing the proxy. As always, these are in line with the design principles that we use to guide development for both Cassandra and our DataStax Enterprise product.
22 |
23 | ## Why a Separate Process?
24 |
25 | We could have built this as a Cassandra plugin that would execute as part of the core process but we decided to build it as a separate process for the following reasons:
26 | - Ability to scale the proxy independently of Cassandra
27 | - Ability to leverage k8s / cloud-native tooling
28 | - Developer agility and to attract contributors—developers can work on the proxy with limited knowledge of Cassandra internals
29 | - Independent release cadence, not tied to the Apache Cassandra project
30 | - Better AWS integration story for stateless apps (i.e., leverage CloudWatch alarm, autoscaling, etc.)
31 |
32 | ## Why Pluggable Persistence?
33 |
34 | On quick inspection, DynamoDB's data model is quite simple. It consists of a hash key, a sort key, and a JSON structure which is referred to as an item. Depending on your goals, the DynamoDB data model can be persisted in Cassandra Query Language (CQL) in different ways. To allow for experimentation and pluggability, we have built the translation layer in a pluggable way that allows for different translators. We continue to build on this scaffolding to test out multiple data models and determine which are best suited for:
35 |
36 | - Different workloads
37 | - Different support for consistency / linearization requirements
38 | - Different performance tradeoffs based on SLAs
39 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | conf/dynamo-cassandra-proxy.yaml
2 | logs/
3 |
4 | .idea
5 |
6 | # Created by https://www.gitignore.io/api/java,osx,emacs,intellij,maven
7 |
8 | ### Java ###
9 | *.class
10 |
11 | # Mobile Tools for Java (J2ME)
12 | .mtj.tmp/
13 |
14 | # Package Files #
15 | *.jar
16 | *.war
17 | *.ear
18 |
19 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
20 | hs_err_pid*
21 |
22 |
23 | ### OSX ###
24 | .DS_Store
25 | .AppleDouble
26 | .LSOverride
27 |
28 | # Icon must end with two \r
29 | Icon
30 |
31 | # Thumbnails
32 | ._*
33 |
34 | # Files that might appear in the root of a volume
35 | .DocumentRevisions-V100
36 | .fseventsd
37 | .Spotlight-V100
38 | .TemporaryItems
39 | .Trashes
40 | .VolumeIcon.icns
41 |
42 | # Directories potentially created on remote AFP share
43 | .AppleDB
44 | .AppleDesktop
45 | Network Trash Folder
46 | Temporary Items
47 | .apdisk
48 |
49 |
50 | ### Emacs ###
51 | # -*- mode: gitignore; -*-
52 | *~
53 | \#*\#
54 | /.emacs.desktop
55 | /.emacs.desktop.lock
56 | *.elc
57 | auto-save-list
58 | tramp
59 | .\#*
60 |
61 | # Org-mode
62 | .org-id-locations
63 | *_archive
64 |
65 | # flymake-mode
66 | *_flymake.*
67 |
68 | # eshell files
69 | /eshell/history
70 | /eshell/lastdir
71 |
72 | # elpa packages
73 | /elpa/
74 |
75 | # reftex files
76 | *.rel
77 |
78 | # AUCTeX auto folder
79 | /auto/
80 |
81 | # cask packages
82 | .cask/
83 | dist/
84 |
85 | # Flycheck
86 | flycheck_*.el
87 |
88 | # server auth directory
89 | /server/
90 |
91 | # projectiles files
92 | .projectile
93 |
94 | ### Intellij ###
95 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
96 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
97 |
98 | # User-specific stuff:
99 | .idea/workspace.xml
100 | .idea/tasks.xml
101 | .idea/dictionaries
102 | .idea/vcs.xml
103 | .idea/jsLibraryMappings.xml
104 |
105 | # Sensitive or high-churn files:
106 | .idea/dataSources.ids
107 | .idea/dataSources.xml
108 | .idea/dataSources.local.xml
109 | .idea/sqlDataSources.xml
110 | .idea/dynamic.xml
111 | .idea/uiDesigner.xml
112 |
113 | # Gradle:
114 | .idea/gradle.xml
115 | .idea/libraries
116 |
117 | # Mongo Explorer plugin:
118 | .idea/mongoSettings.xml
119 |
120 | ## File-based project format:
121 | *.iws
122 |
123 | ## Plugin-specific files:
124 |
125 | # IntelliJ
126 | /out/
127 |
128 | # mpeltonen/sbt-idea plugin
129 | .idea_modules/
130 |
131 | # JIRA plugin
132 | atlassian-ide-plugin.xml
133 |
134 | # Crashlytics plugin (for Android Studio and IntelliJ)
135 | com_crashlytics_export_strings.xml
136 | crashlytics.properties
137 | crashlytics-build.properties
138 | fabric.properties
139 |
140 | ### Intellij Patch ###
141 | *.iml
142 |
143 |
144 | ### Maven ###
145 | target/
146 | pom.xml.tag
147 | pom.xml.releaseBackup
148 | pom.xml.versionsBackup
149 | pom.xml.next
150 | release.properties
151 | dependency-reduced-pom.xml
152 | buildNumber.properties
153 | .mvn/timing.properties
154 |
155 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/managed/dse/CassandraStatements.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.managed.dse;
17 |
18 | import com.datastax.oss.driver.api.core.CqlSession;
19 | import com.datastax.oss.driver.api.core.DriverTimeoutException;
20 | import com.datastax.oss.driver.api.core.cql.PreparedStatement;
21 |
22 | public class CassandraStatements {
23 | private static final String KEYSPACE_PATTERN = ";;;KEYSPACE;;;";
24 | private static final String REPLICATION_STRATEGY_PATTERN = ";;;REPLICATION_STRATEGY;;;";
25 |
26 | private static String STMT_create_keyspace = String.format("CREATE KEYSPACE IF NOT EXISTS %s WITH REPLICATION = %s ;", KEYSPACE_PATTERN, REPLICATION_STRATEGY_PATTERN);
27 | private static String STMT_get_columns = String.format("SELECT * FROM %s.%s WHERE keyspace_name = '%s' ;", "system_schema", "columns", KEYSPACE_PATTERN);
28 |
29 | public static class Prepared {
30 | private final String keyspace;
31 | private final CqlSession session;
32 | private final String replicationStrategy;
33 |
34 | final PreparedStatement get_columns;
35 | final PreparedStatement create_keyspace;
36 |
37 | public Prepared(CqlSession session, String keyspace, String replicationStrategy) {
38 | this.keyspace = keyspace;
39 | this.session = session;
40 | this.replicationStrategy = replicationStrategy;
41 |
42 | create_keyspace = prepare(STMT_create_keyspace);
43 |
44 | //Ensure the dynamo keyspaceName exists
45 | try {
46 | session.execute(create_keyspace.bind());
47 | } catch(DriverTimeoutException de){
48 | try {
49 | Thread.sleep(10000);
50 | session.execute(create_keyspace.bind());
51 | Thread.sleep(10000);
52 | } catch (InterruptedException e) {
53 | e.printStackTrace();
54 | }
55 | }
56 |
57 | get_columns = prepare(STMT_get_columns);
58 | }
59 |
60 | public PreparedStatement prepare(String stmt) {
61 | String withKeyspace = stmt.replaceAll(KEYSPACE_PATTERN, this.keyspace);
62 | String withReplication = withKeyspace.replaceAll(REPLICATION_STRATEGY_PATTERN, this.replicationStrategy);
63 | PreparedStatement prepared = session.prepare(withReplication);
64 |
65 | return prepared;
66 | }
67 |
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/DynamoDSETranslator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp;
17 |
18 | import com.amazonaws.services.dynamodbv2.model.*;
19 | import com.datastax.powertools.dcp.api.DynamoDBResponse;
20 | import com.datastax.powertools.dcp.managed.dse.CassandraManager;
21 | import com.fasterxml.jackson.annotation.JsonInclude;
22 | import com.fasterxml.jackson.databind.DeserializationFeature;
23 | import com.fasterxml.jackson.databind.ObjectMapper;
24 | import com.fasterxml.jackson.databind.PropertyNamingStrategy;
25 |
26 | import java.io.IOException;
27 |
28 | public abstract class DynamoDSETranslator {
29 | protected final CassandraManager cassandraManager;
30 | protected String keyspaceName;
31 |
32 | public static final ObjectMapper awsRequestMapper = new ObjectMapper()
33 | .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
34 | .setSerializationInclusion(JsonInclude.Include.NON_NULL)
35 | .setPropertyNamingStrategy(
36 | //Map to AWS api style
37 | new PropertyNamingStrategy.UpperCamelCaseStrategy() {
38 | @Override
39 | public String translate(String input) {
40 | String output = super.translate(input);
41 |
42 | if (output != null && output.length() >= 2) {
43 | switch (output) {
44 | case "Ss": return "SS";
45 | case "Bool": return "BOOL";
46 | case "Ns": return "NS";
47 | default:
48 | break;
49 | }
50 | }
51 |
52 | return output;
53 | }
54 | });
55 |
56 | public DynamoDSETranslator(CassandraManager cassandraManager) {
57 | this.cassandraManager = cassandraManager;
58 | this.keyspaceName = cassandraManager.getKeyspaceName();
59 | }
60 |
61 | public abstract DynamoDBResponse createTable(CreateTableRequest payload) throws IOException;
62 | public abstract DynamoDBResponse putItem(PutItemRequest payload) throws IOException;
63 | public abstract DynamoDBResponse getItem(GetItemRequest payload);
64 | public abstract DynamoDBResponse query(QueryRequest payload);
65 | public abstract DynamoDBResponse deleteItem(DeleteItemRequest dir);
66 | public abstract DynamoDBResponse describeTable(DescribeTableRequest dtr);
67 | public abstract DynamoDBResponse deleteTable(DeleteTableRequest dbr);
68 |
69 | protected String getKeyspaceName() {
70 | return keyspaceName;
71 | }
72 |
73 | }
74 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/DCProxyApplication.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp;
17 |
18 | import com.datastax.powertools.dcp.managed.ddbstreams.DynamoStreamsManager;
19 | import com.datastax.powertools.dcp.managed.dse.CassandraManager;
20 | import com.datastax.powertools.dcp.managed.dynamodb.DynamoManager;
21 | import com.datastax.powertools.dcp.resources.DCProxyResource;
22 | import io.dropwizard.Application;
23 | import io.dropwizard.setup.Bootstrap;
24 | import io.dropwizard.setup.Environment;
25 |
26 | public class DCProxyApplication extends Application {
27 |
28 | public static void main(String[] args) throws Exception {
29 | new DCProxyApplication().run(args);
30 | }
31 |
32 | @Override
33 | public String getName() {
34 | return "DynamoDB-Cassandra-Proxy";
35 | }
36 |
37 | @Override
38 | public void initialize(Bootstrap bootstrap) {
39 | // nothing to do yet
40 | }
41 |
42 | @Override
43 | public void run(DCProxyConfiguration configuration,
44 | Environment environment) {
45 |
46 | //DataStax
47 | CassandraManager dseManager = new CassandraManager();
48 | dseManager.configure(configuration);
49 | environment.lifecycle().manage(dseManager);
50 |
51 | TranslatorType translatorType = configuration.getTranslatorImplementation();
52 | DynamoDSETranslator ddt;
53 | if (translatorType == TranslatorType.JSON_BLOB){
54 | ddt = new DynamoDSETranslatorJSONBlob(dseManager);
55 | }else{
56 | // TODO: Implement other versions
57 | ddt = new DynamoDSETranslatorJSONBlob(dseManager);
58 | }
59 | final DCProxyResource dcProxyResource = new DCProxyResource(dseManager, ddt);
60 | environment.jersey().register(dcProxyResource);
61 |
62 | //Dynamo
63 | DynamoManager dynamoManager = new DynamoManager();
64 | dynamoManager.configure(configuration);
65 | environment.lifecycle().manage(dynamoManager);
66 |
67 | //DynamoDBStreams
68 | if(configuration.isStreamsEnabled()){
69 | DynamoStreamsManager dynamoStreamsManager = new DynamoStreamsManager(dynamoManager.get());
70 | dynamoStreamsManager.configure(configuration);
71 | environment.lifecycle().manage(dynamoStreamsManager);
72 | }
73 |
74 | /*
75 | Thread thread = new Thread(){
76 | @Override
77 | public void run() {
78 | super.run();
79 | dynamoStreamsManager.processStream();
80 | }
81 | };
82 | thread.setDaemon(true);
83 | thread.start();
84 | */
85 |
86 | //final DynamoStreamsResource streamsResource = new DynamoStreamsResource(dynamoStreamsManager);
87 | //environment.jersey().register(streamsResource);
88 | }
89 |
90 | }
91 |
--------------------------------------------------------------------------------
/docs/Translators-JP.md:
--------------------------------------------------------------------------------
1 | ## Translator
2 |
3 | `dynamo-db-proxy` は、dynamodb のリクエストから CQL のデータモデルへの変換のために Translator を使用します。
4 |
5 | dynamodb は、単独のパーティションキーと、単独のソートキーを使用します。その他はすべて JSON です。クエリー API は、クエリー時に JSON ペイロード内の個別フィールドの選択を可能にしますが、グローバルインデックスを除き、その選択範囲は、パーティション内に限られます。
6 | DDB supports a single partition key and a single sort key. Everything else is JSON. The query API allows users to select individual fields within the JSON payload at query time but only within a partition with the exception of global indexes.
7 |
8 | Translator は、DynamoDB の機能を部分的にサポートしたり、それ以外のみ実装の機能については UnimplementedExceptions を throw することができます。
9 |
10 | Translator クラスは、`DynamoDSETranslator` を extend しなければなりません。
11 |
12 | ## Cassandra / DSE のデータモデル
13 |
14 | ### json_blob
15 |
16 | json_blob - 現在の実装
17 |
18 | ```
19 | CREATE TABLE dynamoks.test (
20 | hash_key double,
21 | sort_key double,
22 | json_blob text,
23 | PRIMARY KEY (hash_key, sort_key)
24 | ) WITH CLUSTERING ORDER BY (sort_key ASC)
25 | ```
26 |
27 | 最も単純な実装となっています。制限としては、DynamoDB の UpdateItem API は、クエリーのたびに write の前に read が必要となるため、対応が容易ではないということです。そのため、更新クエリーは、サポートされていない旨の例外を返します。
28 | ```
29 | cqlsh> select * from dynamoks.test ;
30 |
31 | hash_key | sort_key | json_blob
32 | ----------+----------+----------------------------------------------------------------------------------------------------------------------------------
33 | 1 | 2 | {"favorites":{"SS":["puppies","kittens","other cute animals"]},"hash_key":{"S":"1.0"},"city":{"S":"NYC"},"sort_key":{"S":"2.0"}}
34 |
35 | ```
36 | ### マルチバージョン
37 |
38 | LSM の考えを取り入れたデータモデルであれば、更新をバージョニングし、read 時に LWW のセマンティクスをクライアント側で実装して使用し、(コンパクションのような)バックグラウドで動作するクリーンアップジョブを用意すれば、DynamoDB の UpdateItem API をサポートできるかもしれません。
39 |
40 | ```
41 | CREATE TABLE dynamoks.test (
42 | hash_key double,
43 | sort_key double,
44 | version timeuuid,
45 | json_blob text,
46 | PRIMARY KEY (hash_key, sort_key, version)
47 | ) WITH CLUSTERING ORDER BY (sort_key ASC)
48 | ```
49 |
50 | データ例:
51 | ```
52 | cqlsh> select * from dynamoks.test ;
53 |
54 | hash_key | sort_key | version |json_blob
55 | ----------+----------+---------------------
56 | 1 | 2 | 1 |{"favorites":{"SS":["puppies","kittens","other cute animals"]},"hash_key":{"S":"1.0"},"sort_key":{"S":"2.0"}}
57 | 1 | 2 | 2 |{"favorites":{"SS":["kittens","other cute animals"]},"hash_key":{"S":"1.0"},"city":{"S":"NYC"},"sort_key":{"S":"2.0"}}
58 | ```
59 |
60 | クエリー時には、プロキシがパーティション全体を読み取り、マージを行い、マージ結果を返すという方法が考えられます。
61 |
62 | ```
63 | {"favorites":{"SS":["puppies","kittens","other cute animals"]},"hash_key":{"S":"1.0"},"city":{"S":"NYC"},"sort_key":{"S":"2.0"}}
64 | ```
65 |
66 |
67 |
68 | ### 完全非正規化
69 |
70 | より良いマルチモデルと CQL への対応を考えた場合、以下のようなデータモデルならば、dynamodb 項目を非正規化して複数のレコードにできます。
71 |
72 | ```
73 | CREATE TABLE dynamoks.test (
74 | hash_key double,
75 | sort_key double,
76 | column_name text,
77 | value_numeric double,
78 | value_string text,
79 | value_date date,
80 | PRIMARY KEY (hash_key, sort_key, column_name)
81 | ) WITH CLUSTERING ORDER BY (sort_key ASC)
82 | ```
83 |
84 | データ例:
85 | ```
86 | cqlsh> select * from dynamoks.test ;
87 |
88 | hash_key | sort_key | column_name | value_numeric | value_string | value_date
89 | ----------+----------+----------------------------------------------------
90 | 1 | 2 | 'puppies' | 3 | |
91 | 1 | 2 | 'City' | | 'NYC' |
92 | ```
93 |
94 |
95 | クエリー時には、プロキシがパーティション全体を読み取り、マージを行い、マージ結果を返すということになります。
96 |
97 | ```
98 | {"favorites":{"N":["puppies",3]},"hash_key":{"S":"1.0"},"city":{"S":"NYC"},"sort_key":{"S":"2.0"}}
99 | ```
100 |
101 | 上記の混合も考えられます(完全非正規化とマルチバージョンの混合)
--------------------------------------------------------------------------------
/docs/Translators.md:
--------------------------------------------------------------------------------
1 | ## Translators
2 |
3 | The `dynamo-db-proxy` uses Translators to go from dynamodb requests to CQL data models.
4 |
5 | DDB supports a single partition key and a single sort key. Everything else is JSON. The query API allows users to select individual fields within the JSON payload at query time but only within a partition with the exception of global indexes.
6 |
7 | Some Translators can support partial DynamoDB functionality and throw UnimplementedExceptions for other functionality.
8 |
9 | A translator class must extend `DynamoDSETranslator`.
10 |
11 | ## DSE Data Models:
12 |
13 | ### json_blob
14 |
15 | json_blob - currently implemented
16 |
17 | ```
18 | CREATE TABLE dynamoks.test (
19 | hash_key double,
20 | sort_key double,
21 | json_blob text,
22 | PRIMARY KEY (hash_key, sort_key)
23 | ) WITH CLUSTERING ORDER BY (sort_key ASC)
24 | ```
25 |
26 | This is the simplest implementation. The downside is that it cannot feasibly support the DDB UpdateItem API because it would require a read before write on every query. Update queries will return a not supported exception.
27 | ```
28 | cqlsh> select * from dynamoks.test ;
29 |
30 | hash_key | sort_key | json_blob
31 | ----------+----------+----------------------------------------------------------------------------------------------------------------------------------
32 | 1 | 2 | {"favorites":{"SS":["puppies","kittens","other cute animals"]},"hash_key":{"S":"1.0"},"city":{"S":"NYC"},"sort_key":{"S":"2.0"}}
33 |
34 | ```
35 | ### multi-version
36 | An LSM inspired data model could support the DDB UpdateItem API by versioning updates and using client implemented LWW semantics on read and potentially a cleanup (compaction-like) background job.
37 | ```
38 | CREATE TABLE dynamoks.test (
39 | hash_key double,
40 | sort_key double,
41 | version timeuuid,
42 | json_blob text,
43 | PRIMARY KEY (hash_key, sort_key, version)
44 | ) WITH CLUSTERING ORDER BY (sort_key ASC)
45 | ```
46 |
47 | Example data:
48 | ```
49 | cqlsh> select * from dynamoks.test ;
50 |
51 | hash_key | sort_key | version |json_blob
52 | ----------+----------+---------------------
53 | 1 | 2 | 1 |{"favorites":{"SS":["puppies","kittens","other cute animals"]},"hash_key":{"S":"1.0"},"sort_key":{"S":"2.0"}}
54 | 1 | 2 | 2 |{"favorites":{"SS":["kittens","other cute animals"]},"hash_key":{"S":"1.0"},"city":{"S":"NYC"},"sort_key":{"S":"2.0"}}
55 | ```
56 |
57 | For queries, the proxy would read the whole partition, merge and return a merged response:
58 | ```
59 | {"favorites":{"SS":["puppies","kittens","other cute animals"]},"hash_key":{"S":"1.0"},"city":{"S":"NYC"},"sort_key":{"S":"2.0"}}
60 | ```
61 |
62 |
63 |
64 | ### fully-denormalized
65 | For better multi-model and cql support, the following data model would denormalize the dynamodb item into multiple records.
66 | ```
67 | CREATE TABLE dynamoks.test (
68 | hash_key double,
69 | sort_key double,
70 | column_name text,
71 | value_numeric double,
72 | value_string text,
73 | value_date date,
74 | PRIMARY KEY (hash_key, sort_key, column_name)
75 | ) WITH CLUSTERING ORDER BY (sort_key ASC)
76 | ```
77 |
78 | Example data:
79 | ```
80 | cqlsh> select * from dynamoks.test ;
81 |
82 | hash_key | sort_key | column_name | value_numeric | value_string | value_date
83 | ----------+----------+----------------------------------------------------
84 | 1 | 2 | 'puppies' | 3 | |
85 | 1 | 2 | 'City' | | 'NYC' |
86 | ```
87 |
88 |
89 | For queries, the proxy would read the whole partition, merge and return a merged response:
90 | ```
91 | {"favorites":{"N":["puppies",3]},"hash_key":{"S":"1.0"},"city":{"S":"NYC"},"sort_key":{"S":"2.0"}}
92 | ```
93 |
94 | A mix is also possible (Fully denormalized / multi-version)
95 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/managed/ddbstreams/StreamsRecordProcessor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.managed.ddbstreams;
17 |
18 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
19 | import com.amazonaws.services.dynamodbv2.model.AttributeValue;
20 | import com.amazonaws.services.dynamodbv2.model.DeleteItemRequest;
21 | import com.amazonaws.services.dynamodbv2.model.PutItemRequest;
22 | import com.amazonaws.services.dynamodbv2.streamsadapter.model.RecordAdapter;
23 | import com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.IRecordProcessor;
24 | import com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShutdownReason;
25 | import com.amazonaws.services.kinesis.clientlibrary.types.InitializationInput;
26 | import com.amazonaws.services.kinesis.clientlibrary.types.ProcessRecordsInput;
27 | import com.amazonaws.services.kinesis.clientlibrary.types.ShutdownInput;
28 | import com.amazonaws.services.kinesis.model.Record;
29 | import org.slf4j.Logger;
30 | import org.slf4j.LoggerFactory;
31 |
32 | import java.nio.charset.Charset;
33 | import java.util.HashMap;
34 | import java.util.Map;
35 |
36 | public class StreamsRecordProcessor implements IRecordProcessor {
37 | private Integer checkpointCounter;
38 |
39 | private final AmazonDynamoDB dynamoDBClient;
40 | private final String tableName;
41 | private Logger logger = LoggerFactory.getLogger(StreamsRecordProcessor.class);
42 |
43 | public StreamsRecordProcessor(AmazonDynamoDB dynamoDBClient2, String tableName) {
44 | this.dynamoDBClient = dynamoDBClient2;
45 | this.tableName = tableName;
46 | }
47 |
48 | @Override
49 | public void initialize(InitializationInput initializationInput) {
50 | checkpointCounter = 0;
51 | }
52 |
53 | @Override
54 | public void processRecords(ProcessRecordsInput processRecordsInput) {
55 | for (Record record : processRecordsInput.getRecords()) {
56 | String data = new String(record.getData().array(), Charset.forName("UTF-8"));
57 | System.out.println(data);
58 | if (record instanceof RecordAdapter) {
59 | com.amazonaws.services.dynamodbv2.model.Record streamRecord = ((RecordAdapter) record)
60 | .getInternalObject();
61 |
62 | switch (streamRecord.getEventName()) {
63 | case "INSERT": case "MODIFY":
64 | Map items = streamRecord.getDynamodb().getNewImage();
65 | PutItemRequest putItemRequest = new PutItemRequest().withTableName(tableName).withItem(items);
66 | dynamoDBClient.putItem(putItemRequest);
67 | break;
68 | case "REMOVE":
69 | Map keys = streamRecord.getDynamodb().getKeys();
70 | DeleteItemRequest deleteItemRequest = new DeleteItemRequest().withTableName(tableName).withKey(keys);
71 | dynamoDBClient.deleteItem(deleteItemRequest);
72 | }
73 | }
74 | checkpointCounter += 1;
75 | if (checkpointCounter % 10 == 0) {
76 | try {
77 | processRecordsInput.getCheckpointer().checkpoint();
78 | }
79 | catch (Exception e) {
80 | e.printStackTrace();
81 | }
82 | }
83 | }
84 |
85 | }
86 |
87 | @Override
88 | public void shutdown(ShutdownInput shutdownInput) {
89 | if (shutdownInput.getShutdownReason() == ShutdownReason.TERMINATE) {
90 | try {
91 | shutdownInput.getCheckpointer().checkpoint();
92 | }
93 | catch (Exception e) {
94 | e.printStackTrace();
95 | }
96 | }
97 |
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/README-JP.md:
--------------------------------------------------------------------------------
1 | # dynamo-cassandra-proxy
2 |
3 | `dynamo-cassandra-proxy` は、アプリケーションと Apache Cassandra の間に座る、スケーラブルなプロキシレイヤーとして構成されます。
4 |
5 | このプロキシレイヤーは、DynamoDB SDK との互換性を提供します。そうすることで、既存の DynamoDB アプリケーションに変更を加えることなく Cassandra を対象としたデータの読み書きが可能になります。
6 |
7 | また、DynamoDB Streams 経由で、DynamoDB のテーブルを Cassandra 側と同期させる機能もサポートします。
8 |
9 | ## 構成設定
10 |
11 | 以下のように、テンプレートに基づいて yaml を作成します。
12 |
13 | cp conf/dynamo-cassandra-proxy.yaml.template conf/dynamo-cassandra-proxy.yaml
14 |
15 | 以下は、このプロキシがサポートするオプションです。
16 |
17 | | オプション | 説明 |
18 | | -------- | ---------- |
19 | |streamsEnabled| true に設定すると、プロキシは既存の DynamoDB テーブルからのライブデータの取り込みを有効します|
20 | |dynamoRegion| 上記のストリーミングを有効にした場合にのみ必要。DynamoDB テーブルが格納さているリージョン|
21 | |dyanmoAccessKey| 上記のストリーミングを有効にした場合にのみ必要。DynamoDB Streams への接続に使用|
22 | |dyanmoSecretKey| 上記のストリーミングを有効にした場合にのみ必要。DynamoDB Streams への接続に使用|
23 | |awsDynamodbEndpoint| 上記のストリーミングを有効にした場合にのみ必要。DynamoDB Streams への接続に使用|
24 | |contactPoints| Apache Cassandra(TM) クラスターへの接続に使用するコンタクトポイント。下記の docker オプションを使用する場合は、localhost のままにします|
25 | |dockerCassandra| true に設定すると、ローカルの docker で Cassandra を起動します。docker デーモンがインストールされ、実行されていることと、使用するユーザーが `docker ps` の実行権限を持っていることを確認してください|
26 |
27 |
28 | ## ローカルでの実行
29 |
30 | クローン:
31 |
32 | git clone git@github.com:datastax/dynamo-cassandra-proxy.git
33 |
34 | ビルド:
35 |
36 | mvn package
37 |
38 | プロキシの実行: プロキシを自分で用意した Cassandra クラスターに接続するにしろ、yaml ファルの中で cassandraDocker オプションを利用してプロキシに Cassandra ノードを立ち上げてもらうにしろ、ローカルでコードを実行するには、以下のようします。
39 |
40 | java -Ddw.contactPoints="$contactPoints" -cp target/dynamodb-cassandra-proxy-0.1.0.jar com.datastax.powertools.dcp.DCProxyApplication server conf/dynamo-cassandra-proxy.yaml
41 |
42 | プロキシは起動すると、8080 番のポートをリッスンします。そうしたら、DynamoDB アプリケーションを SDK の中で `<ホスト名>:8080` に向けます。接続文字列の参考例を以下に示します(Java の場合)。
43 |
44 | ClientConfiguration config = new ClientConfiguration();
45 | config.setMaxConnections(dynamodbMaxConnections);;
46 | String dynamodbEndpoint = "localhost:8080"
47 | String signinRegion = "dummy"
48 | AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder
49 | .EndpointConfiguration(protocol + "://" + dynamodbEndpoint, signinRegion);
50 | ddbBuilder = AmazonDynamoDBClientBuilder.standard()
51 | .withClientConfiguration(config)
52 | .withEndpointConfiguration(endpointConfiguration);
53 |
54 | メモ: `MaxConnections` は、AWS SDK をごく基本レベルの性能を超えて動作するようにする主要な鍵となります。参考までに、この値を最大 50 まで上げていきながらテストしましたが、ミディアムサイズの環境で、ほぼリニアにスケールすることを確認しています。ベンチマークなどのために Cassandra クラスターを飽和させることを目的としている場合には、この値を大きくして見てください。
55 |
56 |
57 | ## docker-compose を利用した実行
58 |
59 | 以下のコマンドでビルドします。
60 |
61 | mvn package
62 |
63 | docker コンテナをビルドして実行します。
64 |
65 | docker-compose up
66 |
67 |
68 | ## ローカルの Kubernetes での実行
69 |
70 | Cassandra 用の config map の設定:
71 |
72 | kubectl create configmap cassandra-config \
73 | --from-file=common/cassandra/conf-dir/resources/cassandra/conf
74 |
75 | k8s yaml の適用:
76 |
77 | kubectl apply -f k8s-local/proxy-suite.yaml
78 |
79 | この時点で、pod は以下のようになっているはずです。
80 |
81 | ```
82 | $ kubectl get pods [2:34:13]
83 | NAME READY STATUS RESTARTS AGE
84 | cassandra-0 1/1 Running 0 2m35s
85 | cassandra-1 1/1 Running 0 168s
86 | cassandra-3 1/1 Running 0 123s
87 | dynamo-cass-proxy-0 1/1 Running 4 63s
88 | ```
89 |
90 | 実行したデプロイを終了するには、以下のようにします。
91 |
92 | kubectl delete -f k8s-local/proxy-suite.yaml
93 |
94 |
95 | ## コントリビューション
96 |
97 | Translator を用意するのも手始めとしては良いかもしれません。
98 | Translator の詳細については、[docs 内の Translators](docs/Translators-JP.md) をご覧ください。
99 |
100 | ## MVP(実用最小限の実装)のロードマップ:
101 |
102 | 印が入っているところは現時点の実装済機能:
103 |
104 | - [x] CreateTable - json_blob で実装
105 | - [x] DeleteItem - json_blob で実装
106 | - [ ] DeleteTable
107 | - [x] GetItem - json_blob で実装
108 | - [x] PutItem - json_blob で実装
109 | - [ ] Query - json_blob で単純なケースを実装
110 | - [ ] Scan
111 | - [x] ハイブリッド機能 - DynamoDB から Cassandra へ
112 | - [ ] ハイブリッド機能 - Cassandra から DynamoDB へ
113 |
114 | 未実装の**その他の機能**
115 |
116 | - UpdateItem
117 | - BatchGetItem
118 | - BatchWriteItem
119 | - DescribeStream
120 | - DescribeTable
121 | - DescribeLimits
122 | - DescribeTimeToLive
123 | - GetRecords
124 | - GetShardIterator
125 | - ListStreams
126 | - ListTables
127 | - ListTagsOfResource
128 | - TagResource
129 | - UntagResource
130 | - UpdateTable
131 | - UpdateTimeToLive
132 |
133 | ## ライセンス
134 | 本プロジェクトは、Apache Public License 2.0 のもとにライセンスします
135 |
--------------------------------------------------------------------------------
/k8s-local/proxy-suite.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: cassandra-ext-lb
5 | labels:
6 | app: cassandra
7 | spec:
8 | type: LoadBalancer
9 | ports:
10 | - port: 9042
11 | name: cql-port
12 | selector:
13 | app: cassandra
14 | ---
15 | apiVersion: v1
16 | kind: Service
17 | metadata:
18 | name: cassandra
19 | labels:
20 | app: cassandra
21 | spec:
22 | ports:
23 | - port: 9042
24 | name: cql-port
25 | - port: 9103
26 | name: prom-port
27 | clusterIP: None
28 | selector:
29 | app: cassandra
30 | ---
31 | apiVersion: apps/v1
32 | kind: StatefulSet
33 | metadata:
34 | name: cassandra
35 | spec:
36 | selector:
37 | matchLabels:
38 | app: cassandra
39 | serviceName: "cassandra"
40 | replicas: 3
41 | podManagementPolicy: OrderedReady
42 | updateStrategy:
43 | type: RollingUpdate
44 | template:
45 | metadata:
46 | labels:
47 | app: cassandra
48 | spec:
49 | securityContext:
50 | fsGroup: 999
51 | affinity:
52 | podAntiAffinity:
53 | requiredDuringSchedulingIgnoredDuringExecution:
54 | - labelSelector:
55 | matchExpressions:
56 | - key: app
57 | operator: In
58 | values:
59 | - cassandra
60 | topologyKey: kubernetes.io/hostname
61 | terminationGracePeriodSeconds: 12000
62 | containers:
63 | - name: cassandra
64 | image: datastax/ddac:latest
65 | imagePullPolicy: IfNotPresent
66 | resources:
67 | requests:
68 | cpu: "2"
69 | memory: "4000Mi"
70 | env:
71 | - name: DS_LICENSE
72 | value: accept
73 | - name: SEEDS
74 | value: cassandra-0.cassandra.default.svc.cluster.local,cassandra-1.cassandra.default.svc.cluster.local,cassandra-2.cassandra.default.svc.cluster.local
75 | - name: CLUSTER_NAME
76 | value: "Test_Cluster"
77 | - name: NUM_TOKENS
78 | value: "64"
79 | - name: DC
80 | value: "DC-1"
81 | - name: RACK
82 | value: "rack-1"
83 | - name: SNITCH
84 | value: GossipingPropertyFileSnitch
85 | ports:
86 | - containerPort: 7000
87 | name: intra-node-port
88 | - containerPort: 7001
89 | name: tls-intra-node
90 | - containerPort: 7199
91 | name: jmx-port
92 | - containerPort: 8609
93 | name: inter-node-msg
94 | - containerPort: 9042
95 | name: cql-port
96 | - containerPort: 9160
97 | name: thrift-port
98 | - containerPort: 9103
99 | name: prom-port
100 | - containerPort: 61621
101 | name: ds-agent-port
102 | volumeMounts:
103 | - name: cassandra-data
104 | mountPath: /var/lib/cassandra
105 | - name: config-volume
106 | mountPath: /config
107 | - name: prometheus-volume
108 | mountPath: /opt/cassandra/resources/cassandra/collectd/etc/collectd
109 | livenessProbe:
110 | tcpSocket:
111 | port: 9042
112 | initialDelaySeconds: 900
113 | timeoutSeconds: 1
114 | periodSeconds: 30
115 | failureThreshold: 10
116 | lifecycle:
117 | postStart:
118 | exec:
119 | command: ['/bin/sh', '-c', 'until cqlsh -e "describe cluster"; do echo waiting for Cassandra Startup Complete; sleep 10; done;']
120 | preStop:
121 | exec:
122 | command: ["/bin/sh", "-c", "exec nodetool decommission"]
123 | volumes:
124 | - name: config-volume
125 | configMap:
126 | name: cassandra-config
127 | - name: prometheus-volume
128 | - name: cassandra-data
129 | emptyDir: {}
130 | ---
131 | apiVersion: v1
132 | kind: Service
133 | metadata:
134 | name: dynamo-cass-proxy-ext-lb
135 | labels:
136 | app: dynamo-cass-proxy
137 | spec:
138 | type: LoadBalancer
139 | ports:
140 | - port: 8080
141 | name: port-8080
142 | selector:
143 | app: dynamo-cass-proxy
144 | ---
145 | apiVersion: v1
146 | kind: Service
147 | metadata:
148 | name: dynamo-cass-proxy
149 | labels:
150 | app: dynamo-cass-proxy
151 | spec:
152 | ports:
153 | - port: 8080
154 | name: port-8080
155 | clusterIP: None
156 | selector:
157 | app: dynamo-cass-proxy
158 | ---
159 | apiVersion: apps/v1
160 | kind: StatefulSet
161 | metadata:
162 | name: dynamo-cass-proxy
163 | spec:
164 | selector:
165 | matchLabels:
166 | app: dynamo-cass-proxy
167 | serviceName: "dynamo-cass-proxy"
168 | replicas: 1
169 | podManagementPolicy: OrderedReady
170 | updateStrategy:
171 | type: RollingUpdate
172 | template:
173 | metadata:
174 | labels:
175 | app: dynamo-cass-proxy
176 | spec:
177 | containers:
178 | - name: dynamo-cass-proxy
179 | image: phact/dynamo-cassandra-proxy
180 | command: ['/bin/sh', '-c', 'java -Ddw.contactPoints="cassandra" -cp /opt/dynamo-cassandra-proxy/dynamodb-cassandra-proxy-0.1.0.jar com.datastax.powertools.dcp.DCProxyApplication server /opt/dynamo-cassandra-proxy/dynamo-cassandra-proxy.yaml']
181 | imagePullPolicy: IfNotPresent
182 | resources:
183 | # requests:
184 | # cpu: "2"
185 | # memory: "4000Mi"
186 | ports:
187 | - containerPort: 8080
188 | name: port-8080
189 | # configMap:
190 | # name: dynamo-cass-proxy-config
191 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/DCProxyConfiguration.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp;
17 |
18 | import com.fasterxml.jackson.annotation.JsonProperty;
19 | import io.dropwizard.Configuration;
20 |
21 | public class DCProxyConfiguration extends Configuration {
22 |
23 | //Cassandra Stuff
24 | @JsonProperty
25 | private int cqlPort = 9042;
26 | @JsonProperty
27 | private String contactPoints = "localhost";
28 | @JsonProperty
29 | private String localDC = "dc1";
30 | @JsonProperty
31 | private String cqlUserName = "cassandra";
32 | @JsonProperty
33 | private String cqlPassword = "cassandra";
34 | @JsonProperty
35 | private String keyspaceName = "dynamoks";
36 | @JsonProperty
37 | private String replicationStrategy = "{'class': 'SimpleStrategy', 'replication_factor': 1 }";
38 |
39 | //Dynamo Stuff
40 | @JsonProperty
41 | private String dynamoRegion = "east-nyc-madeup";
42 | private String dsDynamodbEndpoint = "http://localhost:8080";
43 | @JsonProperty
44 | private String awsDynamodbEndpoint = "http://dynamodb.us-east-2.amazonaws.com";
45 | @JsonProperty
46 | private String streamsEndpoint = "https://streams.dynamodb.us-east-2.amazonaws.com";;
47 |
48 | @JsonProperty
49 | private TranslatorType translatorImplementation = TranslatorType.JSON_BLOB;
50 | @JsonProperty
51 | private String dynamoAccessKey= "fake-key";
52 | @JsonProperty
53 | private String dynamoSecretKey= "fake-secret";
54 | @JsonProperty
55 | private boolean streamsEnabled;
56 | @JsonProperty
57 | private boolean dockerCassandra;
58 |
59 | @JsonProperty
60 | public void setContactPoints(String contactPoints) {
61 | this.contactPoints = contactPoints;
62 | }
63 |
64 | @JsonProperty
65 | public String getContactPoints() {
66 | return contactPoints;
67 | }
68 |
69 | @JsonProperty
70 | public int getCqlPort() {
71 | return cqlPort;
72 | }
73 |
74 | @JsonProperty
75 | public void setCqlPort(int cqlPort) {
76 | this.cqlPort = cqlPort;
77 | }
78 |
79 | @JsonProperty
80 | public String getCqlUserName() {
81 | return cqlUserName;
82 | }
83 |
84 | @JsonProperty
85 | public void setCqlUserName(String cqlUserName) {
86 | this.cqlUserName = cqlUserName;
87 | }
88 |
89 | @JsonProperty
90 | public String getCqlPassword() {
91 | return cqlPassword;
92 | }
93 |
94 | @JsonProperty
95 | public void setCqlPassword(String cqlPassword) {
96 | this.cqlPassword = cqlPassword;
97 | }
98 |
99 | @JsonProperty
100 | public String getKeyspaceName() {
101 | return keyspaceName;
102 | }
103 |
104 | @JsonProperty
105 | public void setKeyspaceName(String keyspaceName) {
106 | this.keyspaceName = keyspaceName;
107 | }
108 |
109 | @JsonProperty
110 | public String getReplicationStrategy() {
111 | return replicationStrategy;
112 | }
113 |
114 | @JsonProperty
115 | public void setReplicationStrategy(String replicationStrategy) {
116 | this.replicationStrategy = replicationStrategy;
117 | }
118 |
119 | @JsonProperty
120 | public String getDynamoRegion() {
121 | return dynamoRegion;
122 | }
123 |
124 | @JsonProperty
125 | public void setDynamoRegion(String dynamoRegion) {
126 | this.dynamoRegion = dynamoRegion;
127 | }
128 |
129 | @JsonProperty
130 | public String getDsDynamodbEndpoint() {
131 | return dsDynamodbEndpoint;
132 | }
133 |
134 | @JsonProperty
135 | public void setDsDynamodbEndpoint(String dsDynamodbEndpoint) {
136 | this.dsDynamodbEndpoint = dsDynamodbEndpoint;
137 | }
138 |
139 | public TranslatorType getTranslatorImplementation() {
140 | return this.translatorImplementation;
141 | }
142 |
143 | @JsonProperty
144 | public String getDynamoAccessKey() {
145 | return dynamoAccessKey;
146 | }
147 |
148 | @JsonProperty
149 | public String getDynamoSecretKey() {
150 | return dynamoSecretKey;
151 | }
152 |
153 | @JsonProperty
154 | public String getAwsDynamodbEndpoint() {
155 | return awsDynamodbEndpoint;
156 | }
157 |
158 | @JsonProperty
159 | public void setAwsDynamodbEndpoint(String awsDynamodbEndpoint) {
160 | this.awsDynamodbEndpoint = awsDynamodbEndpoint;
161 | }
162 |
163 | public String getStreamsEndpoint() {
164 | return streamsEndpoint;
165 | }
166 |
167 | public void setStreamsEndpoint(String streamsEndpoint) {
168 | this.streamsEndpoint = streamsEndpoint;
169 | }
170 |
171 | public boolean isStreamsEnabled() {
172 | return streamsEnabled;
173 | }
174 |
175 | public boolean isDockerCassandra() {
176 | return dockerCassandra;
177 | }
178 |
179 | public String getLocalDC() {
180 | return localDC;
181 | }
182 | }
183 |
--------------------------------------------------------------------------------
/gke/proxy-suite.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: storage.k8s.io/v1
2 | kind: StorageClass
3 | metadata:
4 | name: fast
5 | provisioner: kubernetes.io/gce-pd
6 | parameters:
7 | type: pd-ssd
8 | ---
9 | apiVersion: v1
10 | kind: Service
11 | metadata:
12 | name: cassandra-ext-lb
13 | labels:
14 | app: cassandra
15 | spec:
16 | type: LoadBalancer
17 | ports:
18 | - port: 9042
19 | name: cql-port
20 | selector:
21 | app: cassandra
22 | ---
23 | apiVersion: v1
24 | kind: Service
25 | metadata:
26 | name: cassandra
27 | labels:
28 | app: cassandra
29 | spec:
30 | ports:
31 | - port: 9042
32 | name: cql-port
33 | clusterIP: None
34 | selector:
35 | app: cassandra
36 | ---
37 | apiVersion: apps/v1
38 | kind: StatefulSet
39 | metadata:
40 | name: cassandra
41 | spec:
42 | selector:
43 | matchLabels:
44 | app: cassandra
45 | serviceName: "cassandra"
46 | replicas: 3
47 | podManagementPolicy: OrderedReady
48 | updateStrategy:
49 | type: RollingUpdate
50 | template:
51 | metadata:
52 | labels:
53 | app: cassandra
54 | spec:
55 | securityContext:
56 | fsGroup: 999
57 | affinity:
58 | podAntiAffinity:
59 | requiredDuringSchedulingIgnoredDuringExecution:
60 | - labelSelector:
61 | matchExpressions:
62 | - key: app
63 | operator: In
64 | values:
65 | - cassandra
66 | topologyKey: kubernetes.io/hostname
67 | terminationGracePeriodSeconds: 12000
68 | containers:
69 | - name: cassandra
70 | image: datastax/ddac:latest
71 | imagePullPolicy: IfNotPresent
72 | resources:
73 | requests:
74 | cpu: "2"
75 | memory: "4000Mi"
76 | env:
77 | - name: DS_LICENSE
78 | value: accept
79 | - name: SEEDS
80 | value: cassandra-0.cassandra.default.svc.cluster.local,cassandra-1.cassandra.default.svc.cluster.local,cassandra-2.cassandra.default.svc.cluster.local
81 | - name: CLUSTER_NAME
82 | value: "Test_Cluster"
83 | - name: NUM_TOKENS
84 | value: "64"
85 | - name: DC
86 | value: "DC-1"
87 | - name: RACK
88 | value: "rack-1"
89 | - name: SNITCH
90 | value: GossipingPropertyFileSnitch
91 | ports:
92 | - containerPort: 7000
93 | name: intra-node-port
94 | - containerPort: 7001
95 | name: tls-intra-node
96 | - containerPort: 7199
97 | name: jmx-port
98 | - containerPort: 8609
99 | name: inter-node-msg
100 | - containerPort: 9042
101 | name: cql-port
102 | - containerPort: 9160
103 | name: thrift-port
104 | - containerPort: 9103
105 | name: ds-agent-port
106 | volumeMounts:
107 | - name: cassandra-data
108 | mountPath: /var/lib/cassandra
109 | - name: config-volume
110 | mountPath: /config
111 | livenessProbe:
112 | tcpSocket:
113 | port: 9042
114 | initialDelaySeconds: 900
115 | timeoutSeconds: 1
116 | periodSeconds: 30
117 | failureThreshold: 10
118 | lifecycle:
119 | postStart:
120 | exec:
121 | command: ['/bin/sh', '-c', 'until cqlsh -e "describe cluster"; do echo waiting for DSE Startup Complete; sleep 10; done;']
122 | preStop:
123 | exec:
124 | command: ["/bin/sh", "-c", "exec nodetool decommission"]
125 | volumes:
126 | - name: config-volume
127 | configMap:
128 | name: cassandra-config
129 | volumeClaimTemplates:
130 | - metadata:
131 | name: cassandra-data
132 | spec:
133 | accessModes: [ "ReadWriteOnce" ]
134 | storageClassName: fast
135 | resources:
136 | requests:
137 | storage: 40Gi
138 | ---
139 | apiVersion: v1
140 | kind: Service
141 | metadata:
142 | name: dynamo-cass-proxy-ext-lb
143 | labels:
144 | app: dynamo-cass-proxy
145 | spec:
146 | type: LoadBalancer
147 | ports:
148 | - port: 8080
149 | name: port-8080
150 | selector:
151 | app: dynamo-cass-poxy
152 | ---
153 | apiVersion: v1
154 | kind: Service
155 | metadata:
156 | name: dynamo-cass-proxy
157 | labels:
158 | app: proxy
159 | spec:
160 | ports:
161 | - port: 8080
162 | name: port-8080
163 | clusterIP: None
164 | selector:
165 | app: dynamo-cass-proxy
166 | ---
167 | apiVersion: apps/v1
168 | kind: StatefulSet
169 | metadata:
170 | name: dynamo-cass-proxy
171 | spec:
172 | selector:
173 | matchLabels:
174 | app: dynamo-cass-proxy
175 | serviceName: "dynamo-cass-proxy"
176 | replicas: 1
177 | podManagementPolicy: OrderedReady
178 | updateStrategy:
179 | type: RollingUpdate
180 | template:
181 | metadata:
182 | labels:
183 | app: dynamo-cass-proxy
184 | spec:
185 | securityContext:
186 | fsGroup: 472
187 | containers:
188 | - name: dynamo-cass-proxy
189 | image: phact/dynamo-cassandra-proxy
190 | command: ['/bin/sh', '-c', 'java -Ddw.contactPoints="cassandra" -cp /opt/dynamo-cassandra-proxy/dynamodb-cassandra-proxy-0.1.0.jar com.datastax.powertools.dcp.DCProxyApplication server /opt/dynamo-cassandra-proxy/dynamo-cassandra-proxy.yaml']
191 | imagePullPolicy: IfNotPresent
192 | resources:
193 | requests:
194 | cpu: "2"
195 | memory: "4000Mi"
196 | ports:
197 | - containerPort: 8080
198 | name: port-8080
199 |
--------------------------------------------------------------------------------
/migrate/pom.xml.dse:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | com.datastax.powertools.migrate
6 | dynamoDB
7 | 0.1
8 | jar
9 |
10 |
11 | UTF-8
12 | 6.7.3
13 | 2.11.8
14 | 2.11
15 | 3.0.0
16 | 2.0.10
17 | 3.2
18 | 4.12
19 |
20 |
21 |
22 |
23 | com.datastax.dse
24 | dse-spark-dependencies
25 | ${dse.version}
26 | provided
27 |
28 |
29 | com.github.traviscrawford
30 | spark-dynamodb
31 | 0.0.14-SNAPSHOT
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 | com.datastax.spark
48 | spark-cassandra-connector-embedded_${scala.main.version}
49 | ${connector.version}
50 | test
51 |
52 |
53 | org.scalatest
54 | scalatest_${scala.main.version}
55 | ${scalatest.version}
56 | test
57 |
58 |
59 | junit
60 | junit
61 | ${junit.version}
62 | test
63 |
64 |
65 | org.apache.cassandra
66 | cassandra-all
67 | ${cassandra.version}
68 | test
69 |
70 |
71 |
72 |
73 |
74 | DataStax-Repo
75 | https://repo.datastax.com/public-repos/
76 |
77 |
78 |
79 |
80 |
81 |
82 | net.alchim31.maven
83 | scala-maven-plugin
84 | 3.2.2
85 |
86 |
87 | process-sources
88 |
89 | compile
90 | testCompile
91 |
92 |
93 | ${project.build.sourceDirectory}/../scala
94 |
95 |
96 |
97 |
98 |
99 | org.apache.maven.plugins
100 | maven-shade-plugin
101 | 2.4.3
102 |
103 |
104 | package
105 |
106 | shade
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 | org.apache.maven.plugins
123 | maven-surefire-plugin
124 | 2.7
125 |
126 | true
127 |
128 |
129 |
130 |
131 | org.scalatest
132 | scalatest-maven-plugin
133 | 1.0
134 |
135 | ${project.build.directory}/surefire-reports
136 | .
137 | WDF TestSuite.txt
138 |
139 |
140 |
141 | test
142 |
143 | test
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
--------------------------------------------------------------------------------
/migrate/src/main/scala/com/datastax/powertools/DynamoReader.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.powertools.migrate
2 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder
3 | import com.datastax.driver.core.exceptions.AlreadyExistsException
4 | import com.github.traviscrawford.spark.dynamodb._
5 | import org.apache.spark.sql.{SaveMode, SparkSession}
6 | import org.apache.spark.sql.cassandra._
7 | import org.apache.spark.sql.functions._
8 | import com.datastax.spark.connector._
9 | import org.slf4j.LoggerFactory
10 |
11 | import scala.collection.mutable.ListBuffer
12 | import scala.util.control.NonFatal
13 |
14 |
15 | object dynamoDB {
16 |
17 | private val log = LoggerFactory.getLogger(this.getClass)
18 | def getKeys(table_name: String): List[String] = {
19 | log.info("Getting description for %s\n\n", table_name)
20 | val ddb = AmazonDynamoDBClientBuilder.defaultClient
21 | var keyList = new ListBuffer[String]()
22 | try {
23 | val table_info = ddb.describeTable(table_name).getTable
24 | val keyschema = table_info.getKeySchema()
25 | log.info("Keys")
26 | import scala.collection.JavaConversions._
27 | for (k <- keyschema) {
28 | log.info(k.getAttributeName + "(" + k.getKeyType + ")\n")
29 | keyList += k.getAttributeName
30 | }
31 | } catch {
32 | case NonFatal(err) =>
33 | log.error(s"Failed getting table information for: ${table_name}", err)
34 | }
35 | log.info("\nDone!")
36 | (keyList.toList)
37 | }
38 |
39 | def main(args: Array[String]) {
40 |
41 | log.info("entered main")
42 | var table_name = "na"
43 |
44 | if (args.length > 0) {
45 | table_name = args(0)
46 | } else {
47 | log.info("not enough parameters for job $args.length")
48 | }
49 | log.info("entered main " + table_name)
50 |
51 | val sparkJob = new SparkJob()
52 | try {
53 | sparkJob.runJob(table_name)
54 | } catch {
55 | case ex: Exception =>
56 | log.info("error in main running spark job")
57 | }
58 | }
59 |
60 |
61 | class SparkJob extends Serializable {
62 |
63 | log.info("before build spark session")
64 |
65 | def runJob(table_name: String) = {
66 | val appName = "DynamoReader"
67 | val sparkSession =
68 | SparkSession.builder
69 | .appName(appName)
70 | .config("spark.cassandra.connection.host", "node0")
71 | .getOrCreate()
72 |
73 | log.info(s"before read dynamodb " + table_name)
74 | var dynamoDF = sparkSession.emptyDataFrame
75 | try {
76 | dynamoDF = sparkSession.read.dynamodb(table_name)
77 | } catch {
78 | case ex: Exception =>
79 | log.error("Did not find " + table_name + " in DynamoDB")
80 | System.exit (2)
81 | }
82 | dynamoDF.printSchema()
83 | // this caused breakage due to type conversions
84 | // dynamoDF.show(5)
85 | val keycols = getKeys(table_name)
86 | println(s"print key columns")
87 | keycols.foreach {println}
88 | // hash_key is always first
89 | // initialize sort key as it may be null
90 | var sort_key = "na"
91 | if (keycols.length > 1) sort_key = keycols(1).toLowerCase
92 | val hash_key = keycols(0).toLowerCase()
93 | // gets all columns labels into a list, this will be used for list of json columns
94 | val cols = dynamoDF.columns.toSeq
95 | // remove the hash_key and the sort_key as they should not be in json string
96 | val othercols = cols.filterNot(keycols.toSet)
97 | // val othercols = cols.filterNot(x => x == hash_key).filterNot(x => x == sort_key)
98 | println(s"print columns for cols")
99 | cols.foreach {println}
100 | println(s"print columns for othercols")
101 | othercols.foreach {println}
102 | // create string to be used within the expression to add the structype column
103 | val expressString = "(" + othercols.mkString(",") + ")"
104 | // add the structure column and the json_blob column
105 | val newDF = dynamoDF.withColumn("structure",expr(expressString))
106 | .withColumn("json_blob", expr("to_json(structure)"))
107 | // this show causes breakage on long to string conversion
108 | // newDF.show(2)
109 | newDF.printSchema()
110 | // Only need to write out the three columns
111 | var writeDF = sparkSession.emptyDataFrame
112 | if (keycols.length > 1) {
113 | writeDF = newDF.select(col(hash_key), col(sort_key), col("json_blob"))
114 | } else {
115 | writeDF = newDF.select(col(hash_key), col("json_blob"))
116 | }
117 | writeDF.printSchema()
118 | println(s"before create cassandra table, $table_name, $hash_key, $sort_key")
119 | try {
120 | if (keycols.length > 1) {
121 | writeDF.createCassandraTable("testks",table_name.toLowerCase(),partitionKeyColumns = Some(Seq(hash_key))
122 | ,clusteringKeyColumns = Some(Seq(sort_key)))
123 | } else {
124 | writeDF.createCassandraTable("testks",table_name.toLowerCase(),partitionKeyColumns = Some(Seq(hash_key))
125 | )
126 | }
127 | } catch {
128 | case ex: AlreadyExistsException => log.info(table_name + " already existed so did not recreate");
129 | case ex: Exception => ex.printStackTrace();
130 | }
131 | log.info(s"before write cassandra, $table_name, $hash_key, $sort_key")
132 | try {
133 | writeDF.write.cassandraFormat(table_name.toLowerCase, "testks").mode(SaveMode.Append).save()
134 | } catch {
135 | case ex: Exception =>
136 | log.error("Error in write to " + table_name)
137 | ex.printStackTrace()
138 | }
139 | log.info(s"after write cassandra, $table_name, $hash_key, $sort_key")
140 | }
141 | }
142 |
143 | }
144 |
145 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/resources/DCProxyResource.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.resources;
17 |
18 | import com.amazonaws.AmazonWebServiceResult;
19 | import com.amazonaws.services.dynamodbv2.model.CreateTableRequest;
20 | import com.amazonaws.services.dynamodbv2.model.DeleteItemRequest;
21 | import com.amazonaws.services.dynamodbv2.model.DeleteTableRequest;
22 | import com.amazonaws.services.dynamodbv2.model.DescribeTableRequest;
23 | import com.amazonaws.services.dynamodbv2.model.GetItemRequest;
24 | import com.amazonaws.services.dynamodbv2.model.PutItemRequest;
25 | import com.amazonaws.services.dynamodbv2.model.QueryRequest;
26 | import com.datastax.powertools.dcp.DynamoDSETranslator;
27 | import com.datastax.powertools.dcp.api.DynamoDBResponse;
28 | import com.datastax.powertools.dcp.api.DynamoStatementType;
29 | import com.datastax.powertools.dcp.managed.dse.CassandraManager;
30 | import com.fasterxml.jackson.core.JsonProcessingException;
31 | import org.glassfish.jersey.server.ManagedAsync;
32 | import org.slf4j.Logger;
33 | import org.slf4j.LoggerFactory;
34 |
35 | import javax.ws.rs.Consumes;
36 | import javax.ws.rs.HeaderParam;
37 | import javax.ws.rs.POST;
38 | import javax.ws.rs.Path;
39 | import javax.ws.rs.Produces;
40 | import javax.ws.rs.WebApplicationException;
41 | import javax.ws.rs.container.AsyncResponse;
42 | import javax.ws.rs.container.Suspended;
43 | import javax.ws.rs.core.Context;
44 | import javax.ws.rs.core.HttpHeaders;
45 | import javax.ws.rs.core.MediaType;
46 | import javax.ws.rs.core.Response;
47 | import static com.datastax.powertools.dcp.DynamoDSETranslator.awsRequestMapper;
48 |
49 | @Path("/")
50 | @Produces(MediaType.APPLICATION_JSON)
51 | public class DCProxyResource {
52 |
53 | private final CassandraManager dseManager;
54 | private DynamoDSETranslator ddt;
55 | private Logger logger = LoggerFactory.getLogger(DCProxyResource.class);
56 |
57 |
58 |
59 | public DCProxyResource(CassandraManager dsManager, DynamoDSETranslator ddt) {
60 | this.dseManager = dsManager;
61 | this.ddt = ddt;
62 | }
63 |
64 | @POST
65 | @ManagedAsync
66 | @Consumes("application/x-amz-json-1.0")
67 | @Produces("application/json")
68 | public void asyncDynamoRequestHandler(@Suspended final AsyncResponse asyncResponse, @Context HttpHeaders headers, @HeaderParam("X-Amz-Target") String target, String payload) {
69 | target = target.split("\\.")[1];
70 |
71 | DynamoStatementType statementType = DynamoStatementType.valueOf(target);
72 | DynamoDBResponse response = null;
73 | try {
74 | switch (statementType){
75 | case CreateTable: {
76 | CreateTableRequest createTableRequest = awsRequestMapper.readValue(payload, CreateTableRequest.class);
77 | response = ddt.createTable(createTableRequest);
78 | }
79 | break;
80 | case DeleteTable : {
81 | DeleteTableRequest deleteTableRequest = awsRequestMapper.readValue(payload, DeleteTableRequest.class);
82 | response = ddt.deleteTable(deleteTableRequest);
83 | }
84 | break;
85 | case DescribeTable: {
86 | DescribeTableRequest describeTableRequest = awsRequestMapper.readValue(payload, DescribeTableRequest.class);
87 | response = ddt.describeTable(describeTableRequest);
88 | }
89 | break;
90 | case PutItem: {
91 | PutItemRequest putItemRequest = awsRequestMapper.readValue(payload, PutItemRequest.class);
92 | response = ddt.putItem(putItemRequest);
93 | }
94 | break;
95 | case GetItem: {
96 | GetItemRequest gir = awsRequestMapper.readValue(payload, GetItemRequest.class);
97 | response = ddt.getItem(gir);
98 | }
99 | break;
100 | case DeleteItem: {
101 | DeleteItemRequest dir = awsRequestMapper.readValue(payload, DeleteItemRequest.class);
102 | response = ddt.deleteItem(dir);
103 | }
104 | break;
105 | case Query: {
106 | QueryRequest queryRequest = awsRequestMapper.readValue(payload, QueryRequest.class);
107 | response = ddt.query(queryRequest);
108 | }
109 | break;
110 | default: {
111 | logger.error("query type not supported");
112 | response = new DynamoDBResponse(new AmazonWebServiceResult(), 400);
113 | response.setError("query type not supported");
114 | }
115 | break;
116 | }
117 | }
118 | catch (Throwable e) {
119 | e.printStackTrace();
120 | }
121 | finally {
122 |
123 | if (response == null)
124 | {
125 | throw new WebApplicationException("Internal Error", 500);
126 | }
127 |
128 | byte[] bytes = null;
129 | try {
130 | bytes = awsRequestMapper.writeValueAsBytes(response.getResult());
131 | } catch (JsonProcessingException e) {
132 | e.printStackTrace();
133 | }
134 | if (response.getStatusCode() == 200) {
135 | Response httpResponse;
136 | Response.ResponseBuilder responseBuilder = Response.ok(bytes).status(response.getStatusCode());
137 | httpResponse = responseBuilder.build();
138 | asyncResponse.resume(httpResponse);
139 | }else{
140 | throw new WebApplicationException(response.getError(), response.getStatusCode());
141 | }
142 | }
143 | }
144 | }
--------------------------------------------------------------------------------
/src/test/java/com/datastax/powertools/dcp/example/CatalogIntegrationTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.powertools.dcp.example;
2 |
3 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
4 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper;
5 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig;
6 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBQueryExpression;
7 | import com.amazonaws.services.dynamodbv2.document.DynamoDB;
8 | import com.amazonaws.services.dynamodbv2.model.AttributeDefinition;
9 | import com.amazonaws.services.dynamodbv2.model.AttributeValue;
10 | import com.amazonaws.services.dynamodbv2.model.ComparisonOperator;
11 | import com.amazonaws.services.dynamodbv2.model.Condition;
12 | import com.amazonaws.services.dynamodbv2.model.CreateTableRequest;
13 | import com.amazonaws.services.dynamodbv2.model.CreateTableResult;
14 | import com.amazonaws.services.dynamodbv2.model.DeleteTableResult;
15 | import com.amazonaws.services.dynamodbv2.model.KeySchemaElement;
16 | import com.amazonaws.services.dynamodbv2.model.KeyType;
17 | import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput;
18 | import com.datastax.powertools.dcp.AbstractDCPTest;
19 | import org.junit.Assert;
20 | import org.junit.Test;
21 |
22 | import java.util.ArrayList;
23 | import java.util.Arrays;
24 | import java.util.Collection;
25 | import java.util.HashMap;
26 | import java.util.HashSet;
27 | import java.util.List;
28 | import java.util.Map;
29 |
30 | public class CatalogIntegrationTest extends AbstractDCPTest
31 | {
32 | @Test
33 | public void testCatalog()
34 | {
35 | AmazonDynamoDB client = getProxyClient();
36 |
37 | DynamoDB dynamoDB = new DynamoDB(client);
38 |
39 | List attributeDefinitions= new ArrayList<>();
40 | attributeDefinitions.add(new AttributeDefinition().withAttributeName("Id").withAttributeType("N"));
41 | attributeDefinitions.add(new AttributeDefinition().withAttributeName("Title").withAttributeType("S"));
42 |
43 | List keySchema = new ArrayList<>();
44 | keySchema.add(new KeySchemaElement()
45 | .withAttributeName("Id").withKeyType(KeyType.HASH));
46 |
47 | keySchema.add(new KeySchemaElement()
48 | .withAttributeName("Title").withKeyType(KeyType.RANGE));
49 |
50 | String tableName= "ProductCatalog";
51 |
52 | CreateTableRequest request = new CreateTableRequest()
53 | .withTableName(tableName)
54 | .withKeySchema(keySchema)
55 | .withAttributeDefinitions(attributeDefinitions)
56 | .withProvisionedThroughput(new ProvisionedThroughput()
57 | .withReadCapacityUnits(5L)
58 | .withWriteCapacityUnits(6L));
59 |
60 | CreateTableResult createTable = client.createTable(request);
61 |
62 | DynamoDBMapper mapper = new DynamoDBMapper(client, DynamoDBMapperConfig.builder().withSaveBehavior(DynamoDBMapperConfig.SaveBehavior.PUT).build());
63 |
64 | CatalogItem item = new CatalogItem();
65 | item.setId(102);
66 | item.setTitle("Book 102 Title");
67 | item.setISBN("222-2222222222");
68 | item.setBookAuthors(new HashSet<>(Arrays.asList("Author 1", "Author 2")));
69 | item.setSomeProp("Test");
70 | item.setBooksInStock(10);
71 |
72 | mapper.save(item);
73 |
74 | //Read Back
75 | CatalogItem getItem = new CatalogItem();
76 |
77 | getItem.setId(102);
78 | getItem.setTitle("Book 102 Title");
79 |
80 |
81 | CatalogItem partitionKey = new CatalogItem();
82 | partitionKey.setId(102);
83 | Map rangeKeyCondititions = new HashMap<>();
84 |
85 | //NOTE: The current DynamoDB service only allows up to one range key condition per query. Providing more than one range key condition will result in a SdkClientException.
86 | //TODO: test Between ComparisonOperator
87 | Collection attributeValueList = Arrays.asList(new AttributeValue("B"));
88 | Condition rangeCondition = new Condition().withAttributeValueList(attributeValueList)
89 | .withComparisonOperator(ComparisonOperator.GE);
90 |
91 | rangeKeyCondititions.put("Title", rangeCondition);
92 |
93 | DynamoDBQueryExpression queryExpression = new DynamoDBQueryExpression()
94 | .withHashKeyValues(partitionKey)
95 | .withRangeKeyConditions(rangeKeyCondititions);
96 |
97 | List itemList = mapper.query(CatalogItem.class, queryExpression);
98 |
99 | Assert.assertTrue(itemList.size() == 1);
100 | CatalogItem r = itemList.get(0);
101 | Assert.assertEquals(item, r);
102 |
103 | //Complex Query
104 | String keyCoditionExpression = "Id = :v_id and Title > :v_title";
105 | String filterExpression = "ISBN = :v_isbn and booksInStock :v_books_in_stock";
106 | //String projectionExpression = "";
107 | //Integer limit = 1;
108 | //String select = "";
109 | //boolean consistentRead = false;
110 |
111 | Map expressionAttibuteValues = new HashMap<>();
112 |
113 | expressionAttibuteValues.put("Id", new AttributeValue().withN("102"));
114 | expressionAttibuteValues.put("Title", new AttributeValue().withS("B"));
115 | expressionAttibuteValues.put("ISBN", new AttributeValue().withS("222-2222222222"));
116 | expressionAttibuteValues.put("booksInStock", new AttributeValue().withN("2"));
117 |
118 | queryExpression = new DynamoDBQueryExpression()
119 | .withKeyConditionExpression(keyCoditionExpression)
120 | .withFilterExpression(filterExpression)
121 | .withExpressionAttributeValues(expressionAttibuteValues);
122 | // .withProjectionExpression(projectionExpression)
123 | // .withLimit(limit)
124 | // .withSelect(select)
125 | // .withConsistentRead(consistentRead);
126 |
127 | // consider implementing ExclusiveStartKey and ExpressionAttributeNames and Values
128 |
129 | itemList = mapper.query(CatalogItem.class, queryExpression);
130 |
131 | Assert.assertTrue(itemList.size() == 1);
132 | r = itemList.get(0);
133 | Assert.assertEquals(item, r);
134 |
135 |
136 | //Get Item
137 | r = mapper.load(getItem);
138 | Assert.assertEquals(item, r);
139 |
140 | //Delete Item
141 | mapper.delete(getItem);
142 | r = mapper.load(getItem);
143 | Assert.assertTrue(null == r);
144 |
145 |
146 | DeleteTableResult deleteTable = client.deleteTable(tableName);
147 | }
148 | }
149 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # dynamo-cassandra-proxy
2 |
3 | Build state:
4 | 
5 |
6 | `dynamo-cassandra-proxy` consists of a scalable proxy layer that sits between your app and Apache Cassandra. [See the design summary in docs](docs/Summary.md)
7 |
8 | It provides compatibility with the DynamoDB SDK which allows existing DynamoDB applications to read and write data to Cassandra without application changes.
9 |
10 | It also supports the ability to sync DynamoDB tables with cassandra via DynamoDB Streams.
11 |
12 | ## Config
13 |
14 | Create your yaml based on the template:
15 |
16 | cp conf/dynamo-cassandra-proxy.yaml.template conf/dynamo-cassandra-proxy.yaml
17 |
18 | The following are the options supported by the proxy:
19 |
20 | | Option | Description|
21 | | ------ | ---------- |
22 | |streamsEnabled| When set to true it enables the proxy to pull live data from an existing dynamodb table|
23 | |dynamoRegion| Only needed when streaming is enabled, region your dynamodb table is in |
24 | |dyanmoAccessKey| Only needed when streaming is enabled, used to connect to dynamodb streams|
25 | |dyanmoSecretKey| Only needed when streaming is enabled, used to connect to dynamodb streams|
26 | |awsDynamodbEndpoint| Only needed when streaming is enabled, used to connect to dynamodb streams|
27 | |contactPoints| Contact points to connect to Apache Cassandra(TM) cluster. If you are using the docker option just leave localhost|
28 | |dockerCassandra| When set to true it will stand up Cassandra in your local docker. Ensure the docker deamon is installed and running and your user has access to run `docker ps`|
29 |
30 |
31 | ## To run locally
32 |
33 | Clone:
34 |
35 | git clone git@github.com:datastax/dynamo-cassandra-proxy.git
36 |
37 | Build:
38 |
39 | mvn package
40 |
41 | Run the app: whether you point the proxy at your own c* cluster or you rely on the proxy to stand up a cassandra node using the cassandraDocker option in the yaml. You can run the code locally by:
42 |
43 | java -Ddw.contactPoints="$contactPoints" -cp target/dynamodb-cassandra-proxy-0.1.0.jar com.datastax.powertools.dcp.DCProxyApplication server conf/dynamo-cassandra-proxy.yaml
44 |
45 | The proxy will come up and listen on port :8080. In your dynamodb application, just point your app to `:8080` in the SDK. A sample connection string (in Java) should look as follows:
46 |
47 | ClientConfiguration config = new ClientConfiguration();
48 | config.setMaxConnections(dynamodbMaxConnections);;
49 | String dynamodbEndpoint = "localhost:8080"
50 | String signinRegion = "dummy"
51 | AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder
52 | .EndpointConfiguration(protocol + "://" + dynamodbEndpoint, signinRegion);
53 | ddbBuilder = AmazonDynamoDBClientBuilder.standard()
54 | .withClientConfiguration(config)
55 | .withEndpointConfiguration(endpointConfiguration);
56 |
57 |
58 | Note, `MaxConnections` is the main lever to get the AWS SDK to perform beyond very basic levels. We have tested this up to the maxium of 50 and it appears to scale almost linearly all the way up on a medum sized box. If you are doing some benchmarking and are looking to try to saturate a cassandra cluster, crank this value up.
59 |
60 | ## To run via docker-compose
61 |
62 | Build the app
63 |
64 | mvn package
65 |
66 | Build and run the docker containerst st
67 |
68 | docker-compose up
69 |
70 |
71 | ## To run in local k8s
72 |
73 | Set up cassandra config map:
74 |
75 | kubectl create configmap cassandra-config \
76 | --from-file=common/cassandra/conf-dir/resources/cassandra/conf
77 |
78 | Apply k8s yaml:
79 |
80 | kubectl apply -f k8s-local/proxy-suite.yaml
81 |
82 | At this point, your pods should look as follows:
83 |
84 | ```
85 | $ kubectl get pods [2:34:13]
86 | NAME READY STATUS RESTARTS AGE
87 | cassandra-0 1/1 Running 0 2m35s
88 | cassandra-1 1/1 Running 0 168s
89 | cassandra-3 1/1 Running 0 123s
90 | dynamo-cass-proxy-0 1/1 Running 4 63s
91 | ```
92 |
93 | To terminate your deploymet run:
94 |
95 | kubectl delete -f k8s-local/proxy-suite.yaml
96 |
97 | ## To run on GKE
98 |
99 | ### Create cluster
100 |
101 | If you do not have a GKE cluster yet, [create one per the gcloud docs](https://cloud.google.com/kubernetes-engine/docs/how-to/creating-a-cluster) or use this sample commands:
102 |
103 |
104 | create:
105 |
106 | gcloud container clusters create dynamo-proxy-cluster \
107 | --cluster-version=1.12.5-gke.10 --zone us-west1-b \
108 | --machine-type n1-standard-4 --num-nodes 1
109 |
110 | configure kubectl:
111 |
112 | gcloud container clusters get-credentials k8-12-5-10-gke-n1-std-4 --zone us-west1-b
113 |
114 | ### Deploy the proxy
115 |
116 |
117 | Set up configMap
118 |
119 | kubectl create configmap cassandra-config \
120 | --from-file=common/cassandra/conf-dir/resources/cassandra/conf
121 |
122 | Apply k8s yaml:
123 |
124 | kubectl apply -f gke/proxy-suite.yaml
125 |
126 | The workload will appear on your google console like so (ensure you're viewing the right project):
127 |
128 | 
129 |
130 | To terminate your deploymet run:
131 |
132 | kubectl delete -f gke/proxy-suite.yaml
133 |
134 |
135 | ## Contributing
136 |
137 | A good place to start might be fleshing out your own Translator.
138 | For details on translators see [Translators in the docs](docs/Translators.md)
139 |
140 | ## MVP Roadmap:
141 |
142 | Check means currently completed:
143 |
144 | - [x] CreateTable - Done in json_blob
145 | - [x] DeleteItem - Done in json_blob
146 | - [x] DeleteTable - Done in json_blob
147 | - [x] GetItem - Done in json_blob
148 | - [x] PutItem - Done in json_blob
149 | - [x] Query - key condition expression (json_blob)
150 | - [x] Query - key conditions (json_blob)
151 | - [x] Query - filter expressions (json_blob)
152 | - [ ] Query - projection expressions (json_blob)
153 | - [ ] Query - limit (json_blob)
154 | - [ ] Query - withSelect (json_blob)
155 | - [ ] Scan
156 | - [x] Hybrid functionality - DDB to Cassandra
157 | - [ ] Hybrid functionality - Cassandra to DDB
158 |
159 | **Other features** not yet implemented:
160 |
161 | - UpdateItem
162 | - BatchGetItem
163 | - BatchWriteItem
164 | - DescribeStream
165 | - DescribeTable
166 | - DescribeLimits
167 | - DescribeTimeToLive
168 | - GetRecords
169 | - GetShardIterator
170 | - ListStreams
171 | - ListTables
172 | - ListTagsOfResource
173 | - TagResource
174 | - UntagResource
175 | - UpdateTable
176 | - UpdateTimeToLive
177 | - ConsistentRead
178 |
179 | ## License
180 | This project is licensed under the Apache Public License 2.0
181 |
--------------------------------------------------------------------------------
/migrate/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 | com.datastax.powertools.migrate
5 | dynamoDB
6 | 0.1
7 | jar
8 |
9 |
10 | UTF-8
11 | 2.11.8
12 | 2.11
13 | 2.4.0
14 | 3.0.0
15 | 2.4.0
16 | 3.2
17 | 4.12
18 |
19 |
20 |
24 |
25 |
26 | org.scala-lang
27 | scala-library
28 | ${scala.version}
29 |
30 |
31 | org.apache.spark
32 | spark-core_${scala.main.version}
33 | ${spark.version}
34 | provided
35 |
36 |
37 | org.apache.spark
38 | spark-sql_${scala.main.version}
39 | ${spark.version}
40 | provided
41 |
42 |
43 | org.apache.spark
44 | spark-hive_${scala.main.version}
45 | ${spark.version}
46 | provided
47 |
48 |
49 | com.datastax.spark
50 | spark-cassandra-connector_${scala.main.version}
51 | ${connector.version}
52 | provided
53 |
54 |
55 |
56 | joda-time
57 | joda-time
58 | 2.3
59 |
60 |
61 | com.github.traviscrawford
62 | spark-dynamodb
63 | 0.0.14-SNAPSHOT
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 | com.datastax.spark
80 | spark-cassandra-connector-embedded_${scala.main.version}
81 | ${connector.version}
82 | test
83 |
84 |
85 | com.datastax.cassandra
86 | cassandra-driver-core
87 |
88 |
89 |
90 |
91 | org.scalatest
92 | scalatest_${scala.main.version}
93 | ${scalatest.version}
94 | test
95 |
96 |
97 | junit
98 | junit
99 | ${junit.version}
100 | test
101 |
102 |
103 | org.apache.cassandra
104 | cassandra-all
105 | ${cassandra.version}
106 | test
107 |
108 |
109 |
110 | org.slf4j
111 | log4j-over-slf4j
112 |
113 |
114 |
115 |
116 |
117 | com.google.guava
118 | guava
119 | 18.0
120 | test
121 |
122 |
123 |
124 |
125 |
126 |
127 | net.alchim31.maven
128 | scala-maven-plugin
129 | 3.2.2
130 |
131 |
132 | process-sources
133 |
134 | compile
135 | testCompile
136 |
137 |
138 | ${project.build.sourceDirectory}/../scala
139 |
140 |
141 |
142 |
143 |
144 | org.apache.maven.plugins
145 | maven-shade-plugin
146 | 2.4.3
147 |
148 |
149 | package
150 |
151 | shade
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 | org.apache.maven.plugins
168 | maven-surefire-plugin
169 | 2.7
170 |
171 | true
172 |
173 |
174 |
175 |
176 | org.scalatest
177 | scalatest-maven-plugin
178 | 1.0
179 |
180 | ${project.build.directory}/surefire-reports
181 | .
182 | WDF TestSuite.txt
183 |
184 |
185 |
186 | test
187 |
188 | test
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/managed/dse/CassandraManager.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.managed.dse;
17 |
18 |
19 | import com.datastax.oss.driver.api.core.CqlIdentifier;
20 | import com.datastax.oss.driver.api.core.CqlSession;
21 | import com.datastax.oss.driver.api.core.CqlSessionBuilder;
22 | import com.datastax.oss.driver.api.core.cql.PreparedStatement;
23 | import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder;
24 | import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata;
25 | import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata;
26 | import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata;
27 | import com.datastax.powertools.dcp.DCProxyConfiguration;
28 | import com.google.common.collect.Maps;
29 | import io.dropwizard.lifecycle.Managed;
30 | import org.slf4j.Logger;
31 | import org.slf4j.LoggerFactory;
32 |
33 | import java.net.InetSocketAddress;
34 | import java.util.Arrays;
35 | import java.util.List;
36 | import java.util.Map;
37 | import java.util.MissingResourceException;
38 | import java.util.stream.Collectors;
39 |
40 | public class CassandraManager implements Managed {
41 | private final static Logger logger = LoggerFactory.getLogger(CassandraManager.class);
42 |
43 | public CqlSession getSession() {
44 | return session;
45 | }
46 |
47 | private DCProxyConfiguration config;
48 | private String keyspaceName;
49 | private CqlSession session;
50 | private CassandraStatements.Prepared stmts;
51 | private final Map tableDefs = Maps.newConcurrentMap();
52 |
53 | public void configure(DCProxyConfiguration config) {
54 | this.config = config;
55 | this.keyspaceName = config.getKeyspaceName();
56 | }
57 |
58 | public void start() {
59 | logger.info("Contact points {}", config.getContactPoints());
60 |
61 | CqlSessionBuilder builder = CqlSession.builder()
62 | .addContactPoints(Arrays.stream(config.getContactPoints().split(","))
63 | .map(s -> new InetSocketAddress(s, config.getCqlPort()))
64 | .collect(Collectors.toList()))
65 | .withLocalDatacenter(config.getLocalDC());
66 |
67 | if (config.getCqlUserName() != null)
68 | builder.withAuthCredentials(config.getCqlUserName(), config.getCqlPassword());
69 |
70 | if (config.isDockerCassandra()) {
71 | logger.info("Docker cassandra enabled in the yaml.");
72 | logger.info("Attempting to stand up container.");
73 | DockerHelper dh = new DockerHelper();
74 | dh.startDSE();
75 |
76 | //TODO
77 | try {
78 | Thread.sleep(60000);
79 | } catch (InterruptedException e) {
80 | e.printStackTrace();
81 | }
82 |
83 | }
84 |
85 | session = builder.build();
86 |
87 |
88 | logger.info("Preparing statements for " + CassandraManager.class.getSimpleName());
89 | stmts = new CassandraStatements.Prepared(session, config.getKeyspaceName(), config.getReplicationStrategy());
90 |
91 | refreshSchema();
92 | }
93 |
94 | public void refreshSchema() {
95 |
96 | KeyspaceMetadata ksm = session.getMetadata().getKeyspace(keyspaceName).orElseThrow(() -> new RuntimeException("Keyspace missing: " + keyspaceName));
97 |
98 | for (Map.Entry e : ksm.getTables().entrySet())
99 | {
100 | TableMetadata m = e.getValue();
101 | TableDef tableDef = new TableDef();
102 | String tableName = e.getKey().asInternal();
103 |
104 | tableDef.setKeyspaceName(keyspaceName);
105 | tableDef.setTableName("\""+tableName + "\"");
106 | tableDef.setSession(session);
107 |
108 | PreparedStatement jsonPutStatement = stmts.prepare(String.format("INSERT INTO %s.\"%s\" JSON ?", keyspaceName, tableName));
109 | tableDef.setJsonPutStatement(jsonPutStatement);
110 |
111 | List keys = m.getPartitionKey();
112 | if (keys.size() != 1)
113 | throw new IllegalStateException("Dynamo like tables can only contain one Partition Key: " + tableName);
114 |
115 | Map clustering = m.getClusteringColumns();
116 | if (clustering.size() > 1)
117 | throw new IllegalStateException("Dynamo like tables can only contain upto one Clustering Key: " + tableName);
118 |
119 | ColumnMetadata partitionKey = keys.get(0);
120 | tableDef.setPartitionKey(partitionKey);
121 | String partitionKeyName = partitionKey.getName().asInternal();
122 |
123 | PreparedStatement jsonQueryStatement = stmts.prepare(
124 | String.format("SELECT * from %s.\"%s\" where \"%s\" = ?", keyspaceName, tableName, partitionKeyName)
125 | );
126 | tableDef.setJsonQueryPartitionStatement(jsonQueryStatement);
127 |
128 | if (clustering.isEmpty())
129 | {
130 | PreparedStatement deleteStatement = stmts.prepare(
131 | String.format("DELETE from %s.\"%s\" where \"%s\" = ?", keyspaceName, tableName, partitionKeyName)
132 | );
133 | tableDef.setDeleteStatement(deleteStatement);
134 |
135 | PreparedStatement queryRowStatement = stmts.prepare(
136 | String.format("select * from %s.\"%s\" where \"%s\" = ?", keyspaceName, tableName, partitionKeyName)
137 | );
138 | tableDef.setQueryRowStatement(queryRowStatement);
139 | }
140 | else
141 | {
142 | ColumnMetadata clusteringKey = clustering.keySet().iterator().next();
143 | tableDef.setClusteringKey(clusteringKey);
144 | String clusteringKeyName = clusteringKey.getName().asInternal();
145 |
146 | PreparedStatement deleteStatement = stmts.prepare(
147 | String.format("DELETE from %s.\"%s\" where \"%s\" = ? and \"%s\" = ?", keyspaceName, tableName, partitionKeyName, clusteringKeyName)
148 | );
149 | tableDef.setDeleteStatement(deleteStatement);
150 |
151 | PreparedStatement queryRowStatement = stmts.prepare(
152 | String.format("select * from %s.\"%s\" where \"%s\" = ? and \"%s\" = ?", keyspaceName, tableName, partitionKeyName, clusteringKeyName)
153 | );
154 | tableDef.setQueryRowStatement(queryRowStatement);
155 | }
156 |
157 | tableDefs.put(tableName, tableDef);
158 | }
159 | }
160 |
161 | public void stop() throws Exception {
162 | session.close();
163 |
164 | if (config.isDockerCassandra()) {
165 | DockerHelper dh = new DockerHelper();
166 | dh.stopDSE();
167 | }
168 | }
169 |
170 | public String getKeyspaceName() {
171 | return keyspaceName;
172 | }
173 |
174 | public PreparedStatement getPutStatement(String tableName) {
175 | TableDef tableDef = tableDefs.get(tableName);
176 | if (tableDef == null){
177 | logger.error(String.format("Table %s does not exist", tableName));
178 | return null;
179 | }
180 | return tableDefs.get(tableName).getJsonPutStatement();
181 | }
182 |
183 | public boolean hasTable(String tableName)
184 | {
185 | return tableDefs.containsKey(tableName);
186 | }
187 |
188 | public TableDef getTableDef(String tableName) {
189 | TableDef tableDef = tableDefs.get(tableName);
190 | if (tableDef == null)
191 | throw new MissingResourceException("Table not found " + tableName, tableName, tableName);
192 |
193 | return tableDefs.get(tableName);
194 | }
195 | }
196 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | com.datastax.powertools.dcp
7 | dynamodb-cassandra-proxy
8 | 0.1.0
9 |
10 |
11 |
12 | dynamodblocal
13 | AWS DynamoDB Local Release Repository
14 | https://s3-us-west-2.amazonaws.com/dynamodb-local/release
15 |
16 |
17 |
18 |
19 |
20 | io.dropwizard
21 | dropwizard-core
22 | ${dropwizard.version}
23 |
24 |
25 | com.datastax.oss
26 | java-driver-core
27 | ${java.driver.version}
28 |
29 |
30 | com.datastax.oss
31 | java-driver-query-builder
32 | ${java.driver.version}
33 |
34 |
35 | com.amazonaws
36 | aws-java-sdk-dynamodb
37 | ${dynamodb.sdk.version}
38 |
39 |
40 | com.amazonaws
41 | dynamodb-streams-kinesis-adapter
42 | ${dynamodb.streams.kinesis.version}
43 |
44 |
45 | jackson-databind
46 | com.fasterxml.jackson.core
47 |
48 |
49 |
50 |
51 | javax.xml.bind
52 | jaxb-api
53 | ${jaxb.version}
54 |
55 |
56 | com.sun.xml.bind
57 | jaxb-core
58 | ${jaxb.version}
59 |
60 |
61 | com.sun.xml.bind
62 | jaxb-impl
63 | ${jaxb.version}
64 |
65 |
66 | javax.activation
67 | activation
68 | ${activation.version}
69 |
70 |
71 | io.netty
72 | netty-all
73 | ${netty.version}
74 |
75 |
76 | com.github.docker-java
77 | docker-java
78 | ${docker.java.version}
79 |
80 |
81 | netty.io
82 | *
83 |
84 |
85 |
86 |
87 | org.testng
88 | testng
89 | ${testng.version}
90 | test
91 |
92 |
93 | io.dropwizard
94 | dropwizard-testing
95 | ${dropwizard.version}
96 | test
97 |
98 |
99 | jersey-client
100 | org.glassfish.jersey.core
101 | 2.25.1
102 |
103 |
104 | com.amazonaws
105 | DynamoDBLocal
106 | ${dynamodb.sdk.version}
107 | test
108 |
109 |
110 | com.google.guava
111 | guava
112 |
113 |
114 | org.eclipse.jetty
115 | *
116 |
117 |
118 |
119 |
120 |
121 |
122 | UTF-8
123 | 1.3.14
124 | 11
125 | 4.2.1
126 | 2.2.11
127 | 3.1.2
128 | 4.0.56.Final
129 | 7.0.0-beta1
130 | 1.1.1
131 | 1.11.477
132 | 1.4.0
133 |
134 |
135 |
136 |
137 |
138 | org.apache.maven.plugins
139 | maven-compiler-plugin
140 | 3.8.0
141 |
142 | ${java.version}
143 |
144 |
145 |
146 | maven-surefire-plugin
147 | 2.18.1
148 |
149 | -Dsqlite4java.library.path=${basedir}/target/dependencies
150 |
151 |
152 |
153 | org.apache.maven.plugins
154 | maven-resources-plugin
155 | 2.7
156 |
157 |
158 | org.apache.maven.plugins
159 | maven-dependency-plugin
160 | 2.10
161 |
162 |
163 | copy-dependencies
164 | process-test-resources
165 |
166 | copy-dependencies
167 |
168 |
169 | ${project.build.directory}/dependencies
170 | false
171 | false
172 | true
173 |
174 |
175 |
176 |
177 |
178 | org.apache.maven.plugins
179 | maven-shade-plugin
180 | 2.3
181 |
182 | true
183 |
184 |
185 | *:*
186 |
187 | META-INF/*.SF
188 | META-INF/*.DSA
189 | META-INF/*.RSA
190 |
191 |
192 |
193 |
194 |
195 |
196 | package
197 |
198 | shade
199 |
200 |
201 |
202 |
203 |
204 | com.datastax.powertools.dcp.DCProxyApplication
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/managed/ddbstreams/DynamoStreamsManager.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.managed.ddbstreams;
17 |
18 | import com.amazonaws.auth.SystemPropertiesCredentialsProvider;
19 | import com.amazonaws.client.builder.AwsClientBuilder;
20 | import com.amazonaws.regions.Regions;
21 | import com.amazonaws.services.cloudwatch.AmazonCloudWatch;
22 | import com.amazonaws.services.cloudwatch.AmazonCloudWatchClientBuilder;
23 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
24 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
25 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDBStreams;
26 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDBStreamsClientBuilder;
27 | import com.amazonaws.services.dynamodbv2.model.DescribeTableResult;
28 | import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream;
29 | import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration;
30 | import com.amazonaws.services.kinesis.clientlibrary.lib.worker.Worker;
31 | import com.datastax.powertools.dcp.DCProxyConfiguration;
32 | import io.dropwizard.lifecycle.Managed;
33 | import com.amazonaws.services.dynamodbv2.streamsadapter.AmazonDynamoDBStreamsAdapterClient;
34 | import com.amazonaws.services.dynamodbv2.streamsadapter.StreamsWorkerFactory;
35 |
36 |
37 | import java.util.List;
38 | import java.util.Map;
39 | import java.util.Properties;
40 |
41 | public class DynamoStreamsManager implements Managed {
42 | private final AmazonDynamoDB ddbProxy;
43 | private String dynamodbEndpoint;
44 | private String streamsEndpoint;
45 | private String signinRegion;
46 | private String accessKey;
47 | private String secretKey;
48 | private AmazonDynamoDBStreams streamsClient;
49 | private AmazonDynamoDB realDDB;
50 | private String streamArn;
51 |
52 | //private StreamSpecification streamSpec;
53 |
54 | //TODO: support multiple streams from multiple tables
55 | private AmazonDynamoDBStreamsAdapterClient adapterClient;
56 | private StreamsRecordProcessorFactory recordProcessorFactory;
57 | private KinesisClientLibConfiguration workerConfig;
58 |
59 | public DynamoStreamsManager(AmazonDynamoDB ddb) {
60 | ddbProxy = ddb;
61 | }
62 |
63 | @Override
64 | public void start() throws Exception {
65 | }
66 |
67 | @Override
68 | public void stop() throws Exception {
69 | }
70 |
71 | public void configure(DCProxyConfiguration config) {
72 |
73 | //TODO make table name dynamic
74 | String tableName = "test";
75 |
76 | this.dynamodbEndpoint = config.getAwsDynamodbEndpoint();
77 | this.streamsEndpoint = config.getStreamsEndpoint();
78 | this.signinRegion = config.getDynamoRegion();
79 | this.accessKey = config.getDynamoAccessKey();
80 | this.secretKey = config.getDynamoSecretKey();
81 |
82 | Properties props = System.getProperties();
83 | props.setProperty("aws.accessKeyId", accessKey);
84 | props.setProperty("aws.secretKey", secretKey);
85 |
86 | AwsClientBuilder.EndpointConfiguration endpointConfiguration =
87 | new AwsClientBuilder.EndpointConfiguration(streamsEndpoint, signinRegion);
88 | SystemPropertiesCredentialsProvider spcp = new SystemPropertiesCredentialsProvider();
89 |
90 | realDDB = AmazonDynamoDBClientBuilder.standard().
91 | withRegion(Regions.US_EAST_2).
92 | //withEndpointConfiguration(endpointConfiguration).
93 | withCredentials(spcp).build();
94 |
95 | DescribeTableResult tableResult = realDDB.describeTable(tableName);
96 | streamArn = tableResult.getTable().getLatestStreamArn();
97 | //streamSpec = tableResult.getTable().getStreamSpecification();
98 | streamsClient = AmazonDynamoDBStreamsClientBuilder.standard().withEndpointConfiguration(endpointConfiguration).build();
99 |
100 | adapterClient = new AmazonDynamoDBStreamsAdapterClient(streamsClient);
101 |
102 | recordProcessorFactory = new StreamsRecordProcessorFactory(ddbProxy, tableName);
103 |
104 | workerConfig = new KinesisClientLibConfiguration("test-app",
105 | streamArn,
106 | spcp,
107 | "streams-worker")
108 | .withMaxRecords(1000)
109 | .withIdleTimeBetweenReadsInMillis(500)
110 | .withInitialPositionInStream(InitialPositionInStream.TRIM_HORIZON);
111 | AmazonCloudWatch cloudWatchClient;
112 | cloudWatchClient = AmazonCloudWatchClientBuilder.standard()
113 | .withRegion(signinRegion)
114 | .build();
115 |
116 | System.out.println("Creating worker for stream: " + streamArn);
117 |
118 | /*
119 | DescribeStreamRequest request = new DescribeStreamRequest();
120 | DescribeStreamRequestAdapter describeStreamResult = new DescribeStreamRequestAdapter(request);
121 | String id = describeStreamResult.getExclusiveStartShardId();
122 | String id2 = describeStreamResult.withStreamArn(streamArn).getExclusiveStartShardId();
123 | */
124 |
125 | Worker worker = StreamsWorkerFactory.createDynamoDbStreamsWorker(
126 | recordProcessorFactory,
127 | workerConfig,
128 | adapterClient,
129 | realDDB,
130 | cloudWatchClient
131 | );
132 |
133 | System.out.println("Starting worker...");
134 | Thread t = new Thread(worker);
135 | t.start();
136 | }
137 |
138 | /*
139 | public void processStream(){
140 |
141 |
142 | lastEvaluatedShardId = null;
143 |
144 | do {
145 | DescribeStreamResult describeStreamResult;
146 | if (lastEvaluatedShardId == null){
147 |
148 | describeStreamResult = streamsClient.describeStream(
149 | new DescribeStreamRequest()
150 | .withStreamArn(streamArn));
151 | }else {
152 | describeStreamResult = streamsClient.describeStream(
153 | new DescribeStreamRequest()
154 | .withStreamArn(streamArn)
155 | .withExclusiveStartShardId(lastEvaluatedShardId));
156 | }
157 | List shards = describeStreamResult.getStreamDescription().getShards();
158 |
159 | // Process each shard on this page
160 |
161 | for (Shard shard : shards) {
162 | String shardId = shard.getShardId();
163 |
164 | // Get an iterator for the current shard
165 | GetShardIteratorRequest getShardIteratorRequest = new GetShardIteratorRequest()
166 | .withStreamArn(streamArn)
167 | .withShardId(shardId)
168 | .withShardIteratorType(ShardIteratorType.TRIM_HORIZON);
169 | GetShardIteratorResult getShardIteratorResult =
170 | streamsClient.getShardIterator(getShardIteratorRequest);
171 | String currentShardIter = getShardIteratorResult.getShardIterator();
172 |
173 | // Shard iterator is not null until the Shard is sealed (marked as READ_ONLY).
174 | // To prevent running the loop until the Shard is sealed, which will be on average
175 | // 4 hours, we process only the items that were written into DynamoDB and then exit.
176 | int processedRecordCount = 0;
177 | //while (currentShardIter != null && processedRecordCount < maxItemCount) {
178 | while (currentShardIter != null ) {
179 | System.out.println(" Shard iterator: " + currentShardIter.substring(380));
180 |
181 | // Use the shard iterator to read the stream records
182 |
183 | GetRecordsResult getRecordsResult = streamsClient.getRecords(new GetRecordsRequest()
184 | .withShardIterator(currentShardIter));
185 | List records = getRecordsResult.getRecords();
186 | for (Record record : records) {
187 | StreamRecord streamRecord = record.getDynamodb();
188 |
189 | Map keys = streamRecord.getKeys();
190 | ddbProxy.putItem("test", keys);
191 | //TODO: write to DSE via dynamo proxy
192 | System.out.println(" " + record.getDynamodb());
193 | }
194 | processedRecordCount += records.size();
195 | currentShardIter = getRecordsResult.getNextShardIterator();
196 | }
197 | }
198 |
199 | // If LastEvaluatedShardId is set, then there is
200 | // at least one more page of shard IDs to retrieve
201 | lastEvaluatedShardId = describeStreamResult.getStreamDescription().getLastEvaluatedShardId();
202 |
203 | } while (lastEvaluatedShardId != null);
204 | }
205 | */
206 | }
207 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/managed/dse/DockerHelper.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.managed.dse;
17 |
18 | import com.github.dockerjava.api.DockerClient;
19 | import com.github.dockerjava.api.command.CreateContainerResponse;
20 | import com.github.dockerjava.api.command.ListContainersCmd;
21 | import com.github.dockerjava.api.model.Bind;
22 | import com.github.dockerjava.api.model.Container;
23 | import com.github.dockerjava.api.model.ExposedPort;
24 | import com.github.dockerjava.api.model.HostConfig;
25 | import com.github.dockerjava.api.model.Image;
26 | import com.github.dockerjava.api.model.Info;
27 | import com.github.dockerjava.api.model.PortBinding;
28 | import com.github.dockerjava.api.model.Ports;
29 | import com.github.dockerjava.api.model.Volume;
30 | import com.github.dockerjava.core.DefaultDockerClientConfig;
31 | import com.github.dockerjava.core.DockerClientBuilder;
32 | import com.github.dockerjava.core.DockerClientConfig;
33 | import com.github.dockerjava.core.command.LogContainerResultCallback;
34 | import com.github.dockerjava.core.command.PullImageResultCallback;
35 | import com.google.common.util.concurrent.Uninterruptibles;
36 | import org.slf4j.Logger;
37 | import org.slf4j.LoggerFactory;
38 |
39 | import java.io.IOException;
40 | import java.net.InetSocketAddress;
41 | import java.nio.channels.SocketChannel;
42 | import java.time.Duration;
43 | import java.util.ArrayList;
44 | import java.util.Arrays;
45 | import java.util.List;
46 | import java.util.concurrent.TimeUnit;
47 |
48 | public class DockerHelper {
49 |
50 | private DockerClientConfig config;
51 | private DockerClient dockerClient;
52 | private CreateContainerResponse container;
53 | private Logger logger = LoggerFactory.getLogger(DockerHelper.class);
54 |
55 | public DockerHelper() {
56 | this.config = DefaultDockerClientConfig.createDefaultConfigBuilder().build();
57 | this.dockerClient = DockerClientBuilder.getInstance(config).build();
58 |
59 | }
60 |
61 | public void startDSE() {
62 |
63 | String img = "datastax/ddac";
64 | String tag = "latest";
65 | String name = "ddac";
66 | List ports = Arrays.asList(9042);
67 | List volumeDescList = Arrays.asList();;
68 | List envList = Arrays.asList("DS_LICENSE=accept");
69 | List cmdList = Arrays.asList();
70 |
71 | String containerId = startDocker(img,tag,name, ports,volumeDescList, envList, cmdList);
72 |
73 | LogContainerResultCallback loggingCallback = new
74 | LogContainerResultCallback();
75 |
76 | waitForPort("localhost",9042, Duration.ofMillis(50000), logger, true);
77 |
78 | }
79 |
80 | public static boolean waitForPort(String hostname, int port, Duration timeout, Logger logger, boolean quiet)
81 | {
82 | long deadlineNanos = System.nanoTime() + timeout.toNanos();
83 |
84 | while(System.nanoTime() < deadlineNanos)
85 | {
86 | SocketChannel channel = null;
87 |
88 | try
89 | {
90 | logger.info("Checking {}:{}", hostname,port);
91 | channel = SocketChannel.open(new InetSocketAddress(hostname, port));
92 | }
93 | catch(IOException e)
94 | {
95 | Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
96 | }
97 |
98 | if (channel != null)
99 | {
100 | try
101 | {
102 | channel.close();
103 | }
104 | catch (IOException e)
105 | {
106 | //Close quietly
107 | }
108 |
109 | logger.info("Connected to {}:{}", hostname,port);
110 | return true;
111 | }
112 | }
113 |
114 | //The port never opened
115 | if (!quiet)
116 | {
117 | logger.warn("Failed to connect to {}:{} after {} sec", hostname, port, timeout.toSeconds());
118 | }
119 |
120 | return false;
121 | }
122 |
123 | private String startDocker(String IMG, String tag, String name, List ports, List volumeDescList, List envList, List cmdList) {
124 | ListContainersCmd listContainersCmd = dockerClient.listContainersCmd().withStatusFilter(Arrays.asList("exited"));
125 | listContainersCmd.getFilters().put("name", Arrays.asList(name));
126 | List stoppedContainers = null;
127 | try {
128 | stoppedContainers = listContainersCmd.exec();
129 | for (Container stoppedContainer : stoppedContainers) {
130 | String id = stoppedContainer.getId();
131 | logger.info("Removing exited container: " + id);
132 | dockerClient.removeContainerCmd(id).exec();
133 | }
134 | } catch (Exception e) {
135 | e.printStackTrace();
136 | logger.error("Unable to contact docker, make sure docker is up and try again.");
137 | logger.error("If docker is installed make sure this user has access to the docker group.");
138 | logger.error("$ sudo gpasswd -a ${USER} docker && newgrp docker");
139 | System.exit(1);
140 | }
141 |
142 | Container containerId = searchContainer(name);
143 | if (containerId != null) {
144 | return containerId.getId();
145 | }
146 |
147 | Info info = dockerClient.infoCmd().exec();
148 | dockerClient.buildImageCmd();
149 |
150 | String term = IMG.split("/")[1];
151 | //List dockerSearch = dockerClient.searchImagesCmd(term).exec();
152 | List dockerList = dockerClient.listImagesCmd().withImageNameFilter(IMG).exec();
153 | if (dockerList.size() == 0) {
154 | dockerClient.pullImageCmd(IMG)
155 | .withTag(tag)
156 | .exec(new PullImageResultCallback()).awaitSuccess();
157 |
158 | dockerList = dockerClient.listImagesCmd().withImageNameFilter(IMG).exec();
159 | if (dockerList.size() == 0) {
160 | logger.error(String.format("Image %s not found, unable to automatically pull image." +
161 | " Check `docker images`",
162 | IMG));
163 | System.exit(1);
164 | }
165 | }
166 | logger.info("Search returned" + dockerList.toString());
167 |
168 |
169 | List tcpPorts = new ArrayList<>();
170 | List portBindings = new ArrayList<>();
171 | for (Integer port : ports) {
172 | ExposedPort tcpPort = ExposedPort.tcp(port);
173 | Ports.Binding binding = new Ports.Binding("0.0.0.0", String.valueOf(port));
174 | PortBinding pb = new PortBinding(binding, tcpPort);
175 |
176 | tcpPorts.add(tcpPort);
177 | portBindings.add(pb);
178 | }
179 |
180 | List volumeList = new ArrayList<>();
181 | List volumeBindList = new ArrayList<>();
182 | for (String volumeDesc : volumeDescList) {
183 | String volFrom = volumeDesc.split(":")[0];
184 | String volTo = volumeDesc.split(":")[1];
185 | Volume vol = new Volume(volTo);
186 | volumeList.add(vol);
187 | volumeBindList.add(new Bind(volFrom, vol));
188 | }
189 |
190 |
191 | CreateContainerResponse containerResponse;
192 | if (envList == null) {
193 | containerResponse = dockerClient.createContainerCmd(IMG + ":" + tag)
194 | .withCmd(cmdList)
195 | .withExposedPorts(tcpPorts)
196 | .withHostConfig(
197 | new HostConfig()
198 | .withPortBindings(portBindings)
199 | .withPublishAllPorts(true)
200 | .withBinds(volumeBindList)
201 | )
202 | .withName(name)
203 | //.withVolumes(volumeList)
204 | .exec();
205 | } else {
206 | containerResponse = dockerClient.createContainerCmd(IMG + ":" + tag)
207 | .withEnv(envList)
208 | .withExposedPorts(tcpPorts)
209 | .withHostConfig(
210 | new HostConfig()
211 | .withPortBindings(portBindings)
212 | .withPublishAllPorts(true)
213 | .withBinds(volumeBindList)
214 | )
215 | .withName(name)
216 | //.withVolumes(volumeList)
217 | .exec();
218 | }
219 |
220 | dockerClient.startContainerCmd(containerResponse.getId()).exec();
221 |
222 | return containerResponse.getId();
223 | }
224 |
225 |
226 |
227 | private Container searchContainer(String name) {
228 |
229 | ListContainersCmd listContainersCmd = dockerClient.listContainersCmd().withStatusFilter(List.of("running"));
230 | listContainersCmd.getFilters().put("name", Arrays.asList(name));
231 | List runningContainers = null;
232 | try {
233 | runningContainers = listContainersCmd.exec();
234 | } catch (Exception e) {
235 | e.printStackTrace();
236 | logger.error("Unable to contact docker, make sure docker is up and try again.");
237 | System.exit(1);
238 | }
239 |
240 | if (runningContainers.size() >= 1) {
241 | //Container test = runningContainers.get(0);
242 | logger.info(String.format("The container %s is already running", name));
243 |
244 | return runningContainers.get(0);
245 | }
246 | return null;
247 | }
248 |
249 | public void stopDSE()
250 | {
251 | if (container != null)
252 | dockerClient.stopContainerCmd(container.getId()).exec();
253 | }
254 | }
255 |
--------------------------------------------------------------------------------
/src/main/java/com/datastax/powertools/dcp/managed/dse/TableDef.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright DataStax, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datastax.powertools.dcp.managed.dse;
17 |
18 | import com.amazonaws.services.dynamodbv2.model.AttributeDefinition;
19 | import com.amazonaws.services.dynamodbv2.model.ComparisonOperator;
20 | import com.amazonaws.services.dynamodbv2.model.ScalarAttributeType;
21 | import com.datastax.oss.driver.api.core.CqlSession;
22 | import com.datastax.oss.driver.api.core.cql.PreparedStatement;
23 | import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata;
24 | import com.datastax.oss.driver.api.querybuilder.select.Select;
25 | import org.slf4j.Logger;
26 | import org.slf4j.LoggerFactory;
27 |
28 | import java.util.HashMap;
29 | import java.util.Map;
30 | import java.util.Optional;
31 |
32 | import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker;
33 | import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom;
34 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.ASCII;
35 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.BIGINT;
36 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.BLOB;
37 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.BOOLEAN;
38 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.COUNTER;
39 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.DATE;
40 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.DECIMAL;
41 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.DOUBLE;
42 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.FLOAT;
43 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.INET;
44 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.INT;
45 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.SMALLINT;
46 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.TIME;
47 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.TIMEUUID;
48 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.TINYINT;
49 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.UUID;
50 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.VARCHAR;
51 | import static com.datastax.oss.protocol.internal.ProtocolConstants.DataType.VARINT;
52 |
53 | public class TableDef {
54 | private static final Logger logger = LoggerFactory.getLogger(TableDef.class);
55 |
56 | private AttributeDefinition partitionKey;
57 | private Optional clusteringKey = Optional.empty();
58 |
59 | private PreparedStatement jsonPutStatement;
60 | private PreparedStatement jsonQueryPartitionStatement;
61 | private PreparedStatement jsonQueryRowStatement;
62 | private PreparedStatement deleteStatement;
63 | private PreparedStatement queryRowStatement;
64 | private PreparedStatement jsonQueryPartitionAndClusteringStatement;
65 | private Map jsonQueryRangeStatementMap = new HashMap<>();
66 | private String keyspaceName;
67 | private CqlSession session;
68 |
69 | public void setTableName(String tableName) {
70 | this.tableName = tableName;
71 | }
72 |
73 | private String tableName;
74 |
75 | public PreparedStatement getQueryRowStatement() {
76 | return queryRowStatement;
77 | }
78 |
79 | public PreparedStatement getJsonQueryRowStatement() {
80 | return jsonQueryRowStatement;
81 | }
82 |
83 | public void setJsonQueryRowStatement(PreparedStatement jsonQueryRowStatement) {
84 | this.jsonQueryRowStatement = jsonQueryRowStatement;
85 | }
86 |
87 | public AttributeDefinition getPartitionKey() {
88 | return partitionKey;
89 | }
90 |
91 | public Optional getClusteringKey() {
92 | return clusteringKey;
93 | }
94 |
95 |
96 | public PreparedStatement getJsonPutStatement() {
97 | return jsonPutStatement;
98 | }
99 |
100 | public void setJsonPutStatement(PreparedStatement jsonPutStatement) {
101 | this.jsonPutStatement = jsonPutStatement;
102 | }
103 |
104 | public void setPartitionKey(ColumnMetadata pk) {
105 | this.partitionKey = convertToAttribute(pk);
106 | }
107 |
108 | public void setClusteringKey(ColumnMetadata column) {
109 | this.clusteringKey = Optional.of(convertToAttribute(column));
110 | }
111 |
112 | private AttributeDefinition convertToAttribute(ColumnMetadata column)
113 | {
114 | AttributeDefinition ad = new AttributeDefinition();
115 | ad.setAttributeName(column.getName().asInternal());
116 |
117 | switch (column.getType().getProtocolCode())
118 | {
119 | case BLOB:
120 | ad.setAttributeType(ScalarAttributeType.B);
121 | break;
122 | case BIGINT:
123 | case BOOLEAN:
124 | case COUNTER:
125 | case DECIMAL:
126 | case DOUBLE:
127 | case FLOAT:
128 | case INT:
129 | case VARINT:
130 | case TINYINT:
131 | case SMALLINT:
132 | ad.setAttributeType(ScalarAttributeType.N);
133 | break;
134 | case TIMEUUID:
135 | case UUID:
136 | case INET:
137 | case DATE:
138 | case VARCHAR:
139 | case ASCII:
140 | case TIME:
141 | ad.setAttributeType(ScalarAttributeType.S);
142 | break;
143 | default:
144 | throw new IllegalArgumentException("Type not supported: " + column.getName().asInternal() + " " + column.getType());
145 | }
146 |
147 | return ad;
148 | }
149 |
150 |
151 |
152 | public PreparedStatement getJsonQueryPartitionStatement() {
153 | return jsonQueryPartitionStatement;
154 | }
155 |
156 | public void setJsonQueryPartitionStatement(PreparedStatement jsonQueryPartitionStatement) {
157 | this.jsonQueryPartitionStatement = jsonQueryPartitionStatement;
158 | }
159 |
160 | public PreparedStatement getDeleteStatement() {
161 | return deleteStatement;
162 | }
163 |
164 | public void setDeleteStatement(PreparedStatement deleteStatement) {
165 | this.deleteStatement = deleteStatement;
166 | }
167 |
168 | public void setQueryRowStatement(PreparedStatement queryRowStatement) {
169 | this.queryRowStatement = queryRowStatement;
170 | }
171 |
172 | public PreparedStatement getLazyJsonQueryPartitionAndClusteringStatement(ComparisonOperator comparisonOperator) {
173 |
174 | PreparedStatement preparedStatment = null;
175 |
176 | if (jsonQueryRangeStatementMap.containsKey(comparisonOperator)){
177 | preparedStatment = jsonQueryRangeStatementMap.get(comparisonOperator);
178 | }
179 | else{
180 | String clustering = "\"" + clusteringKey.get().getAttributeName() + "\"";
181 | String partition= "\"" + partitionKey.getAttributeName() + "\"";
182 | Select select = selectFrom(keyspaceName, tableName).all().whereColumn(partition).isEqualTo(bindMarker());
183 | switch (comparisonOperator) {
184 | case EQ:
185 | preparedStatment = session.prepare(select.whereColumn(clustering).isEqualTo(bindMarker()).build());
186 | break;
187 | case NE:
188 | preparedStatment = session.prepare(select.whereColumn(clustering).isNotEqualTo(bindMarker()).build());
189 | break;
190 | case IN:
191 | preparedStatment = session.prepare(select.whereColumn(clustering).in(bindMarker()).build());
192 | break;
193 | case LE:
194 | preparedStatment = session.prepare(select.whereColumn(clustering).isLessThanOrEqualTo(bindMarker()).build());
195 | break;
196 | case LT:
197 | preparedStatment = session.prepare(select.whereColumn(clustering).isLessThan(bindMarker()).build());
198 | break;
199 | case GE:
200 | preparedStatment = session.prepare(select.whereColumn(clustering).isGreaterThanOrEqualTo(bindMarker()).build());
201 | break;
202 | case GT:
203 | preparedStatment = session.prepare(select.whereColumn(clustering).isGreaterThan(bindMarker()).build());
204 | break;
205 | case BETWEEN:
206 | preparedStatment = session.prepare(select
207 | .whereColumn(clustering).isGreaterThanOrEqualTo(bindMarker())
208 | .whereColumn(clustering).isLessThanOrEqualTo(bindMarker())
209 | .build()
210 | );
211 | break;
212 | case NOT_NULL:
213 | preparedStatment = session.prepare(select
214 | .whereColumn(clustering).isNotNull()
215 | .build()
216 | );
217 | break;
218 | case NULL:
219 | throw new UnsupportedOperationException("CQL does not support null clustering columns");
220 | case CONTAINS:
221 | throw new UnsupportedOperationException("Contains - feature unsupported");
222 | case NOT_CONTAINS:
223 | throw new UnsupportedOperationException("Not Contains - feature unsupported");
224 | case BEGINS_WITH:
225 | throw new UnsupportedOperationException("Begins With - feature unsupported");
226 | }
227 | }
228 | return preparedStatment;
229 | }
230 |
231 | public void setKeyspaceName(String keyspaceName) {
232 | this.keyspaceName = keyspaceName;
233 | }
234 |
235 | public void setSession(CqlSession session) {
236 | this.session = session;
237 | }
238 | }
239 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/migrate/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 | 1556239346786
210 |
211 |
212 | 1556239346786
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 | 1.8
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
--------------------------------------------------------------------------------