├── scala-driver-4.x
├── project
│ └── build.properties
├── src
│ └── main
│ │ ├── resources
│ │ └── application.conf
│ │ └── scala
│ │ └── com
│ │ └── datastax
│ │ └── alexott
│ │ └── demos
│ │ └── objmapper
│ │ ├── ObjeMapperTest.scala
│ │ └── Entitites.scala
└── build.sbt
├── .gitignore
├── driver-4.x
└── src
│ └── main
│ ├── resources
│ └── application.conf
│ ├── java
│ └── com
│ │ └── datastax
│ │ └── alexott
│ │ └── demos
│ │ ├── TestAstra.java
│ │ ├── TestPointType.java
│ │ ├── CreateKeyspacesInferTopology.java
│ │ ├── UdtTest1.java
│ │ ├── ConnectWithDCDetection.java
│ │ ├── Commons.java
│ │ └── DCDetectingLBPolicy.java
│ ├── kotlin
│ └── com
│ │ └── datastax
│ │ └── alexott
│ │ └── demos
│ │ └── KtTestObjMapper.kt
│ └── scala
│ └── com
│ └── datastax
│ └── alexott
│ └── demos
│ └── UdtScalaTest1.scala
├── driver-1.x
└── src
│ ├── test
│ └── java
│ │ └── com
│ │ └── datastax
│ │ └── alexott
│ │ └── demos
│ │ └── TestUtils.java
│ └── main
│ ├── java
│ └── com
│ │ └── datastax
│ │ └── alexott
│ │ └── demos
│ │ ├── Utils.java
│ │ ├── objmapper
│ │ ├── STestAccessor.java
│ │ ├── STest.java
│ │ ├── TableObjAccessor.java
│ │ ├── UDTTestType.java
│ │ ├── ExpEntity.java
│ │ ├── TableObjJavaTest.java
│ │ ├── STestMain.java
│ │ ├── TestData.java
│ │ ├── AuditTestType.java
│ │ ├── TableObjAccessorTest.java
│ │ ├── MapperTest1.java
│ │ ├── TableObjJava.java
│ │ ├── UDTTestTableFR.java
│ │ ├── UDTTestTableNonFR.java
│ │ ├── Test4.java
│ │ ├── ExpPopularity.java
│ │ ├── Test4Data.java
│ │ ├── TableObjectClustered.java
│ │ ├── AuditTestMain.java
│ │ ├── AuditTestTable.java
│ │ ├── Test4_2.java
│ │ ├── FRvsNonFRUDTMapping.java
│ │ └── ExpMaps.java
│ │ ├── product
│ │ ├── Information.java
│ │ ├── App.java
│ │ └── Product.java
│ │ ├── solr
│ │ ├── DTest.java
│ │ └── DTestMain.java
│ │ ├── misc
│ │ ├── Test3.java
│ │ └── Test1.java
│ │ ├── JMXTest.java
│ │ ├── graph
│ │ ├── GDTest1.java
│ │ └── GraphLoad.java
│ │ ├── TestPreparedStatements.java
│ │ ├── metrics
│ │ └── Metrics1.java
│ │ ├── SessionLimiter.java
│ │ ├── CassandraHealthCheck.java
│ │ ├── WhiteListPolicyExample.java
│ │ ├── TestResultSerializer.java
│ │ ├── TokenRangesScan.java
│ │ ├── DumpClusterConfig.java
│ │ ├── QBuilder.java
│ │ ├── ResultSetSerializer.java
│ │ ├── TestBatches.java
│ │ └── AlterTableWithChecks.java
│ ├── scala
│ └── com
│ │ └── datastax
│ │ └── alexott
│ │ ├── GetDCNames.scala
│ │ ├── CodecsTest.scala
│ │ └── ObjMapperTest.scala
│ └── kotlin
│ └── com
│ └── datastax
│ └── alexott
│ └── demos
│ └── KtTestObjMapper.kt
├── cassandra-join-spark
├── setup.cql
├── src
│ └── main
│ │ ├── resources
│ │ └── log4j2.xml
│ │ ├── java
│ │ └── json
│ │ │ ├── ticks
│ │ │ ├── TickData.java
│ │ │ └── TickGenerator.java
│ │ │ └── utils
│ │ │ └── ExchangeUtils.java
│ │ └── scala
│ │ └── com
│ │ └── datastax
│ │ └── alexott
│ │ └── demos
│ │ └── streaming
│ │ ├── StockTickersJoinDataFrames.scala
│ │ └── StockTickersJoinRDD.scala
└── pom.xml
├── spark-dse
├── src
│ └── main
│ │ ├── scala
│ │ └── com
│ │ │ └── datastax
│ │ │ └── alexott
│ │ │ ├── graphframes
│ │ │ └── DGFSubGraph.scala
│ │ │ ├── spark
│ │ │ ├── JoinTestsScala.scala
│ │ │ └── JoinTestsRDDScala.scala
│ │ │ ├── dsefs
│ │ │ ├── DsefsDownloader.scala
│ │ │ ├── DsefsUploader.scala
│ │ │ └── DsefsGetMerge.scala
│ │ │ └── streaming
│ │ │ ├── StructuredStreamingKafkaDSE.scala
│ │ │ └── StructuredStreamingDSE.scala
│ │ ├── java
│ │ └── com
│ │ │ └── datastax
│ │ │ └── alexott
│ │ │ └── demos
│ │ │ └── spark
│ │ │ ├── SparkTest1.java
│ │ │ ├── UUIDData.java
│ │ │ ├── JoinTests.java
│ │ │ ├── UUIDTest.java
│ │ │ ├── TableCreate.java
│ │ │ └── JoinTestsRDD.java
│ │ └── resources
│ │ └── tweets-1.json
└── pom.xml
├── spark-oss
├── src
│ └── main
│ │ ├── java
│ │ └── com
│ │ │ └── datastax
│ │ │ └── alexott
│ │ │ └── demos
│ │ │ └── spark
│ │ │ ├── SparkTest1.java
│ │ │ ├── UUIDData.java
│ │ │ ├── JoinTests.java
│ │ │ ├── UUIDTest.java
│ │ │ ├── TableCreate.java
│ │ │ ├── JoinTestsRDD.java
│ │ │ └── streaming
│ │ │ └── StructuredStreaming.java
│ │ └── scala
│ │ └── com
│ │ └── datastax
│ │ └── alexott
│ │ ├── spark
│ │ ├── JoinTestsScala.scala
│ │ └── JoinTestsRDDScala.scala
│ │ └── streaming
│ │ ├── StructuredStreamingForEachBatch.scala
│ │ └── StructuredStreamingForEach.scala
└── pom.xml
├── scc-2.5
├── src
│ └── main
│ │ └── scala
│ │ └── com
│ │ └── datastax
│ │ └── alexott
│ │ ├── spark
│ │ ├── JoinTestsRDDScala.scala
│ │ └── JoinTestsScala.scala
│ │ └── streaming
│ │ ├── StructuredStreamingKafkaDSE.scala
│ │ └── StructuredStreamingDSE.scala
└── pom.xml
├── prometheus-java-driver
├── README.md
├── src
│ └── main
│ │ ├── resources
│ │ └── application.conf
│ │ └── java
│ │ └── com
│ │ └── datastax
│ │ └── alexott
│ │ └── demos
│ │ └── MetricsWithPrometheus.java
└── pom.xml
└── README.md
/scala-driver-4.x/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.3.12
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.classpath
2 | /.project
3 | /.settings/
4 | /target/
5 | *~
6 | /.idea/
7 | /jdtest1.iml
8 | /*/target/
9 | /*/\.idea/
10 | /*/*.iml
11 | /scala-driver-4.x/project/target/
12 |
--------------------------------------------------------------------------------
/driver-4.x/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | datastax-java-driver {
2 | basic.load-balancing-policy {
3 | class = com.datastax.oss.driver.internal.core.loadbalancing.DcInferringLoadBalancingPolicy
4 | }
5 | }
--------------------------------------------------------------------------------
/scala-driver-4.x/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | datastax-java-driver {
2 | basic.load-balancing-policy {
3 | class = com.datastax.oss.driver.internal.core.loadbalancing.DcInferringLoadBalancingPolicy
4 | }
5 | }
--------------------------------------------------------------------------------
/driver-1.x/src/test/java/com/datastax/alexott/demos/TestUtils.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.core.LocalDate;
4 |
5 | import junit.framework.TestCase;
6 |
7 | public class TestUtils extends TestCase {
8 |
9 | public void testDate() {
10 | assertEquals(LocalDate.fromYearMonthDay(2017,11,22), Utils.convertDate("2017-11-22"));
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/Utils.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.core.LocalDate;
4 |
5 | public class Utils {
6 |
7 | public static LocalDate convertDate(final String date) {
8 | String[] arr = date.split("-");
9 | if (arr.length != 3)
10 | return null;
11 | return LocalDate.fromYearMonthDay(Integer.parseInt(arr[0]), Integer.parseInt(arr[1]),
12 | Integer.parseInt(arr[2]));
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/cassandra-join-spark/setup.cql:
--------------------------------------------------------------------------------
1 | create keyspace if not exists test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'};
2 |
3 | use test;
4 |
5 | create table if not exists stock_info (
6 | symbol text primary key,
7 | exchange text,
8 | industry text,
9 | name text,
10 | base_price double
11 | );
12 |
13 | truncate stock_info;
14 | COPY stock_info (name, symbol, base_price, exchange, industry) FROM './src/main/resources/json/csv/exchangedata.csv';
15 |
16 |
--------------------------------------------------------------------------------
/cassandra-join-spark/src/main/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/STestAccessor.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.Result;
4 | import com.datastax.driver.mapping.annotations.Accessor;
5 | import com.datastax.driver.mapping.annotations.Param;
6 | import com.datastax.driver.mapping.annotations.Query;
7 |
8 | @Accessor
9 | public interface STestAccessor {
10 | @Query("SELECT * FROM test.stest WHERE solr_query = :solr")
11 |
12 | Result getViaSolr(@Param("solr") String solr);
13 | }
14 |
--------------------------------------------------------------------------------
/spark-dse/src/main/scala/com/datastax/alexott/graphframes/DGFSubGraph.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.graphframes
2 |
3 | import org.apache.spark.SparkContext
4 | import org.apache.spark.sql.SparkSession
5 | //import com.datastax.bdp.graph.spark.graphframe._
6 |
7 | object DGFSubGraph {
8 | def main(args: Array[String]): Unit = {
9 | val sc = new SparkContext()
10 | val spark = SparkSession.builder().config(sc.getConf).getOrCreate()
11 | import spark.implicits._
12 |
13 | // val graphBuilder = spark.dseGraph("GRAPH_NAME")
14 |
15 |
16 | }
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/spark-dse/src/main/java/com/datastax/alexott/demos/spark/SparkTest1.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.spark;
2 |
3 | import org.apache.spark.sql.Dataset;
4 | import org.apache.spark.sql.Row;
5 | import org.apache.spark.sql.SparkSession;
6 |
7 | public class SparkTest1 {
8 |
9 | public static void main(String[] args) {
10 | SparkSession spark = SparkSession
11 | .builder()
12 | .appName("CassandraSpark")
13 | .getOrCreate();
14 |
15 | Dataset sqlDF = spark.sql("select * from datastax.vehicle limit 1000");
16 | sqlDF.printSchema();
17 | sqlDF.show();
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/spark-oss/src/main/java/com/datastax/alexott/demos/spark/SparkTest1.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.spark;
2 |
3 | import org.apache.spark.sql.Dataset;
4 | import org.apache.spark.sql.Row;
5 | import org.apache.spark.sql.SparkSession;
6 |
7 | public class SparkTest1 {
8 |
9 | public static void main(String[] args) {
10 | SparkSession spark = SparkSession
11 | .builder()
12 | .appName("CassandraSpark")
13 | .getOrCreate();
14 |
15 | Dataset sqlDF = spark.sql("select * from datastax.vehicle limit 1000");
16 | sqlDF.printSchema();
17 | sqlDF.show();
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/STest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.annotations.PartitionKey;
4 | import com.datastax.driver.mapping.annotations.Table;
5 |
6 | @Table(keyspace = "test",name = "stest")
7 | public class STest {
8 | @PartitionKey
9 | private int id;
10 | private String t;
11 |
12 | public int getId() {
13 | return id;
14 | }
15 |
16 | public void setId(int id) {
17 | this.id = id;
18 | }
19 |
20 | public String getT() {
21 | return t;
22 | }
23 |
24 | public void setT(String t) {
25 | this.t = t;
26 | }
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/spark-dse/src/main/java/com/datastax/alexott/demos/spark/UUIDData.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.spark;
2 |
3 | import java.util.UUID;
4 |
5 | public class UUIDData {
6 | private UUID u;
7 | private int id;
8 |
9 | public UUIDData() {
10 | }
11 | public UUIDData(int id, UUID u) {
12 | this.u = u;
13 | this.id = id;
14 | }
15 |
16 | public UUID getU() {
17 | return u;
18 | }
19 |
20 | public void setU(UUID u) {
21 | this.u = u;
22 | }
23 |
24 | public int getId() {
25 | return id;
26 | }
27 |
28 | public void setId(int id) {
29 | this.id = id;
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/spark-oss/src/main/java/com/datastax/alexott/demos/spark/UUIDData.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.spark;
2 |
3 | import java.util.UUID;
4 |
5 | public class UUIDData {
6 | private UUID u;
7 | private int id;
8 |
9 | public UUIDData() {
10 | }
11 | public UUIDData(int id, UUID u) {
12 | this.u = u;
13 | this.id = id;
14 | }
15 |
16 | public UUID getU() {
17 | return u;
18 | }
19 |
20 | public void setU(UUID u) {
21 | this.u = u;
22 | }
23 |
24 | public int getId() {
25 | return id;
26 | }
27 |
28 | public void setId(int id) {
29 | this.id = id;
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/product/Information.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.product;
2 |
3 | import com.datastax.driver.mapping.annotations.UDT;
4 |
5 | @UDT(keyspace = "test", name = "information")
6 | public class Information {
7 | String info1;
8 | String info2;
9 |
10 | public String getInfo1() {
11 | return info1;
12 | }
13 | public void setInfo1(String info1) {
14 | this.info1 = info1;
15 | }
16 | public String getInfo2() {
17 | return info2;
18 | }
19 | public void setInfo2(String info2) {
20 | this.info2 = info2;
21 | }
22 | @Override
23 | public String toString() {
24 | return "Information [info1=" + info1 + ", info2=" + info2 + "]";
25 | }
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/scala/com/datastax/alexott/GetDCNames.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott
2 |
3 | import com.datastax.driver.core.Cluster
4 |
5 | import scala.collection.JavaConverters
6 |
7 | object GetDCNames {
8 |
9 | def main(args: Array[String]): Unit = {
10 |
11 | val cluster = Cluster.builder()
12 | .addContactPoint(System.getProperty("contactPoint", "127.0.0.1"))
13 | .build();
14 | val session = cluster.connect()
15 |
16 | val metadata = cluster.getMetadata
17 | val hosts = JavaConverters.collectionAsScalaIterableConverter(metadata.getAllHosts).asScala.toSeq
18 | val dcs = hosts.map{host => host.getDatacenter}.toSet
19 |
20 | println("All DCs: " + dcs)
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/TableObjAccessor.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.objmapper;
2 |
3 | import com.datastax.driver.mapping.Result;
4 | import com.datastax.driver.mapping.annotations.Accessor;
5 | import com.datastax.driver.mapping.annotations.Param;
6 | import com.datastax.driver.mapping.annotations.Query;
7 |
8 | @Accessor
9 | public interface TableObjAccessor {
10 | @Query("SELECT * from test.scala_test_complex where p1 = :p1 and p2 = :p2")
11 | Result getByPartKey(@Param int p1, @Param int p2);
12 |
13 | @Query("DELETE from test.scala_test_complex where p1 = :p1 and p2 = :p2")
14 | void deleteByPartKey(@Param int p1, @Param int p2);
15 | }
16 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/UDTTestType.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.annotations.UDT;
4 |
5 | // CREATE TYPE test.tudt (id int, t text);
6 | @UDT(name = "tudt", keyspace = "test")
7 | public class UDTTestType {
8 | int id;
9 | String t;
10 |
11 | public UDTTestType(int id, String t) {
12 | this.id = id;
13 | this.t = t;
14 | }
15 |
16 | public UDTTestType() {
17 | }
18 |
19 | public int getId() {
20 | return id;
21 | }
22 |
23 | public void setId(int id) {
24 | this.id = id;
25 | }
26 |
27 | public String getT() {
28 | return t;
29 | }
30 |
31 | public void setT(String t) {
32 | this.t = t;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/spark-dse/src/main/scala/com/datastax/alexott/spark/JoinTestsScala.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.spark
2 |
3 | import org.apache.spark.SparkContext
4 | import org.apache.spark.sql.SparkSession
5 |
6 | object JoinTestsScala {
7 | def main(args: Array[String]): Unit = {
8 |
9 | val sc = new SparkContext()
10 | val spark = SparkSession.builder().config(sc.getConf).getOrCreate()
11 | import spark.implicits._
12 |
13 | val toJoin = spark.range(1, 1000).map(x => x.intValue).withColumnRenamed("value", "id")
14 |
15 | val dataset = spark.read
16 | .format("org.apache.spark.sql.cassandra")
17 | .options(Map("table" -> "jtest", "keyspace" -> "test"))
18 | .load
19 | val joined = toJoin.join(dataset, dataset("id") === toJoin("id"))
20 | joined.explain
21 | joined.show(10)
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/spark-oss/src/main/scala/com/datastax/alexott/spark/JoinTestsScala.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.spark
2 |
3 | import org.apache.spark.SparkContext
4 | import org.apache.spark.sql.SparkSession
5 |
6 | object JoinTestsScala {
7 | def main(args: Array[String]): Unit = {
8 |
9 | val sc = new SparkContext()
10 | val spark = SparkSession.builder().config(sc.getConf).getOrCreate()
11 | import spark.implicits._
12 |
13 | val toJoin = spark.range(1, 1000).map(x => x.intValue).withColumnRenamed("value", "id")
14 |
15 | val dataset = spark.read
16 | .format("org.apache.spark.sql.cassandra")
17 | .options(Map("table" -> "jtest", "keyspace" -> "test"))
18 | .load
19 | val joined = toJoin.join(dataset, dataset("id") === toJoin("id"))
20 | joined.explain
21 | joined.show(10)
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/solr/DTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import java.time.Instant;
4 | import java.util.Date;
5 | import org.apache.solr.client.solrj.beans.Field;
6 | import com.fasterxml.jackson.annotation.JsonIgnore;
7 |
8 | public class DTest {
9 | @Field("id")
10 | private int id;
11 |
12 | private Instant t;
13 |
14 | public int getId() {
15 | return id;
16 | }
17 |
18 | public void setId(int id) {
19 | this.id = id;
20 | }
21 |
22 | public Date getT() {
23 | return new Date(t.toEpochMilli());
24 | }
25 |
26 | @Field("t")
27 | public void setT(Date t) {
28 | this.t = t.toInstant();
29 | }
30 |
31 | @JsonIgnore
32 | public void setInstant(Instant t) {
33 | this.t = t;
34 | }
35 |
36 | @JsonIgnore
37 | public Instant getInstant() {
38 | return t;
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/scc-2.5/src/main/scala/com/datastax/alexott/spark/JoinTestsRDDScala.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.spark
2 |
3 | import org.apache.spark.SparkContext
4 | import org.apache.spark.sql.SparkSession
5 | import com.datastax.spark.connector._
6 |
7 | object JoinTestsRDDScala {
8 | def main(args: Array[String]): Unit = {
9 |
10 | val sc = new SparkContext()
11 | val spark = SparkSession.builder().config(sc.getConf).getOrCreate()
12 | import spark.implicits._
13 |
14 | val toJoin = spark.range(1, 100).map(x => x.intValue).withColumnRenamed("value", "id").rdd
15 |
16 | val joined = toJoin.joinWithCassandraTable("test","jtest")
17 | println("Plan: " + joined.toDebugString)
18 | joined.cache()
19 | println("Count: " + joined.count())
20 | print("Data: ")
21 | joined.take(10).foreach(print)
22 | println()
23 |
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/spark-dse/src/main/scala/com/datastax/alexott/spark/JoinTestsRDDScala.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.spark
2 |
3 | import org.apache.spark.SparkContext
4 | import org.apache.spark.sql.SparkSession
5 | import com.datastax.spark.connector._
6 |
7 | object JoinTestsRDDScala {
8 | def main(args: Array[String]): Unit = {
9 |
10 | val sc = new SparkContext()
11 | val spark = SparkSession.builder().config(sc.getConf).getOrCreate()
12 | import spark.implicits._
13 |
14 | val toJoin = spark.range(1, 100).map(x => x.intValue).withColumnRenamed("value", "id").rdd
15 |
16 | val joined = toJoin.joinWithCassandraTable("test","jtest")
17 | println("Plan: " + joined.toDebugString)
18 | joined.cache()
19 | println("Count: " + joined.count())
20 | print("Data: ")
21 | joined.take(10).foreach(print)
22 | println()
23 |
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/spark-oss/src/main/scala/com/datastax/alexott/spark/JoinTestsRDDScala.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.spark
2 |
3 | import org.apache.spark.SparkContext
4 | import org.apache.spark.sql.SparkSession
5 | import com.datastax.spark.connector._
6 |
7 | object JoinTestsRDDScala {
8 | def main(args: Array[String]): Unit = {
9 |
10 | val sc = new SparkContext()
11 | val spark = SparkSession.builder().config(sc.getConf).getOrCreate()
12 | import spark.implicits._
13 |
14 | val toJoin = spark.range(1, 100).map(x => x.intValue).withColumnRenamed("value", "id").rdd
15 |
16 | val joined = toJoin.joinWithCassandraTable("test","jtest")
17 | println("Plan: " + joined.toDebugString)
18 | joined.cache()
19 | println("Count: " + joined.count())
20 | print("Data: ")
21 | joined.take(10).foreach(print)
22 | println()
23 |
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/ExpEntity.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.annotations.PartitionKey;
4 | import com.datastax.driver.mapping.annotations.Table;
5 |
6 | import java.util.Set;
7 |
8 | @Table(name = "entities_udt", keyspace = "srs")
9 | public class ExpEntity {
10 |
11 | @PartitionKey
12 | int hcom_geo_id;
13 |
14 | Set popularity;
15 |
16 | public int getHcom_geo_id() {
17 | return hcom_geo_id;
18 | }
19 |
20 | public void setHcom_geo_id(int hcom_geo_id) {
21 | this.hcom_geo_id = hcom_geo_id;
22 | }
23 |
24 | public Set getPopularity() {
25 | return popularity;
26 | }
27 |
28 | public void setPopularity(Set popularity) {
29 | this.popularity = popularity;
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/driver-4.x/src/main/java/com/datastax/alexott/demos/TestAstra.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.oss.driver.api.core.CqlSession;
4 | import com.datastax.oss.driver.api.core.cql.ResultSet;
5 | import com.datastax.oss.driver.api.core.cql.Row;
6 |
7 | import java.nio.file.Paths;
8 |
9 | public class TestAstra {
10 | public static void main(String[] args) {
11 | try (CqlSession session = CqlSession.builder()
12 | .withCloudSecureConnectBundle(Paths.get("/Users/ott/Downloads/secure-connect-test.zip"))
13 | .withAuthCredentials("test", "...")
14 | .build()) {
15 | ResultSet rs = session.execute("select id,v from test.t1");
16 | for (Row row: rs) {
17 | System.out.println("id=" + row.getInt("id") + ", v=" + row.getInt("v"));
18 | }
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/TableObjJavaTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.objmapper;
2 |
3 | import com.datastax.driver.core.Cluster;
4 | import com.datastax.driver.core.Session;
5 | import com.datastax.driver.mapping.Mapper;
6 | import com.datastax.driver.mapping.MappingManager;
7 |
8 | public class TableObjJavaTest {
9 | public static void main(String[] args) {
10 | String server = System.getProperty("contactPoint", "127.0.0.1");
11 | try (Cluster cluster = Cluster.builder().addContactPoint(server).build();
12 | Session session = cluster.connect()) {
13 | MappingManager manager = new MappingManager(session);
14 | Mapper mapper = manager.mapper(TableObjJava.class);
15 |
16 | TableObjJava obj = mapper.get(1);
17 | System.out.println("Obj(1)=" + obj);
18 | }
19 | }
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/driver-4.x/src/main/java/com/datastax/alexott/demos/TestPointType.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.oss.driver.api.core.CqlSession;
4 | import com.datastax.oss.driver.api.core.cql.ResultSet;
5 | import com.datastax.oss.driver.api.core.cql.Row;
6 | import com.datastax.dse.driver.api.core.data.geometry.Point;
7 |
8 | public class TestPointType {
9 | public static void main(String[] args) {
10 | try (CqlSession session = CqlSession.builder()
11 | .addContactPoints(Commons.getContactPoints("10.101.34.176,10.101.34.94"))
12 | .build()) {
13 | ResultSet rs = session.execute("select point from test.gen_events1");
14 | for (Row row: rs) {
15 | Point point = row.get("point", Point.class);
16 | if (point != null)
17 | System.out.println("point = " + point);
18 | }
19 | }
20 |
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/STestMain.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.core.Cluster;
4 | import com.datastax.driver.core.Session;
5 | import com.datastax.driver.mapping.MappingManager;
6 | import com.datastax.driver.mapping.Result;
7 |
8 | public class STestMain {
9 | public static void main(String[] args) {
10 | String server = System.getProperty("contactPoint", "127.0.0.1");
11 | Cluster cluster = Cluster.builder().addContactPoint(server).build();
12 | Session session = cluster.connect();
13 |
14 | MappingManager manager = new MappingManager(session);
15 |
16 | STestAccessor sa = manager.createAccessor(STestAccessor.class);
17 | Result rs = sa.getViaSolr("*:*");
18 |
19 | for (STest sTest : rs) {
20 | System.out.println("id=" + sTest.getId() + ", text=" + sTest.getT());
21 | }
22 |
23 | session.close();
24 | cluster.close();
25 | }
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/product/App.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.product;
2 |
3 | import com.datastax.driver.core.Cluster;
4 | import com.datastax.driver.core.CodecRegistry;
5 | import com.datastax.driver.core.Session;
6 | import com.datastax.driver.mapping.Mapper;
7 | import com.datastax.driver.mapping.MappingManager;
8 |
9 | public class App {
10 |
11 | public static void main(String[] args) {
12 | CodecRegistry codecRegistry = CodecRegistry.DEFAULT_INSTANCE;
13 | String server = System.getProperty("contactPoint", "127.0.0.1");
14 | Cluster cluster = Cluster.builder().addContactPoint(server).withCodecRegistry(codecRegistry).build();
15 | Session session = cluster.connect();
16 |
17 | MappingManager manager = new MappingManager(session);
18 | Mapper mapper = manager.mapper(Product.class);
19 | Product product = mapper.get("test");
20 | System.out.println("Product: " + product);
21 |
22 | session.close();
23 | }
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/scala-driver-4.x/src/main/scala/com/datastax/alexott/demos/objmapper/ObjeMapperTest.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.objmapper
2 |
3 | import java.net.InetSocketAddress
4 | import java.util
5 |
6 | import com.datastax.oss.driver.api.core.CqlSession
7 |
8 | import collection.JavaConverters._
9 |
10 | object ObjeMapperTest {
11 |
12 | val CQL_PORT: Int = System.getProperty("cqlPort", "9042").toInt
13 |
14 | def getContactPoints(contactPoints: String): util.Collection[InetSocketAddress] = {
15 | contactPoints.split(",")
16 | .map(host => InetSocketAddress.createUnresolved(host, CQL_PORT))
17 | .toSeq.asJava
18 | }
19 |
20 | def main(args: Array[String]): Unit = {
21 | val session = CqlSession.builder.addContactPoints(
22 | getContactPoints("10.101.34.176")).build
23 |
24 | session.execute("select * from system_auth.roles")
25 | .all().asScala.foreach(x => println(x.getFormattedContents))
26 |
27 |
28 | session.close()
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/TestData.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import java.time.LocalDate;
4 | import java.util.UUID;
5 |
6 | import com.datastax.driver.extras.codecs.jdk8.LocalDateCodec;
7 | import com.datastax.driver.mapping.annotations.Column;
8 | import com.datastax.driver.mapping.annotations.PartitionKey;
9 | import com.datastax.driver.mapping.annotations.Table;
10 |
11 | @Table(keyspace = "test", name = "dtest", readConsistency = "ONE", writeConsistency = "ONE")
12 | public class TestData {
13 | @PartitionKey
14 | @Column(name = "id")
15 | private UUID id;
16 |
17 | @Column(name = "ddate", codec = LocalDateCodec.class)
18 | LocalDate ddate;
19 |
20 | public UUID getId() {
21 | return id;
22 | }
23 |
24 | public void setId(UUID id) {
25 | this.id = id;
26 | }
27 |
28 | public LocalDate getDdate() {
29 | return ddate;
30 | }
31 |
32 | public void setDdate(LocalDate ddate) {
33 | this.ddate = ddate;
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/AuditTestType.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.annotations.UDT;
4 |
5 | // create type test.audit_type(id int, t text);
6 | @UDT(keyspace="test", name="audit_type")
7 | public class AuditTestType {
8 | int id;
9 | String t;
10 |
11 | public AuditTestType() {
12 | }
13 |
14 | public AuditTestType(int id, String t) {
15 | this.id = id;
16 | this.t = t;
17 | }
18 |
19 | public int getId() {
20 | return id;
21 | }
22 |
23 | public void setId(int id) {
24 | this.id = id;
25 | }
26 |
27 | public String getT() {
28 | return t;
29 | }
30 |
31 | public void setT(String t) {
32 | this.t = t;
33 | }
34 |
35 | @Override
36 | public String toString() {
37 | return "AuditTestType{" +
38 | "id=" + id +
39 | ", t='" + t + '\'' +
40 | '}';
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/scala-driver-4.x/src/main/scala/com/datastax/alexott/demos/objmapper/Entitites.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.objmapper
2 |
3 | import com.datastax.oss.driver.api.mapper.annotations.{CqlName, Entity, PartitionKey}
4 |
5 | import scala.annotation.meta.field
6 |
7 | /*
8 |
9 | CREATE TYPE test.udt (
10 | id int,
11 | t1 int,
12 | t2 int,
13 | a2 int
14 | );
15 | CREATE TABLE test.u2 (
16 | id int PRIMARY KEY,
17 | u udt
18 | );
19 | */
20 |
21 |
22 | @Entity
23 | case class udt(@(CqlName @field)("id") id: java.lang.Integer,
24 | @(CqlName @field)("t1") t1: java.lang.Integer,
25 | @(CqlName @field)("t2") t2: java.lang.Integer,
26 | @(CqlName @field)("a2") a2: java.lang.Integer) {
27 | def this() {
28 | this(0,0,0,0)
29 | }
30 | }
31 |
32 | @Entity
33 | case class u2(@(PartitionKey @field) id: java.lang.Integer,
34 | @(CqlName @field)(value = "udt") udt: udt) {
35 | def this() {
36 | this(0, new udt)
37 | }
38 | }
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/TableObjAccessorTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.objmapper;
2 |
3 | import com.datastax.driver.core.Cluster;
4 | import com.datastax.driver.core.Session;
5 | import com.datastax.driver.mapping.MappingManager;
6 | import com.datastax.driver.mapping.Result;
7 |
8 | public class TableObjAccessorTest {
9 | public static void main(String[] args) {
10 | String server = System.getProperty("contactPoint", "127.0.0.1");
11 | try (Cluster cluster = Cluster.builder().addContactPoint(server).build();
12 | Session session = cluster.connect()) {
13 | MappingManager manager = new MappingManager(session);
14 | TableObjAccessor accessor = manager.createAccessor(TableObjAccessor.class);
15 | Result objs = accessor.getByPartKey(0, 1);
16 | for (TableObjectClustered obj: objs) {
17 | System.out.println("Obj=" + obj);
18 | }
19 | accessor.deleteByPartKey(0,0);
20 | }
21 |
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/scc-2.5/src/main/scala/com/datastax/alexott/spark/JoinTestsScala.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.spark
2 |
3 | import org.apache.spark.SparkContext
4 | import org.apache.spark.sql.SparkSession
5 |
6 | /**
7 | * Demonstration of Cassandra direct join in the SCC 2.5+
8 | *
9 | * spark-submit need to be executed with
10 | * --conf spark.sql.extensions=com.datastax.spark.connector.CassandraSparkExtensions
11 | */
12 | object JoinTestsScala {
13 | def main(args: Array[String]): Unit = {
14 |
15 | val sc = new SparkContext()
16 | val spark = SparkSession.builder()
17 | .config(sc.getConf)
18 | .getOrCreate()
19 | import spark.implicits._
20 |
21 | val toJoin = spark.range(1, 1000).map(x => x.intValue).withColumnRenamed("value", "id")
22 |
23 | val dataset = spark.read
24 | .format("org.apache.spark.sql.cassandra")
25 | .options(Map("table" -> "jtest", "keyspace" -> "test"))
26 | .load
27 | val joined = toJoin.join(dataset, dataset("id") === toJoin("id"))
28 | joined.explain
29 | joined.show(10)
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/MapperTest1.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import java.util.UUID;
4 |
5 | import com.datastax.driver.core.Cluster;
6 | import com.datastax.driver.core.Session;
7 | import com.datastax.driver.mapping.Mapper;
8 | import com.datastax.driver.mapping.MappingManager;
9 |
10 | public class MapperTest1 {
11 | public static void main(String[] args) {
12 | String server = System.getProperty("contactPoint", "127.0.0.1");
13 | Cluster cluster = Cluster.builder().addContactPoint(server).build();
14 | Session session = cluster.connect();
15 |
16 | MappingManager manager = new MappingManager(session);
17 |
18 | Mapper mapper = manager.mapper(TestData.class);
19 |
20 | UUID uuid = UUID.fromString("e7ae5cf3-d358-4d99-b900-85902fda9bb1");
21 | TestData td = mapper.get(uuid);
22 |
23 | if (td == null) {
24 | System.out.println("Can't find given UUID");
25 | } else {
26 | System.out.println("UUID: " + td.getId() + ", date: " + td.getDdate());
27 | }
28 |
29 | session.close();
30 | cluster.close();
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/TableObjJava.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.objmapper;
2 |
3 | import com.datastax.driver.mapping.annotations.PartitionKey;
4 | import com.datastax.driver.mapping.annotations.Table;
5 |
6 | import java.util.Date;
7 |
8 | @Table(name="scala_test", keyspace = "test")
9 | public class TableObjJava {
10 | @PartitionKey
11 | int id = 0;
12 | String t = "";
13 | Date tm = new Date();
14 |
15 | public int getId() {
16 | return id;
17 | }
18 |
19 | public void setId(int id) {
20 | this.id = id;
21 | }
22 |
23 | public String getT() {
24 | return t;
25 | }
26 |
27 | public void setT(String t) {
28 | this.t = t;
29 | }
30 |
31 | public Date getTm() {
32 | return tm;
33 | }
34 |
35 | public void setTm(Date tm) {
36 | this.tm = tm;
37 | }
38 |
39 | @Override
40 | public String toString() {
41 | return "TableObjJava{" +
42 | "id=" + id +
43 | ", t='" + t + '\'' +
44 | ", tm=" + tm +
45 | '}';
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/prometheus-java-driver/README.md:
--------------------------------------------------------------------------------
1 | This directory contains an example of how we can export metrics from DataStax Java driver
2 | 4.x to Prometheus via [Prometheus Java Client](https://github.com/prometheus/client_java).
3 |
4 | Exporting of [Java driver metrics](https://docs.datastax.com/en/developer/java-driver/4.3/manual/core/metrics/) is simple, we just need to add following lines:
5 |
6 | ```java
7 | MetricRegistry registry = session.getMetrics()
8 | .orElseThrow(() -> new IllegalStateException("Metrics are disabled"))
9 | .getRegistry();
10 | CollectorRegistry.defaultRegistry.register(new DropwizardExports(registry));
11 | ```
12 |
13 | and then expose metrics to Prometheus by specific implementation - this example uses
14 | Prometheus's `HTTPServer`, running on the port 9095 (overridable via `prometheusPort` Java
15 | property).
16 |
17 | Run example with following command:
18 |
19 | ```sh
20 | mvn clean compile exec:java -Dexec.mainClass="com.datastax.alexott.demos.MetricsWithPrometheus" \
21 | -DcontactPoint=10.101.34.241 -DdcName=dc_datastax
22 | ```
23 |
24 | You need to pass contact point & data center name as Java properties (`contactPoint` and
25 | `dcName` correspondingly).
26 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | This repository contains different code samples that are related to the usage of DataStax
2 | Enterprise (DSE). This repository is successor of
3 | [dse-java-playground](https://github.com/alexott/dse-java-playground) repository, but it's
4 | restructured to be more modular, so we can use examples with different versions of Java
5 | driver, separate artifact for Spark code, etc.
6 |
7 | The code is organized as following:
8 |
9 | * `driver-1.x` - samples that use DSE Java driver 1.x (mostly compatible with DataStax
10 | Java driver 3.x);
11 | * `driver-4.x` - samples that use DataStax Java driver 2.x (mostly compatible with DSE
12 | Java driver 2.x);
13 | * `spark-dse` - samples that use DSE Analytics (should be mostly compatible with OSS
14 | Spark, but there are some differences, like, support for DSE Direct Join for data
15 | frames);
16 | * `spark-oss` - samples that demonstrate the use of Spark with OSS Spark Cassandra
17 | Connector, version < 2.5.0
18 | * `scc-2.5` - samples that demonstrate the use of Spark with OSS Spark Cassandra
19 | Connector, version = 2.5.x
20 | * `scc-3.0` - samples that demonstrate the use of Spark with OSS Spark Cassandra
21 | Connector, version >= 3.0
22 |
23 |
24 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/misc/Test3.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.misc;
2 | import java.util.Collections;
3 |
4 | import com.datastax.driver.core.BoundStatement;
5 | import com.datastax.driver.core.Cluster;
6 | import com.datastax.driver.core.PreparedStatement;
7 | import com.datastax.driver.core.Session;
8 | import com.fasterxml.jackson.core.JsonProcessingException;
9 |
10 | public class Test3 {
11 | public static void main(String[] args) throws JsonProcessingException {
12 | String server = System.getProperty("contactPoint", "127.0.0.1");
13 | Cluster cluster = Cluster.builder().addContactPoint(server).withCredentials("user.0", "password").build();
14 | Session session = cluster.connect();
15 |
16 | PreparedStatement prepared = session.prepare("UPDATE test.st SET cities = cities + ? WHERE zip = ? and state = ?");
17 |
18 | BoundStatement bound = prepared.bind(Collections.singleton("t2"), "2", "1");
19 | session.execute(bound);
20 |
21 | BoundStatement bound2 = prepared.bind();
22 | bound2.setSet(0, Collections.singleton("t3"));
23 | bound2.setString(1, "2");
24 | bound2.setString(2, "1");
25 | session.execute(bound2);
26 |
27 | session.close();
28 | cluster.close();
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/driver-4.x/src/main/java/com/datastax/alexott/demos/CreateKeyspacesInferTopology.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.oss.driver.api.core.CqlSession;
4 |
5 | import java.util.Map;
6 | import java.util.TreeMap;
7 |
8 | import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.*;
9 |
10 | public class CreateKeyspacesInferTopology {
11 | private static final String KS_NAME = "my_super_ks";
12 | private static final int MAX_RF = 3;
13 |
14 | public static void main(String[] args) {
15 | try (CqlSession session = CqlSession.builder()
16 | .addContactPoints(Commons.getContactPoints())
17 | .build()) {
18 | Commons.executeDDL(session, dropKeyspace(KS_NAME).ifExists().build());
19 | Map rfPerDC = new TreeMap<>();
20 | for (Map.Entry e: Commons.getDataCenters(session).entrySet()) {
21 | rfPerDC.put(e.getKey(), Math.min(e.getValue(), MAX_RF));
22 | }
23 |
24 | Commons.executeDDL(session,
25 | createKeyspace(KS_NAME).ifNotExists()
26 | .withNetworkTopologyStrategy(rfPerDC).build());
27 |
28 | } catch (InterruptedException e) {
29 | e.printStackTrace();
30 | }
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/product/Product.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.product;
2 |
3 | import java.util.List;
4 | import java.util.Map;
5 |
6 | import com.datastax.driver.mapping.annotations.Frozen;
7 | import com.datastax.driver.mapping.annotations.FrozenValue;
8 | import com.datastax.driver.mapping.annotations.PartitionKey;
9 | import com.datastax.driver.mapping.annotations.Table;
10 |
11 | @Table(keyspace = "test", name = "product")
12 | public class Product {
13 | @PartitionKey
14 | String id;
15 |
16 | @FrozenValue
17 | @Frozen
18 | Map details;
19 |
20 | @Frozen
21 | List moreDetails;
22 |
23 | public String getId() {
24 | return id;
25 | }
26 |
27 | public void setId(String id) {
28 | this.id = id;
29 | }
30 |
31 | public Map getDetails() {
32 | return details;
33 | }
34 |
35 | public void setDetails(Map details) {
36 | this.details = details;
37 | }
38 |
39 | public List getMoreDetails() {
40 | return moreDetails;
41 | }
42 |
43 | public void setMoreDetails(List moreDetails) {
44 | this.moreDetails = moreDetails;
45 | }
46 |
47 | @Override
48 | public String toString() {
49 | return "Product [id=" + id + ", details=" + details + ", moreDetails=" + moreDetails + "]";
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/spark-dse/src/main/scala/com/datastax/alexott/dsefs/DsefsDownloader.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.dsefs
2 |
3 | import java.io._
4 |
5 | import com.datastax.alexott.dsefs.DsefsUploader.getBool
6 | import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
7 | import org.apache.spark.sql.SparkSession
8 |
9 | object DsefsDownloader {
10 | def main(args: Array[String]): Unit = {
11 | if (args.length < 2) {
12 | println("Usage: DsefsDownloader fileOrDirectoryToDownload destination")
13 | System.exit(1)
14 | }
15 | val spark = SparkSession.builder().getOrCreate()
16 |
17 | // import spark.implicits._
18 |
19 | val remoteFS = FileSystem.get(spark.sparkContext.hadoopConfiguration)
20 | val path = new Path(args(0))
21 | if (!remoteFS.exists(path)) {
22 | println("The file or directory '" + args(0) + "' doesn't exist!")
23 | System.exit(1)
24 | }
25 |
26 | val outfile = new File(args(1))
27 | if (outfile.exists()) {
28 | if (getBool("overwriteMode")) {
29 | outfile.delete()
30 | } else {
31 | println("File '" + args(1) + "' exists on disk! Remove it, or pass -DoverwriteMode=true to the job!")
32 | System.exit(1)
33 | }
34 | }
35 |
36 | FileUtil.copy(remoteFS, path, outfile, false, spark.sparkContext.hadoopConfiguration)
37 |
38 | System.exit(0)
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/cassandra-join-spark/src/main/java/json/ticks/TickData.java:
--------------------------------------------------------------------------------
1 | package json.ticks;
2 |
3 | import java.time.LocalDateTime;
4 | import java.time.temporal.ChronoUnit;
5 | import java.util.concurrent.atomic.AtomicLong;
6 |
7 | public class TickData {
8 | private static final LocalDateTime BASE_TIME = LocalDateTime.now();
9 | private static final AtomicLong TIME_OFFSET = new AtomicLong();
10 |
11 | private String symbol;
12 | private double value;
13 | private String datetime;
14 |
15 | public TickData(String symbol, double value) {
16 | this.symbol = symbol;
17 | this.value = value;
18 | this.datetime = BASE_TIME.plus(TIME_OFFSET.incrementAndGet(), ChronoUnit.SECONDS).toString();
19 | }
20 |
21 | public String getSymbol() {
22 | return symbol;
23 | }
24 |
25 | public double getValue() {
26 | return value;
27 | }
28 |
29 | public String getDatetime() {
30 | return datetime;
31 | }
32 |
33 | public void setDatetime(){
34 | this.datetime = BASE_TIME.plus(TIME_OFFSET.incrementAndGet(), ChronoUnit.SECONDS).toString();
35 | }
36 |
37 | public void setValue(double v){
38 | this.value = v;
39 | }
40 |
41 | @Override
42 | public String toString() {
43 | return "TickData [" +
44 | "symbol=" + symbol + ", " +
45 | "value=" + value + "]";
46 | }
47 |
48 | }
49 |
50 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/UDTTestTableFR.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.annotations.ClusteringColumn;
4 | import com.datastax.driver.mapping.annotations.PartitionKey;
5 | import com.datastax.driver.mapping.annotations.Table;
6 |
7 | @Table(name = "udt_test_fr", keyspace = "test")
8 | public class UDTTestTableFR {
9 | @PartitionKey
10 | int id;
11 | @ClusteringColumn
12 | int cid;
13 | UDTTestType udt;
14 |
15 | public UDTTestTableFR(int id, int cid, UDTTestType udt) {
16 | this.id = id;
17 | this.cid = cid;
18 | this.udt = udt;
19 | }
20 |
21 | public UDTTestTableFR() {
22 | }
23 |
24 | @Override
25 | public String toString() {
26 | return "UDTTestTableFR{" +
27 | "id=" + id +
28 | ", cid=" + cid +
29 | ", udt=" + udt +
30 | '}';
31 | }
32 |
33 | public int getId() {
34 | return id;
35 | }
36 |
37 | public void setId(int id) {
38 | this.id = id;
39 | }
40 |
41 | public int getCid() {
42 | return cid;
43 | }
44 |
45 | public void setCid(int cid) {
46 | this.cid = cid;
47 | }
48 |
49 | public UDTTestType getUdt() {
50 | return udt;
51 | }
52 |
53 | public void setUdt(UDTTestType udt) {
54 | this.udt = udt;
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/UDTTestTableNonFR.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.annotations.ClusteringColumn;
4 | import com.datastax.driver.mapping.annotations.PartitionKey;
5 | import com.datastax.driver.mapping.annotations.Table;
6 |
7 | @Table(name = "udt_test", keyspace = "test")
8 | public class UDTTestTableNonFR {
9 | @PartitionKey
10 | int id;
11 | @ClusteringColumn
12 | int cid;
13 | UDTTestType udt;
14 |
15 | public UDTTestTableNonFR(int id, int cid, UDTTestType udt) {
16 | this.id = id;
17 | this.cid = cid;
18 | this.udt = udt;
19 | }
20 |
21 | public UDTTestTableNonFR() {
22 | }
23 |
24 | @Override
25 | public String toString() {
26 | return "UDTTestTableNonFR{" +
27 | "id=" + id +
28 | ", cid=" + cid +
29 | ", udt=" + udt +
30 | '}';
31 | }
32 |
33 | public int getId() {
34 | return id;
35 | }
36 |
37 | public void setId(int id) {
38 | this.id = id;
39 | }
40 |
41 | public int getCid() {
42 | return cid;
43 | }
44 |
45 | public void setCid(int cid) {
46 | this.cid = cid;
47 | }
48 |
49 | public UDTTestType getUdt() {
50 | return udt;
51 | }
52 |
53 | public void setUdt(UDTTestType udt) {
54 | this.udt = udt;
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/spark-dse/src/main/scala/com/datastax/alexott/dsefs/DsefsUploader.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.dsefs
2 |
3 | import java.io.File
4 |
5 | import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
6 | import org.apache.spark.sql.SparkSession
7 |
8 | object DsefsUploader {
9 | def getBool(name: String): Boolean = {
10 | java.lang.Boolean.getBoolean(name)
11 | }
12 |
13 | def main(args: Array[String]): Unit = {
14 | if (args.length < 2) {
15 | println("Usage: DsefsUploader fileOrDirectoryToUpload destination")
16 | System.exit(1)
17 | }
18 | val spark = SparkSession.builder().getOrCreate()
19 |
20 | val infile = new File(args(0))
21 | if (!infile.exists()) {
22 | println("File '" + args(0) + " doesn't exist!")
23 | }
24 |
25 | val fileSystem = FileSystem.get(spark.sparkContext.hadoopConfiguration)
26 |
27 | val name = if ("/".equals(args(1))) {
28 | "/" + infile.getName
29 | } else{
30 | args(1)
31 | }
32 | val path = new Path(name)
33 | if (fileSystem.exists(path)) {
34 | if (getBool("overwriteMode")) {
35 | fileSystem.delete(path, true)
36 | } else {
37 | println("File or directory '" + args(1) + "' exists on DSEFS! Remove it, or pass -DoverwriteMode=true to the job!")
38 | System.exit(1)
39 | }
40 | }
41 |
42 | FileUtil.copy(infile, fileSystem, path, false, spark.sparkContext.hadoopConfiguration)
43 |
44 | System.exit(0)
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/misc/Test1.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.misc;
2 |
3 | import com.datastax.driver.core.Cluster;
4 | import com.datastax.driver.core.ColumnDefinitions.Definition;
5 | import com.datastax.driver.core.DataType;
6 | import com.datastax.driver.core.ResultSet;
7 | import com.datastax.driver.core.Row;
8 | import com.datastax.driver.core.Session;
9 | import com.fasterxml.jackson.core.JsonProcessingException;
10 |
11 | import java.util.Set;
12 |
13 | public class Test1 {
14 | public static void main(String[] args) throws JsonProcessingException {
15 | String server = System.getProperty("contactPoint", "127.0.0.1");
16 | Cluster cluster = Cluster.builder().addContactPoint(server).build();
17 | Session session = cluster.connect();
18 |
19 | ResultSet rs = session.execute("select * from test.ftest ;");
20 | System.out.print("[");
21 | for (Row row : rs) {
22 | for (Definition key : row.getColumnDefinitions()) {
23 | System.out.println(key.getName() + ", type=" + key.getType());
24 | if (key.getType().equals(DataType.frozenSet(DataType.varchar()))) {
25 | System.out.println("\tbingo!");
26 | Set ts = row.getSet(key.getName(), String.class);
27 | for (String string : ts) {
28 | System.out.println("\tval=" + string);
29 | }
30 | }
31 | }
32 | }
33 |
34 | session.close();
35 | cluster.close();
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/scala/com/datastax/alexott/CodecsTest.scala:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott
2 |
3 | import com.datastax.driver.core.{Cluster, Row, TypeCodec}
4 | import com.datastax.driver.extras.codecs.jdk8.OptionalCodec
5 |
6 | import scala.collection.JavaConverters._
7 | import scala.compat.java8.OptionConverters._
8 |
9 |
10 | object Optionals {
11 | private val intCodec = TypeCodec.cint()
12 | private val optionalIntCodec = new OptionalCodec[java.lang.Integer](intCodec)
13 | private val javaIntType = optionalIntCodec.getJavaType()
14 |
15 | def registerCodecs(cluster: Cluster): Unit = {
16 | val codecRegistry = cluster.getConfiguration.getCodecRegistry
17 |
18 | codecRegistry.register(optionalIntCodec)
19 | }
20 |
21 | def getInt(row: Row, col: String): Option[java.lang.Integer] = {
22 | row.get(col, javaIntType).asScala
23 | }
24 | def getInt(row: Row, col: Int): Option[java.lang.Integer] = {
25 | row.get(col, javaIntType).asScala
26 | }
27 | }
28 |
29 | object CodecsTest {
30 | def main(args: Array[String]): Unit = {
31 |
32 | val cluster = Cluster.builder().addContactPoint("10.200.176.39").build()
33 | Optionals.registerCodecs(cluster)
34 | val session = cluster.connect()
35 |
36 | for (row <- session.execute("select id, c1, v1 from test.st1 where id = 2").all().asScala) {
37 | println("id=" + Optionals.getInt(row, "id")
38 | + ", c1=" + Optionals.getInt(row, "c1")
39 | + ", v1=" + Optionals.getInt(row, "v1"))
40 | }
41 | session.close()
42 | cluster.close()
43 |
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/solr/DTestMain.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import java.io.IOException;
4 | import java.util.List;
5 |
6 | import org.apache.solr.client.solrj.SolrClient;
7 | import org.apache.solr.client.solrj.SolrQuery;
8 | import org.apache.solr.client.solrj.SolrServerException;
9 | import org.apache.solr.client.solrj.beans.DocumentObjectBinder;
10 | import org.apache.solr.client.solrj.impl.HttpSolrClient;
11 | import org.apache.solr.client.solrj.response.QueryResponse;
12 | import org.apache.solr.common.SolrDocumentList;
13 |
14 | import com.fasterxml.jackson.databind.ObjectMapper;
15 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
16 |
17 | public class DTestMain {
18 | public static void main(String[] args) throws SolrServerException, IOException {
19 | String url = "http://localhost:8983/solr/test.dtest";
20 |
21 | ObjectMapper mapper = new ObjectMapper();
22 | mapper.findAndRegisterModules();
23 | mapper.registerModule(new JavaTimeModule());
24 |
25 | SolrClient client = new HttpSolrClient(url);
26 | SolrQuery query = new SolrQuery();
27 | query.setQuery("*:*");
28 | query.addFilterQuery("id:1");
29 | query.setRows(10);
30 | QueryResponse response = client.query(query);
31 | SolrDocumentList list = response.getResults();
32 | DocumentObjectBinder binder = new DocumentObjectBinder();
33 |
34 | List lst = binder.getBeans(DTest.class, list);
35 | for (DTest dTest : lst) {
36 | System.out.println("id=" + dTest.getId() + ", t=" + dTest.getT());
37 | }
38 |
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/driver-4.x/src/main/java/com/datastax/alexott/demos/UdtTest1.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.oss.driver.api.core.CqlSession;
4 | import com.datastax.oss.driver.api.core.cql.BoundStatement;
5 | import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder;
6 | import com.datastax.oss.driver.api.core.cql.PreparedStatement;
7 | import com.datastax.oss.driver.api.core.type.UserDefinedType;
8 |
9 | public class UdtTest1 {
10 | /*
11 |
12 | CREATE TYPE test.udt (
13 | id int,
14 | t1 int,
15 | t2 int,
16 | a2 int
17 | );
18 | CREATE TABLE test.u2 (
19 | id int PRIMARY KEY,
20 | u udt
21 | );
22 | */
23 |
24 | public static void main(String[] args) {
25 | try (CqlSession session = CqlSession.builder()
26 | .addContactPoints(Commons.getContactPoints())
27 | .build()) {
28 | UserDefinedType udtType = session
29 | .getMetadata()
30 | .getKeyspace("test")
31 | .flatMap(ks -> ks.getUserDefinedType("udt"))
32 | .orElseThrow(IllegalStateException::new);
33 | PreparedStatement preparedStatement = session.prepare(
34 | "insert into test.u2(id, u) values(?, ?)");
35 | for (int i = 0; i < 5; i++) {
36 | BoundStatement boundStatement =
37 | preparedStatement.bind(i, udtType.newValue(i, i, i, i));
38 | session.execute(boundStatement);
39 | }
40 |
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/JMXTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import java.util.Set;
4 |
5 | import javax.management.JMX;
6 | import javax.management.MBeanServerConnection;
7 | import javax.management.ObjectInstance;
8 | import javax.management.ObjectName;
9 | import javax.management.remote.JMXConnector;
10 | import javax.management.remote.JMXConnectorFactory;
11 | import javax.management.remote.JMXServiceURL;
12 |
13 | import org.apache.cassandra.metrics.CassandraMetricsRegistry;
14 |
15 | public class JMXTest {
16 |
17 | public static void main(String[] args) throws Exception {
18 | JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://[127.0.0.1]:7199/jmxrmi");
19 | JMXConnector jmxc = JMXConnectorFactory.connect(url, null);
20 | MBeanServerConnection mbsc = jmxc.getMBeanServerConnection();
21 |
22 | Set objs = mbsc.queryMBeans(ObjectName
23 | .getInstance("org.apache.cassandra.metrics:type=ClientRequest,scope=Read-ALL,name=TotalLatency"), null);
24 | for (ObjectInstance obj : objs) {
25 | Object proxy = JMX.newMBeanProxy(mbsc, obj.getObjectName(), CassandraMetricsRegistry.JmxCounterMBean.class);
26 | if (proxy instanceof CassandraMetricsRegistry.JmxCounterMBean) {
27 | System.out.println("TotalLatency = " + ((CassandraMetricsRegistry.JmxCounterMBean) proxy).getCount());
28 | }
29 | }
30 | jmxc.close();
31 | }
32 |
33 | }
34 |
35 | /*
36 | * Set names = mbsc.queryNames(null, null); for (ObjectName name :
37 | * names) { System.out.println("\tObjectName = " + name); }
38 | */
39 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/Test4.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.core.BatchStatement;
4 | import com.datastax.driver.core.Cluster;
5 | import com.datastax.driver.core.Session;
6 | import com.datastax.driver.core.Statement;
7 | import com.datastax.driver.mapping.Mapper;
8 | import com.datastax.driver.mapping.MappingManager;
9 | import com.google.common.collect.Lists;
10 | import com.google.common.collect.Maps;
11 | import com.google.common.collect.Sets;
12 |
13 | import java.util.Map;
14 |
15 | public class Test4 {
16 | public static void main(String[] args) {
17 | String server = System.getProperty("contactPoint", "127.0.0.1");
18 | try(Cluster cluster = Cluster.builder().addContactPoint(server).build();
19 | Session session = cluster.connect()) {
20 | MappingManager manager = new MappingManager(session);
21 | Mapper mapper = manager.mapper(Test4Data.class);
22 |
23 | for (int i = 0; i < 2; i++) {
24 | BatchStatement batchStatement = new BatchStatement(BatchStatement.Type.UNLOGGED);
25 | for (int j = 0; j < 5; j++) {
26 | Statement statement = mapper.saveQuery(new Test4Data(i, j, "t " + i + "," + j));
27 | System.out.println(statement.getClass());
28 | batchStatement.add(statement);
29 | }
30 | session.execute(batchStatement);
31 | }
32 |
33 | Test4Data test4Data = mapper.get(0, 1);
34 | System.out.println(test4Data);
35 | }
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/driver-4.x/src/main/java/com/datastax/alexott/demos/ConnectWithDCDetection.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.oss.driver.api.core.CqlSession;
4 | import com.datastax.oss.driver.api.core.cql.ResultSet;
5 | import com.datastax.oss.driver.api.core.cql.Row;
6 |
7 | import java.net.InetSocketAddress;
8 | import java.util.Arrays;
9 | import java.util.stream.Collectors;
10 |
11 | public class ConnectWithDCDetection {
12 |
13 | public static void main(String[] args) {
14 | String contactPointsStr = System.getProperty("contactPoints", "");
15 | if (contactPointsStr.isEmpty()) {
16 | System.err.println("Please pass Cassandra contact points as Java system property with name 'contactPoints'");
17 | System.exit(1);
18 | }
19 |
20 | // this is not really necessary, because of the existence of the DcInferringLoadBalancingPolicy implemented as
21 | // part of JAVA-2459
22 | String[] contactPoints = contactPointsStr.split(",");
23 | String dcName = DCDetectingLBPolicy.detectDcName(contactPoints);
24 | System.out.println("Detected DC Name: '" + dcName + "'");
25 |
26 | try(CqlSession session = CqlSession.builder()
27 | .addContactPoints(Arrays.stream(contactPoints)
28 | .map(x -> new InetSocketAddress(x, 9042)).collect(Collectors.toList()))
29 | .withLocalDatacenter(dcName)
30 | .build()) {
31 | ResultSet rs = session.execute("select data_center, host_id from system.peers");
32 | for (Row row: rs) {
33 | System.out.println(String.format("Host ID: %s, DC: %s", row.getUuid("host_id"), row.getString("data_center")));
34 | }
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/graph/GDTest1.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.graph;
2 |
3 | import java.util.UUID;
4 |
5 | import com.datastax.driver.dse.DseCluster;
6 | import com.datastax.driver.dse.DseSession;
7 | import com.datastax.driver.dse.graph.GraphOptions;
8 | import com.datastax.driver.dse.graph.SimpleGraphStatement;
9 |
10 | public class GDTest1 {
11 |
12 | public static void main(String[] args) {
13 | String server = System.getProperty("contactPoint", "127.0.0.1");
14 | try (DseCluster dseCluster = DseCluster.builder().addContactPoints(server)
15 | .withGraphOptions(new GraphOptions().setGraphName("test")).build();
16 | DseSession session = dseCluster.connect()) {
17 |
18 | long start = System.nanoTime();
19 | long startL = System.nanoTime();
20 | for (int i = 1; i <= 1000; i++) {
21 | // String s = String.format("g.addV(label, 'person' ,'id', '%s' ,"
22 | // + "'email', 'sample%d@gmail.com')", UUID.randomUUID().toString(), i);
23 | // session.executeGraph(s).one().asVertex();
24 |
25 | SimpleGraphStatement s = new SimpleGraphStatement(
26 | "g.addV(label, 'person' ,'id', idV , 'email', emailV)").set("idV", UUID.randomUUID().toString())
27 | .set("emailV", "sample@gmail.com" + Integer.toString(i));
28 | session.executeGraph(s).one().asVertex();
29 |
30 | if ((i % 100) == 0) {
31 | long endL = System.nanoTime();
32 | System.out.printf("%d time = %d ms\n", i, (endL - startL) / 1000000);
33 | startL = System.nanoTime();
34 | }
35 | }
36 | long end = System.nanoTime();
37 | System.out.printf("Total time = %d ms\n", (end - start) / 1000000);
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/ExpPopularity.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.annotations.UDT;
4 |
5 | import java.util.Objects;
6 |
7 | @UDT(keyspace = "src", name = "popularity")
8 | public class ExpPopularity {
9 |
10 | String locale;
11 | double pop_a;
12 | double pop_b;
13 |
14 | public ExpPopularity(String locale, double pop_a, double pop_b) {
15 | this.locale = locale;
16 | this.pop_a = pop_a;
17 | this.pop_b = pop_b;
18 | }
19 | public ExpPopularity() {
20 | locale = "";
21 | pop_a = 0;
22 | pop_b = 0;
23 | }
24 |
25 | public String getLocale() {
26 | return locale;
27 | }
28 |
29 | public void setLocale(String locale) {
30 | this.locale = locale;
31 | }
32 |
33 | public double getPop_a() {
34 | return pop_a;
35 | }
36 |
37 | public void setPop_a(double pop_a) {
38 | this.pop_a = pop_a;
39 | }
40 |
41 | public double getPop_b() {
42 | return pop_b;
43 | }
44 |
45 | public void setPop_b(double pop_b) {
46 | this.pop_b = pop_b;
47 | }
48 |
49 | @Override
50 | public boolean equals(Object o) {
51 | if (this == o) return true;
52 | if (!(o instanceof ExpPopularity)) return false;
53 | ExpPopularity that = (ExpPopularity) o;
54 | return Double.compare(that.pop_a, pop_a) == 0 &&
55 | Double.compare(that.pop_b, pop_b) == 0 &&
56 | Objects.equals(locale, that.locale);
57 | }
58 |
59 | @Override
60 | public int hashCode() {
61 | return Objects.hash(locale, pop_a, pop_b);
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/Test4Data.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.mapping.annotations.ClusteringColumn;
4 | import com.datastax.driver.mapping.annotations.Column;
5 | import com.datastax.driver.mapping.annotations.PartitionKey;
6 | import com.datastax.driver.mapping.annotations.Table;
7 |
8 | // create KEYSPACE if not exists test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 3};
9 | // create table if not exists test.t4(id int, c int, t text, primary key(id, c));
10 | @Table(name = "t4", keyspace = "test")
11 | public class Test4Data {
12 | @PartitionKey
13 | int id;
14 |
15 | @ClusteringColumn
16 | @Column(name = "c")
17 | int clCol;
18 |
19 | @Column(name = "t")
20 | String text;
21 |
22 | public int getId() {
23 | return id;
24 | }
25 |
26 | public Test4Data() {
27 |
28 | }
29 |
30 | public Test4Data(int id, int clCol, String text) {
31 | this.id = id;
32 | this.clCol = clCol;
33 | this.text = text;
34 | }
35 |
36 | public void setId(int id) {
37 | this.id = id;
38 | }
39 |
40 | public int getClCol() {
41 | return clCol;
42 | }
43 |
44 | public void setClCol(int clCol) {
45 | this.clCol = clCol;
46 | }
47 |
48 | public String getText() {
49 | return text;
50 | }
51 |
52 | public void setText(String text) {
53 | this.text = text;
54 | }
55 |
56 | @Override
57 | public String toString() {
58 | return "Test4Data{" +
59 | "id=" + id +
60 | ", clCol=" + clCol +
61 | ", text='" + text + '\'' +
62 | '}';
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/TableObjectClustered.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.objmapper;
2 |
3 | import com.datastax.driver.mapping.annotations.Table;
4 |
5 | import java.util.Date;
6 |
7 | @Table(name = "scala_test_complex", keyspace = "test")
8 | public class TableObjectClustered {
9 | int p1 = 0;
10 | int p2 = 0;
11 | int c1 = 0;
12 | int c2 = 0;
13 | String t = "";
14 | Date tm = new Date();
15 |
16 | TableObjectClustered() {
17 | }
18 |
19 | public int getP1() {
20 | return p1;
21 | }
22 |
23 | public void setP1(int p1) {
24 | this.p1 = p1;
25 | }
26 |
27 | public int getP2() {
28 | return p2;
29 | }
30 |
31 | public void setP2(int p2) {
32 | this.p2 = p2;
33 | }
34 |
35 | public int getC1() {
36 | return c1;
37 | }
38 |
39 | public void setC1(int c1) {
40 | this.c1 = c1;
41 | }
42 |
43 | public int getC2() {
44 | return c2;
45 | }
46 |
47 | public void setC2(int c2) {
48 | this.c2 = c2;
49 | }
50 |
51 | public String getT() {
52 | return t;
53 | }
54 |
55 | public void setT(String t) {
56 | this.t = t;
57 | }
58 |
59 | public Date getTm() {
60 | return tm;
61 | }
62 |
63 | public void setTm(Date tm) {
64 | this.tm = tm;
65 | }
66 |
67 | @Override
68 | public String toString() {
69 | return "TableObjectClustered{" +
70 | "p1=" + p1 +
71 | ", p2=" + p2 +
72 | ", c1=" + c1 +
73 | ", c2=" + c2 +
74 | ", t='" + t + '\'' +
75 | ", tm=" + tm +
76 | '}';
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/java/com/datastax/alexott/demos/objmapper/AuditTestMain.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos;
2 |
3 | import com.datastax.driver.core.Cluster;
4 | import com.datastax.driver.core.Session;
5 | import com.datastax.driver.core.policies.LoadBalancingPolicy;
6 | import com.datastax.driver.core.policies.RoundRobinPolicy;
7 | import com.datastax.driver.core.policies.WhiteListPolicy;
8 | import com.datastax.driver.mapping.Mapper;
9 | import com.datastax.driver.mapping.MappingManager;
10 | import com.google.common.collect.Lists;
11 | import com.google.common.collect.Maps;
12 | import com.google.common.collect.Sets;
13 |
14 | import java.net.InetSocketAddress;
15 | import java.util.Collections;
16 | import java.util.Map;
17 |
18 | public class AuditTestMain {
19 | public static void main(String[] args) {
20 | LoadBalancingPolicy lbpolicy = new WhiteListPolicy(new RoundRobinPolicy(),
21 | Collections.singletonList(new InetSocketAddress("10.200.180.207", 9042)));
22 | try (Cluster cluster = Cluster.builder().addContactPoint("10.200.180.207")
23 | .withLoadBalancingPolicy(lbpolicy)
24 | .build();
25 | Session session = cluster.connect()) {
26 |
27 | MappingManager manager = new MappingManager(session);
28 | Mapper mapper = manager.mapper(AuditTestTable.class);
29 |
30 | Map m = Maps.newHashMap();
31 | m.put(1, "m 1");
32 | m.put(2, "m 2");
33 | mapper.save(new AuditTestTable(2, new AuditTestType(2, "test 2"),
34 | Sets.newHashSet("s 1", " s 2"), Lists.newArrayList("l 1", "l 2"),
35 | m));
36 |
37 | System.out.println(mapper.get(2));
38 | }
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/prometheus-java-driver/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | datastax-java-driver {
2 | basic {
3 | contact-points = ["127.0.0.1:9042"]
4 | session-name = example_app
5 | load-balancing-policy {
6 | local-datacenter = datacenter1
7 | }
8 | }
9 |
10 | // see https://docs.datastax.com/en/developer/java-driver/4.3/manual/core/configuration/reference/
11 | advanced.metrics {
12 | session {
13 | enabled = [ bytes-sent, bytes-received, connected-nodes, cql-requests, cql-client-timeouts,
14 | cql-prepared-cache-size, throttling.delay, throttling.queue-size, throttling.errors
15 | ]
16 | cql-requests {
17 | highest-latency = 3 seconds
18 | significant-digits = 3
19 | refresh-interval = 5 minutes
20 | }
21 | throttling.delay {
22 | highest-latency = 3 seconds
23 | significant-digits = 3
24 | refresh-interval = 5 minutes
25 | }
26 | }
27 |
28 | node {
29 | enabled = [ pool.open-connections, pool.available-streams, pool.in-flight, pool.orphaned-streams,
30 | bytes-sent, bytes-received, cql-messages, errors.request.unsent, errors.request.aborted,
31 | errors.request.write-timeouts, errors.request.read-timeouts, errors.request.unavailables,
32 | errors.request.others, retries.total, retries.aborted, retries.read-timeout,
33 | retries.write-timeout, retries.unavailable, retries.other, ignores.total, ignores.aborted,
34 | ignores.read-timeout, ignores.write-timeout, ignores.unavailable, ignores.other, speculative-executions,
35 | errors.connection.init, errors.connection.auth
36 | ]
37 |
38 | cql-messages {
39 | highest-latency = 3 seconds
40 | significant-digits = 3
41 | refresh-interval = 5 minutes
42 | }
43 | }
44 | }
45 | }
--------------------------------------------------------------------------------
/spark-dse/src/main/java/com/datastax/alexott/demos/spark/JoinTests.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.spark;
2 |
3 | import org.apache.spark.api.java.JavaRDD;
4 | import org.apache.spark.api.java.function.Function;
5 | import org.apache.spark.sql.Dataset;
6 | import org.apache.spark.sql.Row;
7 | import org.apache.spark.sql.RowFactory;
8 | import org.apache.spark.sql.SparkSession;
9 | import org.apache.spark.sql.types.DataTypes;
10 | import org.apache.spark.sql.types.StructField;
11 | import org.apache.spark.sql.types.StructType;
12 | import org.spark_project.guava.collect.ImmutableMap;
13 | import scala.Tuple1;
14 |
15 | import java.util.ArrayList;
16 | import java.util.List;
17 | import java.util.Random;
18 |
19 | // create table if not exists test.jtest (id int primary key, v text);
20 |
21 | public class JoinTests {
22 | public static void main(String[] args) {
23 | SparkSession spark = SparkSession
24 | .builder()
25 | .appName("CassandraSparkWithJoin")
26 | // .config("spark.cassandra.connection.host", "192.168.0.10")
27 | .getOrCreate();
28 |
29 | // Dataset df = spark.sql("select * from test.jtest");
30 | // df.show();
31 | Dataset toJoin = spark.range(1, 1000).selectExpr("cast(id as int) as id");
32 |
33 | toJoin.printSchema();
34 | // toJoin.show();
35 |
36 | Dataset dataset = spark.read()
37 | .format("org.apache.spark.sql.cassandra")
38 | .options(ImmutableMap.of("table", "jtest", "keyspace", "test"))
39 | .load();
40 |
41 | Dataset joined = toJoin.join(dataset,
42 | dataset.col("id").equalTo(toJoin.col("id")));
43 | joined.printSchema();
44 | joined.explain();
45 | joined.show(10);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/spark-oss/src/main/java/com/datastax/alexott/demos/spark/JoinTests.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.spark;
2 |
3 | import org.apache.spark.api.java.JavaRDD;
4 | import org.apache.spark.api.java.function.Function;
5 | import org.apache.spark.sql.Dataset;
6 | import org.apache.spark.sql.Row;
7 | import org.apache.spark.sql.RowFactory;
8 | import org.apache.spark.sql.SparkSession;
9 | import org.apache.spark.sql.types.DataTypes;
10 | import org.apache.spark.sql.types.StructField;
11 | import org.apache.spark.sql.types.StructType;
12 | import org.spark_project.guava.collect.ImmutableMap;
13 | import scala.Tuple1;
14 |
15 | import java.util.ArrayList;
16 | import java.util.List;
17 | import java.util.Random;
18 |
19 | // create table if not exists test.jtest (id int primary key, v text);
20 |
21 | public class JoinTests {
22 | public static void main(String[] args) {
23 | SparkSession spark = SparkSession
24 | .builder()
25 | .appName("CassandraSparkWithJoin")
26 | // .config("spark.cassandra.connection.host", "192.168.0.10")
27 | .getOrCreate();
28 |
29 | // Dataset df = spark.sql("select * from test.jtest");
30 | // df.show();
31 | Dataset toJoin = spark.range(1, 1000).selectExpr("cast(id as int) as id");
32 |
33 | toJoin.printSchema();
34 | // toJoin.show();
35 |
36 | Dataset dataset = spark.read()
37 | .format("org.apache.spark.sql.cassandra")
38 | .options(ImmutableMap.of("table", "jtest", "keyspace", "test"))
39 | .load();
40 |
41 | Dataset joined = toJoin.join(dataset,
42 | dataset.col("id").equalTo(toJoin.col("id")));
43 | joined.printSchema();
44 | joined.explain();
45 | joined.show(10);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/cassandra-join-spark/src/main/java/json/ticks/TickGenerator.java:
--------------------------------------------------------------------------------
1 | package json.ticks;
2 |
3 | import org.slf4j.Logger;
4 | import org.slf4j.LoggerFactory;
5 |
6 | import java.io.Serializable;
7 | import java.util.List;
8 |
9 | public class TickGenerator {
10 |
11 | static final Logger log = LoggerFactory.getLogger("TickGenerator");
12 |
13 | private final List stocksList;
14 |
15 | public TickGenerator(List stocks) {
16 | this.stocksList = stocks;
17 | }
18 |
19 | public TickValue getTickValueRandom(int i) {
20 | TickData thisStock = stocksList.get(i);
21 | TickValue tickValue = new TickValue(thisStock.getSymbol(), thisStock.getValue());
22 | tickValue.value = this.createRandomValue(tickValue.value);
23 | return tickValue;
24 | }
25 |
26 | public TickData getStockWithRandomValue(int i) {
27 | TickData thisStock = stocksList.get(i);
28 | thisStock.setValue(this.createRandomValue(thisStock.getValue()));
29 | return thisStock;
30 | }
31 |
32 | public int getStocksCount() {
33 | return stocksList.size();
34 | }
35 |
36 | class TickValue implements Serializable {
37 | String tickSymbol;
38 | double value;
39 |
40 | public TickValue(String tickSymbol, double value) {
41 | super();
42 | this.tickSymbol = tickSymbol;
43 | this.value = value;
44 | }
45 | }
46 |
47 | private double createRandomValue(double lastValue) {
48 |
49 | double up = Math.random() * 2;
50 | double percentMove = (Math.random() * 1.0) / 100;
51 |
52 | if (up < 1) {
53 | lastValue -= percentMove*lastValue;
54 | } else {
55 | lastValue += percentMove*lastValue;
56 | }
57 |
58 | return lastValue;
59 | }
60 |
61 | }
62 |
63 |
--------------------------------------------------------------------------------
/spark-dse/src/main/java/com/datastax/alexott/demos/spark/UUIDTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.spark;
2 |
3 | import com.datastax.spark.connector.japi.CassandraJavaUtil;
4 | import com.datastax.spark.connector.japi.RDDJavaFunctions;
5 | import com.datastax.spark.connector.japi.rdd.CassandraJavaPairRDD;
6 | import com.datastax.spark.connector.japi.rdd.CassandraJavaRDD;
7 | import org.apache.spark.api.java.JavaRDD;
8 | import org.apache.spark.api.java.function.Function;
9 | import org.apache.spark.sql.SparkSession;
10 | import scala.Tuple1;
11 | import scala.Tuple2;
12 |
13 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.javaFunctions;
14 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapRowTo;
15 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapRowToTuple;
16 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapToRow;
17 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapTupleToRow;
18 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.someColumns;
19 |
20 | // create table if not exists test.utest (id int primary key, u uuid);
21 | public class UUIDTest {
22 | public static void main(String[] args) {
23 | SparkSession spark = SparkSession
24 | .builder()
25 | .appName("UUIDTest")
26 | .getOrCreate();
27 |
28 | CassandraJavaRDD uuids = javaFunctions(spark.sparkContext())
29 | .cassandraTable("test", "utest", mapRowTo(UUIDData.class));
30 |
31 | uuids.collect().forEach(System.out::println);
32 |
33 | JavaRDD uuids2 = uuids.map(x -> new UUIDData(x.getId() + 10, x.getU()));
34 |
35 | CassandraJavaUtil.javaFunctions(uuids2)
36 | .writerBuilder("test", "utest", mapToRow(UUIDData.class))
37 | .saveToCassandra();
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/spark-oss/src/main/java/com/datastax/alexott/demos/spark/UUIDTest.java:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos.spark;
2 |
3 | import com.datastax.spark.connector.japi.CassandraJavaUtil;
4 | import com.datastax.spark.connector.japi.RDDJavaFunctions;
5 | import com.datastax.spark.connector.japi.rdd.CassandraJavaPairRDD;
6 | import com.datastax.spark.connector.japi.rdd.CassandraJavaRDD;
7 | import org.apache.spark.api.java.JavaRDD;
8 | import org.apache.spark.api.java.function.Function;
9 | import org.apache.spark.sql.SparkSession;
10 | import scala.Tuple1;
11 | import scala.Tuple2;
12 |
13 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.javaFunctions;
14 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapRowTo;
15 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapRowToTuple;
16 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapToRow;
17 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapTupleToRow;
18 | import static com.datastax.spark.connector.japi.CassandraJavaUtil.someColumns;
19 |
20 | // create table if not exists test.utest (id int primary key, u uuid);
21 | public class UUIDTest {
22 | public static void main(String[] args) {
23 | SparkSession spark = SparkSession
24 | .builder()
25 | .appName("UUIDTest")
26 | .getOrCreate();
27 |
28 | CassandraJavaRDD uuids = javaFunctions(spark.sparkContext())
29 | .cassandraTable("test", "utest", mapRowTo(UUIDData.class));
30 |
31 | uuids.collect().forEach(System.out::println);
32 |
33 | JavaRDD uuids2 = uuids.map(x -> new UUIDData(x.getId() + 10, x.getU()));
34 |
35 | CassandraJavaUtil.javaFunctions(uuids2)
36 | .writerBuilder("test", "utest", mapToRow(UUIDData.class))
37 | .saveToCassandra();
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/scala-driver-4.x/build.sbt:
--------------------------------------------------------------------------------
1 | scalaVersion := "2.11.12"
2 |
3 | lazy val library = new {
4 | val mapperRuntime = "com.datastax.oss" % "java-driver-mapper-runtime" % "4.7.0"
5 | val mapperProcessor = "com.datastax.oss" % "java-driver-mapper-processor" % "4.7.0" % "provided"
6 | val queryBuilder = "com.datastax.oss" % "java-driver-query-builder" % "4.7.0"
7 | }
8 |
9 | lazy val processAnnotations = taskKey[Unit]("Process annotations")
10 |
11 | processAnnotations := {
12 | val log = streams.value.log
13 | log.info("Processing annotations ...")
14 |
15 | val classpath = ((products in Compile).value ++ ((dependencyClasspath in Compile).value.files)) mkString ":"
16 | val destinationDirectory = (classDirectory in Compile).value
17 |
18 | val processor = "com.datastax.oss.driver.internal.mapper.processor.MapperProcessor"
19 | val classesToProcess = Seq("com.datastax.alexott.demos.objmapper.u2",
20 | "com.datastax.alexott.demos.objmapper.udt") mkString " "
21 |
22 | val command = s"javac -cp $classpath -proc:only -processor $processor -XprintRounds -d $destinationDirectory $classesToProcess"
23 |
24 | runCommand(command, "Failed to process annotations.", log)
25 | log.info("Done processing annotations.")
26 | }
27 |
28 | def runCommand(command: String, message: => String, log: Logger) = {
29 | import scala.sys.process._
30 |
31 | val result = command !
32 |
33 | if (result != 0) {
34 | log.error(message)
35 | sys.error("Failed running command: " + command)
36 | }
37 | }
38 |
39 | packageBin in Compile := (packageBin in Compile dependsOn (processAnnotations in Compile)).value
40 |
41 | organization := "com.datastax.alexott"
42 | version := "1.0"
43 | name := "demos"
44 | scalacOptions += "-target:jvm-1.8"
45 | javacOptions ++= Seq("-source", "1.8", "-target", "1.8")
46 | libraryDependencies ++= Seq(
47 | library.mapperRuntime,
48 | library.queryBuilder,
49 | library.mapperProcessor
50 | )
51 |
--------------------------------------------------------------------------------
/driver-1.x/src/main/kotlin/com/datastax/alexott/demos/KtTestObjMapper.kt:
--------------------------------------------------------------------------------
1 | package com.datastax.alexott.demos
2 |
3 | import com.datastax.driver.core.Cluster
4 | import com.datastax.driver.mapping.MappingManager
5 | import com.datastax.driver.mapping.annotations.Column
6 | import com.datastax.driver.mapping.annotations.PartitionKey
7 | import com.datastax.driver.mapping.annotations.Table
8 |
9 | @Table(keyspace = "test", name = "app_category_agg")
10 | class AppCategoryAggData {
11 |
12 | @PartitionKey
13 | lateinit var category: String
14 |
15 | @Column(name = "app_count")
16 | var appCount: Int = 0
17 |
18 | @Column(name = "sp_count")
19 | var spCount: Int = 0
20 |
21 | @Column(name = "subscriber_count")
22 | var subscriberCount: Int = 0
23 |
24 | @Column(name = "window_revenue")
25 | var windowRevenue: Long = 0
26 |
27 | @Column(name = "top_apps")
28 | var topApps: List