├── .gitignore
├── CHANGELOG
├── GROUPS
├── ISSUES
├── LICENSE
├── NOTICE
├── OWNERS
├── README.md
├── bin
└── release
├── cassie-core
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── org
│ │ │ └── apache
│ │ │ └── thrift
│ │ │ └── bootleg
│ │ │ └── Utf8Helper.java
│ ├── scala
│ │ └── com
│ │ │ └── twitter
│ │ │ └── cassie
│ │ │ ├── BaseColumnFamily.scala
│ │ │ ├── BatchMutation.scala
│ │ │ ├── BatchMutationBuilder.scala
│ │ │ ├── Cluster.scala
│ │ │ ├── ClusterRemapper.scala
│ │ │ ├── Column.scala
│ │ │ ├── ColumnFamily.scala
│ │ │ ├── ColumnsIteratee.scala
│ │ │ ├── CounterBatchMutationBuilder.scala
│ │ │ ├── CounterColumn.scala
│ │ │ ├── CounterColumnFamily.scala
│ │ │ ├── CounterColumnsIteratee.scala
│ │ │ ├── CounterRowsIteratee.scala
│ │ │ ├── FNV1A.scala
│ │ │ ├── Keyspace.scala
│ │ │ ├── Order.scala
│ │ │ ├── ReadConsistency.scala
│ │ │ ├── RowsIteratee.scala
│ │ │ ├── SuperColumnFamily.scala
│ │ │ ├── SuperCounterBatchMutationBuilder.scala
│ │ │ ├── SuperCounterColumnFamily.scala
│ │ │ ├── WriteConsistency.scala
│ │ │ ├── clocks
│ │ │ ├── Clock.scala
│ │ │ ├── MicrosecondEpochClock.scala
│ │ │ └── StrictlyIncreasingClock.scala
│ │ │ ├── codecs
│ │ │ ├── ByteArrayCodec.scala
│ │ │ ├── Codec.scala
│ │ │ ├── IntCodec.scala
│ │ │ ├── LegacyUtf8Codec.scala
│ │ │ ├── LexicalUUIDCodec.scala
│ │ │ ├── LongCodec.scala
│ │ │ ├── ThriftCodec.scala
│ │ │ └── Utf8Codec.scala
│ │ │ ├── connection
│ │ │ ├── CCluster.scala
│ │ │ ├── ClientProvider.scala
│ │ │ └── ClusterClientProvider.scala
│ │ │ ├── tests
│ │ │ └── util
│ │ │ │ └── FakeCassandra.scala
│ │ │ ├── types.scala
│ │ │ ├── types
│ │ │ ├── LexicalUUID.scala
│ │ │ └── ThriftEncoded.scala
│ │ │ └── util
│ │ │ ├── ByteBufferUtil.scala
│ │ │ └── FutureUtil.scala
│ └── thrift
│ │ └── cassandra.thrift
│ └── test
│ ├── java
│ └── com
│ │ └── twitter
│ │ └── cassie
│ │ ├── MockCassandraClient.java
│ │ └── jtests
│ │ ├── ClusterTest.java
│ │ ├── ColumnFamilyTest.java
│ │ └── examples
│ │ └── CassieRun.java
│ ├── scala
│ └── com
│ │ └── twitter
│ │ └── cassie
│ │ ├── FNV1ATest.scala
│ │ ├── Mutations.scala
│ │ ├── clocks
│ │ └── tests
│ │ │ └── MicrosecondEpochClockTest.scala
│ │ ├── codecs
│ │ └── tests
│ │ │ ├── ByteArrayCodecTest.scala
│ │ │ ├── ByteBufferLiteral.scala
│ │ │ ├── CodecTest.scala
│ │ │ ├── IntCodecTest.scala
│ │ │ ├── LexicalUUIDCodecTest.scala
│ │ │ ├── LongCodecTest.scala
│ │ │ ├── ThriftCodecTest.scala
│ │ │ └── Utf8CodecTest.scala
│ │ ├── examples
│ │ └── CassieRun.scala
│ │ ├── tests
│ │ ├── BatchMutationBuilderTest.scala
│ │ ├── ClusterRemapperTest.scala
│ │ ├── ClusterTest.scala
│ │ ├── ColumnFamilyTest.scala
│ │ ├── ColumnTest.scala
│ │ ├── ColumnsIterateeTest.scala
│ │ ├── CounterColumnFamilyTest.scala
│ │ ├── CounterRowsIterateeTest.scala
│ │ ├── KeyspaceTest.scala
│ │ ├── ReadConsistencyTest.scala
│ │ ├── RowsIterateeTest.scala
│ │ └── WriteConsistencyTest.scala
│ │ ├── types
│ │ └── tests
│ │ │ └── LexicalUUIDTest.scala
│ │ └── util
│ │ ├── ColumnFamilyTestHelper.scala
│ │ └── FakeCassandraTest.scala
│ └── thrift
│ └── person.thrift
├── cassie-hadoop
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── twitter
│ │ │ └── cassie
│ │ │ └── hadoop
│ │ │ └── CassieCounters.java
│ └── scala
│ │ └── com
│ │ └── twitter
│ │ └── cassie
│ │ └── hadoop
│ │ ├── CassieReducer.scala
│ │ └── ColumnWritable.scala
│ └── test
│ └── scala
│ └── com
│ └── twitter
│ └── cassie
│ └── hadoop
│ └── CassieReducerTest.scala
├── cassie-serversets
├── pom.xml
└── src
│ └── main
│ └── scala
│ └── com
│ └── twitter
│ └── cassie
│ └── serversets
│ └── ServerSetsCluster.scala
├── cassie-stress
├── build.properties
├── pom.xml
└── src
│ └── main
│ └── scala
│ └── com
│ └── twitter
│ └── cassie
│ └── stress
│ ├── BatchMutationBuilderStresser.scala
│ ├── CounterBatchMutationBuilderStresser.scala
│ ├── Main.scala
│ └── Stresser.scala
└── pom.xml
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | *.iml
3 | out/
4 | lib_managed
5 | project/boot
6 | target
7 | project/plugins/project/
8 | project/plugins/src_managed/
9 | .ivyjars
10 | .classpath
11 | .project
12 | .scala_dependencies
13 | sbt
14 |
--------------------------------------------------------------------------------
/CHANGELOG:
--------------------------------------------------------------------------------
1 | # 0.24.0
2 | * upgrade to finagle 5.3.9
3 | * upgrade to util 5.3.10
4 |
5 | # 0.23.0
6 | * Expose expFailFast on ClientBuilder
7 |
8 | [ missing logs ]
9 |
10 | # 0.20.1
11 | * Use tcpConnectTimeout and connectTimeout for ClientBuilder. This will set the correct timeout for a TCP connection.
12 |
13 | # 0.20.0
14 | * added cassie-stress for doing micro-benchmarks
15 | * refactor to batch mutation builders to reduce garbage
16 | * sanity check on timeouts
17 | * changed default consistency level to LocalQuorum
18 | * finagle 3.0.0
19 | * util 3.0.0
20 |
21 | # 0.19.1
22 | * finagle 1.11.1
23 |
24 | # 0.19.0
25 | * add a ServerSetsCluster constructor that accepts a ZooKeeperClient instance (matthew billoti)
26 | * upgrade to util 1.12.12 and finagle 1.10.0
27 | * support out-of-order operations on FakeCassandra
28 | * numerous cleanups in preparation for open-sourcing
29 |
30 | # 0.18.0
31 | * make FakeCassandra spin up on random port (@kmx)
32 | * Allow the column iteratee to take in order and limit. (@skr)
33 | * Move from RetryingFilter+TimeoutFilter to RetryPolicy (@stuhood)
34 | * make cassie reducer take an optional port (@kmx)
35 | * pass port to ClusterRemapper to preserve it across remapping (@zuercher)
36 | * add super column support to FakeCassandra, handle reversed slice range start/finish (@zuercher)
37 | * make ClusterBase trait (@skr)
38 |
39 | # 0.17.1
40 | * fix bug in FakeCassandra that would cause port collisions
41 | * upgrade to util 1.12.7
42 | * upgrade to finagle 1.9.10
43 |
44 | # 0.17.0
45 | * update to latest finagle, etc.
46 | * switch tracers to factories
47 |
48 | # 0.16.4
49 | * ?
50 |
51 | # 0.16.3
52 | * fixed a bug in SuperCounterColumnFamilyBatchMutationBuilder where we weren't actually doing anything, ever.
53 |
54 | # 0.16.2
55 | * add a zero-arg constructor to ColumnFamily so that kyle can mock it in his tests
56 |
57 | # 0.16.1
58 | * fix multigetslice for FakeCassandra
59 |
60 | # 0.16.0
61 | * add multigetRowSlice for regular super column families
62 |
63 | # 0.15.0
64 | * fix Utf8Codec. If you are using the current codec, you may need to use LegacyUtf8Codec for compatibility with your stored data.
65 | * fix getRowSlice for regular super column familes (ignored start/end column, order and size)
66 |
67 | # 0.14.0
68 | * SuperCounterColumnFamily was added
69 | * regular super column families were also added
70 |
71 | # 0.13.0
72 | * upgrade to finagle 1.8.4
73 | * integrated serversets into cassie as a seperate project (cassie-serversets)
74 | ServerSetsCluster is introduced for discovering hosts via serversets
75 | Changes in the API include: mapHostsEvery() method moved from Keyspace class to Cluster class
76 | * high level latency and failure counts reported for underlying Cassandra thrift calls:
77 | set a StatsReceiver when instantiating a Cluster to collect the stats
78 |
79 | # 0.11.0
80 | * improved iteration (see RowsIteratee or ColumnsIteratee for examples)
81 | * improved cluster remapping
82 | * separate retry policies for counters and cluster remapping
83 |
84 | # 0.10.2
85 | * fix bug where ColumnFamily#columnIteratee might never finish
86 |
87 | # 0.10.1
88 | * upgrade to finagle 1.7.1
89 |
90 | # 0.10.0
91 | * no more implicits for codecs
92 | * removed redundant codecs. going forward we'll use one codec per datatype.
93 | This means instead of VarLong and FixedLong we just have Long (and its
94 | encoded fixed)
95 | * update to new thrift IDL for counters (should be backwards compat if you
96 | don't use counters)
97 | * make it possible to iterate over the columns in a row (by @iano)
98 | * make it possible to pass in a tracer
99 | * retry on more than just WriteError's
100 | * update to StandardProject 0.12.7 and sbt-thrift 1.4.4
101 | * update to util 1.10.1
102 | * better exception handling in ColumnFamily
103 |
104 | # 0.9.4
105 | * drop finagle-ostrich dependency
106 |
107 | # 0.9.3
108 | * upgrade util-core dependency
109 |
110 | # 0.9.2
111 | * upgrade to finagle 1.6.1
112 |
113 | # 0.9.1
114 | * upgrade to finagle 1.5.3
115 |
116 | # 0.9.0
117 | * switch from Option[StatsReceiver] to NullStatsReceiver as an optional parameter
118 | * improvements to the cassie reducer
119 |
120 | # 0.8.0
121 | * upgrade to finagle 1.5.1
122 |
123 | # 0.7.3
124 | * fix bug in StatsReceiver implementation
125 |
126 | # 0.7.2
127 | * update util to 1.8.12
128 |
129 | # 0.7.1
130 | * make the finagle stats receiver configurable
131 | * fix heterogenous batches [kyle]
132 | * upgrade to finagle 1.4.3
133 |
134 | # 0.7.0
135 | * Add FakeCassandra for testing cassie-based apps
136 |
137 | # 0.6.2
138 | * lots of scaladoc
139 | * some minor interface changes (caused by documenting the code)
140 | * add EachQuorum and LocalQuorum consistency levels
141 | * README update
142 | * made the methods for overriding typing on CF private. if you want to use a different type, copy the CF instance
143 | * added ability for batch mutations across column families
144 |
145 | # 0.5.3
146 | * assorted fixes to build/publishing
147 |
148 | # 0.5.2
149 | * fix bug in remapping clusters
150 |
151 | # 0.5.1
152 | * replace dependency on util with util-core (robey)
153 |
154 | # 0.5.0
155 | * you can now set a connection timeout
156 | * support for counter column families
157 | * clocks are no longer configurable. everyone gets MicrosecondEpochClock
158 | * fix bug where timer thread wouldn't shut down
159 | * better error handling when re-mapping clusters
160 | * better testing around encoding ints and longs
161 | * ostrich4 support
162 | * an api for deletions at a given timestamp (good for backfilling)
163 | * moved to subprojects
164 | * basic hadoop output support
165 |
166 | # 0.3.0 [2011-03-04]
167 |
168 | * ttl support. Column now takes a ttl as a Duration
169 | * retries now defaults to 0. this is both less surprising and works around a bug in fingagle where retries >= maxConnectionsPerHost (with 1 host)
170 | * switch to builder pattern for Column
171 |
--------------------------------------------------------------------------------
/GROUPS:
--------------------------------------------------------------------------------
1 | cassandra
--------------------------------------------------------------------------------
/ISSUES:
--------------------------------------------------------------------------------
1 | Herein are a list of known issues and areas for improvement–
2 |
3 | # Mixed Java/Scala data structures
4 |
5 | We currently use a mix of java and scala data structures. We should really standardize on one,
6 | then provide wrappers for others. THe plan is to standardize on scala data structures internally and
7 | then provide wrappers for java compatibility/ease-of-use.
8 |
9 | # Code duplication
10 |
11 | We've duplicated code across ColumFamily, SuperColumFamily, CounterColumFamily and SuperCounterColumFamily.
12 | We need to pull this back together in a more sane way. (also Column and CounterColumn)
13 |
14 | # Incomplete Cassandra feature support
15 |
16 | Not all cassandra operations are supported (we've taken a JIT approach).
17 |
18 | # Iteratee code is fragile and complicated
19 |
20 | We should move to Finagle Spools.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2010 Coda Hale; 2011-2012 Twitter, Inc.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | Cassie is Scala client for Cassandra
2 | Copyright 2011-2012 Twitter, Inc.
3 |
4 | This work was originally inspired by Coda Hale's client on GitHub
5 | https://github.com/codahale/cassie
6 |
7 | Copyright (c) 2010 Coda Hale
8 |
9 | Permission is hereby granted, free of charge, to any person obtaining
10 | a copy of this software and associated documentation files (the
11 | "Software"), to deal in the Software without restriction, including
12 | without limitation the rights to use, copy, modify, merge, publish,
13 | distribute, sublicense, and/or sell copies of the Software, and to
14 | permit persons to whom the Software is furnished to do so, subject to
15 | the following conditions:
16 |
17 | The above copyright notice and this permission notice shall be
18 | included in all copies or substantial portions of the Software.
19 |
20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
23 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
24 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
25 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
26 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
27 |
--------------------------------------------------------------------------------
/OWNERS:
--------------------------------------------------------------------------------
1 | ryan
2 | stuhood
3 | chrisg
4 | boaz
5 | pyaskevich
6 |
--------------------------------------------------------------------------------
/bin/release:
--------------------------------------------------------------------------------
1 | sbt publish-release && sbt 'project cassie-core' publish-to-git && sbt 'project cassie-hadoop' publish-to-git && sbt 'project cassie-serversets' publish-to-git
--------------------------------------------------------------------------------
/cassie-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 | com.twitter
5 | cassie-core
6 | jar
7 | 0.25.1-SNAPSHOT
8 |
9 | com.twitter
10 | scala-parent-292
11 | 0.0.4
12 | ../../parents/scala-parent-292/pom.xml
13 |
14 |
15 | ${project.basedir}/../../.git
16 |
17 |
18 |
19 |
20 | com.novocode
21 | junit-interface
22 | 0.7
23 | test
24 |
25 |
26 | commons-codec
27 | commons-codec
28 | 1.5
29 |
30 |
31 | org.scala-tools.testing
32 | scalacheck_2.9.1
33 | 1.9
34 | test
35 |
36 |
37 | org.scalatest
38 | scalatest_2.9.2
39 | 1.7.2
40 | test
41 |
42 |
43 | org.slf4j
44 | slf4j-api
45 | 1.6.1
46 |
47 |
48 |
49 | com.twitter
50 | finagle-core
51 | 6.0.2-SNAPSHOT
52 |
53 |
54 | com.twitter
55 | finagle-thrift
56 | 6.0.2-SNAPSHOT
57 |
58 |
59 | com.twitter
60 | util-core
61 | 6.0.2-SNAPSHOT
62 |
63 |
64 |
65 |
66 |
67 | com.twitter
68 | maven-finagle-thrift-plugin
69 |
70 |
71 | finagle
72 |
73 |
74 |
75 |
76 |
77 |
78 |
--------------------------------------------------------------------------------
/cassie-core/src/main/java/org/apache/thrift/bootleg/Utf8Helper.java:
--------------------------------------------------------------------------------
1 | package org.apache.thrift.bootleg;
2 |
3 | /*
4 | * Licensed to the Apache Software Foundation (ASF) under one
5 | * or more contributor license agreements. See the NOTICE file
6 | * distributed with this work for additional information
7 | * regarding copyright ownership. The ASF licenses this file
8 | * to you under the Apache License, Version 2.0 (the
9 | * "License"); you may not use this file except in compliance
10 | * with the License. You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing,
15 | * software distributed under the License is distributed on an
16 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17 | * KIND, either express or implied. See the License for the
18 | * specific language governing permissions and limitations
19 | * under the License.
20 | */
21 |
22 | /**
23 | * Apache Thrift
24 | * Copyright 2006-2009 The Apache Software Foundation, et al.
25 | *
26 | * This product includes software developed at
27 | * The Apache Software Foundation (http://www.apache.org/).
28 | */
29 |
30 | /**
31 | * Stolen without mercy from https://issues.apache.org/jira/browse/THRIFT-765
32 | */
33 | @SuppressWarnings("all")
34 | @Deprecated
35 | public final class Utf8Helper {
36 | private Utf8Helper() {}
37 |
38 | @Deprecated
39 | public static final int getByteLength(final String s) {
40 | int byteLength = 0;
41 | int codePoint;
42 | for (int i = 0; i < s.length(); i++) {
43 | codePoint = s.charAt(i);
44 | if (codePoint >= 0x07FF) {
45 | codePoint = s.codePointAt(i);
46 | if (Character.isSupplementaryCodePoint(codePoint)) {
47 | i++;
48 | }
49 | }
50 | if (codePoint >= 0 && codePoint <= 0x007F) {
51 | byteLength++;
52 | } else if (codePoint >= 0x80 && codePoint <= 0x07FF) {
53 | byteLength += 2;
54 | } else if ((codePoint >= 0x0800 && codePoint < 0xD800) || (codePoint > 0xDFFF && codePoint <= 0xFFFD)) {
55 | byteLength+=3;
56 | } else if (codePoint >= 0x10000 && codePoint <= 0x10FFFF) {
57 | byteLength+=4;
58 | } else {
59 | throw new RuntimeException("Unknown unicode codepoint in string! "
60 | + Integer.toHexString(codePoint));
61 | }
62 | }
63 | return byteLength;
64 | }
65 |
66 | @Deprecated
67 | public static byte[] encode(String s) {
68 | byte[] buf = new byte[getByteLength(s)];
69 | encode(s, buf, 0);
70 | return buf;
71 | }
72 |
73 | @Deprecated
74 | public static void encode(final String s, final byte[] buf, final int offset) {
75 | int nextByte = 0;
76 | int codePoint;
77 | final int strLen = s.length();
78 | for (int i = 0; i < strLen; i++) {
79 | codePoint = s.charAt(i);
80 | if (codePoint >= 0x07FF) {
81 | codePoint = s.codePointAt(i);
82 | if (Character.isSupplementaryCodePoint(codePoint)) {
83 | i++;
84 | }
85 | }
86 | if (codePoint <= 0x007F) {
87 | buf[offset + nextByte] = (byte)codePoint;
88 | nextByte++;
89 | } else if (codePoint <= 0x7FF) {
90 | buf[offset + nextByte ] = (byte)(0xC0 | ((codePoint >> 6) & 0x1F));
91 | buf[offset + nextByte + 1] = (byte)(0x80 | ((codePoint >> 0) & 0x3F));
92 | nextByte+=2;
93 | } else if ((codePoint < 0xD800) || (codePoint > 0xDFFF && codePoint <= 0xFFFD)) {
94 | buf[offset + nextByte ] = (byte)(0xE0 | ((codePoint >> 12) & 0x0F));
95 | buf[offset + nextByte + 1] = (byte)(0x80 | ((codePoint >> 6) & 0x3F));
96 | buf[offset + nextByte + 2] = (byte)(0x80 | ((codePoint >> 0) & 0x3F));
97 | nextByte+=3;
98 | } else if (codePoint >= 0x10000 && codePoint <= 0x10FFFF) {
99 | buf[offset + nextByte ] = (byte)(0xF0 | ((codePoint >> 18) & 0x07));
100 | buf[offset + nextByte + 1] = (byte)(0x80 | ((codePoint >> 12) & 0x3F));
101 | buf[offset + nextByte + 2] = (byte)(0x80 | ((codePoint >> 6) & 0x3F));
102 | buf[offset + nextByte + 3] = (byte)(0x80 | ((codePoint >> 0) & 0x3F));
103 | nextByte+=4;
104 | } else {
105 | throw new RuntimeException("Unknown unicode codepoint in string! "
106 | + Integer.toHexString(codePoint));
107 | }
108 | }
109 | }
110 |
111 | @Deprecated
112 | public static String decode(byte[] buf) {
113 | char[] charBuf = new char[buf.length];
114 | int charsDecoded = decode(buf, 0, buf.length, charBuf);
115 | return new String(charBuf, 0, charsDecoded);
116 | }
117 |
118 | public static final int UNI_SUR_HIGH_START = 0xD800;
119 | public static final int UNI_SUR_HIGH_END = 0xDBFF;
120 | public static final int UNI_SUR_LOW_START = 0xDC00;
121 | public static final int UNI_SUR_LOW_END = 0xDFFF;
122 | public static final int UNI_REPLACEMENT_CHAR = 0xFFFD;
123 |
124 | private static final int HALF_BASE = 0x0010000;
125 | private static final long HALF_SHIFT = 10;
126 | private static final long HALF_MASK = 0x3FFL;
127 |
128 | @Deprecated
129 | public static int decode(final byte[] buf, final int offset, final int byteLength, final char[] charBuf) {
130 | int curByteIdx = offset;
131 | int endByteIdx = offset + byteLength;
132 |
133 | int curCharIdx = 0;
134 |
135 | while (curByteIdx < endByteIdx) {
136 | final int b = buf[curByteIdx++]&0xff;
137 | final int ch;
138 |
139 | if (b < 0xC0) {
140 | ch = b;
141 | } else if (b < 0xE0) {
142 | ch = ((b & 0x1F) << 6) + (buf[curByteIdx++] & 0x3F);
143 | } else if (b < 0xf0) {
144 | ch = ((b & 0xF) << 12) + ((buf[curByteIdx++] & 0x3F) << 6) + (buf[curByteIdx++] & 0x3F);
145 | } else {
146 | ch = ((b & 0x7) << 18) + ((buf[curByteIdx++]& 0x3F) << 12) + ((buf[curByteIdx++] & 0x3F) << 6) + (buf[curByteIdx++] & 0x3F);
147 | }
148 |
149 | if (ch <= 0xFFFF) {
150 | // target is a character <= 0xFFFF
151 | charBuf[curCharIdx++] = (char) ch;
152 | } else {
153 | // target is a character in range 0xFFFF - 0x10FFFF
154 | final int chHalf = ch - HALF_BASE;
155 | charBuf[curCharIdx++] = (char) ((chHalf >> HALF_SHIFT) + UNI_SUR_HIGH_START);
156 | charBuf[curCharIdx++] = (char) ((chHalf & HALF_MASK) + UNI_SUR_LOW_START);
157 | }
158 | }
159 | return curCharIdx;
160 | }
161 | }
162 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/BaseColumnFamily.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.codecs.ThriftCodec
18 | import com.twitter.cassie.connection.ClientProvider
19 | import com.twitter.cassie.util.FutureUtil
20 | import com.twitter.finagle.stats.StatsReceiver
21 | import com.twitter.finagle.tracing.Trace
22 | import com.twitter.util.Future
23 | import org.slf4j.Logger
24 | import org.apache.cassandra.finagle.thrift
25 | import org.apache.cassandra.finagle.thrift.Cassandra.ServiceToClient
26 |
27 | object BaseColumnFamily {
28 | val annPredCodec = new ThriftCodec[thrift.SlicePredicate](classOf[thrift.SlicePredicate])
29 | }
30 |
31 | private[cassie] abstract class BaseColumnFamily(keyspace: String, cf: String, provider: ClientProvider, stats: StatsReceiver) {
32 |
33 | import BaseColumnFamily._
34 | import FutureUtil._
35 |
36 | val baseAnnotations = Map("keyspace" -> keyspace, "columnfamily" -> cf)
37 |
38 | protected def trace(annotations: Map[String, Any]) {
39 | Trace.recordBinaries(baseAnnotations)
40 | Trace.recordBinaries(annotations)
41 | }
42 |
43 | /**
44 | * @param name The thrift method name being called
45 | * @param traceAnnotations Optional annotations for tracing
46 | * @param args A lazily constructed list of the arguments to the thrift method, for debug logging
47 | * @param f Function ot receives the connection
48 | */
49 | def withConnection[T](
50 | name: String,
51 | traceAnnotations: Map[String, Any] = Map.empty,
52 | args: => Seq[Any] = Nil
53 | )(f: ServiceToClient => Future[T]
54 | )(implicit log: Logger): Future[T] = {
55 | if (log.isDebugEnabled) {
56 | log.debug(args.mkString(name + "(", ", ", ")"))
57 | }
58 | timeFutureWithFailures(stats, name) {
59 | // Associate trace annotations with the client span by using a terminal trace id
60 | Trace.unwind {
61 | Trace.setTerminalId(Trace.nextId)
62 |
63 | trace(traceAnnotations)
64 | provider.map(f)
65 | }
66 | }
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/BatchMutation.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import java.nio.ByteBuffer
18 | import java.util.{ List => JList, Map => JMap, Set => JSet, ArrayList => JArrayList,HashMap => JHashMap}
19 | import org.apache.cassandra.finagle.thrift
20 |
21 |
22 | trait BatchMutation {
23 |
24 | private[cassie] val mutations: JMap[ByteBuffer, JMap[String, JList[thrift.Mutation]]] =
25 | new JHashMap[ByteBuffer, JMap[String, JList[thrift.Mutation]]]()
26 |
27 | // modifies the supplied JHashMap
28 | protected def putMutation(encodedKey: ByteBuffer, cfName: String, mutation: thrift.Mutation) = {
29 | var h = mutations.get(encodedKey)
30 | if (h == null){
31 | h = new JHashMap[String, JList[thrift.Mutation]]
32 | mutations.put(encodedKey, h)
33 | }
34 |
35 | var l = h.get(cfName)
36 | if (l == null) {
37 | l = new JArrayList[thrift.Mutation]
38 | h.put(cfName, l)
39 | }
40 | l.add(mutation)
41 | }
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/BatchMutationBuilder.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.util.Future
18 | import java.nio.ByteBuffer
19 | import java.util.Collections.{ singleton => singletonJSet }
20 | import java.util.{ List => JList, Map => JMap, Set => JSet, ArrayList => JArrayList,HashMap => JHashMap}
21 | import org.apache.cassandra.finagle.thrift
22 | import scala.collection.mutable.ListBuffer
23 |
24 | /**
25 | * A ColumnFamily-alike which batches mutations into a single API call.
26 | *
27 | */
28 | class BatchMutationBuilder[Key, Name, Value](private[cassie] val cf: ColumnFamily[Key, Name, Value])
29 | extends BatchMutation {
30 |
31 | type This = BatchMutationBuilder[Key, Name, Value]
32 |
33 | def insert(key: Key, column: Column[Name, Value]): This = synchronized {
34 | val mutation = insertMutation(key, column)
35 | val encodedKey = cf.keyCodec.encode(key)
36 | putMutation(encodedKey, cf.name, mutation)
37 | this
38 | }
39 |
40 | def removeColumn(key: Key, columnName: Name): This =
41 | removeColumns(key, singletonJSet(columnName))
42 |
43 | def removeColumn(key: Key, columnName: Name, timestamp: Long): This =
44 | removeColumns(key, singletonJSet(columnName), timestamp)
45 |
46 | def removeColumns(key: Key, columns: JSet[Name]): This =
47 | removeColumns(key, columns, cf.clock.timestamp)
48 |
49 | def removeColumns(key: Key, columnNames: JSet[Name], timestamp: Long): This = synchronized {
50 | val mutation = deleteMutation(key, columnNames, timestamp)
51 | val encodedKey = cf.keyCodec.encode(key)
52 |
53 | putMutation(encodedKey, cf.name, mutation)
54 | this
55 | }
56 |
57 | /**
58 | * Submits the batch of operations, returning a Future[Void] to allow blocking for success.
59 | */
60 | def execute(): Future[Void] = {
61 | if (mutations.isEmpty) {
62 | Future.Void
63 | } else {
64 | Future {
65 | cf.batch(mutations)
66 | }.flatten
67 | }
68 | }
69 |
70 | private[this] def insertMutation(key: Key, column: Column[Name, Value]): thrift.Mutation = {
71 | val cosc = new thrift.ColumnOrSuperColumn
72 | cosc.setColumn(
73 | Column.convert(
74 | cf.nameCodec,
75 | cf.valueCodec,
76 | cf.clock,
77 | column
78 | )
79 | )
80 | val mutation = new thrift.Mutation
81 | mutation.setColumn_or_supercolumn(cosc)
82 | }
83 |
84 | private[this] def deleteMutation(key: Key, columnNames: JSet[Name], timestamp: Long): thrift.Mutation = {
85 | val pred = new thrift.SlicePredicate
86 | pred.setColumn_names(cf.nameCodec.encodeSet(columnNames))
87 |
88 | val deletion = new thrift.Deletion()
89 | deletion.setTimestamp(timestamp)
90 | deletion.setPredicate(pred)
91 |
92 | val mutation = new thrift.Mutation
93 | mutation.setDeletion(deletion)
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/Cluster.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.connection.{ CCluster, ClusterClientProvider, RetryPolicy, SocketAddressCluster }
18 | import com.twitter.conversions.time._
19 | import com.twitter.finagle.stats.{ StatsReceiver, NullStatsReceiver }
20 | import com.twitter.finagle.tracing.{ Tracer, NullTracer }
21 | import com.twitter.util.Duration
22 | import org.slf4j.LoggerFactory
23 | import java.net.{ SocketAddress, InetSocketAddress }
24 | import scala.collection.JavaConversions._
25 |
26 | /**
27 | * A Cassandra cluster.
28 | *
29 | * @param seedHosts list of some hosts in the cluster
30 | * @param seedPort the port number for '''all''' hosts in the cluster
31 | * to refresh its host list.
32 | * @param stats a finagle stats receiver
33 | */
34 | class Cluster(seedHosts: Set[String], seedPort: Int, stats: StatsReceiver, tracer: Tracer.Factory) extends ClusterBase {
35 | private var mapHostsEvery: Duration = 10.minutes
36 |
37 | /**
38 | * @param seedHosts A comma separated list of seed hosts for a cluster. The rest of the
39 | * hosts can be found via mapping the cluser. See KeyspaceBuilder.mapHostsEvery.
40 | * The port number is assumed to be 9160.
41 | */
42 | def this(seedHosts: String, stats: StatsReceiver = NullStatsReceiver) =
43 | this(seedHosts.split(',').filter { !_.isEmpty }.toSet, 9160, stats, NullTracer.factory)
44 |
45 | /**
46 | * @param seedHosts A comma separated list of seed hosts for a cluster. The rest of the
47 | * hosts can be found via mapping the cluser. See KeyspaceBuilder.mapHostsEvery.
48 | */
49 | def this(seedHosts: String, port: Int) =
50 | this(seedHosts.split(',').filter { !_.isEmpty }.toSet, port, NullStatsReceiver, NullTracer.factory)
51 |
52 | /**
53 | * @param seedHosts A collection of seed host addresses. The port number is assumed to be 9160
54 | */
55 | def this(seedHosts: java.util.Collection[String]) =
56 | this(collectionAsScalaIterable(seedHosts).toSet, 9160, NullStatsReceiver, NullTracer.factory)
57 |
58 | /**
59 | * Returns a [[com.twitter.cassie.KeyspaceBuilder]] instance.
60 | * @param name the keyspace's name
61 | */
62 | def keyspace(name: String): KeyspaceBuilder = {
63 | val scopedStats = stats.scope("cassie").scope(name)
64 | val seedAddresses = seedHosts.map { host => new InetSocketAddress(host, seedPort) }.toSeq
65 | val cluster = if (mapHostsEvery > 0.seconds)
66 | // either map the cluster for this keyspace
67 | new ClusterRemapper(name, seedAddresses, mapHostsEvery, seedPort, stats.scope("remapper"), tracer)
68 | else
69 | // or connect directly to the hosts that were given as seeds
70 | new SocketAddressCluster(seedAddresses)
71 |
72 | KeyspaceBuilder(cluster, name, scopedStats, tracer)
73 | }
74 |
75 | /**
76 | * @param d Cassie will query the cassandra cluster every [[period]] period
77 | * to refresh its host list.
78 | */
79 | def mapHostsEvery(period: Duration): Cluster = {
80 | mapHostsEvery = period
81 | this
82 | }
83 | }
84 |
85 | trait ClusterBase {
86 | /**
87 | * Returns a [[com.twitter.cassie.KeyspaceBuilder]] instance.
88 | * @param name the keyspace's name
89 | */
90 | def keyspace(name: String): KeyspaceBuilder
91 | }
92 |
93 | object KeyspaceBuilder {
94 | private val log = LoggerFactory.getLogger(this.getClass)
95 | }
96 |
97 |
98 | case class KeyspaceBuilder(
99 | cluster: CCluster[SocketAddress],
100 | name: String,
101 | stats: StatsReceiver,
102 | tracer: Tracer.Factory,
103 | _retries: Int = 0,
104 | _timeout: Int = 5000,
105 | _requestTimeout: Int = 1000,
106 | _connectTimeout: Int = 1000,
107 | _minConnectionsPerHost: Int = 1,
108 | _maxConnectionsPerHost: Int = 5,
109 | _hostConnectionMaxWaiters: Int = 100,
110 | _retryPolicy: RetryPolicy = RetryPolicy.Idempotent,
111 | _failFast: Boolean = false
112 | ) {
113 |
114 | import KeyspaceBuilder._
115 |
116 | /**
117 | * connect to the cluster with the specified parameters
118 | */
119 | def connect(): Keyspace = {
120 | // TODO: move to builder pattern as well
121 | if (_timeout < _requestTimeout)
122 | log.error("Timeout (for all requests including retries) is less than the per-request timeout.")
123 |
124 | if (_timeout < _connectTimeout)
125 | log.error("Timeout (for all requests including retries) is less than the connection timeout.")
126 |
127 | val ccp = new ClusterClientProvider(
128 | cluster,
129 | name,
130 | _retries,
131 | _timeout.milliseconds,
132 | _requestTimeout.milliseconds,
133 | _connectTimeout.milliseconds,
134 | _minConnectionsPerHost,
135 | _maxConnectionsPerHost,
136 | _hostConnectionMaxWaiters,
137 | stats,
138 | tracer,
139 | _retryPolicy,
140 | _failFast)
141 | new Keyspace(name, ccp, stats)
142 | }
143 |
144 | /**
145 | * In general, it is recommended that you set this to true.
146 | * It is likely to become the default behavior in Finagle in the not too distant future.
147 | */
148 | def failFast(ff: Boolean): KeyspaceBuilder = copy(_failFast = ff)
149 |
150 | def timeout(t: Int): KeyspaceBuilder = copy(_timeout = t)
151 | def retries(r: Int): KeyspaceBuilder = copy(_retries = r)
152 | def retryPolicy(r: RetryPolicy): KeyspaceBuilder = copy(_retryPolicy = r)
153 |
154 | /**
155 | * @see requestTimeout in [[http://twitter.github.com/finagle/finagle-core/target/doc/main/api/com/twitter/finagle/builder/ClientBuilder.html]]
156 | */
157 | def requestTimeout(r: Int): KeyspaceBuilder = copy(_requestTimeout = r)
158 |
159 | /**
160 | * @see connectionTimeout in [[http://twitter.github.com/finagle/finagle-core/target/doc/main/api/com/twitter/finagle/builder/ClientBuilder.html]]
161 | */
162 | def connectTimeout(r: Int): KeyspaceBuilder = copy(_connectTimeout = r)
163 |
164 | def minConnectionsPerHost(m: Int): KeyspaceBuilder =
165 | copy(_minConnectionsPerHost = m)
166 | def maxConnectionsPerHost(m: Int): KeyspaceBuilder =
167 | copy(_maxConnectionsPerHost = m)
168 |
169 | /** A finagle stats receiver for reporting. */
170 | def reportStatsTo(r: StatsReceiver): KeyspaceBuilder = copy(stats = r)
171 |
172 | /** Set a tracer to collect request traces. */
173 | def tracerFactory(t: Tracer.Factory): KeyspaceBuilder = copy(tracer = t)
174 |
175 | def hostConnectionMaxWaiters(i: Int): KeyspaceBuilder = copy(_hostConnectionMaxWaiters = i)
176 | }
177 |
178 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/ClusterRemapper.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.connection.CCluster
18 | import com.twitter.cassie.connection.{ClusterClientProvider, SocketAddressCluster, RetryPolicy}
19 | import com.twitter.concurrent.Spool
20 | import com.twitter.finagle.builder.{Cluster => FCluster}
21 | import com.twitter.finagle.ServiceFactory
22 | import com.twitter.finagle.stats.StatsReceiver
23 | import com.twitter.finagle.tracing.{ Tracer, NullTracer }
24 | import com.twitter.finagle.WriteException
25 | import org.slf4j.LoggerFactory
26 | import com.twitter.util.{ Duration, Future, Promise, Return, Time, JavaTimer }
27 | import java.io.IOException
28 | import java.net.{ InetSocketAddress, SocketAddress }
29 | import java.util.concurrent.TimeUnit
30 | import scala.collection.JavaConversions._
31 | import scala.collection.SeqProxy
32 | import scala.util.parsing.json.JSON
33 |
34 | /**
35 | * Given a seed host and port, returns a set of nodes in the cluster.
36 | *
37 | * @param keyspace the keyspace to map
38 | * @param seeds seed node addresses
39 | * @param port the Thrift port of client nodes
40 | */
41 | object ClusterRemapper {
42 | private val log = LoggerFactory.getLogger(this.getClass)
43 | }
44 | private class ClusterRemapper(
45 | keyspace: String,
46 | seeds: Seq[InetSocketAddress],
47 | remapPeriod: Duration,
48 | port: Int = 9160,
49 | statsReceiver: StatsReceiver,
50 | tracerFactory: Tracer.Factory
51 | ) extends CCluster[SocketAddress] {
52 | import ClusterRemapper._
53 |
54 | private[this] var hosts = seeds
55 | private[this] var changes = new Promise[Spool[FCluster.Change[SocketAddress]]]
56 |
57 | // Timer keeps updating the host list. Variables "hosts" and "changes" together reflect the cluster consistently
58 | // at any time
59 | private[cassie] var timer = new JavaTimer(true)
60 | timer.schedule(Time.now, remapPeriod) {
61 | fetchHosts(hosts) onSuccess { ring =>
62 | log.debug("Received: %s", ring)
63 | val (added, removed) = synchronized {
64 | val oldSet = hosts.toSet
65 | hosts = ring.flatMap { h =>
66 | collectionAsScalaIterable(h.endpoints).map {
67 | new InetSocketAddress(_, port)
68 | }
69 | }.toSeq
70 | val newSet = hosts.toSet
71 | (newSet &~ oldSet, oldSet &~ newSet)
72 | }
73 | added foreach { host => appendChange(FCluster.Add(host)) }
74 | removed foreach { host => appendChange(FCluster.Rem(host)) }
75 | } onFailure { error =>
76 | log.error("error mapping ring", error)
77 | statsReceiver.counter("ClusterRemapFailure." + error.getClass().getName()).incr
78 | }
79 | }
80 |
81 | private[this] def appendChange(change: FCluster.Change[SocketAddress]) = {
82 | val newTail = new Promise[Spool[FCluster.Change[SocketAddress]]]
83 | changes() = Return(change *:: newTail)
84 | changes = newTail
85 | }
86 |
87 | def close = timer.stop()
88 |
89 | def snap: (Seq[SocketAddress], Future[Spool[FCluster.Change[SocketAddress]]]) = (hosts, changes)
90 |
91 | private[this] def fetchHosts(hosts: Seq[SocketAddress]) = {
92 | val ccp = new ClusterClientProvider(
93 | new SocketAddressCluster(hosts),
94 | keyspace,
95 | retries = 5,
96 | timeout = Duration(5, TimeUnit.SECONDS),
97 | requestTimeout = Duration(1, TimeUnit.SECONDS),
98 | connectTimeout = Duration(1, TimeUnit.SECONDS),
99 | minConnectionsPerHost = 1,
100 | maxConnectionsPerHost = 1,
101 | hostConnectionMaxWaiters = 100,
102 | statsReceiver = statsReceiver,
103 | tracerFactory = tracerFactory,
104 | retryPolicy = RetryPolicy.Idempotent
105 | )
106 | ccp map {
107 | log.info("Mapping cluster...")
108 | _.describe_ring(keyspace)
109 | } ensure {
110 | ccp.close()
111 | }
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/Column.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.clocks.Clock
18 | import com.twitter.cassie.codecs.Codec
19 | import com.twitter.conversions.time._
20 | import com.twitter.util.Duration
21 | import org.apache.cassandra.finagle.thrift
22 |
23 | object Column {
24 | def apply[A, B](name: A, value: B): Column[A, B] = new Column(name, value)
25 |
26 | /**
27 | * Convert from a thrift CoSC to a Cassie column.
28 | */
29 | private[cassie] def convert[A, B](nameCodec: Codec[A], valueCodec: Codec[B], colOrSCol: thrift.ColumnOrSuperColumn): Column[A, B] = {
30 | val c = Column(
31 | nameCodec.decode(colOrSCol.column.name),
32 | valueCodec.decode(colOrSCol.column.value)
33 | ).timestamp(colOrSCol.column.timestamp)
34 |
35 | if (colOrSCol.column.isSetTtl) {
36 | c.ttl(colOrSCol.column.getTtl.seconds)
37 | } else {
38 | c
39 | }
40 | }
41 |
42 | /**
43 | * Convert from a thrift CoSC to a Cassie column.
44 | */
45 | private[cassie] def convert[A, B](nameCodec: Codec[A], valueCodec: Codec[B], column: thrift.Column): Column[A, B] = {
46 | val c = Column(
47 | nameCodec.decode(column.name),
48 | valueCodec.decode(column.value)
49 | ).timestamp(column.timestamp)
50 |
51 | if (column.isSetTtl) {
52 | c.ttl(column.getTtl.seconds)
53 | } else {
54 | c
55 | }
56 | }
57 |
58 | /**
59 | * Convert from a cassie Column to a thrift.Column
60 | */
61 | private[cassie] def convert[A, B](nameCodec: Codec[A], valueCodec: Codec[B], clock: Clock, col: Column[A, B]): thrift.Column = {
62 | val tColumn = new thrift.Column(nameCodec.encode(col.name))
63 | tColumn.setValue(valueCodec.encode(col.value))
64 | tColumn.setTimestamp(col.timestamp.getOrElse(clock.timestamp))
65 | col.ttl.foreach { t => tColumn.setTtl(t.inSeconds) }
66 | tColumn
67 | }
68 | }
69 |
70 | case class Column[A, B](name: A, value: B, timestamp: Option[Long], ttl: Option[Duration]) {
71 |
72 | def this(name: A, value: B) = {
73 | this(name, value, None, None)
74 | }
75 |
76 | /**
77 | * Create a copy of this column with a timestamp set. Builder-style.
78 | */
79 | def timestamp(ts: Long): Column[A, B] = {
80 | copy(timestamp = Some(ts))
81 | }
82 |
83 | /**
84 | * Create a copy of this Column with a ttl set. Builder-style.
85 | */
86 | def ttl(t: Duration): Column[A, B] = {
87 | copy(ttl = Some(t))
88 | }
89 |
90 | def pair = name -> this
91 | }
92 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/ColumnsIteratee.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.util.{ Future, Promise }
18 | import java.util.{ Map => JMap, List => JList, ArrayList => JArrayList }
19 | import org.apache.cassandra.finagle.thrift
20 | import scala.collection.JavaConversions._
21 | import scala.collection.mutable.Buffer
22 |
23 | /**
24 | * Async iteration across the columns for a given key.
25 | *
26 | * EXAMPLE
27 | * val cf = new Cluster("127.0.0.1").keyspace("foo")
28 | * .connect().columnFamily("bar", Utf8Codec, Utf8Codec, Utf8Codec)
29 | *
30 | * val done = cf.columnsIteratee.foreach("bam").foreach {col =>
31 | * println(col) // this function is executed asynchronously for each column
32 | * }
33 | * done() // this is a Future[Unit] that will be satisfied when the iteration
34 | * // is done
35 | */
36 |
37 | trait ColumnsIteratee[Key, Name, Value] {
38 | def hasNext(): Boolean
39 | def next(): Future[ColumnsIteratee[Key, Name, Value]]
40 |
41 | def foreach(f: Column[Name, Value] => Unit): Future[Unit] = {
42 | val p = new Promise[Unit]
43 | next map (_.visit(p, f)) handle { case e => p.setException(e) }
44 | p
45 | }
46 |
47 | def map[A](f: Column[Name, Value] => A): Future[Seq[A]] = {
48 | val buffer = Buffer.empty[A]
49 | foreach { column =>
50 | buffer.append(f(column))
51 | }.map { _ => buffer }
52 | }
53 |
54 | def visit(p: Promise[Unit], f: Column[Name, Value] => Unit): Unit
55 | }
56 |
57 | object ColumnsIteratee {
58 | def apply[Key, Name, Value](cf: ColumnFamily[Key, Name, Value], key: Key,
59 | start: Option[Name], end: Option[Name], batchSize: Int,
60 | limit: Int, order: Order = Order.Normal) = {
61 | new InitialColumnsIteratee(cf, key, start, end, batchSize, limit, order)
62 | }
63 | }
64 |
65 | private[cassie] class InitialColumnsIteratee[Key, Name, Value](
66 | val cf: ColumnFamily[Key, Name, Value], key: Key, start: Option[Name], end: Option[Name],
67 | batchSize: Int, remaining: Int, order: Order) extends ColumnsIteratee[Key, Name, Value] {
68 |
69 | def hasNext() = true
70 |
71 | def next() = {
72 | // if limit < batchSize
73 | val fetchSize = math.min(batchSize, remaining)
74 |
75 | cf.getRowSlice(key, start, end, fetchSize, order).map { buf =>
76 | if (buf.size < batchSize || batchSize == remaining) {
77 | new FinalColumnsIteratee(buf)
78 | } else {
79 | new SubsequentColumnsIteratee(cf, key, batchSize, buf.last.name, end, remaining - buf.size, order, buf)
80 | }
81 | }
82 | }
83 |
84 | def visit(p: Promise[Unit], f: Column[Name, Value] => Unit) {
85 | throw new UnsupportedOperationException("no need to visit the initial Iteratee")
86 | }
87 | }
88 |
89 | private[cassie] class SubsequentColumnsIteratee[Key, Name, Value](val cf: ColumnFamily[Key, Name, Value],
90 | val key: Key, val batchSize: Int, val start: Name, val end: Option[Name],
91 | val remaining: Int, val order: Order, val buffer: JList[Column[Name, Value]])
92 | extends ColumnsIteratee[Key, Name, Value] {
93 |
94 | def hasNext = true
95 |
96 | def next() = {
97 | val fetchSize = math.min(batchSize + 1, remaining + 1)
98 |
99 | cf.getRowSlice(key, Some(start), end, fetchSize, order).map { buf =>
100 | val skipped = buf.subList(1, buf.length)
101 | if (skipped.size() < batchSize || batchSize == remaining) {
102 | new FinalColumnsIteratee(skipped)
103 | } else {
104 | new SubsequentColumnsIteratee(cf, key, batchSize, skipped.last.name, end, remaining - skipped.size, order, skipped)
105 | }
106 | }
107 | }
108 |
109 | def visit(p: Promise[Unit], f: Column[Name, Value] => Unit) {
110 | for (c <- buffer) {
111 | f(c)
112 | }
113 | if (hasNext) {
114 | next map (_.visit(p, f)) handle { case e => p.setException(e) }
115 | } else {
116 | p.setValue(Unit)
117 | }
118 | }
119 | }
120 |
121 | private[cassie] class FinalColumnsIteratee[Key, Name, Value](val buffer: JList[Column[Name, Value]])
122 | extends ColumnsIteratee[Key, Name, Value] {
123 | def hasNext = false
124 | def next = Future.exception(new UnsupportedOperationException("no next for the final iteratee"))
125 |
126 | def visit(p: Promise[Unit], f: Column[Name, Value] => Unit) {
127 | for (c <- buffer) {
128 | f(c)
129 | }
130 | p.setValue(Unit)
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/CounterBatchMutationBuilder.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.codecs.Codec
18 | import com.twitter.util.Future
19 | import java.util.Collections.{ singleton => singletonJSet }
20 | import java.util.{Set => JSet}
21 | import org.apache.cassandra.finagle.thrift
22 |
23 | /**
24 | * A ColumnFamily-alike which batches mutations into a single API call for counters.
25 | */
26 | class CounterBatchMutationBuilder[Key, Name](cf: CounterColumnFamily[Key, Name])
27 | extends BatchMutation {
28 |
29 | type This = CounterBatchMutationBuilder[Key, Name]
30 |
31 | def insert(key: Key, column: CounterColumn[Name]): This = synchronized {
32 | putMutation(cf.keyCodec.encode(key), cf.name, insertMutation(key, column))
33 | this
34 | }
35 |
36 | def removeColumn(key: Key, columnName: Name): This =
37 | removeColumns(key, singletonJSet(columnName))
38 |
39 | def removeColumns(key: Key, columnNames: JSet[Name]): This = synchronized {
40 | putMutation(cf.keyCodec.encode(key), cf.name, deleteMutation(key, columnNames))
41 | this
42 | }
43 |
44 | /**
45 | * Submits the batch of operations, returning a future to allow blocking for success.
46 | */
47 | def execute(): Future[Void] = {
48 | if (mutations.isEmpty) {
49 | Future.Void
50 | } else {
51 | Future {
52 | cf.batch(mutations)
53 | }.flatten
54 | }
55 | }
56 |
57 | private[this] def insertMutation(key: Key, column: CounterColumn[Name]): thrift.Mutation = {
58 | val cosc = new thrift.ColumnOrSuperColumn()
59 | val counterColumn = new thrift.CounterColumn(cf.nameCodec.encode(column.name), column.value)
60 | cosc.setCounter_column(counterColumn)
61 | val mutation = new thrift.Mutation
62 | mutation.setColumn_or_supercolumn(cosc)
63 | }
64 |
65 | private[this] def deleteMutation(key: Key, columnNames: JSet[Name]): thrift.Mutation = {
66 | val pred = new thrift.SlicePredicate
67 | pred.setColumn_names(cf.nameCodec.encodeSet(columnNames))
68 |
69 | val deletion = new thrift.Deletion
70 | deletion.setPredicate(pred)
71 |
72 | val mutation = new thrift.Mutation
73 | mutation.setDeletion(deletion)
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/CounterColumn.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.codecs.Codec
18 | import org.apache.cassandra.finagle.thrift
19 |
20 | object CounterColumn {
21 |
22 | /**
23 | * Convert from a thrift.CounterColumn to a cassie CounterColumn
24 | */
25 | private[cassie] def convert[A](nameCodec: Codec[A], counter: thrift.CounterColumn): CounterColumn[A] = {
26 | CounterColumn(
27 | nameCodec.decode(counter.name),
28 | counter.value
29 | )
30 | }
31 |
32 | /**
33 | * Convert from a thrift.CounterColumn to a cassie CounterColumn
34 | */
35 | private[cassie] def convert[A](nameCodec: Codec[A], cosc: thrift.ColumnOrSuperColumn): CounterColumn[A] = {
36 | val counter = cosc.getCounter_column
37 | CounterColumn(
38 | nameCodec.decode(counter.name),
39 | counter.value
40 | )
41 | }
42 |
43 | /**
44 | * Convert from a cassie CounterColumn to a thrift CounterColumn
45 | */
46 | private[cassie] def convert[A](nameCodec: Codec[A], col: CounterColumn[A]): thrift.CounterColumn = {
47 | new thrift.CounterColumn(
48 | nameCodec.encode(col.name),
49 | col.value
50 | )
51 | }
52 | }
53 |
54 | /**
55 | * A counter column in a Cassandra. Belongs to a row in a column family.
56 | */
57 | case class CounterColumn[A](name: A, value: Long) {
58 | def pair = name -> this
59 | }
60 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/CounterColumnsIteratee.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.util.{ Future, Promise }
18 | import java.util.{ Map => JMap, List => JList, ArrayList => JArrayList }
19 | import org.apache.cassandra.finagle.thrift
20 | import scala.collection.JavaConversions._
21 | import scala.collection.mutable.Buffer
22 |
23 | /**
24 | * Async iteration across the columns for a given key.
25 | *
26 | * EXAMPLE
27 | * val cf = new Cluster("127.0.0.1").keyspace("foo")
28 | * .connect().columnFamily("bar", Utf8Codec, Utf8Codec, Utf8Codec)
29 | *
30 | * val done = cf.CounterColumnsIteratee.foreach("bam").foreach {col =>
31 | * println(col) // this function is executed asynchronously for each column
32 | * }
33 | * done() // this is a Future[Unit] that will be satisfied when the iteration
34 | * // is done
35 | */
36 |
37 | trait CounterColumnsIteratee[Key, Name] {
38 |
39 | def hasNext(): Boolean
40 | def next(): Future[CounterColumnsIteratee[Key, Name]]
41 |
42 | def foreach(f: CounterColumn[Name] => Unit): Future[Unit] = {
43 | val p = new Promise[Unit]
44 | next map (_.visit(p, f)) handle { case e => p.setException(e) }
45 | p
46 | }
47 |
48 | def map[A](f: CounterColumn[Name] => A): Future[Iterable[A]] = {
49 | val buffer = Buffer.empty[A]
50 | foreach { column =>
51 | buffer.append(f(column))
52 | }.map { _ => buffer }
53 | }
54 | def visit(p: Promise[Unit], f: CounterColumn[Name] => Unit): Unit
55 | }
56 |
57 | object CounterColumnsIteratee {
58 | def apply[Key, Name](cf: CounterColumnFamily[Key, Name], key: Key,
59 | start: Option[Name], end: Option[Name], batchSize: Int,
60 | limit: Int, order: Order = Order.Normal) = {
61 | new InitialCounterColumnsIteratee(cf, key, start, end, batchSize, limit, order)
62 | }
63 | }
64 |
65 | private[cassie] class InitialCounterColumnsIteratee[Key, Name](
66 | val cf: CounterColumnFamily[Key, Name],
67 | val key: Key,
68 | val start: Option[Name],
69 | val end: Option[Name],
70 | val batchSize: Int,
71 | val remaining: Int,
72 | val order: Order)
73 | extends CounterColumnsIteratee[Key, Name] {
74 |
75 | def hasNext() = true
76 |
77 | def next() = {
78 | val fetchSize = math.min(batchSize, remaining)
79 |
80 | cf.getRowSlice(key, start, end, fetchSize, order).map { buf =>
81 | if (buf.size < batchSize || batchSize == remaining) {
82 | new FinalCounterColumnsIteratee(buf)
83 | } else {
84 | new SubsequentCounterColumnsIteratee(cf, key, batchSize, buf.last.name, end, remaining - buf.size, order, buf)
85 | }
86 | }
87 | }
88 |
89 | def visit(p: Promise[Unit], f: CounterColumn[Name] => Unit) {
90 | throw new UnsupportedOperationException("no need to visit the initial Iteratee")
91 | }
92 | }
93 |
94 | private[cassie] class SubsequentCounterColumnsIteratee[Key, Name](
95 | val cf: CounterColumnFamily[Key, Name],
96 | val key: Key,
97 | val batchSize: Int,
98 | val start: Name,
99 | val end: Option[Name],
100 | val remaining: Int,
101 | val order: Order,
102 | val buffer: JList[CounterColumn[Name]])
103 | extends CounterColumnsIteratee[Key, Name] {
104 |
105 | def hasNext = true
106 |
107 | def next() = {
108 | val fetchSize = math.min(batchSize + 1, remaining + 1)
109 | cf.getRowSlice(key, Some(start), end, fetchSize, order).map { buf =>
110 | val skipped = buf.subList(1, buf.length)
111 | if (skipped.size() < batchSize || batchSize == remaining) {
112 | new FinalCounterColumnsIteratee(skipped)
113 | } else {
114 | new SubsequentCounterColumnsIteratee(cf, key, batchSize, skipped.last.name, end, remaining - skipped.size, order, skipped)
115 | }
116 | }
117 | }
118 |
119 | def visit(p: Promise[Unit], f: CounterColumn[Name] => Unit) {
120 | for (c <- buffer) {
121 | f(c)
122 | }
123 | if (hasNext) {
124 | next map (_.visit(p, f)) handle { case e => p.setException(e) }
125 | } else {
126 | p.setValue(Unit)
127 | }
128 | }
129 | }
130 |
131 | private[cassie] class FinalCounterColumnsIteratee[Key, Name](
132 | val buffer: JList[CounterColumn[Name]])
133 | extends CounterColumnsIteratee[Key, Name] {
134 | def hasNext = false
135 | def next = Future.exception(new UnsupportedOperationException("no next for the final iteratee"))
136 |
137 | def visit(p: Promise[Unit], f: CounterColumn[Name] => Unit) {
138 | for (c <- buffer) {
139 | f(c)
140 | }
141 | p.setValue(Unit)
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/CounterRowsIteratee.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.util.ByteBufferUtil
18 | import com.twitter.util.{ Future, Promise }
19 | import java.util.{ List => JList }
20 | import org.apache.cassandra.finagle.thrift
21 | import scala.collection.JavaConversions._
22 | import scala.collection.mutable.Buffer
23 |
24 | /**
25 | * Given a column family, a key range, a batch size, a slice predicate,
26 | * iterates through slices of each matching row until a cycle is detected
27 | * (e.g., Cassandra returns the last slice a second time) or until an empty
28 | * slice is returned (e.g., no more slices).
29 | * Provides a sequence of (row key, columns).
30 | *
31 | * EXAMPLE:
32 | * val cluster = new Cluster("127.0.0.1").keyspace("foo")
33 | * .connect().counterCounterColumnFamily("bar", Utf8Codec, Utf8Codec, Utf8Codec)
34 | * val finished = cf.rowsIteratee(100).foreach { case(key, columns) =>
35 | * println(key) //this function is executed async for each row
36 | * println(cols)
37 | * }
38 | * finished() //this is a Future[Unit]. wait on it to know when the iteration is done
39 | */
40 |
41 | trait CounterRowsIteratee[Key, Name] {
42 | def foreach(f: (Key, JList[CounterColumn[Name]]) => Unit): Future[Unit] = {
43 | val p = new Promise[Unit]
44 | next map (_.visit(p, f)) handle { case e => p.setException(e) }
45 | p
46 | }
47 | def map[A](f: (Key, JList[CounterColumn[Name]]) => A): Future[Seq[A]] = {
48 | val buffer = Buffer.empty[A]
49 | foreach { case(key, columns) =>
50 | buffer.append(f(key, columns))
51 | }.map { _ => buffer }
52 | }
53 | def hasNext(): Boolean
54 | def next(): Future[CounterRowsIteratee[Key, Name]]
55 | def visit(p: Promise[Unit], f: (Key, JList[CounterColumn[Name]]) => Unit): Unit
56 | }
57 |
58 | object CounterRowsIteratee {
59 | def apply[Key, Name](cf: CounterColumnFamily[Key, Name], batchSize: Int, pred: thrift.SlicePredicate) = {
60 | new CounterInitialRowsIteratee(cf, batchSize, pred)
61 | }
62 |
63 | def apply[Key, Name](cf: CounterColumnFamily[Key, Name], start: Key, end: Key, batchSize: Int, pred: thrift.SlicePredicate) = {
64 | new CounterInitialRowsIteratee(cf, start, end, batchSize, pred)
65 | }
66 | }
67 |
68 | private[cassie] class CounterInitialRowsIteratee[Key, Name](
69 | val cf: CounterColumnFamily[Key, Name],
70 | val start: Key,
71 | val end: Key,
72 | val batchSize: Int,
73 | val predicate: thrift.SlicePredicate) extends CounterRowsIteratee[Key, Name] {
74 |
75 | def this(cf: CounterColumnFamily[Key, Name], batchSize: Int, pred: thrift.SlicePredicate) = {
76 | this(cf, cf.keyCodec.decode(ByteBufferUtil.EMPTY), cf.keyCodec.decode(ByteBufferUtil.EMPTY),
77 | batchSize, pred)
78 | }
79 |
80 | def visit(p: Promise[Unit], f: (Key, JList[CounterColumn[Name]]) => Unit): Unit = {
81 | throw new UnsupportedOperationException("no need to visit the initial Iteratee")
82 | }
83 |
84 | override def hasNext() = true
85 |
86 | def next(): Future[CounterRowsIteratee[Key, Name]] = {
87 | cf.getRangeSlice(start, end, batchSize, predicate) map { buf =>
88 | // the last found key, or the end key if the slice was empty
89 | buf.lastOption match {
90 | case None => new CounterFinalRowsIteratee(buf)
91 | case Some(row) => new CounterSubsequentRowsIteratee(cf, row._1, end, batchSize, predicate, buf)
92 | }
93 | }
94 | }
95 | }
96 |
97 | private[cassie] class CounterSubsequentRowsIteratee[Key, Name](
98 | val cf: CounterColumnFamily[Key, Name],
99 | val start: Key,
100 | val end: Key,
101 | val batchSize: Int,
102 | val predicate: thrift.SlicePredicate,
103 | val buffer: JList[(Key, JList[CounterColumn[Name]])]) extends CounterRowsIteratee[Key, Name] {
104 | override def hasNext = true
105 |
106 | def visit(p: Promise[Unit], f: (Key, JList[CounterColumn[Name]]) => Unit): Unit = {
107 | for ((key, columns) <- buffer) {
108 | f(key, columns)
109 | }
110 | next map { n =>
111 | n.visit(p, f)
112 | } handle { case e => p.setException(e) }
113 | }
114 |
115 | def next() = {
116 | cf.getRangeSlice(start, end, batchSize + 1, predicate).map { buf =>
117 | val skipped = buf.subList(1, buf.length)
118 | skipped.lastOption match {
119 | case None => new CounterFinalRowsIteratee(skipped)
120 | case Some(r) => new CounterSubsequentRowsIteratee(cf, r._1, end, batchSize, predicate, skipped)
121 | }
122 | }
123 | }
124 | }
125 |
126 | private[cassie] class CounterFinalRowsIteratee[Key, Name](
127 | val buffer: JList[(Key, JList[CounterColumn[Name]])]) extends CounterRowsIteratee[Key, Name] {
128 | override def hasNext = false
129 | def next = Future.exception(new UnsupportedOperationException("No more results."))
130 | def visit(p: Promise[Unit], f: (Key, JList[CounterColumn[Name]]) => Unit) = {
131 | for ((key, columns) <- buffer) {
132 | f(key, columns)
133 | }
134 | p.setValue(Unit)
135 | }
136 | }
137 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/FNV1A.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | /**
18 | * The FNV1-A 64-bit hashing algorithm.
19 | */
20 | object FNV1A extends (Array[Byte] => Long) {
21 | private val offsetBasis = 0xcbf29ce484222325L
22 | private val prime = 0x100000001b3L
23 |
24 | def apply(ary: Array[Byte]): Long = {
25 | var n = offsetBasis
26 | var i = 0
27 | while (i < ary.length) {
28 | n = (n ^ ary(i)) * prime
29 | i += 1
30 | }
31 | n
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/Keyspace.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.codecs.{ ThriftCodec, Codec }
18 | import com.twitter.cassie.connection.ClientProvider
19 | import com.twitter.cassie.util.FutureUtil.timeFutureWithFailures
20 | import com.twitter.finagle.stats.{ StatsReceiver, NullStatsReceiver }
21 | import com.twitter.util.Future
22 | import java.nio.ByteBuffer
23 | import java.util.{ HashMap => JHashMap, Map => JMap, List => JList, ArrayList => JArrayList }
24 | import org.apache.cassandra.finagle.thrift
25 | import scala.collection.JavaConversions._
26 |
27 | /**
28 | * A Cassandra keyspace, which maintains a connection pool.
29 | *
30 | * @param provider a [[com.twitter.cassie.connection.ClientProvider]] instance
31 | */
32 | class Keyspace(val name: String, val provider: ClientProvider, val stats: StatsReceiver) {
33 |
34 | /**
35 | * Returns a ColumnFamily with the given name and column/value codecs.
36 | */
37 | def columnFamily[Key, Name, Value](
38 | name: String,
39 | keyCodec: Codec[Key],
40 | nameCodec: Codec[Name],
41 | valueCodec: Codec[Value]) =
42 | new ColumnFamily(this.name, name, provider, keyCodec, nameCodec, valueCodec, stats.scope(name))
43 |
44 | /**
45 | * Returns a CounterColumnFamily with the given name and column codecs
46 | */
47 | def counterColumnFamily[Key, Name](
48 | name: String,
49 | keyCodec: Codec[Key],
50 | nameCodec: Codec[Name]) =
51 | new CounterColumnFamily(this.name, name, provider, keyCodec, nameCodec, stats.scope(name))
52 |
53 | def superColumnFamily[Key, Name, SubName, Value](
54 | name: String,
55 | keyCodec: Codec[Key],
56 | nameCodec: Codec[Name],
57 | subNameCodec: Codec[SubName],
58 | valueCodec: Codec[Value]) = new SuperColumnFamily(this.name, name, provider, keyCodec, nameCodec, subNameCodec, valueCodec, stats.scope(name))
59 |
60 | def superCounterColumnFamily[Key, Name, SubName](
61 | name: String,
62 | keyCodec: Codec[Key],
63 | nameCodec: Codec[Name],
64 | subNameCodec: Codec[SubName]) = new SuperCounterColumnFamily(this.name, name, provider, keyCodec, nameCodec, subNameCodec, stats.scope(name))
65 |
66 | /**
67 | * Execute batch mutations across column families. To use this, build a separate BatchMutationBuilder
68 | * for each CF, then send them all to this method.
69 | * @return a future that can contain [[org.apache.cassandra.finagle.thrift.TimedOutException]],
70 | * [[org.apache.cassandra.finagle.thrift.UnavailableException]] or [[org.apache.cassandra.finagle.thrift.InvalidRequestException]]
71 | * @param batches a Seq of BatchMutationBuilders, each for a different CF. Their mutations will be merged and
72 | * sent as one operation
73 | * @param writeConsistency to write this at
74 | */
75 | def execute(batches: Iterable[BatchMutation], writeConsistency: WriteConsistency): Future[Void] = {
76 | if (batches.size == 0) return Future.Void
77 |
78 | val mutations = new JHashMap[ByteBuffer, JMap[String, JList[thrift.Mutation]]]
79 |
80 | batches.map(_.mutations).foreach { ms =>
81 | for ((row, inner) <- ms) {
82 | if (!mutations.containsKey(row)) {
83 | mutations.put(row, new JHashMap[String, JList[thrift.Mutation]])
84 | }
85 | val oldRowMap = mutations.get(row)
86 | for ((cf, mutationList) <- inner) {
87 | if (!oldRowMap.containsKey(cf)) {
88 | oldRowMap.put(cf, new JArrayList[thrift.Mutation])
89 | }
90 | val oldList = oldRowMap.get(cf)
91 | oldList.addAll(mutationList)
92 | }
93 | }
94 | }
95 |
96 | timeFutureWithFailures(stats, "batch_execute") {
97 | provider.map { _.batch_mutate(mutations, writeConsistency.level) }
98 | }
99 | }
100 |
101 | /**
102 | * Closes connections to the cluster for this keyspace.
103 | */
104 | def close() = provider.close()
105 | }
106 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/Order.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | /**
18 | * An ordering of columns.
19 | */
20 | sealed case class Order(normal: Boolean) {
21 | val reversed = !normal
22 | override def toString = "Order(%s)".format(if (normal) "normal" else "reversed")
23 | }
24 |
25 | object Order {
26 | /**
27 | * Return the columns in normal order.
28 | */
29 | val Normal = Order(true)
30 |
31 | /**
32 | * Return the columns in reverse order.
33 | */
34 | val Reversed = Order(false)
35 | }
36 |
37 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/ReadConsistency.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import org.apache.cassandra.finagle.thrift
18 |
19 | /**
20 | * The level of consistency required for a read operation.
21 | */
22 | sealed case class ReadConsistency(level: thrift.ConsistencyLevel) {
23 | override def toString = "ReadConsistency." +
24 | level.toString.toLowerCase.capitalize
25 | }
26 |
27 | object ReadConsistency {
28 |
29 | /**
30 | * Will return the record returned by the first node to respond. A consistency
31 | * check is sometimes done in a background thread to fix any consistency issues
32 | * when ReadConsistency.One is used (see read_repair_chance in cassandra). This
33 | * means eventuall subsequent calls will have correct data even if the initial read gets
34 | * an older value. (This is called read repair.)
35 | */
36 | val One = ReadConsistency(thrift.ConsistencyLevel.ONE)
37 |
38 | /**
39 | * Will query all nodes and return the record with the most recent timestamp
40 | * once it has at least a majority of replicas reported. Again, the remaining
41 | * replicas will be checked in the background.
42 | */
43 | val Quorum = ReadConsistency(thrift.ConsistencyLevel.QUORUM)
44 |
45 | /**
46 | * Returns the record with the most recent timestamp once a majority of replicas within
47 | * the local datacenter have replied. Requres NetworkTopologyStrategy on the server side.
48 | */
49 | val LocalQuorum = ReadConsistency(thrift.ConsistencyLevel.LOCAL_QUORUM)
50 |
51 | /**
52 | * Returns the record with the most recent timestamp once a majority of replicas within
53 | * each datacenter have replied.
54 | */
55 | val EachQuorum = ReadConsistency(thrift.ConsistencyLevel.EACH_QUORUM)
56 |
57 | /**
58 | * Will query all nodes and return the record with the most recent timestamp
59 | * once all nodes have replied. Any unresponsive nodes will fail the
60 | * operation.
61 | */
62 | val All = ReadConsistency(thrift.ConsistencyLevel.ALL)
63 | }
64 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/RowsIteratee.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.util.ByteBufferUtil
18 | import com.twitter.util.{ Future, Promise }
19 | import java.util.{ List => JList }
20 | import org.apache.cassandra.finagle.thrift
21 | import scala.collection.JavaConversions._
22 | import scala.collection.mutable.Buffer
23 |
24 | /**
25 | * Given a column family, a key range, a batch size, a slice predicate,
26 | * iterates through slices of each matching row until a cycle is detected
27 | * (e.g., Cassandra returns the last slice a second time) or until an empty
28 | * slice is returned (e.g., no more slices).
29 | * Provides a sequence of (row key, columns).
30 | *
31 | * EXAMPLE:
32 | * val cluster = new Cluster("127.0.0.1").keyspace("foo")
33 | * .connect().columnFamily("bar", Utf8Codec, Utf8Codec, Utf8Codec)
34 | * val finished = cf.rowsIteratee(100).foreach { case(key, columns) =>
35 | * println(key) //this function is executed async for each row
36 | * println(cols)
37 | * }
38 | * finished() //this is a Future[Unit]. wait on it to know when the iteration is done
39 | */
40 |
41 | trait RowsIteratee[Key, Name, Value] {
42 | def foreach(f: (Key, JList[Column[Name, Value]]) => Unit): Future[Unit] = {
43 | val p = new Promise[Unit]
44 | next map (_.visit(p, f)) handle { case e => p.setException(e) }
45 | p
46 | }
47 | def map[A](f: (Key, JList[Column[Name, Value]]) => A): Future[Seq[A]] = {
48 | val buffer = Buffer.empty[A]
49 | foreach { case(key, columns) =>
50 | buffer.append(f(key, columns))
51 | }.map { _ => buffer }
52 | }
53 | def hasNext(): Boolean
54 | def next(): Future[RowsIteratee[Key, Name, Value]]
55 | def visit(p: Promise[Unit], f: (Key, JList[Column[Name, Value]]) => Unit): Unit
56 | }
57 |
58 | object RowsIteratee {
59 | def apply[Key, Name, Value](cf: ColumnFamily[Key, Name, Value], batchSize: Int, pred: thrift.SlicePredicate) = {
60 | new InitialRowsIteratee(cf, batchSize, pred)
61 | }
62 |
63 | def apply[Key, Name, Value](cf: ColumnFamily[Key, Name, Value], start: Key, end: Key, batchSize: Int, pred: thrift.SlicePredicate) = {
64 | new InitialRowsIteratee(cf, start, end, batchSize, pred)
65 | }
66 | }
67 |
68 | private[cassie] class InitialRowsIteratee[Key, Name, Value](val cf: ColumnFamily[Key, Name, Value],
69 | val start: Key,
70 | val end: Key,
71 | val batchSize: Int,
72 | val predicate: thrift.SlicePredicate) extends RowsIteratee[Key, Name, Value] {
73 |
74 | def this(cf: ColumnFamily[Key, Name, Value], batchSize: Int, pred: thrift.SlicePredicate) = {
75 | this(cf, cf.keyCodec.decode(ByteBufferUtil.EMPTY), cf.keyCodec.decode(ByteBufferUtil.EMPTY),
76 | batchSize, pred)
77 | }
78 |
79 | def visit(p: Promise[Unit], f: (Key, JList[Column[Name, Value]]) => Unit): Unit = {
80 | throw new UnsupportedOperationException("no need to visit the initial Iteratee")
81 | }
82 |
83 | override def hasNext() = true
84 |
85 | def next(): Future[RowsIteratee[Key, Name, Value]] = {
86 | cf.getRangeSlice(start, end, batchSize, predicate) map { buf =>
87 | // the last found key, or the end key if the slice was empty
88 | buf.lastOption match {
89 | case None => new FinalRowsIteratee(buf)
90 | case Some(row) => new SubsequentRowsIteratee(cf, row._1, end, batchSize, predicate, buf)
91 | }
92 | }
93 | }
94 | }
95 |
96 | private[cassie] class SubsequentRowsIteratee[Key, Name, Value](
97 | cf: ColumnFamily[Key, Name, Value],
98 | start: Key,
99 | end: Key,
100 | batchSize: Int,
101 | predicate: thrift.SlicePredicate,
102 | buffer: JList[(Key, JList[Column[Name, Value]])]) extends RowsIteratee[Key, Name, Value] {
103 | override def hasNext = true
104 |
105 | def visit(p: Promise[Unit], f: (Key, JList[Column[Name, Value]]) => Unit): Unit = {
106 | for ((key, columns) <- buffer) {
107 | f(key, columns)
108 | }
109 | next map { n =>
110 | n.visit(p, f)
111 | } handle { case e => p.setException(e) }
112 | }
113 |
114 | def next() = {
115 | cf.getRangeSlice(start, end, batchSize + 1, predicate).map { buf =>
116 | val skipped = buf.subList(1, buf.length)
117 | skipped.lastOption match {
118 | case None => new FinalRowsIteratee(skipped)
119 | case Some(r) => new SubsequentRowsIteratee(cf, r._1, end, batchSize, predicate, skipped)
120 | }
121 | }
122 | }
123 | }
124 |
125 | private[cassie] class FinalRowsIteratee[Key, Name, Value](buffer: JList[(Key, JList[Column[Name, Value]])]) extends RowsIteratee[Key, Name, Value] {
126 | override def hasNext = false
127 | def next = Future.exception(new UnsupportedOperationException("No more results."))
128 | def visit(p: Promise[Unit], f: (Key, JList[Column[Name, Value]]) => Unit) = {
129 | for ((key, columns) <- buffer) {
130 | f(key, columns)
131 | }
132 | p.setValue(Unit)
133 | }
134 | }
135 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/SuperColumnFamily.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.clocks.{ MicrosecondEpochClock, Clock }
18 | import com.twitter.cassie.codecs.{ Codec }
19 | import com.twitter.cassie.connection.ClientProvider
20 | import com.twitter.cassie.util.ByteBufferUtil.EMPTY
21 | import com.twitter.cassie.util.FutureUtil.timeFutureWithFailures
22 | import com.twitter.finagle.stats.StatsReceiver
23 | import org.slf4j.LoggerFactory
24 | import com.twitter.util.Future
25 | import java.nio.ByteBuffer
26 | import java.util.Collections.{ singleton => singletonJSet }
27 | import java.util.{ArrayList => JArrayList, HashMap => JHashMap, List => JList, Map => JMap, Set => JSet }
28 | import org.apache.cassandra.finagle.thrift
29 | import scala.collection.JavaConversions._ // TODO get rid of this
30 |
31 | /**
32 | * A readable, writable column family with batching capabilities. This is a
33 | * lightweight object: it inherits a connection pool from the Keyspace.
34 | *
35 | * Note that this implementation is the minimal set we've needed in production. We've done this
36 | * because we hope that SuperColumns can be obsoleted in the future.
37 | */
38 | object SuperColumnFamily {
39 | private implicit val log = LoggerFactory.getLogger(this.getClass)
40 | }
41 |
42 | case class SuperColumnFamily[Key, Name, SubName, Value](
43 | keyspace: String,
44 | name: String,
45 | provider: ClientProvider,
46 | keyCodec: Codec[Key],
47 | nameCodec: Codec[Name],
48 | subNameCodec: Codec[SubName],
49 | valueCodec: Codec[Value],
50 | stats: StatsReceiver,
51 | readConsistency: ReadConsistency = ReadConsistency.Quorum,
52 | writeConsistency: WriteConsistency = WriteConsistency.Quorum
53 | ) extends BaseColumnFamily(keyspace, name, provider, stats) {
54 |
55 | import SuperColumnFamily._
56 | import BaseColumnFamily._
57 |
58 | type This = SuperColumnFamily[Key, Name, SubName, Value]
59 |
60 | private[cassie] var clock: Clock = MicrosecondEpochClock
61 |
62 | def consistency(rc: ReadConsistency): This = copy(readConsistency = rc)
63 | def consistency(wc: WriteConsistency): This = copy(writeConsistency = wc)
64 |
65 | def insert(key: Key, superColumn: Name, column: Column[SubName, Value]): Future[Void] = {
66 | Future {
67 | val cp = (new thrift.ColumnParent(name)).setSuper_column(nameCodec.encode(superColumn))
68 | val col = Column.convert(subNameCodec, valueCodec, clock, column)
69 | val keyEncoded = keyCodec.encode(key)
70 | withConnection(
71 | "insert",
72 | Map("key" -> keyEncoded, "col" -> col.name, "writeconsistency" -> writeConsistency.toString),
73 | Seq(keyspace, key, cp, col, writeConsistency.level)
74 | ) {
75 | _.insert(keyEncoded, cp, col, writeConsistency.level)
76 | }
77 | }.flatten
78 | }
79 |
80 | def getRow(key: Key): Future[Seq[(Name, Seq[Column[SubName, Value]])]] = {
81 | getRowSlice(key, None, None, Int.MaxValue, Order.Normal)
82 | }
83 |
84 | def getRowSlice(key: Key, start: Option[Name], end: Option[Name], count: Int,
85 | order: Order): Future[Seq[(Name, Seq[Column[SubName, Value]])]] = {
86 | Future {
87 | getOrderedSlice(key, start, end, count, order)
88 | }.flatten
89 | }
90 |
91 | def multigetRow(keys: JSet[Key]): Future[JMap[Key, Seq[(Name, Seq[Column[SubName, Value]])]]] = {
92 | multigetRowSlice(keys, None, None, Int.MaxValue, Order.Normal)
93 | }
94 |
95 | def multigetRowSlice(keys: JSet[Key], start: Option[Name], end: Option[Name], count: Int,
96 | order: Order): Future[JMap[Key, Seq[(Name, Seq[Column[SubName, Value]])]]] = {
97 | Future {
98 | multigetSlice(keys, start, end, count, order)
99 | }.flatten
100 | }
101 |
102 | private def getOrderedSlice(key: Key, start: Option[Name], end: Option[Name], size: Int, order: Order): Future[Seq[(Name, Seq[Column[SubName, Value]])]] = {
103 | Future {
104 | val pred = sliceRangePredicate(start, end, order, size)
105 | val cp = new thrift.ColumnParent(name)
106 | val keyEncoded = keyCodec.encode(key)
107 | withConnection(
108 | "get_slice",
109 | Map("key" -> keyEncoded, "predicate" -> annPredCodec.encode(pred), "readconsistency" -> readConsistency.toString),
110 | Seq(keyspace, key, cp, pred, readConsistency.level)
111 | ) {
112 | _.get_slice(keyEncoded, cp, pred, readConsistency.level)
113 | } map { result =>
114 | result.map { cosc =>
115 | val sc = cosc.getSuper_column()
116 | (nameCodec.decode(sc.name), sc.columns.map(Column.convert(subNameCodec, valueCodec, _)))
117 | }
118 | }
119 | }.flatten
120 | }
121 |
122 | private def multigetSlice(keys: JSet[Key], start: Option[Name], end: Option[Name],size: Int,
123 | order: Order): Future[JMap[Key, Seq[(Name, Seq[Column[SubName, Value]])]]] = {
124 | val pred = sliceRangePredicate(start, end, order, size)
125 | val cp = new thrift.ColumnParent(name)
126 | val keyEncoded = keyCodec.encodeSet(keys)
127 | withConnection(
128 | "multiget_slice",
129 | Map("key" -> keyEncoded, "predicate" -> annPredCodec.encode(pred), "readconsistency" -> readConsistency.toString),
130 | Seq(keyspace, keys, cp, pred, readConsistency.level)
131 | ) {
132 | _.multiget_slice(keyEncoded, cp, pred, readConsistency.level)
133 | } map { result =>
134 | val rows: JMap[Key, Seq[(Name, Seq[Column[SubName, Value]])]] = new JHashMap(result.size)
135 | result.foldLeft(rows) {
136 | case (memo, (key, coscList)) =>
137 | memo(keyCodec.decode(key)) = coscList.map { cosc =>
138 | val sc = cosc.getSuper_column()
139 | (nameCodec.decode(sc.name), sc.columns.map(Column.convert(subNameCodec, valueCodec, _)))
140 | }
141 | memo
142 | }
143 | }
144 | }
145 |
146 | def removeRow(key: Key): Future[Void] = {
147 | val cp = new thrift.ColumnPath(name)
148 | val ts = clock.timestamp
149 | val keyEncoded = keyCodec.encode(key)
150 | withConnection(
151 | "remove",
152 | Map("key" -> keyEncoded, "timestamp" -> ts, "writeconsistency" -> writeConsistency.toString),
153 | Seq(keyspace, key, cp, ts, writeConsistency.level)
154 | ) {
155 | _.remove(keyEncoded, cp, ts, writeConsistency.level)
156 | }
157 | }
158 |
159 | private def sliceRangePredicate(startColumnName: Option[Name], endColumnName: Option[Name], order: Order, count: Int) = {
160 | val startBytes = startColumnName.map { c => nameCodec.encode(c) }.getOrElse(EMPTY)
161 | val endBytes = endColumnName.map { c => nameCodec.encode(c) }.getOrElse(EMPTY)
162 | val pred = new thrift.SlicePredicate()
163 | pred.setSlice_range(new thrift.SliceRange(startBytes, endBytes, order.reversed, count))
164 | }
165 |
166 | private def sliceRangePredicate(columnNames: JSet[Name]) = {
167 | new thrift.SlicePredicate().setColumn_names(nameCodec.encodeSet(columnNames))
168 | }
169 | }
170 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/SuperCounterBatchMutationBuilder.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.util.Future
18 | import java.util.{ArrayList => JArrayList}
19 | import org.apache.cassandra.finagle.thrift
20 |
21 | class SuperCounterBatchMutationBuilder[Key, Name, SubName](cf: SuperCounterColumnFamily[Key, Name, SubName]) extends BatchMutation {
22 |
23 | def insert(key: Key, name: Name, column: CounterColumn[SubName]) = synchronized {
24 | putMutation(cf.keyCodec.encode(key), cf.name, insertMutation(key, name, column))
25 | this
26 | }
27 |
28 | /**
29 | * Submits the batch of operations, returning a future to allow blocking for success.
30 | */
31 | def execute(): Future[Void] = {
32 | if (mutations.isEmpty) {
33 | Future.Void
34 | } else {
35 | Future {
36 | cf.batch(mutations)
37 | }.flatten
38 | }
39 | }
40 |
41 | private[this] def insertMutation(key: Key, name: Name, column: CounterColumn[SubName]): thrift.Mutation = {
42 | val cosc = new thrift.ColumnOrSuperColumn()
43 | val counterColumn = new thrift.CounterColumn(cf.subNameCodec.encode(column.name), column.value)
44 | val columns = new JArrayList[thrift.CounterColumn]()
45 | columns.add(counterColumn)
46 | val sc = new thrift.CounterSuperColumn(cf.nameCodec.encode(name), columns)
47 | cosc.setCounter_super_column(sc)
48 | val mutation = new thrift.Mutation
49 | mutation.setColumn_or_supercolumn(cosc)
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/SuperCounterColumnFamily.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.twitter.cassie.codecs.Codec
18 | import com.twitter.cassie.connection.ClientProvider
19 | import com.twitter.cassie.util.ByteBufferUtil.EMPTY
20 | import com.twitter.cassie.util.FutureUtil.timeFutureWithFailures
21 | import com.twitter.finagle.stats.{ StatsReceiver, NullStatsReceiver }
22 | import org.slf4j.LoggerFactory
23 | import com.twitter.util.Future
24 | import java.nio.ByteBuffer
25 | import java.util.Collections.{ singleton => singletonJSet }
26 | import java.util.{ArrayList => JArrayList,HashMap => JHashMap,Iterator => JIterator,List => JList,Map => JMap,Set => JSet}
27 | import org.apache.cassandra.finagle.thrift
28 | import scala.collection.JavaConversions._
29 |
30 | object SuperCounterColumnFamily {
31 | private implicit val log = LoggerFactory.getLogger(this.getClass)
32 | }
33 |
34 | /*
35 | * Note that this implementation is the minimal set we've needed in production. We've done this
36 | * because we hope that SuperColumns can be obsoleted in the future.
37 | */
38 | case class SuperCounterColumnFamily[Key, Name, SubName](
39 | keyspace: String,
40 | name: String,
41 | provider: ClientProvider,
42 | keyCodec: Codec[Key],
43 | nameCodec: Codec[Name],
44 | subNameCodec: Codec[SubName],
45 | stats: StatsReceiver,
46 | readConsistency: ReadConsistency = ReadConsistency.Quorum,
47 | writeConsistency: WriteConsistency = WriteConsistency.One
48 | ) extends BaseColumnFamily(keyspace, name, provider, stats) {
49 |
50 | import SuperCounterColumnFamily._
51 | import BaseColumnFamily._
52 |
53 | type This = SuperCounterColumnFamily[Key, Name, SubName]
54 |
55 | def consistency(rc: ReadConsistency): This = copy(readConsistency = rc)
56 | def consistency(wc: WriteConsistency): This = copy(writeConsistency = wc)
57 |
58 | def multigetSlices(keys: JSet[Key], start: Name, end: Name): Future[JMap[Key, JMap[Name, JMap[SubName, CounterColumn[SubName]]]]] = {
59 | Future {
60 | val pred = sliceRangePredicate(Some(start), Some(end), Order.Normal, Int.MaxValue)
61 | multigetSlice(keys, pred)
62 | }.flatten
63 | }
64 |
65 | private def sliceRangePredicate(startColumnName: Option[Name], endColumnName: Option[Name], order: Order, count: Int) = {
66 | val startBytes = startColumnName.map { c => nameCodec.encode(c) }.getOrElse(EMPTY)
67 | val endBytes = endColumnName.map { c => nameCodec.encode(c) }.getOrElse(EMPTY)
68 | val pred = new thrift.SlicePredicate()
69 | pred.setSlice_range(new thrift.SliceRange(startBytes, endBytes, order.reversed, count))
70 | }
71 |
72 | private def multigetSlice(keys: JSet[Key], pred: thrift.SlicePredicate): Future[JMap[Key, JMap[Name, JMap[SubName, CounterColumn[SubName]]]]] = {
73 | val cp = new thrift.ColumnParent(name)
74 | val encodedKeys = keyCodec.encodeSet(keys)
75 | withConnection(
76 | "multiget_slice",
77 | Map("keys" -> encodedKeys, "predicate" -> annPredCodec.encode(pred), "readconsistency" -> readConsistency.toString),
78 | Seq(keyspace, keys, cp, pred, readConsistency.level)
79 | ) {
80 | _.multiget_slice(encodedKeys, cp, pred, readConsistency.level)
81 | }.map { result =>
82 | val rows: JMap[Key, JMap[Name, JMap[SubName, CounterColumn[SubName]]]] = new JHashMap(result.size)
83 | for (rowEntry <- collectionAsScalaIterable(result.entrySet)) {
84 | val sCols: JMap[Name, JMap[SubName, CounterColumn[SubName]]] = new JHashMap(rowEntry.getValue.size)
85 | for (scol <- collectionAsScalaIterable(rowEntry.getValue)) {
86 | val cols: JMap[SubName, CounterColumn[SubName]] = new JHashMap(scol.getCounter_super_column.columns.size)
87 | for (counter <- collectionAsScalaIterable(scol.getCounter_super_column().columns)) {
88 | val col = CounterColumn.convert(subNameCodec, counter)
89 | cols.put(col.name, col)
90 | }
91 | sCols.put(nameCodec.decode(scol.getCounter_super_column.BufferForName()), cols)
92 | }
93 | rows.put(keyCodec.decode(rowEntry.getKey), sCols)
94 | }
95 | rows
96 | }
97 | }
98 |
99 | def batch(): SuperCounterBatchMutationBuilder[Key, Name, SubName] =
100 | new SuperCounterBatchMutationBuilder(this)
101 |
102 | private[cassie] def batch(mutations: JMap[ByteBuffer, JMap[String, JList[thrift.Mutation]]]) = {
103 | withConnection(
104 | "batch_mutate",
105 | Map("writeconsistency" -> writeConsistency.toString),
106 | Seq(keyspace, mutations, writeConsistency.level)
107 | ) {
108 | _.batch_mutate(mutations, writeConsistency.level)
109 | }
110 | }
111 | }
112 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/WriteConsistency.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import org.apache.cassandra.finagle.thrift
18 |
19 | /**
20 | * The level of consistency required for a write operation.
21 | */
22 | sealed case class WriteConsistency(level: thrift.ConsistencyLevel) {
23 | override def toString = "WriteConsistency." +
24 | level.toString.toLowerCase.capitalize
25 | }
26 |
27 | object WriteConsistency {
28 | /**
29 | * Ensure that the write has been written to at least 1 node, including hinted
30 | * recipients.
31 | */
32 | val Any = WriteConsistency(thrift.ConsistencyLevel.ANY)
33 |
34 | /**
35 | * Ensure that the write has been written to at least 1 node's commit log and
36 | * memory table before responding to the client.
37 | */
38 | val One = WriteConsistency(thrift.ConsistencyLevel.ONE)
39 |
40 | /**
41 | * Ensure that the write has been written to ReplicationFactor / 2 + 1 nodes
42 | * before responding to the client.
43 | */
44 | val Quorum = WriteConsistency(thrift.ConsistencyLevel.QUORUM)
45 |
46 | /**
47 | * Returns the record with the most recent timestamp once a majority of replicas within
48 | * the local datacenter have replied. Requres NetworkTopologyStrategy on the server side.
49 | */
50 | val LocalQuorum = WriteConsistency(thrift.ConsistencyLevel.LOCAL_QUORUM)
51 |
52 | /**
53 | * Returns the record with the most recent timestamp once a majority of replicas within
54 | * each datacenter have replied.
55 | */
56 | val EachQuorum = WriteConsistency(thrift.ConsistencyLevel.EACH_QUORUM)
57 |
58 | /**
59 | * Ensure that the write is written to all ReplicationFactor nodes before
60 | * responding to the client. Any unresponsive nodes will fail the operation.
61 | */
62 | val All = WriteConsistency(thrift.ConsistencyLevel.ALL)
63 | }
64 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/clocks/Clock.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.clocks
16 |
17 | /**
18 | * A clock which returns a 64-bit timestamp.
19 | */
20 | trait Clock {
21 | def timestamp: Long
22 |
23 | /** To conveniently get the singleton/Object from Java. */
24 | def get(): Clock = this
25 | }
26 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/clocks/MicrosecondEpochClock.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.clocks
16 |
17 | /**
18 | * A clock which returns the time since Jan 1, 1970 UTC in microseconds.
19 | *
20 | * N.B.: This doesn't actually return microseconds, since few platforms actually
21 | * have reliable access to microsecond-accuracy clocks. What it does return is
22 | * the time in milliseconds, multiplied by 1000. That said, it *is* strictly
23 | * increasing, so that even if your calls to MicrosecondEpochClock#timestamp
24 | * occur within a single millisecond, the timestamps will be ordered
25 | * appropriately.
26 | */
27 | object MicrosecondEpochClock extends StrictlyIncreasingClock {
28 | protected def tick = System.currentTimeMillis * 1000
29 | }
30 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/clocks/StrictlyIncreasingClock.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.clocks
16 |
17 | import java.util.concurrent.atomic.AtomicLong
18 |
19 | /**
20 | * A concurrent, strictly-increasing clock.
21 | */
22 | abstract class StrictlyIncreasingClock extends Clock {
23 | private val counter = new AtomicLong(tick)
24 |
25 | def timestamp: Long = {
26 | var newTime: Long = 0
27 | while (newTime == 0) {
28 | val last = counter.get
29 | val current = tick
30 | val next = if (current > last) current else last + 1
31 | if (counter.compareAndSet(last, next)) {
32 | newTime = next
33 | }
34 | }
35 | return newTime
36 | }
37 |
38 | protected def tick: Long
39 | }
40 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/codecs/ByteArrayCodec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs
16 |
17 | import java.nio.ByteBuffer
18 |
19 | /**
20 | * An identity encoding.
21 | *
22 | * TODO: Fix name.
23 | */
24 | object ByteArrayCodec extends Codec[ByteBuffer] {
25 | def encode(obj: ByteBuffer) = obj
26 | def decode(ary: ByteBuffer) = ary
27 | }
28 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/codecs/Codec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs
16 |
17 | import java.nio.ByteBuffer
18 | import scala.collection.JavaConversions.collectionAsScalaIterable
19 | import java.util.{ ArrayList => JArrayList, Set => JSet, List => JList }
20 |
21 | /**
22 | * A bidirection encoding for column names or values.
23 | */
24 | trait Codec[A] {
25 | def encode(obj: A): ByteBuffer
26 | def decode(ary: ByteBuffer): A
27 |
28 | /** To conveniently get the singleton/Object from Java. */
29 | def get() = this
30 |
31 | /** Helpers for conversion from ByteBuffers to byte arrays. Keep explicit! */
32 | def b2b(buff: ByteBuffer): Array[Byte] = {
33 | val bytes = new Array[Byte](buff.remaining)
34 | buff.duplicate.get(bytes)
35 | bytes
36 | }
37 | def b2b(array: Array[Byte]): ByteBuffer = ByteBuffer.wrap(array)
38 |
39 | def encodeSet(values: JSet[A]): JList[ByteBuffer] = {
40 | val output = new JArrayList[ByteBuffer](values.size)
41 | for (value <- collectionAsScalaIterable(values))
42 | output.add(encode(value))
43 | output
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/codecs/IntCodec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs
16 |
17 | import java.nio.ByteBuffer
18 |
19 | /**
20 | * Encodes and decodes 32-bit integers as 4-byte, big-endian byte buffers.
21 | */
22 | object IntCodec extends Codec[Int] {
23 | private val length = 4
24 |
25 | def encode(v: Int) = {
26 | val buf = ByteBuffer.allocate(length)
27 | buf.putInt(v)
28 | buf.rewind
29 | buf
30 | }
31 |
32 | def decode(buf: ByteBuffer) = {
33 | require(buf.remaining == length)
34 | buf.duplicate().getInt
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/codecs/LegacyUtf8Codec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs
16 |
17 | import java.nio.ByteBuffer
18 | import java.lang.String
19 | import org.apache.thrift.bootleg.Utf8Helper
20 |
21 | /**
22 | * Encodes and decodes values as UTF-8 strings.
23 | */
24 | @deprecated("""Use the new Utf8Codec if you can. You may need to use this for backwards
25 | compatability with your stored data. This should only be a problem if you
26 | use codepoints outside the BMP.""", "0.15.0")
27 | object LegacyUtf8Codec extends Codec[String] {
28 | @deprecated("""Use the new Utf8Codec if you can. You may need to use this for backwards
29 | compatability with your stored data. This should only be a problem if you
30 | use codepoints outside the BMP.""", "0.15.0")
31 | def encode(s: String) = b2b(Utf8Helper.encode(s))
32 | @deprecated("""Use the new Utf8Codec if you can. You may need to use this for backwards
33 | compatability with your stored data. This should only be a problem if you
34 | use codepoints outside the BMP.""", "0.15.0")
35 | def decode(ary: ByteBuffer) = Utf8Helper.decode(b2b(ary))
36 | }
37 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/codecs/LexicalUUIDCodec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs
16 |
17 | import com.twitter.cassie.types.LexicalUUID
18 | import java.nio.ByteBuffer
19 |
20 | /**
21 | * Encodes and decodes UUIDs as 128-bit values.
22 | */
23 | object LexicalUUIDCodec extends Codec[LexicalUUID] {
24 | private val length = 16
25 |
26 | def encode(uuid: LexicalUUID) = {
27 | val b = ByteBuffer.allocate(length)
28 | b.putLong(uuid.timestamp)
29 | b.putLong(uuid.workerID)
30 | b.rewind
31 | b
32 | }
33 |
34 | def decode(buf: ByteBuffer) = {
35 | require(buf.remaining == length)
36 | val dupe = buf.duplicate
37 | LexicalUUID(dupe.getLong(), dupe.getLong())
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/codecs/LongCodec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs
16 |
17 | import java.nio.ByteBuffer
18 |
19 | /**
20 | * Encodes and decodes 64-bit integers as 8-byte, big-endian byte arrays.
21 | */
22 | object LongCodec extends Codec[Long] {
23 | private val length = 8
24 |
25 | def encode(v: Long) = {
26 | val b = ByteBuffer.allocate(length)
27 | b.putLong(v)
28 | b.rewind
29 | b
30 | }
31 |
32 | def decode(buf: ByteBuffer) = {
33 | require(buf.remaining == length)
34 | buf.duplicate.getLong
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/codecs/ThriftCodec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs
16 |
17 | import org.apache.thrift._
18 | import org.apache.thrift.protocol._
19 | import org.apache.thrift.transport._
20 | import java.nio.ByteBuffer
21 | import java.io._
22 |
23 | // TODO move to util-thrift
24 | class ThriftCodec[T <: TBase[_, _]](klass: Class[T]) extends Codec[T] {
25 |
26 | class ThreadLocal[T](init: => T) extends java.lang.ThreadLocal[T] {
27 | override def initialValue: T = init
28 | }
29 | implicit def getThreadLocal[S](tl: ThreadLocal[S]): S = tl.get
30 |
31 | val thriftProtocolFactory = new ThreadLocal(new TBinaryProtocol.Factory())
32 | val outputStream = new ThreadLocal(new ByteArrayOutputStream())
33 | val outputProtocol = new ThreadLocal(thriftProtocolFactory.getProtocol(new TIOStreamTransport(outputStream)))
34 | val inputStream = new ThreadLocal(new ByteArrayInputStream(Array.empty[Byte]) {
35 | def refill(ary: Array[Byte]) {
36 | buf = ary
37 | pos = 0
38 | mark = 0
39 | count = buf.length
40 | }
41 | })
42 | val inputProtocol = new ThreadLocal(thriftProtocolFactory.getProtocol(new TIOStreamTransport(inputStream)))
43 |
44 | def encode(t: T) = {
45 | outputStream.reset
46 | t.write(outputProtocol)
47 | b2b(outputStream.toByteArray)
48 | }
49 |
50 | def decode(ary: ByteBuffer) = {
51 | inputStream.refill(b2b(ary))
52 | val out = klass.newInstance
53 | out.read(inputProtocol)
54 | out
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/codecs/Utf8Codec.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs
16 |
17 | import java.nio.ByteBuffer
18 | import org.apache.thrift.bootleg.Utf8Helper
19 |
20 | /**
21 | * Encodes and decodes values as UTF-8 strings.
22 | */
23 | object Utf8Codec extends Codec[String] {
24 | def encode(s: String) = b2b(s.getBytes("UTF-8"))
25 | def decode(ary: ByteBuffer) = new String(ary.array, ary.position, ary.remaining, "UTF-8")
26 | }
27 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/connection/CCluster.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.connection
16 |
17 | import com.twitter.finagle.builder.{ StaticCluster => FStaticCluster, Cluster => FCluster }
18 | import com.twitter.finagle.ServiceFactory
19 | import java.net.SocketAddress
20 |
21 | trait CCluster[T] extends FCluster[T] {
22 | def close
23 | }
24 |
25 | /**
26 | * A cassandra cluster specified by socket addresses. No remapping.
27 | */
28 | class SocketAddressCluster(private[this] val underlying: Seq[SocketAddress])
29 | extends FStaticCluster[SocketAddress](underlying) with CCluster[SocketAddress] {
30 | def close() = ()
31 | }
32 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/connection/ClientProvider.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.connection
16 |
17 | import com.twitter.util.Future
18 | import org.apache.cassandra.finagle.thrift.Cassandra.ServiceToClient
19 |
20 | /**
21 | * A utility interface for classes which pass a Cassandra `Client` instance to
22 | * a function and return the result.
23 | */
24 | trait ClientProvider {
25 | /**
26 | * Passes a Cassandra `ServiceToClient` instance to the given function and returns a
27 | * future which will be the client's response.
28 | *
29 | * @tparam A the result type
30 | * @param f the function to which the `ServiceToClient` is passed
31 | * @return `f(client)`
32 | */
33 | def map[A](f: ServiceToClient => Future[A]): Future[A]
34 |
35 | /**
36 | * Releases any resources held by the provider.
37 | */
38 | def close() = {}
39 | }
40 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/connection/ClusterClientProvider.scala:
--------------------------------------------------------------------------------
1 | package com.twitter.cassie.connection
2 |
3 | // Copyright 2012 Twitter, Inc.
4 |
5 | // Licensed under the Apache License, Version 2.0 (the "License");
6 | // you may not use this file except in compliance with the License.
7 | // You may obtain a copy of the License at
8 |
9 | // http://www.apache.org/licenses/LICENSE-2.0
10 |
11 | // Unless required by applicable law or agreed to in writing, software
12 | // distributed under the License is distributed on an "AS IS" BASIS,
13 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | // See the License for the specific language governing permissions and
15 | // limitations under the License.
16 |
17 | import com.twitter.finagle.builder.ClientBuilder
18 | import com.twitter.finagle.ServiceFactory
19 | import com.twitter.finagle.service.{ Backoff, RetryPolicy => FinagleRetryPolicy }
20 | import com.twitter.finagle.stats.StatsReceiver
21 | import com.twitter.finagle.thrift.{ ThriftClientRequest, ThriftClientFramedCodec }
22 | import com.twitter.finagle.tracing.Tracer
23 | import com.twitter.finagle.{ ChannelException, CodecFactory, ClientCodecConfig, RequestTimeoutException, WriteException }
24 | import com.twitter.util.{ Duration, Future, Throw, Timer, TimerTask, Time, Try }
25 | import java.net.SocketAddress
26 | import java.util.concurrent.TimeUnit
27 | import org.apache.cassandra.finagle.thrift.Cassandra.ServiceToClient
28 | import org.apache.cassandra.finagle.thrift.{ UnavailableException, TimedOutException }
29 | import org.apache.thrift.protocol.{ TBinaryProtocol, TProtocolFactory }
30 |
31 | sealed case class RetryPolicy()
32 |
33 | object RetryPolicy {
34 | val Idempotent = RetryPolicy()
35 | val NonIdempotent = RetryPolicy()
36 | }
37 |
38 | private[cassie] class ClusterClientProvider(
39 | val hosts: CCluster[SocketAddress],
40 | val keyspace: String,
41 | val retries: Int,
42 | val timeout: Duration,
43 | val requestTimeout: Duration,
44 | val connectTimeout: Duration,
45 | val minConnectionsPerHost: Int,
46 | val maxConnectionsPerHost: Int,
47 | val hostConnectionMaxWaiters: Int,
48 | val statsReceiver: StatsReceiver,
49 | val tracerFactory: Tracer.Factory,
50 | val retryPolicy: RetryPolicy = RetryPolicy.Idempotent,
51 | val failFast: Boolean = true
52 | ) extends ClientProvider {
53 |
54 | implicit val fakeTimer = new Timer {
55 | def schedule(when: Time)(f: => Unit): TimerTask = throw new Exception("illegal use!")
56 | def schedule(when: Time, period: Duration)(f: => Unit): TimerTask = throw new Exception("illegal use!")
57 | def stop() { throw new Exception("illegal use!") }
58 | }
59 |
60 | /** Record the given exception, and return true. */
61 | private def recordRetryable(e: Exception): Boolean = {
62 | statsReceiver.counter(e.getClass.getSimpleName()).incr()
63 | true
64 | }
65 |
66 | val finagleRetryPolicy: FinagleRetryPolicy[Try[Nothing]] = retryPolicy match {
67 | case RetryPolicy.Idempotent =>
68 | FinagleRetryPolicy.backoff(Backoff.const(Duration(0, TimeUnit.MILLISECONDS)) take (retries)) {
69 | case Throw(x: WriteException) => recordRetryable(x)
70 | case Throw(x: RequestTimeoutException) => recordRetryable(x)
71 | case Throw(x: ChannelException) => recordRetryable(x)
72 | case Throw(x: UnavailableException) => recordRetryable(x)
73 | // TODO: if this is a legit serverside timeout, then we should be careful about retrying, since the
74 | // serverside timeout is ideally set to just a smidgeon below our client timeout, and we would thus
75 | // wait a lot of extra time
76 | case Throw(x: TimedOutException) => recordRetryable(x)
77 | // TODO: do we need to retry IndividualRequestTimeoutException?
78 | }
79 | case RetryPolicy.NonIdempotent =>
80 | FinagleRetryPolicy.backoff(Backoff.const(Duration(0, TimeUnit.MILLISECONDS)) take (retries)) {
81 | case Throw(x: WriteException) => recordRetryable(x)
82 | case Throw(x: UnavailableException) => recordRetryable(x)
83 | }
84 | }
85 |
86 | private val service = ClientBuilder()
87 | .cluster(hosts)
88 | .name("cassie")
89 | .codec(CassandraThriftFramedCodec())
90 | .retryPolicy(finagleRetryPolicy)
91 | .timeout(timeout)
92 | .requestTimeout(requestTimeout)
93 | .connectTimeout(connectTimeout)
94 | .tcpConnectTimeout(connectTimeout)
95 | .hostConnectionCoresize(minConnectionsPerHost)
96 | .hostConnectionLimit(maxConnectionsPerHost)
97 | .reportTo(statsReceiver)
98 | .tracerFactory(tracerFactory)
99 | .hostConnectionMaxWaiters(hostConnectionMaxWaiters)
100 | .expFailFast(failFast)
101 | .build()
102 |
103 | private val client = new ServiceToClient(service, new TBinaryProtocol.Factory())
104 |
105 | def map[A](f: ServiceToClient => Future[A]) = f(client)
106 |
107 | override def close(): Unit = {
108 | hosts.close
109 | service.release()
110 | ()
111 | }
112 |
113 | /**
114 | * Convenience methods for passing in a codec factory.
115 | */
116 | object CassandraThriftFramedCodec {
117 | def apply() = new CassandraThriftFramedCodecFactory
118 | def get() = apply()
119 | }
120 |
121 | /**
122 | * Create a CassandraThriftFramedCodec with a BinaryProtocol
123 | */
124 | class CassandraThriftFramedCodecFactory
125 | extends CodecFactory[ThriftClientRequest, Array[Byte]]#Client {
126 | def apply(config: ClientCodecConfig) = {
127 | new CassandraThriftFramedCodec(new TBinaryProtocol.Factory(), config)
128 | }
129 | }
130 |
131 | class CassandraThriftFramedCodec(protocolFactory: TProtocolFactory, config: ClientCodecConfig)
132 | extends ThriftClientFramedCodec(protocolFactory: TProtocolFactory, config: ClientCodecConfig, None, false) {
133 | override def prepareConnFactory(factory: ServiceFactory[ThriftClientRequest, Array[Byte]]) = {
134 | val keyspacedSetFactory = factory flatMap { service =>
135 | val client = new ServiceToClient(service, new TBinaryProtocol.Factory())
136 | client.set_keyspace(keyspace) map { _ => service }
137 | }
138 | // set up tracing
139 | super.prepareConnFactory(keyspacedSetFactory)
140 | }
141 | }
142 | }
143 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/types.scala:
--------------------------------------------------------------------------------
1 | package com.twitter.cassie
2 |
3 | import types._
4 |
5 | /**
6 | * Implicit conversions for all of Cassie's special types.
7 | */
8 | package object types {
9 | implicit def String2LexicalUUID(s: String): LexicalUUID = LexicalUUID(s)
10 | implicit def LexicalUUID2String(uuid: LexicalUUID): String = uuid.toString
11 | }
12 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/types/LexicalUUID.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.types
16 |
17 | import com.twitter.cassie.clocks.Clock
18 | import com.twitter.cassie.FNV1A
19 | import java.net.InetAddress.{ getLocalHost => localHost }
20 | import java.nio.ByteBuffer
21 | import org.apache.commons.codec.binary.Hex.decodeHex
22 |
23 | object LexicalUUID {
24 | private val defaultWorkerID = FNV1A(localHost.getHostName.getBytes)
25 |
26 | /**
27 | * Given a clock, generates a new LexicalUUID, using a hash of the machine's
28 | * hostname as a worker ID.
29 | */
30 | def apply(clock: Clock): LexicalUUID =
31 | new LexicalUUID(clock, LexicalUUID.defaultWorkerID)
32 |
33 | /**
34 | * Given a UUID formatted as a hex string, returns it as a LexicalUUID.
35 | */
36 | def apply(uuid: String): LexicalUUID = {
37 | val buf = ByteBuffer.wrap(decodeHex(uuid.toCharArray.filterNot { _ == '-' }))
38 | new LexicalUUID(buf.getLong(), buf.getLong())
39 | }
40 | }
41 |
42 | /**
43 | * A 128-bit UUID, composed of a 64-bit timestamp and a 64-bit worker ID.
44 | */
45 | case class LexicalUUID(timestamp: Long, workerID: Long) extends Ordered[LexicalUUID] {
46 |
47 | /**
48 | * Given a worker ID and a clock, generates a new LexicalUUID. If each node
49 | * has unique worker ID and a clock which is guaranteed to never go backwards,
50 | * then each generated UUID will be unique.
51 | */
52 | def this(clock: Clock, workerID: Long) = this(clock.timestamp, workerID)
53 |
54 | /**
55 | * Given a clock, generates a new LexicalUUID, using a hash of the machine's
56 | * hostname as a worker ID.
57 | */
58 | def this(clock: Clock) = this(clock.timestamp, LexicalUUID.defaultWorkerID)
59 |
60 | /**
61 | * Sort by timestamp, then by worker ID.
62 | */
63 | def compare(that: LexicalUUID) = {
64 | val res = timestamp.compare(that.timestamp)
65 | if (res == 0) {
66 | workerID.compare(that.workerID)
67 | } else {
68 | res
69 | }
70 | }
71 |
72 | override def toString = {
73 | val hex = "%016x".format(timestamp)
74 | "%s-%s-%s-%016x".format(hex.substring(0, 8),
75 | hex.substring(8, 12),
76 | hex.substring(12, 16), workerID)
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/types/ThriftEncoded.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.types
16 |
17 | import org.apache.thrift._
18 |
19 | case class ThriftEncoded[T <: TBase[_, _]](value: T)
20 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/util/ByteBufferUtil.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.util
16 | import java.nio.ByteBuffer
17 |
18 | object ByteBufferUtil {
19 | val EMPTY = ByteBuffer.allocate(0)
20 | }
21 |
--------------------------------------------------------------------------------
/cassie-core/src/main/scala/com/twitter/cassie/util/FutureUtil.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.util
16 |
17 | import com.twitter.util.Future
18 | import com.twitter.finagle.stats.StatsReceiver
19 |
20 | object FutureUtil {
21 | private val errPrefix = "errors_%s_%s"
22 |
23 | def timeFutureWithFailures[T](stats: StatsReceiver, name: String)(f: => Future[T]): Future[T] = {
24 | stats.timeFuture(name)(f).onFailure { throwable =>
25 | stats.counter(errPrefix.format(name, throwable.getClass.getSimpleName)).incr()
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/cassie-core/src/test/java/com/twitter/cassie/MockCassandraClient.java:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie;
16 |
17 | import com.twitter.cassie.codecs.Utf8Codec;
18 | import com.twitter.cassie.ColumnFamily;
19 | import com.twitter.cassie.connection.ClientProvider;
20 | import com.twitter.cassie.ReadConsistency;
21 | import com.twitter.cassie.WriteConsistency;
22 | import com.twitter.util.Future;
23 | import com.twitter.util.Promise;
24 | import java.nio.ByteBuffer;
25 | import java.util.ArrayList;
26 | import java.util.HashMap;
27 | import java.util.List;
28 | import org.apache.cassandra.finagle.thrift.*;
29 | import org.junit.Before;
30 | import org.junit.Test;
31 | import static junit.framework.Assert.assertEquals;
32 | import static org.mockito.Mockito.*;
33 |
34 | import org.apache.cassandra.finagle.thrift.Cassandra.ServiceToClient;
35 |
36 | public final class MockCassandraClient {
37 | public final ServiceToClient client;
38 |
39 | public MockCassandraClient() {
40 | this.client = mock(ServiceToClient.class);
41 | }
42 |
43 | public static final class SimpleProvider implements ClientProvider {
44 | public final ServiceToClient client;
45 | public boolean closed = false;
46 | public SimpleProvider(ServiceToClient client) {
47 | this.client = client;
48 | }
49 | @Override
50 | public Future map(scala.Function1> func) {
51 | assert !closed;
52 | return func.apply(client);
53 | }
54 | @Override
55 | public void close() { closed = true; }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/cassie-core/src/test/java/com/twitter/cassie/jtests/ClusterTest.java:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.jtests;
16 |
17 | import com.twitter.cassie.Cluster;
18 | import com.twitter.cassie.Keyspace;
19 | import com.twitter.finagle.stats.NullStatsReceiver$;
20 | import com.twitter.util.Duration;
21 | import java.nio.ByteBuffer;
22 | import org.junit.Before;
23 | import org.junit.Test;
24 | import static junit.framework.Assert.assertEquals;
25 |
26 | public class ClusterTest {
27 | public Cluster cluster;
28 |
29 | @Before
30 | public void before() throws Exception {
31 | cluster = new Cluster("host1,host2", NullStatsReceiver$.MODULE$).mapHostsEvery(new Duration(0));
32 | }
33 |
34 | @Test
35 | public void test() {
36 | Keyspace ks = cluster.keyspace("blah").connect();
37 | assertEquals(ks.name(), "blah");
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/cassie-core/src/test/java/com/twitter/cassie/jtests/ColumnFamilyTest.java:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.jtests;
16 |
17 | import com.twitter.cassie.codecs.Codec;
18 | import com.twitter.cassie.codecs.Utf8Codec;
19 | import com.twitter.cassie.ColumnFamily;
20 | import com.twitter.cassie.MockCassandraClient;
21 | import com.twitter.cassie.ReadConsistency;
22 | import com.twitter.cassie.WriteConsistency;
23 | import com.twitter.finagle.stats.NullStatsReceiver$;
24 | import java.nio.ByteBuffer;
25 | import org.apache.cassandra.finagle.thrift.*;
26 | import org.junit.Before;
27 | import org.junit.Test;
28 | import org.mockito.ArgumentCaptor;
29 | import static junit.framework.Assert.assertEquals;
30 | import static org.mockito.Matchers.*;
31 | import static org.mockito.Mockito.*;
32 |
33 | public class ColumnFamilyTest {
34 | protected MockCassandraClient mock;
35 |
36 | public final Codec codec = Utf8Codec.get();
37 |
38 | @Before
39 | public void before() throws Exception {
40 | mock = new MockCassandraClient();
41 | }
42 |
43 | @Test
44 | public void test() {
45 | ColumnFamily cf = new ColumnFamily("ks", "cf", new MockCassandraClient.SimpleProvider(mock.client),
46 | Utf8Codec.get(), Utf8Codec.get(), Utf8Codec.get(), NullStatsReceiver$.MODULE$,
47 | ReadConsistency.Quorum(), WriteConsistency.Quorum());
48 | cf.getColumn("key", "name");
49 | ColumnParent cp = new ColumnParent("cf");
50 | ArgumentCaptor pred = ArgumentCaptor.forClass(SlicePredicate.class);
51 | verify(mock.client).get_slice(eq(codec.encode("key")), eq(cp),
52 | pred.capture(), eq(ConsistencyLevel.QUORUM));
53 | for (ByteBuffer name : pred.getValue().getColumn_names()) {
54 | assertEquals(codec.decode(name), "name");
55 | }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/cassie-core/src/test/java/com/twitter/cassie/jtests/examples/CassieRun.java:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.jtests.examples;
16 |
17 | import com.twitter.cassie.*;
18 | import com.twitter.cassie.clocks.MicrosecondEpochClock;
19 | import com.twitter.cassie.codecs.*;
20 | import com.twitter.cassie.types.*;
21 | import com.twitter.finagle.stats.NullStatsReceiver$;
22 | import com.twitter.util.Function2;
23 | import com.twitter.util.Function;
24 | import com.twitter.util.Future;
25 | import java.util.Arrays;
26 | import java.util.HashSet;
27 | import java.util.List;
28 |
29 | public final class CassieRun {
30 | public static HashSet Set(V... values) {
31 | return new HashSet(Arrays.asList(values));
32 | }
33 |
34 | public static void info(Object o) {
35 | System.out.println(o);
36 | }
37 |
38 | public static void main(String[] args) throws Exception {
39 | // create a cluster with a single seed from which to map keyspaces
40 | Cluster cluster = new Cluster("localhost", NullStatsReceiver$.MODULE$);
41 |
42 | // create a keyspace
43 | Keyspace keyspace = cluster.keyspace("Keyspace1").connect();
44 |
45 | // create a column family
46 | ColumnFamily cass = keyspace.columnFamily("Standard1", Utf8Codec.get(), Utf8Codec.get(), Utf8Codec.get());
47 |
48 | info("inserting some columns");
49 | //note that these calls are async, the apply() is where the waiting happens
50 | cass.insert("yay for me", cass.newColumn("name", "Coda")).apply();
51 | cass.insert("yay for me", cass.newColumn("motto", "Moar lean.")).apply();
52 |
53 | cass.insert("yay for you", cass.newColumn("name", "Niki")).apply();
54 | cass.insert("yay for you", cass.newColumn("motto", "Told ya.")).apply();
55 |
56 | cass.insert("yay for us", cass.newColumn("name", "Biscuit")).apply();
57 | cass.insert("yay for us", cass.newColumn("motto", "Mlalm.")).apply();
58 |
59 | cass.insert("yay for everyone", cass.newColumn("name", "Louie")).apply();
60 | cass.insert("yay for everyone", cass.newColumn("motto", "Swish!")).apply();
61 |
62 | info("getting a column: " + cass.getColumn("yay for me", "name").apply());
63 | info("getting a column that doesn't exist: " + cass.getColumn("yay for no one", "name").apply());
64 | info("getting a column that doesn't exist #2: " + cass.getColumn("yay for no one", "oink").apply());
65 | info("getting a set of columns: " + cass.getColumns("yay for me", Set("name", "motto")).apply());
66 | info("getting a whole row: " + cass.getRow("yay for me").apply());
67 | info("getting a column from a set of keys: " + cass.multigetColumn(Set("yay for me", "yay for you"), "name").apply());
68 | info("getting a set of columns from a set of keys: " + cass.multigetColumns(Set("yay for me", "yay for you"), Set("name", "motto")).apply());
69 |
70 | info("Iterating!");
71 | Future f = cass.rowsIteratee(2).foreach(new scala.runtime.AbstractFunction2>, scala.runtime.BoxedUnit>() {
72 | public scala.runtime.BoxedUnit apply(String key, List> columns) {
73 | info("Found: " + key);
74 | return null;
75 | }
76 | });
77 |
78 | f.apply();
79 |
80 | Future f2 = cass.columnsIteratee(2, "yay for me").foreach(new Function, scala.runtime.BoxedUnit>() {
81 | public scala.runtime.BoxedUnit apply(Column column){
82 | info("Found Columns Iteratee: " + column);
83 | return null;
84 | }
85 | });
86 |
87 | f2.apply();
88 |
89 | info("removing a column");
90 | cass.removeColumn("yay for me", "motto").apply();
91 |
92 | info("removing a row");
93 | cass.removeRow("yay for me").apply();
94 |
95 | info("Batching up some stuff");
96 | cass.batch()
97 | .removeColumn("yay for you", "name")
98 | .removeColumns("yay for us", Set("name", "motto"))
99 | .insert("yay for nobody", cass.newColumn("name", "Burt"))
100 | .insert("yay for nobody", cass.newColumn("motto", "'S funny."))
101 | .execute().apply();
102 |
103 | info("Wrappin' up");
104 | keyspace.close();
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/FNV1ATest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import org.junit.runner.RunWith
18 | import org.scalatest.junit.JUnitRunner
19 | import org.scalatest.matchers.MustMatchers
20 | import org.scalatest.FunSpec
21 |
22 | @RunWith(classOf[JUnitRunner])
23 | class FNV1ATest extends FunSpec with MustMatchers {
24 | describe("the FNV1A hash function") {
25 | it("matches up with existing implementations") {
26 | FNV1A("foobar".getBytes) must equal(0x85944171f73967e8L)
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/Mutations.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | /**
18 | * Just here to poke a hole through BatchMutationBuilder's package-level privacy
19 | * for its mutations.
20 | */
21 | object Mutations {
22 | def apply(builder: BatchMutationBuilder[_, _, _]) = builder.mutations
23 | }
24 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/clocks/tests/MicrosecondEpochClockTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.clocks.tests
16 |
17 | import com.twitter.cassie.clocks.MicrosecondEpochClock
18 | import org.junit.runner.RunWith
19 | import org.scalatest.junit.JUnitRunner
20 | import org.scalatest.matchers.MustMatchers
21 | import org.scalatest.FunSpec
22 |
23 | @RunWith(classOf[JUnitRunner])
24 | class MicrosecondEpochClockTest extends FunSpec with MustMatchers {
25 | describe("the microseconds clock") {
26 | it("uses the Java epoch milliseconds clock") {
27 | MicrosecondEpochClock.timestamp must be((System.currentTimeMillis * 1000) plusOrMinus (1000))
28 | }
29 |
30 | it("is strictly increasing, even beyond the precision of the clock") {
31 | val timestamps = 1.to(40).map { c => MicrosecondEpochClock.timestamp }
32 |
33 | timestamps.sortWith { _ < _ } must equal(timestamps)
34 | timestamps.toSet.size must equal(timestamps.size)
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/codecs/tests/ByteArrayCodecTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs.tests
16 |
17 | import com.twitter.cassie.codecs.ByteArrayCodec
18 | import com.twitter.cassie.codecs.tests.ByteBufferLiteral._
19 | import java.nio.ByteBuffer
20 | import org.scalacheck.Prop
21 | import org.scalacheck._
22 | import org.junit.runner.RunWith
23 | import org.scalatest.junit.JUnitRunner
24 |
25 | @RunWith(classOf[JUnitRunner])
26 | class ByteArrayCodecTest extends CodecTest {
27 | describe("encoding an array of bytes") {
28 | it("produces an array of bytes") {
29 | ByteArrayCodec.encode(bb(1, 2, 3)) must equal(bb(1, 2, 3))
30 | }
31 | }
32 |
33 | describe("decoding an array of bytes") {
34 | it("produces an array of bytes") {
35 | ByteArrayCodec.decode(bb(49, 50, 51)) must equal(bb(49, 50, 51))
36 | }
37 | }
38 |
39 | check(Prop.forAll(randomBuffer) { i: ByteBuffer => ByteArrayCodec.decode(ByteArrayCodec.encode(i)) == i })
40 | }
41 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/codecs/tests/ByteBufferLiteral.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs.tests
16 |
17 | import java.nio.ByteBuffer
18 |
19 | /** Sugar for ByteBuffer literals: obviously not intended to be performant. */
20 | object ByteBufferLiteral {
21 | def bb(arr: Byte*): ByteBuffer = {
22 | val buf = ByteBuffer.allocate(arr.length)
23 | for (byte <- arr) buf.put(byte)
24 | buf.rewind
25 | buf
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/codecs/tests/CodecTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs.tests
16 |
17 | import com.twitter.cassie.codecs.tests.ByteBufferLiteral._
18 | import java.nio.charset.Charset
19 | import java.nio.{ ByteBuffer, CharBuffer }
20 | import org.scalacheck.util.Buildable.buildableArray
21 | import org.scalacheck._
22 | import org.junit.runner.RunWith
23 | import org.scalatest.junit.JUnitRunner
24 | import org.scalatest.matchers.MustMatchers
25 | import org.scalatest.prop.Checkers
26 | import org.scalatest.FunSpec
27 |
28 | // lifted from http://blog.zilverline.com/2011/04/07/serializing-strings-unicode-and-randomized-testing-using-scalacheck/
29 | @RunWith(classOf[JUnitRunner])
30 | class CodecTest extends FunSpec with MustMatchers with Checkers {
31 | val UnicodeLeadingSurrogate = '\uD800' to '\uDBFF'
32 | val UnicodeTrailingSurrogate = '\uDC00' to '\uDFFF'
33 | val UnicodeBasicMultilingualPlane = ('\u0000' to '\uFFFF').diff(UnicodeLeadingSurrogate).diff(UnicodeTrailingSurrogate)
34 |
35 | val unicodeCharacterBasicMultilingualPlane: Gen[String] = Gen.oneOf(UnicodeBasicMultilingualPlane).map(_.toString)
36 | val unicodeCharacterSupplementaryPlane: Gen[String] = for {
37 | c1 <- Gen.oneOf(UnicodeLeadingSurrogate)
38 | c2 <- Gen.oneOf(UnicodeTrailingSurrogate)
39 | } yield {
40 | c1.toString + c2.toString
41 | }
42 |
43 | val unicodeCharacter = Gen.frequency(
44 | 9 -> unicodeCharacterBasicMultilingualPlane,
45 | 1 -> unicodeCharacterSupplementaryPlane)
46 |
47 | val unicodeString = Gen.listOf(unicodeCharacter).map(_.mkString)
48 |
49 | val bytesGen: Gen[Byte] = for {
50 | b <- Gen.choose(0, 255)
51 | } yield b.toByte
52 |
53 | val randomBuffer = Gen.containerOf[Array, Byte](bytesGen).map(ByteBuffer.wrap(_))
54 |
55 | implicit override val generatorDrivenConfig =
56 | PropertyCheckConfig(minSuccessful = 10000)
57 | }
58 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/codecs/tests/IntCodecTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs.tests
16 |
17 | import com.twitter.cassie.codecs.IntCodec
18 | import com.twitter.cassie.codecs.tests.ByteBufferLiteral._
19 | import org.junit.runner.RunWith
20 | import org.scalatest.junit.JUnitRunner
21 |
22 | @RunWith(classOf[JUnitRunner])
23 | class IntCodecTest extends CodecTest {
24 | describe("encoding an int") {
25 | it("produces a variable length zig-zag encoded array of bytes") {
26 | IntCodec.encode(199181) must equal(bb(0, 3, 10, 13))
27 | }
28 | }
29 |
30 | describe("decoding an array of bytes") {
31 | it("produces an int") {
32 | IntCodec.decode(bb(0, 3, 10, 13)) must equal(199181)
33 | }
34 | }
35 |
36 | check { i: Int => IntCodec.decode(IntCodec.encode(i)) == i }
37 | }
38 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/codecs/tests/LexicalUUIDCodecTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs.tests
16 |
17 | import com.twitter.cassie.codecs.LexicalUUIDCodec
18 | import com.twitter.cassie.codecs.tests.ByteBufferLiteral._
19 | import com.twitter.cassie.types.LexicalUUID
20 | import org.junit.runner.RunWith
21 | import org.scalatest.junit.JUnitRunner
22 |
23 | @RunWith(classOf[JUnitRunner])
24 | class LexicalUUIDCodecTest extends CodecTest {
25 | val uuid = LexicalUUID(0x990213812L, 0x899813298123L)
26 | val bytes = bb(0, 0, 0, 9, -112, 33, 56, 18, 0, 0, -119, -104, 19, 41, -127, 35)
27 |
28 | describe("encoding a UUID") {
29 | it("produces a 16-byte array") {
30 | LexicalUUIDCodec.encode(uuid) must equal(bytes)
31 | }
32 | }
33 |
34 | describe("decoding a UUID") {
35 | it("produces a LexicalUUID") {
36 | LexicalUUIDCodec.decode(bytes) must equal(uuid)
37 | }
38 | }
39 |
40 | check { (i: Long, j: Long) =>
41 | LexicalUUIDCodec.decode(LexicalUUIDCodec.encode(LexicalUUID(i, j))) == LexicalUUID(i, j)
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/codecs/tests/LongCodecTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs.tests
16 |
17 | import com.twitter.cassie.codecs.LongCodec
18 | import com.twitter.cassie.codecs.tests.ByteBufferLiteral._
19 | import org.junit.runner.RunWith
20 | import org.scalatest.junit.JUnitRunner
21 |
22 | @RunWith(classOf[JUnitRunner])
23 | class LongCodecTest extends CodecTest {
24 | describe("encoding a long") {
25 | it("produces a variable length zig-zag encoded array of bytes") {
26 | LongCodec.encode(199181989101092820L) must equal(bb(2, -61, -94, -10, -70, 6, -65, -44))
27 | }
28 | }
29 |
30 | describe("decoding an array of bytes") {
31 | it("produces a long") {
32 | LongCodec.decode(bb(2, -61, -94, -10, -70, 6, -65, -44)) must equal(199181989101092820L)
33 | }
34 | }
35 |
36 | check { i: Long => LongCodec.decode(LongCodec.encode(i)) == i }
37 | }
38 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/codecs/tests/ThriftCodecTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs.tests
16 |
17 | import com.twitter.cassie.codecs.tests.ByteBufferLiteral._
18 | import com.twitter.cassie.codecs.ThriftCodec
19 | import com.twitter.cassie.test.thrift.Person
20 | import org.scalacheck._
21 | import org.junit.runner.RunWith
22 | import org.scalatest.junit.JUnitRunner
23 |
24 | @RunWith(classOf[JUnitRunner])
25 | class ThriftCodecTest extends CodecTest {
26 | describe("encoding a person") {
27 | it("must be decodable") {
28 | val codec = new ThriftCodec(classOf[Person])
29 | val person = new Person("joe", "doe")
30 | val bytes = codec.encode(person)
31 | codec.decode(bytes) must equal(person)
32 |
33 | // We do this 2x to verify that we aren't introducing bugs with object reuse
34 | val another = new Person("john", "doe")
35 | val moreBytes = codec.encode(another)
36 | codec.decode(moreBytes) must equal(another)
37 | }
38 | }
39 |
40 | check(Prop.forAll(unicodeString, unicodeString) { (fname: String, lname: String) =>
41 | val p = new Person(fname, lname)
42 | val codec = new ThriftCodec(classOf[Person])
43 | codec.decode(codec.encode(p)) == p
44 | })
45 | }
46 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/codecs/tests/Utf8CodecTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.codecs.tests
16 |
17 | import com.twitter.cassie.codecs.tests.ByteBufferLiteral._
18 | import com.twitter.cassie.codecs.Utf8Codec
19 | import org.scalacheck.Prop
20 | import org.junit.runner.RunWith
21 | import org.scalatest.junit.JUnitRunner
22 |
23 | @RunWith(classOf[JUnitRunner])
24 | class Utf8CodecTest extends CodecTest {
25 | describe("encoding a string") {
26 | it("produces a UTF-8 encoded array of bytes") {
27 | Utf8Codec.encode("123") must equal(bb(49, 50, 51))
28 | }
29 | }
30 |
31 | describe("decoding an array of bytes") {
32 | it("produces a string") {
33 | Utf8Codec.decode(bb(49, 50, 51)) must equal("123")
34 | }
35 | }
36 |
37 | check(Prop.forAll(unicodeString) { i: String => Utf8Codec.decode(Utf8Codec.encode(i)) == i })
38 | }
39 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/examples/CassieRun.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests.examples
16 |
17 | import com.twitter.cassie._
18 | import com.twitter.cassie.codecs.Utf8Codec
19 | import com.twitter.cassie.types.LexicalUUID
20 | // TODO: unfortunate
21 | import scala.collection.JavaConversions._
22 |
23 | import org.slf4j.LoggerFactory
24 |
25 | object CassieRun {
26 | private val log = LoggerFactory.getLogger(this.getClass)
27 |
28 | def main(args: Array[String]) {
29 | // create a cluster with a single seed from which to map keyspaces
30 | val cluster = new Cluster("localhost")
31 |
32 | // create a keyspace object (does nothing on the server)
33 | val keyspace = cluster.keyspace("Keyspace1").connect()
34 |
35 | // create a column family object (does nothing on the server)
36 | val cass = keyspace.columnFamily("Standard1", Utf8Codec, Utf8Codec, Utf8Codec)
37 |
38 | log.info("inserting some columns")
39 | cass.insert("yay for me", Column("name", "Coda")).apply()
40 | cass.insert("yay for me", Column("motto", "Moar lean.")).apply()
41 |
42 | cass.insert("yay for you", Column("name", "Niki")).apply()
43 | cass.insert("yay for you", Column("motto", "Told ya.")).apply()
44 |
45 | cass.insert("yay for us", Column("name", "Biscuit")).apply()
46 | cass.insert("yay for us", Column("motto", "Mlalm.")).apply()
47 |
48 | cass.insert("yay for everyone", Column("name", "Louie")).apply()
49 | cass.insert("yay for everyone", Column("motto", "Swish!")).apply()
50 |
51 | log.info("getting a column: %s", cass.getColumn("yay for me", "name").apply())
52 | log.info("getting a column that doesn't exist: %s", cass.getColumn("yay for no one", "name").apply())
53 | log.info("getting a column that doesn't exist #2: %s", cass.getColumn("yay for no one", "oink").apply())
54 | log.info("getting a set of columns: %s", cass.getColumns("yay for me", Set("name", "motto")).apply())
55 | log.info("getting a whole row: %s", cass.getRow("yay for me").apply())
56 | log.info("getting a column from a set of keys: %s", cass.multigetColumn(Set("yay for me", "yay for you"), "name").apply())
57 | log.info("getting a set of columns from a set of keys: %s", cass.multigetColumns(Set("yay for me", "yay for you"), Set("name", "motto")).apply())
58 |
59 | log.info("Iterating!")
60 | val f = cass.rowsIteratee(2).foreach {
61 | case (key, cols) =>
62 | log.info("Found: %s %s", key, cols)
63 | }
64 | f()
65 |
66 | val f2 = cass.columnsIteratee(2, "yay for me").foreach { col =>
67 | log.info("Found Columns Iteratee: %s", col)
68 | }
69 |
70 | log.info("removing a column")
71 | cass.removeColumn("yay for me", "motto").apply()
72 |
73 | log.info("removing a row")
74 | cass.removeRow("yay for me").apply()
75 |
76 | log.info("Batching up some stuff")
77 | cass.batch()
78 | .removeColumn("yay for you", "name")
79 | .removeColumns("yay for us", Set("name", "motto"))
80 | .insert("yay for nobody", Column("name", "Burt"))
81 | .insert("yay for nobody", Column("motto", "'S funny."))
82 | .execute().apply()
83 |
84 | log.info("Wrappin' up");
85 | keyspace.close();
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/BatchMutationBuilderTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.codecs.Utf8Codec
18 | import com.twitter.cassie.util.ColumnFamilyTestHelper
19 | import com.twitter.cassie._
20 | import com.twitter.conversions.time._
21 | import org.junit.runner.RunWith
22 | import org.scalatest.junit.JUnitRunner
23 | import org.scalatest.matchers.MustMatchers
24 | import org.scalatest.mock.MockitoSugar
25 | import org.scalatest.FunSpec
26 | import scala.collection.JavaConversions._
27 |
28 | @RunWith(classOf[JUnitRunner])
29 | class BatchMutationBuilderTest extends FunSpec with MustMatchers with MockitoSugar with ColumnFamilyTestHelper {
30 |
31 | val (client, cf) = setup
32 |
33 | def setupBuilder() = new BatchMutationBuilder(cf)
34 | def enc(string: String) = Utf8Codec.encode(string)
35 |
36 | describe("inserting a column") {
37 | val builder = setupBuilder()
38 | builder.insert("key", Column("name", "value").timestamp(234).ttl(999.seconds))
39 | val mutations = Mutations(builder)
40 |
41 | it("adds an insertion mutation") {
42 | val mutation = mutations.get(enc("key")).get("cf").get(0)
43 | val col = mutation.getColumn_or_supercolumn.getColumn
44 | Utf8Codec.decode(col.name) must equal("name")
45 | Utf8Codec.decode(col.value) must equal("value")
46 | col.getTimestamp must equal(234)
47 | col.ttl must equal(999)
48 | }
49 | }
50 |
51 | describe("removing a column with an implicit timestamp") {
52 | val builder = setupBuilder()
53 | builder.removeColumn("key", "column")
54 | val mutations = Mutations(builder)
55 |
56 | it("adds a deletion mutation") {
57 | val mutation = mutations.get(enc("key")).get("cf").get(0)
58 | val deletion = mutation.getDeletion
59 |
60 | deletion.getPredicate.getColumn_names.map { Utf8Codec.decode(_) } must equal(List("column"))
61 | }
62 | }
63 |
64 | describe("removing a set of columns with an implicit timestamp") {
65 | val builder = setupBuilder()
66 | builder.removeColumns("key", Set("one", "two"))
67 | val mutations = Mutations(builder)
68 |
69 | it("adds a deletion mutation") {
70 | val mutation = mutations.get(enc("key")).get("cf").get(0)
71 | val deletion = mutation.getDeletion
72 |
73 | deletion.getPredicate.getColumn_names.map { Utf8Codec.decode(_) }.sortWith { _ < _ } must equal(List("one", "two"))
74 | }
75 | }
76 |
77 | describe("removing a set of columns with an explicit timestamp") {
78 | val builder = setupBuilder()
79 | builder.removeColumns("key", Set("one", "two"), 17)
80 | val mutations = Mutations(builder)
81 |
82 | it("adds a deletion mutation at the specified time") {
83 | val mutation = mutations.get(enc("key")).get("cf").get(0)
84 | val deletion = mutation.getDeletion
85 |
86 | deletion.getTimestamp must equal(17)
87 | }
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/ClusterRemapperTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.ClusterRemapper
18 | import com.twitter.conversions.time._
19 | import java.net.{ SocketAddress, InetSocketAddress }
20 | import org.apache.cassandra.finagle.thrift
21 | import org.junit.runner.RunWith
22 | import org.mockito.Mockito.when
23 | import org.scalatest.junit.JUnitRunner
24 | import org.scalatest.matchers.MustMatchers
25 | import org.scalatest.{ BeforeAndAfterAll, FunSpec }
26 | import scala.collection.JavaConversions._
27 |
28 | @RunWith(classOf[JUnitRunner])
29 | class ClusterRemapperTest extends FunSpec with MustMatchers with BeforeAndAfterAll {
30 | // val server = new MockCassandraServer(MockCassandraServer.choosePort())
31 | // val ring = tr("start", "end", "c1.example.com") ::
32 | // tr("start", "end", "c2.example.com") :: Nil
33 | // when(server.cassandra.describe_ring("keyspace")).thenReturn(seqAsJavaList(ring))
34 | //
35 | // def tr(start: String, end: String, endpoints: String*): thrift.TokenRange = {
36 | // val tr = new thrift.TokenRange()
37 | // tr.setStart_token(start)
38 | // tr.setEnd_token(end)
39 | // tr.setEndpoints(seqAsJavaList(endpoints))
40 | // }
41 | //
42 | // override protected def beforeAll() {
43 | // server.start()
44 | // }
45 | //
46 | // override protected def afterAll() {
47 | // server.stop()
48 | // }
49 |
50 | // describe("mapping a cluster") {
51 | // it("returns the set of nodes in the cluster") {
52 | // val mapper = new ClusterRemapper("keyspace", "127.0.0.1", 10.minutes, server.port)
53 | //
54 | // val mapped = mapper.fetchHosts(Seq(new InetSocketAddress("127.0.0.1", server.port)))
55 | //
56 | // mapped must equal(List(
57 | // addr("c1.example.com", server.port), addr("c2.example.com", server.port)
58 | // ))
59 | // }
60 | // }
61 | //
62 | // def addr(host: String, port: Int) = new InetSocketAddress(host, port)
63 | }
64 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/ClusterTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.Cluster
18 | import com.twitter.conversions.time._
19 | import org.junit.runner.RunWith
20 | import org.scalatest.junit.JUnitRunner
21 | import org.scalatest.matchers.MustMatchers
22 | import org.scalatest.mock.MockitoSugar
23 | import org.scalatest.FunSpec
24 |
25 | @RunWith(classOf[JUnitRunner])
26 | class ClusterTest extends FunSpec with MustMatchers with MockitoSugar {
27 | describe("a cluster") {
28 | val cluster = new Cluster("nonhost").mapHostsEvery(0.minutes)
29 |
30 | it("creates a keyspace with the given name and provider") {
31 | val ks = cluster.keyspace("ks").connect()
32 |
33 | ks.name must equal("ks")
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/ColumnTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.Column
18 | import com.twitter.conversions.time._
19 | import org.junit.runner.RunWith
20 | import org.scalatest.junit.JUnitRunner
21 | import org.scalatest.matchers.MustMatchers
22 | import org.scalatest.FunSpec
23 |
24 | @RunWith(classOf[JUnitRunner])
25 | class ColumnTest extends FunSpec with MustMatchers {
26 | describe("a column with an explicit ttl") {
27 | val col = Column("id", 300).timestamp(400L).ttl(1.minute)
28 |
29 | it("has a name") {
30 | col.name must equal("id")
31 | }
32 |
33 | it("has a value") {
34 | col.value must equal(300)
35 | }
36 |
37 | it("has a timestamp") {
38 | col.timestamp must equal(Some(400L))
39 | }
40 |
41 | it("has a ttl") {
42 | col.ttl must equal(Some(60.seconds))
43 | }
44 |
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/CounterRowsIterateeTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.util.ColumnFamilyTestHelper
18 | import com.twitter.cassie._
19 | import com.twitter.util.Future
20 | import java.util.{ List => JList, HashSet => JHashSet, ArrayList => JArrayList }
21 | import org.apache.cassandra.finagle.thrift
22 | import org.mockito.Matchers.{ eq => matchEq }
23 | import org.mockito.Mockito.{ when, inOrder => inOrderVerify }
24 | import org.junit.runner.RunWith
25 | import org.scalatest.junit.JUnitRunner
26 | import org.scalatest.matchers.MustMatchers
27 | import org.scalatest.mock.MockitoSugar
28 | import org.scalatest.{ OneInstancePerTest, FunSpec }
29 | import scala.collection.JavaConversions._
30 | import scala.collection.mutable.ListBuffer
31 |
32 |
33 | @RunWith(classOf[JUnitRunner])
34 | class CounterRowsIterateeTest extends FunSpec with MustMatchers with MockitoSugar
35 | with OneInstancePerTest with ColumnFamilyTestHelper {
36 |
37 | def c(name: String, value: String, timestamp: Long) = {
38 | new Column(name, value, Some(timestamp), None)
39 | }
40 |
41 | def keyRange(start: String, end: String, count: Int) = {
42 | new thrift.KeyRange().setStart_key(b(start)).setEnd_key(b(end)).setCount(count)
43 | }
44 |
45 | def keySlice(cf: ColumnFamily[String, String, String], key: String, columns: Seq[Column[String, String]]) = {
46 | new thrift.KeySlice()
47 | .setKey(b(key))
48 | .setColumns(
49 | seqAsJavaList(columns.map(c => new thrift.ColumnOrSuperColumn().setColumn(Column.convert(cf.nameCodec, cf.valueCodec, cf.clock, c))))
50 | )
51 | }
52 |
53 | describe("iterating through an empty column family") {
54 | val (client, cf) = setup
55 |
56 | when(client.get_range_slices(anyColumnParent, anySlicePredicate, anyKeyRange, anyConsistencyLevel)).thenReturn(
57 | Future.value(new JArrayList[thrift.KeySlice]())
58 | )
59 |
60 | val iteratee = cf.rowsIteratee(5, new JHashSet[String]())
61 |
62 | it("doesn't throw an error") {
63 | val f = iteratee.foreach { case (key, columns) => () }
64 | f()
65 | }
66 | }
67 |
68 | describe("iterating through the columns of a range of keys") {
69 | val (client, cf) = setup
70 |
71 | when(client.get_range_slices(anyColumnParent, anySlicePredicate, anyKeyRange, anyConsistencyLevel)).thenReturn(
72 | Future.value(
73 | seqAsJavaList(List(
74 | keySlice(cf, "start", List(c("name", "value", 1), c("name1", "value1", 2))),
75 | keySlice(cf, "start1", List(c("name", "value", 1), c("name1", "value1", 2))),
76 | keySlice(cf, "start2", List(c("name", "value", 1), c("name1", "value1", 2))),
77 | keySlice(cf, "start3", List(c("name", "value", 1), c("name1", "value1", 2)))))),
78 | Future.value(seqAsJavaList(List(keySlice(cf, "start3", List(c("name", "value", 1), c("name1", "value1", 2))))))
79 | )
80 |
81 | val iterator = cf.rowsIteratee("start", "end", 5, new JHashSet())
82 |
83 | val data = new ListBuffer[(String, JList[Column[String, String]])]()
84 | val f = iterator.foreach {
85 | case (key, columns) =>
86 | data += ((key, columns))
87 | }
88 | f()
89 |
90 | it("does a buffered iteration over the columns in the rows in the range") {
91 | data must equal(ListBuffer(
92 | ("start", seqAsJavaList(List(c("name", "value", 1), c("name1", "value1", 2)))),
93 | ("start1", seqAsJavaList(List(c("name", "value", 1), c("name1", "value1", 2)))),
94 | ("start2", seqAsJavaList(List(c("name", "value", 1), c("name1", "value1", 2)))),
95 | ("start3", seqAsJavaList(List(c("name", "value", 1), c("name1", "value1", 2))))
96 | ))
97 | }
98 |
99 | it("requests data using the last key as the start key until the end is detected") {
100 | val f = iterator.foreach { case (key, columns) => () }
101 | f()
102 | val cp = new thrift.ColumnParent(cf.name)
103 | val inOrder = inOrderVerify(client)
104 | inOrder.verify(client).get_range_slices(matchEq(cp), anySlicePredicate, matchEq(keyRange("start", "end", 5)), anyConsistencyLevel)
105 | inOrder.verify(client).get_range_slices(matchEq(cp), anySlicePredicate, matchEq(keyRange("start3", "end", 6)), anyConsistencyLevel)
106 | }
107 | }
108 |
109 | describe("map") {
110 | val (client, cf) = setup
111 |
112 | when(client.get_range_slices(anyColumnParent, anySlicePredicate, anyKeyRange, anyConsistencyLevel)).thenReturn(
113 | Future.value(
114 | seqAsJavaList(List(
115 | keySlice(cf, "start", List(c("name", "value", 1), c("name1", "value1", 2))),
116 | keySlice(cf, "start1", List(c("name", "value", 1), c("name1", "value1", 2))),
117 | keySlice(cf, "start2", List(c("name", "value", 1), c("name1", "value1", 2))),
118 | keySlice(cf, "start3", List(c("name", "value", 1), c("name1", "value1", 2)))))),
119 | Future.value(seqAsJavaList(List(keySlice(cf, "start3", List(c("name", "value", 1), c("name1", "value1", 2))))))
120 | )
121 |
122 | val data = cf.rowsIteratee("start", "end", 5, new JHashSet()).map{ case(key, columns) => (key + "foo", columns)}.apply
123 |
124 | it("does a buffered iteration over the columns in the rows in the range") {
125 | data must equal(ListBuffer(
126 | ("startfoo", seqAsJavaList(List(c("name", "value", 1), c("name1", "value1", 2)))),
127 | ("start1foo", seqAsJavaList(List(c("name", "value", 1), c("name1", "value1", 2)))),
128 | ("start2foo", seqAsJavaList(List(c("name", "value", 1), c("name1", "value1", 2)))),
129 | ("start3foo", seqAsJavaList(List(c("name", "value", 1), c("name1", "value1", 2))))
130 | ))
131 | }
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/KeyspaceTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.clocks.Clock
18 | import com.twitter.cassie.codecs.Utf8Codec
19 | import com.twitter.cassie.Column
20 | import com.twitter.cassie.connection.ClientProvider
21 | import com.twitter.cassie.{ WriteConsistency, ReadConsistency, Keyspace }
22 | import com.twitter.finagle.stats.NullStatsReceiver
23 | import com.twitter.util.Future
24 | import java.nio.ByteBuffer
25 | import java.util.{ HashMap, Map => JMap, List => JList, ArrayList => JArrayList }
26 | import org.apache.cassandra.finagle.thrift
27 | import org.apache.cassandra.finagle.thrift.Cassandra.ServiceToClient
28 | import org.junit.runner.RunWith
29 | import org.mockito.Matchers.{ anyObject }
30 | import org.mockito.Mockito._
31 | import org.scalatest.junit.JUnitRunner
32 | import org.scalatest.matchers.MustMatchers
33 | import org.scalatest.mock.MockitoSugar
34 | import org.scalatest.{ BeforeAndAfterEach, FunSpec }
35 |
36 | @RunWith(classOf[JUnitRunner])
37 | class KeyspaceTest extends FunSpec with MustMatchers with MockitoSugar with BeforeAndAfterEach {
38 |
39 | case class DumbClientProvider(stc: ServiceToClient) extends ClientProvider {
40 | def map[A](f: ServiceToClient => Future[A]) = f(stc)
41 | }
42 |
43 | object StaticClock extends Clock {
44 | def timestamp: Long = 123456
45 | }
46 |
47 | var stc: ServiceToClient = null
48 | var provider: ClientProvider = null
49 | var keyspace: Keyspace = null
50 |
51 | override def beforeEach {
52 | stc = mock[ServiceToClient]
53 | provider = DumbClientProvider(stc)
54 | keyspace = new Keyspace("MyApp", provider, NullStatsReceiver)
55 | }
56 |
57 | describe("a keyspace") {
58 |
59 | it("builds a column family with the same ClientProvider") {
60 | val cf = keyspace.columnFamily[String, String, String]("People", Utf8Codec, Utf8Codec, Utf8Codec)
61 | cf.keyspace must equal("MyApp")
62 | cf.name must equal("People")
63 | cf.readConsistency must equal(ReadConsistency.LocalQuorum)
64 | cf.writeConsistency must equal(WriteConsistency.LocalQuorum)
65 | cf.keyCodec must equal(Utf8Codec)
66 | cf.nameCodec must equal(Utf8Codec)
67 | cf.valueCodec must equal(Utf8Codec)
68 | cf.provider must equal(provider)
69 | }
70 |
71 | it("executes empty batch") {
72 | keyspace.execute(Seq(), WriteConsistency.One).get()
73 | }
74 |
75 | it("executes multiple batches") {
76 | val void = Future(null.asInstanceOf[Void])
77 | val a = keyspace.columnFamily[String, String, String]("People", Utf8Codec, Utf8Codec, Utf8Codec)
78 | val b = keyspace.columnFamily[String, String, String]("Dogs", Utf8Codec, Utf8Codec, Utf8Codec)
79 |
80 | // Hard to check equality of separately constructed mutations while the clock is moving
81 | // out from under us
82 | a.clock = StaticClock
83 | b.clock = StaticClock
84 |
85 | val aBatch = a.batch()
86 | val bBatch = b.batch()
87 |
88 | val tmp = a.batch()
89 | tmp.insert("foo", Column("bar", "baz"))
90 |
91 | // java.util.Map[ByteBuffer, java.util.Map[String, java.util.List[Mutation]]]
92 | val expectedMutations = tmp.mutations
93 | val tmpMap = new JArrayList[JMap[String, JList[thrift.Mutation]]](expectedMutations.values).get(0)
94 | val col = new JArrayList[JList[thrift.Mutation]](tmpMap.values).get(0)
95 | tmpMap.put("Dogs", col)
96 |
97 | aBatch.insert("foo", Column("bar", "baz"))
98 | bBatch.insert("foo", Column("bar", "baz"))
99 | when(stc.batch_mutate(anyObject(), anyObject())).thenReturn(void);
100 | keyspace.execute(Seq(aBatch, bBatch), WriteConsistency.Quorum).get()
101 | verify(stc).batch_mutate(expectedMutations, WriteConsistency.Quorum.level)
102 | }
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/ReadConsistencyTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.ReadConsistency
18 | import org.apache.cassandra.finagle.thrift.ConsistencyLevel
19 | import org.junit.runner.RunWith
20 | import org.scalatest.junit.JUnitRunner
21 | import org.scalatest.matchers.MustMatchers
22 | import org.scalatest.FunSpec
23 |
24 | @RunWith(classOf[JUnitRunner])
25 | class ReadConsistencyTest extends FunSpec with MustMatchers {
26 | describe("a read consistency of One") {
27 | it("is human readable") {
28 | ReadConsistency.One.toString must equal("ReadConsistency.One")
29 | }
30 |
31 | it("has a corresponding Thrift ConsistencyLevel of ONE") {
32 | ReadConsistency.One.level must equal(ConsistencyLevel.ONE)
33 | }
34 | }
35 |
36 | describe("a read consistency of Quorum") {
37 | it("is human readable") {
38 | ReadConsistency.Quorum.toString must equal("ReadConsistency.Quorum")
39 | }
40 |
41 | it("has a corresponding Thrift ConsistencyLevel of QUORUM") {
42 | ReadConsistency.Quorum.level must equal(ConsistencyLevel.QUORUM)
43 | }
44 | }
45 |
46 | describe("a read consistency of All") {
47 | it("is human readable") {
48 | ReadConsistency.All.toString must equal("ReadConsistency.All")
49 | }
50 |
51 | it("has a corresponding Thrift ConsistencyLevel of ALL") {
52 | ReadConsistency.All.level must equal(ConsistencyLevel.ALL)
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/RowsIterateeTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.util.ColumnFamilyTestHelper
18 | import com.twitter.cassie._
19 | import com.twitter.util.Future
20 | import java.util.{ List => JList, HashSet => JHashSet, ArrayList => JArrayList }
21 | import org.apache.cassandra.finagle.thrift
22 | import org.junit.runner.RunWith
23 | import org.mockito.Matchers.{ eq => matchEq }
24 | import org.mockito.Mockito.{ when, inOrder => inOrderVerify }
25 | import org.scalatest.junit.JUnitRunner
26 | import org.scalatest.matchers.MustMatchers
27 | import org.scalatest.mock.MockitoSugar
28 | import org.scalatest.{ FunSpec, OneInstancePerTest }
29 | import scala.collection.JavaConversions._
30 | import scala.collection.mutable.ListBuffer
31 |
32 | @RunWith(classOf[JUnitRunner])
33 | class RowsIterateeTest extends FunSpec with MustMatchers with MockitoSugar with OneInstancePerTest with ColumnFamilyTestHelper {
34 |
35 | def co(name: String, value: String, timestamp: Long) = {
36 | new Column(name, value, Some(timestamp), None)
37 | }
38 |
39 | def keyRange(start: String, end: String, count: Int) = {
40 | new thrift.KeyRange().setStart_key(b(start)).setEnd_key(b(end)).setCount(count)
41 | }
42 |
43 | def keySlice(cf: ColumnFamily[String, String, String], key: String, columns: Seq[Column[String, String]]) = {
44 | new thrift.KeySlice()
45 | .setKey(b(key))
46 | .setColumns(
47 | seqAsJavaList(columns.map(c => new thrift.ColumnOrSuperColumn().setColumn(Column.convert(cf.nameCodec, cf.valueCodec, cf.clock, c))))
48 | )
49 | }
50 |
51 | describe("iterating through an empty column family") {
52 | val (client, cf) = setup
53 |
54 | when(client.get_range_slices(anyColumnParent, anySlicePredicate, anyKeyRange, anyConsistencyLevel)).thenReturn(
55 | Future.value(new JArrayList[thrift.KeySlice]())
56 | )
57 |
58 | val iteratee = cf.rowsIteratee(5, new JHashSet[String]())
59 |
60 | it("doesn't throw an error") {
61 | val f = iteratee.foreach { case (key, columns) => () }
62 | f()
63 | }
64 | }
65 |
66 | describe("iterating through the columns of a range of keys") {
67 | val (client, cf) = setup
68 |
69 | when(client.get_range_slices(anyColumnParent, anySlicePredicate, anyKeyRange, anyConsistencyLevel)).thenReturn(
70 | Future.value(
71 | seqAsJavaList(List(
72 | keySlice(cf, "start", List(co("name", "value", 1), co("name1", "value1", 2))),
73 | keySlice(cf, "start1", List(co("name", "value", 1), co("name1", "value1", 2))),
74 | keySlice(cf, "start2", List(co("name", "value", 1), co("name1", "value1", 2))),
75 | keySlice(cf, "start3", List(co("name", "value", 1), co("name1", "value1", 2)))))),
76 | Future.value(seqAsJavaList(List(keySlice(cf, "start3", List(co("name", "value", 1), co("name1", "value1", 2))))))
77 | )
78 |
79 | val iterator = cf.rowsIteratee("start", "end", 5, new JHashSet())
80 |
81 | val data = new ListBuffer[(String, JList[Column[String, String]])]()
82 | val f = iterator.foreach {
83 | case (key, columns) =>
84 | data += ((key, columns))
85 | }
86 | f()
87 |
88 | it("does a buffered iteration over the columns in the rows in the range") {
89 | data must equal(ListBuffer(
90 | ("start", seqAsJavaList(List(co("name", "value", 1), co("name1", "value1", 2)))),
91 | ("start1", seqAsJavaList(List(co("name", "value", 1), co("name1", "value1", 2)))),
92 | ("start2", seqAsJavaList(List(co("name", "value", 1), co("name1", "value1", 2)))),
93 | ("start3", seqAsJavaList(List(co("name", "value", 1), co("name1", "value1", 2))))
94 | ))
95 | }
96 |
97 | it("requests data using the last key as the start key until the end is detected") {
98 | val f = iterator.foreach { case (key, columns) => () }
99 | f()
100 | val cp = new thrift.ColumnParent(cf.name)
101 | val inOrder = inOrderVerify(client)
102 | inOrder.verify(client).get_range_slices(matchEq(cp), anySlicePredicate, matchEq(keyRange("start", "end", 5)), anyConsistencyLevel)
103 | inOrder.verify(client).get_range_slices(matchEq(cp), anySlicePredicate, matchEq(keyRange("start3", "end", 6)), anyConsistencyLevel)
104 | }
105 | }
106 |
107 | describe("map") {
108 | val (client, cf) = setup
109 |
110 | when(client.get_range_slices(anyColumnParent, anySlicePredicate, anyKeyRange, anyConsistencyLevel)).thenReturn(
111 | Future.value(
112 | seqAsJavaList(List(
113 | keySlice(cf, "start", List(co("name", "value", 1), co("name1", "value1", 2))),
114 | keySlice(cf, "start1", List(co("name", "value", 1), co("name1", "value1", 2))),
115 | keySlice(cf, "start2", List(co("name", "value", 1), co("name1", "value1", 2))),
116 | keySlice(cf, "start3", List(co("name", "value", 1), co("name1", "value1", 2)))))),
117 | Future.value(seqAsJavaList(List(keySlice(cf, "start3", List(co("name", "value", 1), co("name1", "value1", 2))))))
118 | )
119 |
120 | val data = cf.rowsIteratee("start", "end", 5, new JHashSet()).map{(k, c) => (k + "foo", c)}()
121 |
122 | it("it maps over the data") {
123 | data must equal(ListBuffer(
124 | ("startfoo", seqAsJavaList(List(co("name", "value", 1), co("name1", "value1", 2)))),
125 | ("start1foo", seqAsJavaList(List(co("name", "value", 1), co("name1", "value1", 2)))),
126 | ("start2foo", seqAsJavaList(List(co("name", "value", 1), co("name1", "value1", 2)))),
127 | ("start3foo", seqAsJavaList(List(co("name", "value", 1), co("name1", "value1", 2))))
128 | ))
129 | }
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/tests/WriteConsistencyTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.tests
16 |
17 | import com.twitter.cassie.WriteConsistency
18 | import org.apache.cassandra.finagle.thrift.ConsistencyLevel
19 | import org.junit.runner.RunWith
20 | import org.scalatest.junit.JUnitRunner
21 | import org.scalatest.matchers.MustMatchers
22 | import org.scalatest.FunSpec
23 |
24 |
25 | @RunWith(classOf[JUnitRunner])
26 | class WriteConsistencyTest extends FunSpec with MustMatchers {
27 | describe("a write consistency of Any") {
28 | it("is human readable") {
29 | WriteConsistency.Any.toString must equal("WriteConsistency.Any")
30 | }
31 |
32 | it("has a corresponding Thrift ConsistencyLevel of ANY") {
33 | WriteConsistency.Any.level must equal(ConsistencyLevel.ANY)
34 | }
35 | }
36 |
37 | describe("a write consistency of One") {
38 | it("is human readable") {
39 | WriteConsistency.One.toString must equal("WriteConsistency.One")
40 | }
41 |
42 | it("has a corresponding Thrift ConsistencyLevel of ONE") {
43 | WriteConsistency.One.level must equal(ConsistencyLevel.ONE)
44 | }
45 | }
46 |
47 | describe("a write consistency of Quorum") {
48 | it("is human readable") {
49 | WriteConsistency.Quorum.toString must equal("WriteConsistency.Quorum")
50 | }
51 |
52 | it("has a corresponding Thrift ConsistencyLevel of QUORUM") {
53 | WriteConsistency.Quorum.level must equal(ConsistencyLevel.QUORUM)
54 | }
55 | }
56 |
57 | describe("a write consistency of All") {
58 | it("is human readable") {
59 | WriteConsistency.All.toString must equal("WriteConsistency.All")
60 | }
61 |
62 | it("has a corresponding Thrift ConsistencyLevel of ALL") {
63 | WriteConsistency.All.level must equal(ConsistencyLevel.ALL)
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/types/tests/LexicalUUIDTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.types.tests
16 |
17 | import com.twitter.cassie.clocks.Clock
18 | import com.twitter.cassie.types.LexicalUUID
19 | import org.junit.runner.RunWith
20 | import org.scalatest.junit.JUnitRunner
21 | import org.scalatest.matchers.MustMatchers
22 | import org.scalatest.FunSpec
23 |
24 | @RunWith(classOf[JUnitRunner])
25 | class LexicalUUIDTest extends FunSpec with MustMatchers {
26 | describe("a lexical UUID") {
27 | val uuid = LexicalUUID(0xFF9281, 0xA0091991)
28 |
29 | it("has a timestamp") {
30 | uuid.timestamp must equal(0xFF9281)
31 | }
32 |
33 | it("has a worker ID") {
34 | uuid.workerID must equal(0xA0091991)
35 | }
36 |
37 | it("is human-readable") {
38 | uuid.toString must equal("00000000-00ff-9281-ffffffffa0091991")
39 | }
40 |
41 | it("is convertible to a String") {
42 | val s: String = uuid
43 |
44 | s must equal("00000000-00ff-9281-ffffffffa0091991")
45 | }
46 |
47 | it("is convertible from a String") {
48 | val u: LexicalUUID = "00000000-00ff-9281-ffffffffa0091991"
49 |
50 | u must equal(uuid)
51 | }
52 | }
53 |
54 | describe("generating a lexical UUID") {
55 | val clock = new Clock {
56 | def timestamp = 19910019L
57 | }
58 |
59 | val uuid = new LexicalUUID(clock, 1001)
60 |
61 | it("uses the timestamp from the clock and the provided worker ID") {
62 | uuid.toString must equal("00000000-012f-cd83-00000000000003e9")
63 | }
64 | }
65 |
66 | describe("ordering lexical UUIDs") {
67 | val uuid1 = LexicalUUID(0xFF9281, 0xA0091991)
68 | val uuid2 = LexicalUUID(0xFF9281, 0xA0091992)
69 | val uuid3 = LexicalUUID(0xFF9282, 0xA0091991)
70 |
71 | it("orders by timestamp, then worker ID") {
72 | val ordered = Seq(uuid2, uuid3, uuid1).sortWith { _ < _ }
73 |
74 | ordered must equal(Seq(uuid1, uuid2, uuid3))
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/cassie-core/src/test/scala/com/twitter/cassie/util/ColumnFamilyTestHelper.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.util
16 |
17 | import com.twitter.cassie.clocks.MicrosecondEpochClock
18 | import com.twitter.cassie.codecs.Utf8Codec
19 | import com.twitter.cassie.MockCassandraClient.SimpleProvider
20 | import com.twitter.cassie._
21 | import com.twitter.finagle.stats.NullStatsReceiver
22 | import java.nio.ByteBuffer
23 | import org.apache.cassandra.finagle.thrift
24 | import org.mockito.Matchers.any
25 |
26 | trait ColumnFamilyTestHelper {
27 | type ColumnList = java.util.List[thrift.ColumnOrSuperColumn]
28 | type KeyColumnMap = java.util.Map[java.nio.ByteBuffer, ColumnList]
29 |
30 | def cosc(cf: ColumnFamily[String, String, String], c: Column[String, String]) = {
31 | new thrift.ColumnOrSuperColumn().setColumn(Column.convert(cf.nameCodec, cf.valueCodec, cf.clock, c))
32 | }
33 |
34 | def c(cf: ColumnFamily[String, String, String], name: String, value: String, timestamp: Long) = {
35 | val cosc = new thrift.ColumnOrSuperColumn
36 | cosc.setColumn(
37 | Column.convert(
38 | Utf8Codec,
39 | Utf8Codec,
40 | cf.clock,
41 | cf.newColumn(name, value, timestamp)
42 | )
43 | )
44 | cosc
45 | }
46 |
47 | def cc(name: String, value: Long) = {
48 | val cosc = new thrift.ColumnOrSuperColumn()
49 | cosc.setCounter_column(new thrift.CounterColumn(Utf8Codec.encode(name), value))
50 | cosc
51 | }
52 |
53 | def b(keyString: String) = ByteBuffer.wrap(keyString.getBytes)
54 |
55 | def setup = {
56 | val mcc = new MockCassandraClient
57 | val cf = new ColumnFamily("ks", "cf", new SimpleProvider(mcc.client),
58 | Utf8Codec, Utf8Codec, Utf8Codec, NullStatsReceiver)
59 | (mcc.client, cf)
60 | }
61 |
62 | def setupCounters = {
63 | val mcc = new MockCassandraClient
64 | val cf = new CounterColumnFamily("ks", "cf", new SimpleProvider(mcc.client),
65 | Utf8Codec, Utf8Codec, NullStatsReceiver)
66 | (mcc.client, cf)
67 | }
68 |
69 | def anyByteBuffer() = any(classOf[ByteBuffer])
70 | def anyColumnParent() = any(classOf[thrift.ColumnParent])
71 | def anyColumnPath() = any(classOf[thrift.ColumnPath])
72 | def anySlicePredicate() = any(classOf[thrift.SlicePredicate])
73 | def anyColumn() = any(classOf[thrift.Column])
74 | def anyConsistencyLevel() = any(classOf[thrift.ConsistencyLevel])
75 | def anyCounterColumn() = any(classOf[thrift.CounterColumn])
76 | def anyKeyRange() = any(classOf[thrift.KeyRange])
77 | def anyInt() = any(classOf[Int])
78 |
79 | def pred(start: String, end: String, count: Int, order: Order = Order.Normal) =
80 | new thrift.SlicePredicate().setSlice_range(
81 | new thrift.SliceRange().setStart(b(start)).setFinish(b(end))
82 | .setReversed(order.reversed).setCount(count))
83 |
84 | }
85 |
--------------------------------------------------------------------------------
/cassie-core/src/test/thrift/person.thrift:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | namespace java com.twitter.cassie.test.thrift
16 |
17 | struct Person {
18 | 1: string first_name,
19 | 2: string last_name
20 | }
--------------------------------------------------------------------------------
/cassie-hadoop/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 | com.twitter
5 | cassie-hadoop
6 | jar
7 | 0.25.1-SNAPSHOT
8 |
9 | com.twitter
10 | scala-parent-292
11 | 0.0.4
12 | ../../parents/scala-parent-292/pom.xml
13 |
14 |
15 | ${project.basedir}/../../.git
16 |
17 |
18 |
19 |
20 | com.novocode
21 | junit-interface
22 | 0.7
23 | test
24 |
25 |
26 | org.apache.hadoop
27 | hadoop-core
28 | 0.20.2
29 |
30 |
31 | org.scalatest
32 | scalatest_2.9.2
33 | 1.7.2
34 | test
35 |
36 |
37 |
38 | com.twitter
39 | cassie-core
40 | 0.25.1-SNAPSHOT
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/cassie-hadoop/src/main/java/com/twitter/cassie/hadoop/CassieCounters.java:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.hadoop;
16 |
17 | import org.apache.hadoop.mapreduce.Counters;
18 |
19 | public class CassieCounters {
20 | public static enum Counters { SUCCESS, RETRY, FAILURE }
21 | }
--------------------------------------------------------------------------------
/cassie-hadoop/src/main/scala/com/twitter/cassie/hadoop/CassieReducer.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.hadoop
16 |
17 | import com.twitter.cassie.clocks._
18 | import com.twitter.cassie.codecs._
19 | import com.twitter.cassie.hadoop._
20 | import com.twitter.cassie._
21 | import com.twitter.conversions.time._
22 | import com.twitter.util._
23 | import java.nio.ByteBuffer
24 | import org.apache.hadoop.io._
25 | import org.apache.hadoop.mapreduce._
26 | import scala.collection.JavaConversions._
27 | import scala.collection.mutable.ListBuffer
28 | import scala.math._
29 |
30 | object CassieReducer {
31 | val DEFAULT_PAGE_SIZE = 100
32 | val PAGE_SIZE = "page_size"
33 | val KEYSPACE = "keyspace"
34 | val COLUMN_FAMILY = "column_family"
35 | val HOSTS = "hosts"
36 | val PORT = "port"
37 | val MIN_BACKOFF = "min_backoff"
38 | val MAX_BACKOFF = "max_backoff"
39 | val IGNORE_FAILURES = "ignore_failures"
40 | }
41 |
42 | import CassieReducer._
43 |
44 | class CassieReducer extends Reducer[BytesWritable, ColumnWritable, BytesWritable, BytesWritable] {
45 |
46 | val defaultReadConsistency = ReadConsistency.One
47 | val defaultWriteConsistency = WriteConsistency.One
48 |
49 | var cluster: Cluster = null
50 | var keyspace: Keyspace = null
51 | var columnFamily: ColumnFamily[ByteBuffer, ByteBuffer, ByteBuffer] = null
52 | var page: Int = CassieReducer.DEFAULT_PAGE_SIZE
53 | var i = 0
54 | var consecutiveFailures = 0
55 | var consecutiveSuccesses = 0
56 |
57 | var minBackoff = 1000
58 | var maxBackoff = 30000
59 | var ignoreFailures = true
60 |
61 | var batch: BatchMutationBuilder[ByteBuffer, ByteBuffer, ByteBuffer] = null
62 |
63 | type ReducerContext = Reducer[BytesWritable, ColumnWritable, BytesWritable, BytesWritable]#Context
64 |
65 | override def setup(context: ReducerContext) = {
66 | def conf(key: String) = context.getConfiguration.get(key)
67 | val port = if (conf(PORT) == null) 9160 else conf(PORT).toInt
68 | cluster = new Cluster(conf(HOSTS), port)
69 | cluster = configureCluster(cluster)
70 | if (conf(MIN_BACKOFF) != null) minBackoff = Integer.valueOf(conf(MIN_BACKOFF)).intValue
71 | if (conf(MAX_BACKOFF) != null) maxBackoff = Integer.valueOf(conf(MAX_BACKOFF)).intValue
72 | if (conf(IGNORE_FAILURES) != null) ignoreFailures = conf(IGNORE_FAILURES) == "true"
73 | if (conf(PAGE_SIZE) != null) page = Integer.valueOf(conf(PAGE_SIZE)).intValue
74 |
75 | keyspace = configureKeyspace(cluster.keyspace(conf(KEYSPACE))).connect()
76 | columnFamily = keyspace.columnFamily[ByteBuffer, ByteBuffer, ByteBuffer](conf(COLUMN_FAMILY),
77 | ByteArrayCodec, ByteArrayCodec, ByteArrayCodec)
78 | batch = columnFamily.batch
79 | }
80 |
81 | def configureKeyspace(c: KeyspaceBuilder): KeyspaceBuilder = {
82 | c.retries(2)
83 | }
84 |
85 | def configureCluster(cluster: Cluster): Cluster = {
86 | cluster
87 | }
88 |
89 | override def reduce(key: BytesWritable, values: java.lang.Iterable[ColumnWritable], context: ReducerContext) = {
90 | for (value <- values) {
91 | val bufKey = bufCopy(ByteBuffer.wrap(key.getBytes, 0, key.getLength))
92 | batch.insert(bufKey, new Column(bufCopy(value.name), bufCopy(value.value)))
93 | i += 1
94 | if (i % page == 0) {
95 | execute(context)
96 | consecutiveSuccesses += 1
97 | consecutiveFailures = 0
98 | batch = columnFamily.batch
99 | }
100 | }
101 | }
102 |
103 | private def execute(context: ReducerContext): Unit = try {
104 | batch.execute.get()
105 | context.getCounter(CassieCounters.Counters.SUCCESS).increment(1)
106 | } catch {
107 | case t: Throwable => {
108 | t.printStackTrace
109 | val toSleep = minBackoff * (1 << consecutiveFailures)
110 | if(toSleep < maxBackoff) {
111 | context.progress()
112 | Thread.sleep(toSleep)
113 | context.getCounter(CassieCounters.Counters.RETRY).increment(1)
114 | execute(context)
115 | } else {
116 | context.getCounter(CassieCounters.Counters.FAILURE).increment(1)
117 | if (ignoreFailures) {
118 | System.err.println("Ignoring......")
119 | t.printStackTrace
120 | //continue
121 | } else {
122 | throw (t)
123 | }
124 | }
125 | consecutiveSuccesses = 0
126 | consecutiveFailures += 1
127 | }
128 | }
129 |
130 | private def bufCopy(old: ByteBuffer) = {
131 | val n = ByteBuffer.allocate(old.remaining)
132 | n.put(old.array, old.position, old.remaining)
133 | n.rewind
134 | n
135 | }
136 |
137 | override def cleanup(context: ReducerContext) = execute(context)
138 | }
139 |
--------------------------------------------------------------------------------
/cassie-hadoop/src/main/scala/com/twitter/cassie/hadoop/ColumnWritable.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.hadoop
16 |
17 | import com.twitter.cassie.codecs._
18 | import java.nio.ByteBuffer
19 | import org.apache.hadoop.io._
20 | import org.apache.hadoop.mapreduce._
21 | import scala.collection.JavaConversions._
22 |
23 | class ColumnWritable extends ArrayWritable(classOf[BytesWritable]) {
24 | def this(name: ByteBuffer, value: ByteBuffer) = {
25 | this()
26 | set(name, value, System.currentTimeMillis)
27 | }
28 |
29 | def set(name: ByteBuffer, value: ByteBuffer, timestamp: Long) {
30 | val a = writable(name)
31 | val b = writable(value)
32 | val c = writable(LongCodec.encode(timestamp))
33 | set(Array(a, b, c))
34 | }
35 |
36 | def name = getBuf(0)
37 | def value = getBuf(1)
38 | def timestamp = LongCodec.decode(getBuf(2))
39 |
40 | def getBuf(i: Int) = {
41 | val bw = get()(i).asInstanceOf[BytesWritable]
42 | ByteBuffer.wrap(bw.getBytes(), 0, bw.getLength())
43 | }
44 |
45 | private def writable(buf: ByteBuffer) = {
46 | val out = new BytesWritable
47 | out.set(buf.array, buf.position, buf.limit - buf.position)
48 | out
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/cassie-hadoop/src/test/scala/com/twitter/cassie/hadoop/CassieReducerTest.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie.hadoop
16 |
17 | import com.twitter.cassie.clocks._
18 | import com.twitter.cassie.codecs._
19 | import com.twitter.cassie.tests.util._
20 | import com.twitter.cassie._
21 | import com.twitter.conversions.time._
22 | import com.twitter.util._
23 | import java.io._
24 | import java.net._
25 | import java.nio.ByteBuffer
26 | import java.util._
27 | import org.apache.hadoop.conf._
28 | import org.apache.hadoop.fs.Path
29 | import org.apache.hadoop.io._
30 | import org.apache.hadoop.mapreduce.lib.input._
31 | import org.apache.hadoop.mapreduce.lib.output._
32 | import org.apache.hadoop.mapreduce._
33 | import org.apache.hadoop.mapreduce.{ Mapper, Reducer, Job }
34 | import org.apache.hadoop.util._
35 | import org.scalatest.matchers.MustMatchers
36 | import org.scalatest.FunSpec
37 | import scala.collection.JavaConversions._
38 | import scala.collection.mutable.ListBuffer
39 | import scala.math._
40 |
41 |
42 | import CassieReducer._
43 |
44 | object Fake {
45 | class Map extends Mapper[LongWritable, Text, BytesWritable, ColumnWritable] {
46 |
47 | type MapperContext = Mapper[LongWritable, Text, BytesWritable, ColumnWritable]#Context
48 |
49 | override def map(lineNumber: LongWritable, line: Text, context: MapperContext): Unit = {
50 | val key = bb(lineNumber.get)
51 | val column = new ColumnWritable(bb("default"), bb(line))
52 | val bw = new BytesWritable
53 | bw.set(key.array(), key.position, key.remaining)
54 | context.write(bw, column)
55 | }
56 |
57 | private def bb(a: Any) = {
58 | val s = a.toString
59 | val b = s.getBytes
60 | ByteBuffer.wrap(b)
61 | }
62 | }
63 | }
64 |
65 | class NonMappingCassieReducer extends CassieReducer {
66 | override def configureCluster(cluster: Cluster): Cluster = {
67 | cluster.mapHostsEvery(0.seconds)
68 | }
69 | }
70 |
71 | class TestScript(port: Int) extends Configured with Tool {
72 |
73 | def run(args: Array[String]): Int = {
74 | val path = "/tmp/cassie-test"
75 | val writer = new PrintStream(new File(path))
76 | for (arg <- args) writer.println(arg)
77 | writer.close
78 |
79 | val inputPath = new Path(path)
80 | val fs = inputPath.getFileSystem(getConf())
81 |
82 | val job = new Job(getConf())
83 | val jc = job.getConfiguration()
84 |
85 | jc.set(HOSTS, "127.0.0.1")
86 | jc.set(PORT, port.toString)
87 | jc.set(KEYSPACE, "ks")
88 | jc.set(COLUMN_FAMILY, "cf")
89 |
90 | job.setJarByClass(getClass)
91 | job.setJobName(getClass.getName)
92 |
93 | job.setMapperClass(classOf[Fake.Map])
94 |
95 | job.setMapOutputKeyClass(classOf[BytesWritable])
96 | job.setMapOutputValueClass(classOf[ColumnWritable])
97 | job.setReducerClass(classOf[NonMappingCassieReducer])
98 | job.setNumReduceTasks(1)
99 |
100 | job.setInputFormatClass(classOf[TextInputFormat])
101 | job.setOutputFormatClass(classOf[NullOutputFormat[_, _]])
102 |
103 | FileInputFormat.setInputPaths(job, inputPath)
104 |
105 | job.waitForCompletion(true)
106 | 0
107 | }
108 |
109 | }
110 |
111 | class CassieReducerTest extends FunSpec with MustMatchers {
112 |
113 | describe("CassieReducer") {
114 | it("should go through a lifecycle") {
115 | val fake = new FakeCassandra
116 | try {
117 | fake.start()
118 | ToolRunner.run(new Configuration(), new TestScript(fake.port.get), Array("hello", "world"))
119 | implicit val keyCodec = Utf8Codec
120 | val cluster = new Cluster("127.0.0.1", fake.port.get)
121 | val ks = cluster.mapHostsEvery(0.seconds).keyspace("ks").connect()
122 | val cf = ks.columnFamily[String, String, String]("cf", Utf8Codec, Utf8Codec, Utf8Codec)
123 |
124 | cf.getRow("0")().get("default").value must equal("hello")
125 | } finally {
126 | fake.stop()
127 | }
128 | }
129 |
130 | it("should not blow up when empty input data") {
131 | val fake = new FakeCassandra
132 | fake.start()
133 | Thread.sleep(1000)
134 | ToolRunner.run(new Configuration(), new TestScript(fake.port.get), Array())
135 | implicit val keyCodec = Utf8Codec
136 | val cluster = new Cluster("127.0.0.1", fake.port.get)
137 | val ks = cluster.mapHostsEvery(0.seconds).keyspace("ks").connect()
138 | val cf = ks.columnFamily[String, String, String]("cf", Utf8Codec, Utf8Codec, Utf8Codec)
139 |
140 | fake.stop()
141 | }
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/cassie-serversets/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 | com.twitter
5 | cassie-serversets
6 | jar
7 | 0.25.1-SNAPSHOT
8 |
9 | com.twitter
10 | scala-parent-292
11 | 0.0.4
12 | ../../parents/scala-parent-292/pom.xml
13 |
14 |
15 | ${project.basedir}/../../.git
16 |
17 |
18 |
19 |
20 |
21 | com.twitter
22 | cassie-core
23 | 0.25.1-SNAPSHOT
24 |
25 |
26 | com.twitter
27 | finagle-serversets
28 | 6.0.2-SNAPSHOT
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/cassie-serversets/src/main/scala/com/twitter/cassie/serversets/ServerSetsCluster.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package com.twitter.cassie
16 |
17 | import com.google.common.collect.ImmutableSet
18 | import com.twitter.cassie.connection.CCluster
19 | import com.twitter.common.net.pool.DynamicHostSet._
20 | import com.twitter.common.quantity.{Amount, Time}
21 | import com.twitter.common.zookeeper.{ServerSet, ServerSetImpl, ZooKeeperClient}
22 | import com.twitter.finagle.stats.{ StatsReceiver, NullStatsReceiver }
23 | import com.twitter.finagle.tracing.{ Tracer, NullTracer }
24 | import com.twitter.finagle.zookeeper.ZookeeperServerSetCluster
25 | import com.twitter.thrift.ServiceInstance
26 | import java.net.{SocketAddress, InetSocketAddress}
27 | import scala.collection.JavaConversions
28 |
29 | class ZookeeperServerSetCCluster(serverSet: ServerSet)
30 | extends ZookeeperServerSetCluster(serverSet) with CCluster[SocketAddress] {
31 | def close {}
32 | }
33 |
34 | /**
35 | * A Cassandra cluster where nodes are discovered using ServerSets.
36 | *
37 | * import com.twitter.conversions.time._
38 | * val clusterName = "cluster"
39 | * val keyspace = "KeyspaceName"
40 | * val zkPath = "/twitter/service/cassandra/%s".format(clusterName)
41 | * val zkHosts = Seq(new InetSocketAddress("zookeeper.example.com", 2181))
42 | * val timeoutMillis = 1.minute.inMilliseconds.toInt
43 | * val stats = NullStatsReceiver // or OstrichStatsReciever or whatever
44 | *
45 | * val cluster = new ServerSetsCluster(zkHosts, zkPath, timeoutMillis, stats)
46 | * val keyspace = cluster.keyspace(keyspace).connect()
47 | *
48 | * @param serverSet zookeeper ServerSet
49 | * @param stats a finagle stats receiver
50 | */
51 | class ServerSetsCluster(serverSet: ServerSet, stats: StatsReceiver, tracer: Tracer.Factory) extends ClusterBase {
52 |
53 | private class NoOpMonitor extends HostChangeMonitor[ServiceInstance] {
54 | override def onChange(hostSet: ImmutableSet[ServiceInstance]) = {}
55 | }
56 |
57 | /**
58 | * Constructor that takes an existing ZooKeeperClient and explicit zk path to a list of servers
59 | *
60 | * @param zkClient existing ZooKeeperClient
61 | * @param zkPath path to node where Cassandra hosts will exist under
62 | * @param stats a finagle stats receiver
63 | */
64 | def this(zkClient: ZooKeeperClient, zkPath: String, stats: StatsReceiver, tracer: Tracer.Factory) =
65 | this(new ServerSetImpl(zkClient, zkPath), stats, tracer)
66 |
67 | def this(zkClient: ZooKeeperClient, zkPath: String, stats: StatsReceiver) =
68 | this(zkClient, zkPath, stats, NullTracer.factory)
69 |
70 | /**
71 | * Convenience constructor that creates a ZooKeeperClient using the specified hosts and timeout.
72 | *
73 | * @param zkAddresses list of some ZooKeeper hosts
74 | * @param zkPath path to node where Cassandra hosts will exist under
75 | * @param timeoutMillis timeout for ZooKeeper connection
76 | * @param stats a finagle stats receiver
77 | */
78 | def this(zkAddresses: Iterable[InetSocketAddress], zkPath: String, timeoutMillis: Int,
79 | stats: StatsReceiver = NullStatsReceiver) =
80 | this(new ZooKeeperClient(Amount.of(timeoutMillis, Time.MILLISECONDS),
81 | JavaConversions.asJavaIterable(zkAddresses)), zkPath, stats, NullTracer.factory)
82 |
83 | /**
84 | * Returns a [[com.twitter.cassie.KeyspaceBuilder]] instance.
85 | * @param name the keyspace's name
86 | */
87 | def keyspace(name: String): KeyspaceBuilder = {
88 | serverSet.monitor(new NoOpMonitor()) // will block until serverset ready
89 | val cluster = new ZookeeperServerSetCCluster(serverSet)
90 | KeyspaceBuilder(cluster, name, stats.scope("cassie").scope(name), tracer)
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/cassie-stress/build.properties:
--------------------------------------------------------------------------------
1 | #Project properties
2 | #Mon May 09 12:21:31 PDT 2011
3 | project.name=cassie-core
4 |
--------------------------------------------------------------------------------
/cassie-stress/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 | com.twitter
5 | cassie-stress
6 | jar
7 | 0.25.1-SNAPSHOT
8 |
9 | com.twitter
10 | scala-parent-292
11 | 0.0.4
12 | ../../parents/scala-parent-292/pom.xml
13 |
14 |
15 | ${project.basedir}/../../.git
16 |
17 |
18 |
19 |
20 |
21 | com.twitter
22 | cassie-core
23 | 0.25.1-SNAPSHOT
24 |
25 |
26 | com.twitter
27 | finagle-ostrich4
28 | 6.0.2-SNAPSHOT
29 |
30 |
31 | com.twitter
32 | finagle-stress
33 | 6.0.2-SNAPSHOT
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/cassie-stress/src/main/scala/com/twitter/cassie/stress/BatchMutationBuilderStresser.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 | package com.twitter.cassie.stress
15 |
16 | import com.twitter.cassie.codecs.{LongCodec, Utf8Codec}
17 | import com.twitter.cassie._
18 | import com.twitter.conversions.time._
19 | import com.twitter.finagle.stats.OstrichStatsReceiver
20 | import com.twitter.finagle.stress.Stats
21 | import com.twitter.util.{Time, ScheduledThreadPoolTimer}
22 |
23 | class BatchMutationBuilderStresser extends Stresser {
24 |
25 | private[this] val cluster = new Cluster("localhost", new OstrichStatsReceiver())
26 | private[this] val keyspace = cluster.keyspace("test").connect()
27 | private[this] val cf = keyspace.columnFamily("standard", Utf8Codec, LongCodec, LongCodec)
28 |
29 | private[this] val beginTime = Time.now
30 | private[this] val timer = new ScheduledThreadPoolTimer()
31 |
32 | timer.schedule(10.seconds) {
33 | println("@@ %ds".format(beginTime.untilNow.inSeconds))
34 | Stats.prettyPrintStats()
35 | }
36 |
37 | private[this] val row = "foo"
38 | private[this] val data = 0 until 100 toSeq
39 |
40 | def dispatchLoop() {
41 | val batch = cf.batch
42 | data foreach { col =>
43 | batch.insert(row, Column(col, col))
44 | }
45 |
46 | batch.execute ensure {
47 | dispatchLoop()
48 | }
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/cassie-stress/src/main/scala/com/twitter/cassie/stress/CounterBatchMutationBuilderStresser.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 | package com.twitter.cassie.stress
15 |
16 | import com.twitter.cassie.codecs.{LongCodec, Utf8Codec}
17 | import com.twitter.cassie._
18 | import com.twitter.conversions.time._
19 | import com.twitter.finagle.stats.OstrichStatsReceiver
20 | import com.twitter.util.{Time, ScheduledThreadPoolTimer}
21 | import com.twitter.finagle.stress.Stats
22 |
23 | class CounterBatchMutationBuilderStresser extends Stresser {
24 |
25 | private[this] val cluster = new Cluster("localhost", new OstrichStatsReceiver())
26 | private[this] val keyspace = cluster.keyspace("test").connect()
27 | private[this] val cf = keyspace.counterColumnFamily("counters", Utf8Codec, LongCodec)
28 |
29 | private[this] val beginTime = Time.now
30 | private[this] val timer = new ScheduledThreadPoolTimer()
31 |
32 | timer.schedule(10.seconds) {
33 | println("@@ %ds".format(beginTime.untilNow.inSeconds))
34 | Stats.prettyPrintStats()
35 | }
36 |
37 | private[this] val row = "foo"
38 | private[this] val data = 0 until 100 toSeq
39 |
40 | def dispatchLoop() {
41 | val batch = cf.batch
42 | data foreach { col =>
43 | batch.insert(row, CounterColumn(col, 1))
44 | }
45 |
46 | batch.execute ensure {
47 | dispatchLoop()
48 | }
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/cassie-stress/src/main/scala/com/twitter/cassie/stress/Main.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 | package com.twitter.cassie.stress
15 |
16 | object Main {
17 | def main(args: Array[String]) {
18 |
19 | val stresser = args.toList match {
20 | case "-batchCounters" :: tail => new CounterBatchMutationBuilderStresser()
21 | case "-batch" :: tail => new BatchMutationBuilderStresser()
22 | case _ => new BatchMutationBuilderStresser()
23 | }
24 |
25 | stresser.run(10)
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/cassie-stress/src/main/scala/com/twitter/cassie/stress/Stresser.scala:
--------------------------------------------------------------------------------
1 | // Copyright 2012 Twitter, Inc.
2 |
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 |
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 | package com.twitter.cassie.stress
15 |
16 | trait Stresser {
17 |
18 | def dispatchLoop()
19 |
20 | def run(concurrency: Int) {
21 | 0 until concurrency foreach { _ => dispatchLoop() }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 | com.twitter
5 | cassie
6 | pom
7 | 0.25.0-SNAPSHOT
8 |
9 |
10 | artifactory-release
11 | Twitter Artifactory Releases
12 | http://artifactory.local.twitter.com/libs-releases-local
13 |
14 |
15 | artifactory-snapshot
16 | Twitter Artifactory Snapshots
17 | http://artifactory.local.twitter.com/libs-snapshots-local
18 |
19 |
20 |
21 | cassie-core
22 | cassie-hadoop
23 | cassie-serversets
24 | cassie-stress
25 |
26 |
27 |
--------------------------------------------------------------------------------