├── .gitignore ├── .gitmodules ├── README.md ├── README_DEMO.md ├── admin ├── .gitignore ├── README.md ├── build.gradle ├── config-example │ ├── commands.json │ ├── config.properties │ └── jaas.conf └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── nucypher │ │ │ └── kafka │ │ │ └── admin │ │ │ ├── AdminHandler.java │ │ │ ├── AdminZooKeeperHandler.java │ │ │ ├── Console.java │ │ │ └── databind │ │ │ ├── Command.java │ │ │ ├── CommandFactory.java │ │ │ └── CommandType.java │ └── resources │ │ └── log4j2.xml │ └── test │ ├── java │ └── com │ │ └── nucypher │ │ └── kafka │ │ └── admin │ │ ├── AdminHandlerTest.java │ │ ├── CommandFactoryTest.java │ │ └── ZooKeeperHandlerTest.java │ └── resources │ ├── command.json │ ├── commands.json │ ├── jaas_test.conf │ └── wrong_commands.json ├── build.gradle ├── build_project.sh ├── clients ├── build.gradle └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── nucypher │ │ │ └── kafka │ │ │ └── clients │ │ │ ├── MessageSerDeConfig.java │ │ │ ├── StructuredMessageSerDeConfig.java │ │ │ ├── decrypt │ │ │ ├── AesMessageDeserializer.java │ │ │ ├── AesMessageDeserializerConfig.java │ │ │ ├── AesStructuredMessageDeserializer.java │ │ │ └── AesStructuredMessageDeserializerConfig.java │ │ │ └── encrypt │ │ │ ├── AesMessageSerializer.java │ │ │ ├── AesMessageSerializerConfig.java │ │ │ ├── AesStructuredMessageSerializer.java │ │ │ └── AesStructuredMessageSerializerConfig.java │ └── resources │ │ ├── P521.pem │ │ ├── consumer.properties │ │ └── producer.properties │ └── test │ └── groovy │ └── com │ └── nucypher │ └── kafka │ └── clients │ ├── AesMessageEncryptorDecryptorSpec.groovy │ └── StructuredMessageSerializeDeserializeSpec.groovy ├── commons ├── build.gradle └── src │ ├── main │ └── java │ │ ├── com │ │ └── nucypher │ │ │ └── kafka │ │ │ ├── Constants.java │ │ │ ├── DefaultProvider.java │ │ │ ├── INamed.java │ │ │ ├── cipher │ │ │ ├── CipherFactory.java │ │ │ ├── ICipher.java │ │ │ ├── JCECipher.java │ │ │ └── OpenSSLCipher.java │ │ │ ├── clients │ │ │ ├── EncryptedDataEncryptionKey.java │ │ │ ├── Message.java │ │ │ ├── MessageHandler.java │ │ │ ├── ReEncryptionHandler.java │ │ │ ├── ReEncryptionHandlerConfigs.java │ │ │ └── granular │ │ │ │ ├── AbstractAvroDataAccessor.java │ │ │ │ ├── AvroDataAccessor.java │ │ │ │ ├── AvroSchemaLessDataAccessor.java │ │ │ │ ├── DataFormat.java │ │ │ │ ├── JsonDataAccessor.java │ │ │ │ ├── OneMessageDataAccessor.java │ │ │ │ ├── StructuredDataAccessor.java │ │ │ │ └── StructuredMessageHandler.java │ │ │ ├── encrypt │ │ │ ├── DataEncryptionKeyManager.java │ │ │ └── ReEncryptionKeyManager.java │ │ │ ├── errors │ │ │ └── CommonException.java │ │ │ ├── utils │ │ │ ├── AESKeyGenerators.java │ │ │ ├── AvroUtils.java │ │ │ ├── ByteUtils.java │ │ │ ├── EncryptionAlgorithmUtils.java │ │ │ ├── GranularUtils.java │ │ │ ├── KeyType.java │ │ │ ├── KeyUtils.java │ │ │ ├── StringUtils.java │ │ │ ├── SubkeyGenerator.java │ │ │ └── WrapperReEncryptionKey.java │ │ │ └── zk │ │ │ ├── BaseZooKeeperHandler.java │ │ │ ├── Channel.java │ │ │ ├── ClientType.java │ │ │ ├── EncryptionType.java │ │ │ └── KeyHolder.java │ │ └── org │ │ └── apache │ │ └── avro │ │ └── file │ │ └── GenericDataFileWriter.java │ └── test │ ├── groovy │ └── com │ │ └── nucypher │ │ └── kafka │ │ ├── cipher │ │ └── CipherSpec.groovy │ │ └── clients │ │ ├── MessageHandlerSpec.groovy │ │ ├── MessageSpec.groovy │ │ └── granular │ │ ├── AvroDataAccessorSpec.groovy │ │ ├── AvroSchemaLessDataAccessorSpec.groovy │ │ ├── JsonDataAccessorSpec.groovy │ │ └── StructuredMessageHandlerSpec.groovy │ ├── java │ └── com │ │ └── nucypher │ │ └── kafka │ │ ├── TestUtils.java │ │ ├── clients │ │ └── granular │ │ │ ├── AvroTestUtils.java │ │ │ └── StructuredDataAccessorStub.java │ │ ├── encrypt │ │ ├── DataEncryptionKeyManagerTest.java │ │ └── ReEncryptionKeyManagerTest.java │ │ ├── utils │ │ ├── GranularUtilsTest.java │ │ ├── KeyUtilsAlgorithmTest.java │ │ ├── KeyUtilsTest.java │ │ └── SubkeyGeneratorTest.java │ │ └── zk │ │ ├── BaseZooKeeperHandlerTest.java │ │ ├── DataUtils.java │ │ └── ZooKeeperSASLResource.java │ └── resources │ ├── P521.pem │ ├── jaas_test.conf │ ├── log4j2.xml │ ├── private-key-prime256v1-1.pem │ ├── private-key-prime256v1-2.pem │ ├── private-key-secp521r1-1.pem │ ├── private-key-secp521r1-2.pem │ ├── public-key-prime256v1-1.pem │ ├── public-key-prime256v1-2.pem │ ├── public-key-secp521r1-1.pem │ └── public-key-secp521r1-2.pem ├── dependencies_libs.gradle ├── examples ├── build.gradle └── src │ └── main │ ├── java │ └── com │ │ └── nucypher │ │ └── kafka │ │ ├── clients │ │ └── example │ │ │ ├── general │ │ │ ├── StringConsumer.java │ │ │ └── StringProducer.java │ │ │ ├── granular │ │ │ ├── AvroConsumer.java │ │ │ ├── AvroProducer.java │ │ │ ├── AvroSchemaLessConsumer.java │ │ │ ├── AvroSchemaLessProducer.java │ │ │ ├── JsonConsumer.java │ │ │ ├── JsonProducer.java │ │ │ └── SchemaRegistry.java │ │ │ └── utils │ │ │ └── JaasUtils.java │ │ └── proxy │ │ ├── ProxyFromProperties.java │ │ ├── TransparentProxyFromInstance.java │ │ └── benchmark │ │ ├── ConsumerBenchmark.java │ │ └── ProducerBenchmark.java │ └── resources │ ├── P521.pem │ ├── consumer.properties │ ├── generated.reduced.json │ ├── jaas.conf │ ├── jaas_proxy.conf │ ├── log4j2.xml │ ├── producer.properties │ ├── proxy.properties │ └── schema-registry.properties ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── initialize_project.sh ├── proxy ├── build.gradle ├── config-example │ ├── jaas_proxy.conf │ └── proxy-broker.properties └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── nucypher │ │ │ └── kafka │ │ │ └── proxy │ │ │ ├── Acceptor.java │ │ │ ├── BrokerChannelBuilders.java │ │ │ ├── ClientBrokerChannelBuilder.java │ │ │ ├── Processor.java │ │ │ ├── ProxyServer.java │ │ │ ├── ProxyType.java │ │ │ ├── Utils.java │ │ │ ├── config │ │ │ ├── AbstractProxyConfig.java │ │ │ └── ProxyConfig.java │ │ │ └── handler │ │ │ ├── MessageHandler.java │ │ │ ├── MessageHandlerRouter.java │ │ │ └── MessageTransformer.java │ └── resources │ │ └── log4j2.xml │ └── test │ └── resources │ └── jaas.conf ├── screenshots ├── EC_Keys.png ├── Full_1.png ├── Full_2.png ├── Full_3.png ├── Full_4.png ├── Full_5.png ├── Full_6.png ├── Granular_1.png ├── Granular_10.png ├── Granular_11.png ├── Granular_12.png ├── Granular_13.png ├── Granular_2.png ├── Granular_3.png ├── Granular_4.png ├── Granular_5.png ├── Granular_6.png ├── Granular_7.png ├── Granular_8.png ├── Granular_9.png ├── Kafka.png ├── Simple.png └── ZooKeeper.png ├── settings.gradle └── tools ├── aes.256.java.files ├── UnlimitedJCEPolicyJDK7.zip ├── UnlimitedJCEPolicyJDK8 │ └── README.txt ├── jce_policy-8.zip └── original │ └── security │ ├── blacklist │ ├── blacklisted.certs │ ├── cacerts │ ├── java.policy │ ├── java.security │ ├── javaws.policy │ └── trusted.libraries └── create.patch ├── apply_patch.sh ├── create_patch.sh └── kafka-run-class.sh /.gitignore: -------------------------------------------------------------------------------- 1 | docs/ 2 | 3 | # Created by .ignore support plugin (hsz.mobi) 4 | ### JetBrains template 5 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion 6 | 7 | *.iml 8 | 9 | ## Directory-based project format: 10 | .idea/ 11 | # if you remove the above rule, at least ignore the following: 12 | 13 | # User-specific stuff: 14 | # .idea/workspace.xml 15 | # .idea/tasks.xml 16 | # .idea/dictionaries 17 | 18 | # Sensitive or high-churn files: 19 | # .idea/dataSources.ids 20 | # .idea/dataSources.xml 21 | # .idea/sqlDataSources.xml 22 | # .idea/dynamic.xml 23 | # .idea/uiDesigner.xml 24 | 25 | # Gradle: 26 | # .idea/gradle.xml 27 | # .idea/libraries 28 | 29 | # Mongo Explorer plugin: 30 | # .idea/mongoSettings.xml 31 | 32 | ## File-based project format: 33 | *.ipr 34 | *.iws 35 | 36 | ## Plugin-specific files: 37 | 38 | # IntelliJ 39 | /out/ 40 | 41 | # mpeltonen/sbt-idea plugin 42 | .idea_modules/ 43 | 44 | # JIRA plugin 45 | atlassian-ide-plugin.xml 46 | 47 | # Crashlytics plugin (for Android Studio and IntelliJ) 48 | com_crashlytics_export_strings.xml 49 | crashlytics.properties 50 | crashlytics-build.properties 51 | 52 | 53 | ### Java template 54 | *.class 55 | 56 | # Mobile Tools for Java (J2ME) 57 | .mtj.tmp/ 58 | 59 | # Package Files # 60 | *.jar 61 | *.war 62 | *.ear 63 | 64 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 65 | hs_err_pid* 66 | 67 | .metadata 68 | .recommenders 69 | .project 70 | .classpath 71 | .settings 72 | target 73 | 74 | classes 75 | *.agent 76 | 77 | *main.log 78 | 79 | .vscode 80 | bower_components 81 | node_modules 82 | *.log 83 | 84 | 85 | # Created by https://www.gitignore.io/api/gradle,eclipse,maven,jetbrains 86 | 87 | ### Eclipse ### 88 | 89 | .metadata 90 | bin/ 91 | tmp/ 92 | *.tmp 93 | *.bak 94 | *.swp 95 | *~.nib 96 | local.properties 97 | .settings/ 98 | .loadpath 99 | .recommenders 100 | 101 | # Eclipse Core 102 | .project 103 | 104 | # External tool builders 105 | .externalToolBuilders/ 106 | 107 | # Locally stored "Eclipse launch configurations" 108 | *.launch 109 | 110 | # PyDev specific (Python IDE for Eclipse) 111 | *.pydevproject 112 | 113 | # CDT-specific (C/C++ Development Tooling) 114 | .cproject 115 | 116 | # JDT-specific (Eclipse Java Development Tools) 117 | .classpath 118 | 119 | # Java annotation processor (APT) 120 | .factorypath 121 | 122 | # PDT-specific (PHP Development Tools) 123 | .buildpath 124 | 125 | # sbteclipse plugin 126 | .target 127 | 128 | # Tern plugin 129 | .tern-project 130 | 131 | # TeXlipse plugin 132 | .texlipse 133 | 134 | # STS (Spring Tool Suite) 135 | .springBeans 136 | 137 | # Code Recommenders 138 | .recommenders/ 139 | 140 | 141 | ### Maven ### 142 | target/ 143 | pom.xml.tag 144 | pom.xml.releaseBackup 145 | pom.xml.versionsBackup 146 | pom.xml.next 147 | release.properties 148 | dependency-reduced-pom.xml 149 | buildNumber.properties 150 | .mvn/timing.properties 151 | 152 | # Exclude maven wrapper 153 | !/.mvn/wrapper/maven-wrapper.jar 154 | 155 | 156 | ### JetBrains ### 157 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 158 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 159 | 160 | # User-specific stuff: 161 | .idea/workspace.xml 162 | .idea/tasks.xml 163 | 164 | # Sensitive or high-churn files: 165 | .idea/dataSources/ 166 | .idea/dataSources.ids 167 | .idea/dataSources.xml 168 | .idea/dataSources.local.xml 169 | .idea/sqlDataSources.xml 170 | .idea/dynamic.xml 171 | .idea/uiDesigner.xml 172 | 173 | # Gradle: 174 | .idea/gradle.xml 175 | .idea/libraries 176 | 177 | # Mongo Explorer plugin: 178 | .idea/mongoSettings.xml 179 | 180 | ## File-based project format: 181 | *.iws 182 | 183 | ## Plugin-specific files: 184 | 185 | # IntelliJ 186 | /out/ 187 | 188 | # mpeltonen/sbt-idea plugin 189 | .idea_modules/ 190 | 191 | # JIRA plugin 192 | atlassian-ide-plugin.xml 193 | 194 | # Crashlytics plugin (for Android Studio and IntelliJ) 195 | com_crashlytics_export_strings.xml 196 | crashlytics.properties 197 | crashlytics-build.properties 198 | fabric.properties 199 | 200 | ### JetBrains Patch ### 201 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 202 | 203 | # *.iml 204 | # modules.xml 205 | # .idea/misc.xml 206 | # *.ipr 207 | 208 | 209 | ### Gradle ### 210 | .gradle 211 | build/ 212 | bin/ 213 | 214 | # Ignore Gradle GUI config 215 | gradle-app.setting 216 | 217 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 218 | !gradle-wrapper.jar 219 | 220 | # Cache of project 221 | .gradletasknamecache 222 | 223 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 224 | # gradle/wrapper/gradle-wrapper.properties 225 | 226 | keys/*.pem 227 | out/ -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "kafka"] 2 | path = kafka 3 | url = https://github.com/nucypher/kafka-oss 4 | [submodule "crypto"] 5 | path = crypto 6 | url = https://github.com/nucypher/nucypher-crypto-oss 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Run initialize_project.sh 2 | 3 | After git clone run: initialize_project.sh 4 | 5 | # Project structure 6 | 7 | 8 | ## External submodules 9 | 10 | ### nucypher-crypto-oss 11 | ### kafka-oss (0.10.1-encrypted branch) 12 | 13 | 14 | ## Java features for AES 256 bit 15 | 16 | http://docs.oracle.com/javase/8/docs/technotes/guides/security/SunProviders.html#importlimits 17 | If stronger algorithms are needed (for example, AES with 256-bit keys), the JCE Unlimited Strength Jurisdiction Policy Files must be obtained and installed in the JDK/JRE. 18 | 19 | Need to download: 20 | Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files for JDK/JRE 21 | 22 | and install (overwrite) files in 23 | $JAVA_HOME/jre/lib/security 24 | 25 | 26 | ## Create patch for Kafka 27 | 28 | 1. Run 29 | ```bash 30 | tools/create.patch/create_patch.sh 31 | ``` 32 | directory *patch* will be created with the following structure: 33 | 34 | ```bash 35 | bin/ 36 | libs/ 37 | 38 | bin/kafka-run-class.sh - added NuCypher jar's to Kafka class path 39 | libs/kafka_2.10-1.0-SNAPSHOT.jar - patched core Kafka jar 40 | 41 | libs/nucypher/ - core NuCypher jar's 42 | nucypher-kafka-admin-1.0-SNAPSHOT.jar 43 | nucypher-kafka-clients-1.0-SNAPSHOT.jar 44 | nucypher-kafka-commons-1.0-SNAPSHOT.jar 45 | 46 | libs/nucypher/lib - 3rd party jar's for NuCypher core jar's 47 | ``` 48 | 49 | and a tar.gz archive nucypher-patch-kafka_2.10-1.0-SNAPSHOT.tar.gz 50 | it will contains also a script to patch Kafka 51 | tools/create.patch/apply_patch.sh 52 | 53 | 54 | 2. Run 55 | 56 | ```bash 57 | tools/create.patch/apply_patch.sh /opt/kafka 58 | ``` 59 | 60 | Need to specify a path to Kafka directory, for instance: Kafka is located in /opt/kafka so 61 | 62 | ```bash 63 | tools/create.patch/apply_patch.sh /opt/kafka 64 | ``` 65 | 66 | 67 | -------------------------------------------------------------------------------- /admin/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | /build/ 3 | -------------------------------------------------------------------------------- /admin/build.gradle: -------------------------------------------------------------------------------- 1 | group 'com.nucypher.kafka' 2 | 3 | apply plugin: 'java' 4 | apply plugin: 'idea' 5 | apply plugin: 'eclipse' 6 | apply plugin: 'maven' 7 | apply plugin: 'application' 8 | 9 | sourceCompatibility = 1.8 10 | targetCompatibility = 1.8 11 | 12 | mainClassName = 'com.nucypher.kafka.admin.Console' 13 | applicationName = 'nucypher-kafka-admin' 14 | 15 | dependencies { 16 | compile project(':commons') 17 | 18 | compile "org.apache.logging.log4j:log4j-slf4j-impl:$log4jVersion" 19 | compile "org.apache.logging.log4j:log4j-1.2-api:$log4jVersion" 20 | compile "net.sf.jopt-simple:jopt-simple:$joptSimpleVersion" 21 | compile("com.101tec:zkclient:$zkclientVersion") { 22 | exclude group: "org.slf4j", module: 'slf4j-api' 23 | exclude group: "org.slf4j", module: 'slf4j-log4j12' 24 | exclude group: "log4j", module: 'log4j' 25 | } 26 | 27 | testCompile project(path: ':commons', configuration: 'testArchives') 28 | testCompile "junit:junit:$junitVersion" 29 | testCompile "org.apache.curator:curator-test:$curatorVersion" 30 | testCompile "org.apache.curator:curator-framework:$curatorVersion" 31 | testCompile "org.mockito:mockito-core:$mockitoVersion" 32 | testCompile "org.powermock:powermock-api-mockito:$powermockVersion" 33 | testCompile "org.powermock:powermock-module-junit4:$powermockVersion" 34 | testCompile "org.powermock:powermock-module-junit4-rule:$powermockVersion" 35 | } 36 | 37 | compileJava { 38 | options.encoding = 'UTF-8' 39 | } 40 | 41 | task copyToLib(type: Copy) { 42 | 43 | into "$buildDir/libs/lib" 44 | from configurations.runtime 45 | 46 | doLast { 47 | jar { 48 | archiveName = "$applicationName-${version}.${extension}" 49 | 50 | manifest { 51 | attributes("Main-Class": mainClassName) 52 | attributes("Class-Path": configurations.runtime.collect { "lib/$it.name" }.join(' ')) 53 | } 54 | } 55 | } 56 | 57 | } 58 | 59 | jar.dependsOn copyToLib 60 | 61 | applicationDistribution.from("config-example/") { 62 | into "config-example" 63 | } 64 | 65 | startScripts { 66 | doLast { 67 | def windowsScriptFile = file getWindowsScript() 68 | def unixScriptFile = file getUnixScript() 69 | windowsScriptFile.text = windowsScriptFile.text.replaceAll( 70 | 'CLASSPATH=\\S*', 'CLASSPATH=%APP_HOME%\\\\lib\\\\*') 71 | unixScriptFile.text = unixScriptFile.text.replaceAll( 72 | 'CLASSPATH=\\S*\n', 'CLASSPATH=\\$APP_HOME/lib/*\n') 73 | } 74 | } -------------------------------------------------------------------------------- /admin/config-example/commands.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "command-type": "generate", 4 | "curve-name": "secp521r1", 5 | "private-key-path": "keys/master-private-key.pem", 6 | "public-key-path": "keys/master-public-key.pem" 7 | }, 8 | { 9 | "command-type": "generate", 10 | "curve-name": "secp521r1", 11 | "private-key-path": "keys/producer-private-key.pem", 12 | "public-key-path": "keys/producer-public-key.pem" 13 | }, 14 | { 15 | "command-type": "generate", 16 | "curve-name": "secp521r1", 17 | "private-key-path": "keys/consumer-private-key.pem", 18 | "public-key-path": "keys/consumer-public-key.pem" 19 | }, 20 | { 21 | "command-type": "add_channel", 22 | "channel-name": "full", 23 | "channel-type": "full" 24 | }, 25 | { 26 | "command-type": "add_key", 27 | "master-key": "keys/master-private-key.pem", 28 | "client-key": "keys/producer-private-key.pem", 29 | "client-type": "producer", 30 | "client-name": "alice", 31 | "channel-name": "full", 32 | "expired-days": "365" 33 | }, 34 | { 35 | "command-type": "add_key", 36 | "master-key": "keys/master-private-key.pem", 37 | "client-key": "keys/consumer-private-key.pem", 38 | "client-type": "consumer", 39 | "client-name": "alice", 40 | "channel-name": "full", 41 | "expired-days": "365" 42 | }, 43 | { 44 | "command-type": "add_key", 45 | "client-type": "producer", 46 | "client-name": "alice", 47 | "channel-name": "full2" 48 | }, 49 | { 50 | "command-type": "add_key", 51 | "master-key": "keys/master-private-key.pem", 52 | "client-key": "keys/consumer-public-key.pem", 53 | "client-type": "consumer", 54 | "client-name": "alice", 55 | "channel-name": "full2", 56 | "expired-days": "365" 57 | }, 58 | { 59 | "command-type": "add_key", 60 | "client-type": "producer", 61 | "client-name": "alice", 62 | "channel-name": "full3" 63 | }, 64 | { 65 | "command-type": "add_key", 66 | "client-type": "consumer", 67 | "client-name": "alice", 68 | "channel-name": "full3" 69 | }, 70 | { 71 | "command-type": "add_channel", 72 | "channel-name": "granular", 73 | "channel-type": "granular", 74 | "channel-data-format": "json" 75 | }, 76 | { 77 | "command-type": "add_key", 78 | "master-key": "keys/master-private-key.pem", 79 | "client-key": "keys/producer-private-key.pem", 80 | "client-type": "producer", 81 | "client-name": "alice", 82 | "channel-name": "granular", 83 | "expired-days": "365", 84 | "fields": ["a.1", "b.b", "c", "d", "e.e.e.1"] 85 | }, 86 | { 87 | "command-type": "add_key", 88 | "master-key": "keys/master-private-key.pem", 89 | "client-key": "keys/consumer-private-key.pem", 90 | "client-type": "consumer", 91 | "client-name": "alice", 92 | "channel-name": "granular", 93 | "expired-days": "365", 94 | "fields": ["a.1", "b.b", "d", "e.e.e.1"] 95 | }, 96 | { 97 | "command-type": "add_key", 98 | "master-key": "keys/master-private-key.pem", 99 | "client-key": "keys/master-private-key.pem", 100 | "client-type": "producer", 101 | "client-name": "alice", 102 | "channel-name": "granular2", 103 | "expired-days": "365", 104 | "fields": ["a.1", "b.b", "c", "d", "e.e.e.1"], 105 | "channel-data-format": "json" 106 | }, 107 | { 108 | "command-type": "add_key", 109 | "master-key": "keys/master-private-key.pem", 110 | "client-key": "keys/consumer-public-key.pem", 111 | "client-type": "consumer", 112 | "client-name": "alice", 113 | "channel-name": "granular2", 114 | "expired-days": "365", 115 | "fields": ["a.1", "b.b", "d", "e.e.e.1"] 116 | } 117 | ] -------------------------------------------------------------------------------- /admin/config-example/config.properties: -------------------------------------------------------------------------------- 1 | # ZooKeeper host and port 2 | zookeeper.server=localhost:2181 3 | 4 | # Scheme for admin authorization. Available values are sasl and digest. 5 | # See https://cwiki.apache.org/confluence/display/ZOOKEEPER/Zookeeper+and+SASL 6 | admin.scheme=sasl 7 | # Admin user name 8 | admin.user=admin 9 | # Admin password for digest authorization 10 | #admin.password=admin-password 11 | 12 | # Scheme for Kafka authorization. For now available only sasl 13 | kafka.scheme=sasl 14 | # Kafka user name 15 | kafka.user=kafka 16 | 17 | # Root path for keys in ZooKeeper 18 | keys.path=/keys/admin -------------------------------------------------------------------------------- /admin/config-example/jaas.conf: -------------------------------------------------------------------------------- 1 | Client { 2 | org.apache.zookeeper.server.auth.DigestLoginModule required 3 | username="admin" 4 | password="admin-password"; 5 | }; -------------------------------------------------------------------------------- /admin/src/main/java/com/nucypher/kafka/admin/databind/CommandType.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.admin.databind; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | 5 | /** 6 | * Command type 7 | */ 8 | public enum CommandType { 9 | 10 | /** 11 | * Generate and save the re-encryption key to the storage 12 | */ 13 | ADD_KEY, 14 | /** 15 | * Delete the re-encryption key from the storage 16 | */ 17 | DELETE_KEY, 18 | /** 19 | * Create the channel in the storage 20 | */ 21 | ADD_CHANNEL, 22 | /** 23 | * Delete the channel from the storage 24 | */ 25 | DELETE_CHANNEL, 26 | /** 27 | * Generate key pair and save it to the file or files 28 | */ 29 | GENERATE; 30 | 31 | @JsonCreator 32 | public static CommandType fromString(String key) { 33 | return key == null ? null : 34 | CommandType.valueOf(key.toUpperCase()); 35 | } 36 | 37 | } 38 | -------------------------------------------------------------------------------- /admin/src/main/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /admin/src/test/resources/command.json: -------------------------------------------------------------------------------- 1 | { 2 | "command-type": "add_key", 3 | "master-key": "masterKey", 4 | "client-key": "clientKey", 5 | "curve-name": "curveName", 6 | "client-type": "consumer", 7 | "client-name": "clientName", 8 | "channel-name": "channelName" 9 | } -------------------------------------------------------------------------------- /admin/src/test/resources/commands.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "command-type": "add_key", 4 | "encryption-algorithm": "elgamal", 5 | "master-key": "masterKey", 6 | "client-key": "clientKey", 7 | "curve-name": "curveName", 8 | "client-type": "cons", 9 | "key-type": "private", 10 | "client-name": "clientName", 11 | "channel-name": "channelName", 12 | "fields": ["a.c", "b"] 13 | }, 14 | { 15 | "command-type": "add_key", 16 | "master-key": "masterKey", 17 | "client-key": "clientKey", 18 | "client-type": "prod", 19 | "key-type": "pub", 20 | "client-name": "clientName", 21 | "channel-name": "channelName", 22 | "expired-date": "2017-01-01T00:00:00Z" 23 | }, 24 | { 25 | "command-type": "add_key", 26 | "master-key": "masterKey", 27 | "client-key": "clientKey", 28 | "client-type": "producer", 29 | "client-name": "clientName", 30 | "channel-name": "channelName", 31 | "expired-date": "2017-01-01T00:00:00Z", 32 | "fields": ["a.c", "b"], 33 | "channel-data-accessor": "com.nucypher.kafka.clients.granular.StructuredDataAccessorStub" 34 | }, 35 | { 36 | "command-type": "add_key", 37 | "master-key": "masterKey", 38 | "client-key": "clientKey", 39 | "client-type": "producer", 40 | "client-name": "clientName", 41 | "channel-name": "channelName", 42 | "expired-days": "10", 43 | "fields": ["a.c", "b"], 44 | "channel-data-format": "json" 45 | }, 46 | { 47 | "command-type": "delete_key", 48 | "client-type": "producer", 49 | "client-name": "clientName", 50 | "channel-name": "channelName" 51 | }, 52 | { 53 | "command-type": "delete_key", 54 | "client-type": "consumer", 55 | "client-name": "clientName", 56 | "channel-name": "channelName", 57 | "fields": ["a.c"] 58 | }, 59 | { 60 | "command-type": "delete_channel", 61 | "channel-name": "channelName" 62 | }, 63 | { 64 | "command-type": "add_channel", 65 | "channel-name": "channelName" 66 | }, 67 | { 68 | "command-type": "add_channel", 69 | "channel-name": "channelName", 70 | "channel-type": "granular", 71 | "channel-data-accessor": "com.nucypher.kafka.clients.granular.StructuredDataAccessorStub" 72 | }, 73 | { 74 | "command-type": "add_channel", 75 | "channel-name": "channelName", 76 | "channel-type": "granular", 77 | "channel-data-format": "json" 78 | }, 79 | { 80 | "command-type": "add_key", 81 | "client-type": "consumer", 82 | "client-name": "clientName", 83 | "channel-name": "channelName" 84 | }, 85 | { 86 | "command-type": "generate", 87 | "curve-name": "curveName", 88 | "private-key-path": "privateKey" 89 | }, 90 | { 91 | "command-type": "generate", 92 | "curve-name": "curveName", 93 | "private-key-path": "privateKey", 94 | "public-key-path": "publicKey" 95 | } 96 | ] -------------------------------------------------------------------------------- /admin/src/test/resources/jaas_test.conf: -------------------------------------------------------------------------------- 1 | Server { 2 | org.apache.zookeeper.server.auth.DigestLoginModule required 3 | user_kafka="test" 4 | user_zkAdmin1="123"; 5 | }; 6 | 7 | Client { 8 | org.apache.zookeeper.server.auth.DigestLoginModule required 9 | username="zkAdmin1" 10 | password="123"; 11 | }; -------------------------------------------------------------------------------- /admin/src/test/resources/wrong_commands.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "command-type": "add_key", 4 | "master-key": "masterKey", 5 | "expired-date": "2017-01-01T00:00:00Z", 6 | "expired-days": "10" 7 | }, 8 | { 9 | "command-type": "add_key", 10 | "master-key": "masterKey", 11 | "client-key": "clientKey", 12 | "client-type": "prod", 13 | "key-type": "pub", 14 | "client-name": "clientName", 15 | "channel-name": "channelName", 16 | "channel-data-accessor": "com.nucypher.kafka.clients.granular.StructuredDataAccessorStub", 17 | "channel-data-format": "json" 18 | }, 19 | { 20 | "command-type": "delete_key", 21 | "fields": ["a.c", "b"] 22 | }, 23 | { 24 | "command-type": "delete_channel" 25 | }, 26 | { 27 | "command-type": "add_channel", 28 | "channel-type": "granular", 29 | "channel-data-accessor": "com.nucypher.kafka.clients.granular.StructuredDataAccessorStub", 30 | "channel-data-format": "json" 31 | }, 32 | { 33 | "command-type": "generate" 34 | } 35 | ] -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | description = 'NuCypher Kafka' 2 | 3 | group 'nucypher-kafka-as-module' 4 | 5 | task wrapper(type: Wrapper) { 6 | gradleVersion = "$gradleRootVersion" 7 | } 8 | 9 | buildscript { 10 | repositories { 11 | mavenLocal() 12 | mavenCentral() 13 | maven { 14 | url "https://plugins.gradle.org/m2/" 15 | } 16 | jcenter() 17 | } 18 | 19 | dependencies { 20 | classpath "gradle.plugin.gradle-plugins:jartest:1.0" 21 | classpath 'com.github.jengelman.gradle.plugins:shadow:1.2.4' 22 | } 23 | 24 | } 25 | 26 | allprojects { 27 | group = 'com.nucypher.kafka.encrypted' 28 | 29 | repositories { 30 | mavenLocal() 31 | mavenCentral() 32 | maven { 33 | url "http://packages.confluent.io/maven/" 34 | } 35 | } 36 | 37 | ext { 38 | 39 | rootKafkaSubPath = ':kafka' 40 | 41 | forceGroup = [ 42 | // 'org.slf4j' : "$slf4jVersion", 43 | // 'ch.qos.logback' : "$logbackVersion" 44 | ] 45 | 46 | replaceArtifact = [ 47 | // 'commons-logging:commons-logging': "org.slf4j:jcl-over-slf4j:$slf4jVersion", 48 | // 'log4j:log4j' : "org.slf4j:log4j-over-slf4j:$slf4jVersion", 49 | // 'org.slf4j:slf4j-log4j12' : "org.slf4j:slf4j-api:$slf4jVersion" 50 | ] 51 | } 52 | 53 | apply plugin: 'java' 54 | apply plugin: 'groovy' 55 | apply plugin: 'idea' 56 | apply plugin: 'eclipse' 57 | 58 | } 59 | 60 | subprojects { subProject -> 61 | 62 | apply from: file("${rootProject.projectDir}/dependencies_libs.gradle") 63 | 64 | dependencies { 65 | compile "org.bouncycastle:bcprov-jdk15on:$bouncyCastleVersion" 66 | compile "org.bouncycastle:bcpkix-jdk15on:$bouncyCastleVersion" 67 | } 68 | 69 | compileJava.mustRunAfter clean 70 | 71 | [compileJava, compileTestJava, javadoc]*.options*.encoding = 'UTF-8' 72 | } 73 | 74 | 75 | -------------------------------------------------------------------------------- /build_project.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # totally build project with all submodules 4 | #./gradlew clean :commons:build :admin:build :clients:build :examples:build -x test 5 | #&& 6 | # extra keys forced by kafka build 7 | ./gradlew clean build -x test -x checkstyleMain -x checkstyleTest -------------------------------------------------------------------------------- /clients/build.gradle: -------------------------------------------------------------------------------- 1 | group 'com.nucypher.kafka' 2 | 3 | sourceCompatibility = 1.7 4 | targetCompatibility = 1.7 5 | 6 | jar { 7 | archiveName = "nucypher-kafka-clients-${version}.${extension}" 8 | } 9 | 10 | dependencies { 11 | compile project(':commons') 12 | 13 | testCompile project(path: ':commons', configuration: 'testArchives') 14 | testCompile "org.spockframework:spock-core:$spockVersion" 15 | } 16 | -------------------------------------------------------------------------------- /clients/src/main/java/com/nucypher/kafka/clients/MessageSerDeConfig.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients; 2 | 3 | 4 | import com.nucypher.crypto.impl.ElGamalEncryptionAlgorithm; 5 | import com.nucypher.kafka.cipher.CipherFactory; 6 | import org.apache.kafka.common.config.AbstractConfig; 7 | import org.apache.kafka.common.config.ConfigDef; 8 | 9 | import java.util.Map; 10 | 11 | /** 12 | * Base class for configs for message serializers and deserializers 13 | */ 14 | public class MessageSerDeConfig extends AbstractConfig { 15 | 16 | public static final String DEK_ENCRYPTION_ALGORITHM_CONFIG = "encryption.dek.algorithm"; 17 | public static final String DEK_ENCRYPTION_ALGORITHM_DOC = 18 | "Encryption algorithm used for DEK encryption"; 19 | public static final String DEK_ENCRYPTION_ALGORITHM_DEFAULT = 20 | ElGamalEncryptionAlgorithm.class.getCanonicalName(); 21 | 22 | public static final String DATA_ENCRYPTION_PROVIDER_CONFIG = "encryption.data.provider"; 23 | public static final String DATA_ENCRYPTION_PROVIDER_DOC = "Provider used for data encryption"; 24 | public static final String DATA_ENCRYPTION_PROVIDER_DEFAULT = 25 | CipherFactory.CipherProvider.BOUNCY_CASTLE.toString(); 26 | 27 | public static final String DATA_ENCRYPTION_TRANFORMATION_CONFIG = "encryption.data.transformation"; 28 | public static final String DATA_ENCRYPTION_TRANFORMATION_DOC = "Transformation used for data encryption"; 29 | public static final String DATA_ENCRYPTION_TRANFORMATION_DEFAULT = "AES/GCM/NoPadding"; 30 | 31 | public static ConfigDef baseConfigDef() { 32 | return new ConfigDef() 33 | .define(DEK_ENCRYPTION_ALGORITHM_CONFIG, 34 | ConfigDef.Type.STRING, 35 | DEK_ENCRYPTION_ALGORITHM_DEFAULT, 36 | ConfigDef.Importance.HIGH, 37 | DEK_ENCRYPTION_ALGORITHM_DOC) 38 | .define(DATA_ENCRYPTION_PROVIDER_CONFIG, 39 | ConfigDef.Type.STRING, 40 | DATA_ENCRYPTION_PROVIDER_DEFAULT, 41 | ConfigDef.Importance.HIGH, 42 | DATA_ENCRYPTION_PROVIDER_DOC) 43 | .define(DATA_ENCRYPTION_TRANFORMATION_CONFIG, 44 | ConfigDef.Type.STRING, 45 | DATA_ENCRYPTION_TRANFORMATION_DEFAULT, 46 | ConfigDef.Importance.HIGH, 47 | DATA_ENCRYPTION_TRANFORMATION_DOC); 48 | } 49 | 50 | public MessageSerDeConfig(ConfigDef config, Map props) { 51 | super(config, props); 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /clients/src/main/java/com/nucypher/kafka/clients/StructuredMessageSerDeConfig.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients; 2 | 3 | 4 | import org.apache.kafka.common.config.ConfigDef; 5 | 6 | /** 7 | * Base class for configs for granular serializers and deserializers 8 | */ 9 | public class StructuredMessageSerDeConfig { 10 | 11 | public static final String GRANULAR_DATA_ACCESSOR_CONFIG = 12 | "encryption.granular.data.accessor"; 13 | public static final String GRANULAR_DATA_ACCESSOR_DOC = 14 | "Structured data accessor for granular encryption"; 15 | 16 | 17 | public static ConfigDef addGranularConfigDef(ConfigDef configDef) { 18 | return configDef 19 | .define(GRANULAR_DATA_ACCESSOR_CONFIG, ConfigDef.Type.CLASS, 20 | ConfigDef.Importance.HIGH, GRANULAR_DATA_ACCESSOR_DOC); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /clients/src/main/java/com/nucypher/kafka/clients/decrypt/AesMessageDeserializerConfig.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.decrypt; 2 | 3 | import com.nucypher.kafka.clients.MessageSerDeConfig; 4 | import org.apache.kafka.clients.consumer.ConsumerConfig; 5 | import org.apache.kafka.common.config.ConfigDef; 6 | import org.apache.kafka.common.serialization.ByteArrayDeserializer; 7 | 8 | import java.util.Map; 9 | 10 | /** 11 | * Configuration for {@link AesMessageDeserializer} 12 | */ 13 | public class AesMessageDeserializerConfig extends MessageSerDeConfig { 14 | 15 | public static final String PRIVATE_KEY_CONFIG = "encryption.private.key"; 16 | public static final String PRIVATE_KEY_DOC = "Path to the EC private key"; 17 | 18 | public static final String VALUE_DESERIALIZER_CLASS_CONFIG = "encryption.value.deserializer"; 19 | public static final String VALUE_DESERIALIZER_CLASS_DOC = 20 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_DOC; 21 | 22 | public static final String KEY_DESERIALIZER_CLASS_CONFIG = "encryption.key.deserializer"; 23 | public static final String KEY_DESERIALIZER_CLASS_DOC = 24 | ConsumerConfig.KEY_DESERIALIZER_CLASS_DOC; 25 | 26 | public static final String CACHE_DECRYPTION_CAPACITY_CONFIG = "cache.decryption.capacity"; 27 | public static final String CACHE_DECRYPTION_CAPACITY_DOC = "Decryption cache capacity"; 28 | public static final int CACHE_DECRYPTION_CAPACITY_DEFAULT = 200000; 29 | 30 | private static final ConfigDef CONFIG; 31 | 32 | static { 33 | CONFIG = MessageSerDeConfig.baseConfigDef() 34 | .define(PRIVATE_KEY_CONFIG, 35 | ConfigDef.Type.STRING, 36 | ConfigDef.Importance.HIGH, 37 | PRIVATE_KEY_DOC) 38 | .define(VALUE_DESERIALIZER_CLASS_CONFIG, 39 | ConfigDef.Type.CLASS, 40 | ByteArrayDeserializer.class, 41 | ConfigDef.Importance.HIGH, 42 | VALUE_DESERIALIZER_CLASS_DOC) 43 | .define(KEY_DESERIALIZER_CLASS_CONFIG, 44 | ConfigDef.Type.CLASS, 45 | ByteArrayDeserializer.class, 46 | ConfigDef.Importance.HIGH, 47 | KEY_DESERIALIZER_CLASS_DOC) 48 | .define(CACHE_DECRYPTION_CAPACITY_CONFIG, 49 | ConfigDef.Type.INT, 50 | CACHE_DECRYPTION_CAPACITY_DEFAULT, 51 | ConfigDef.Importance.LOW, 52 | CACHE_DECRYPTION_CAPACITY_DOC); 53 | } 54 | 55 | public static ConfigDef baseConfigDef() { 56 | return CONFIG; 57 | } 58 | 59 | public AesMessageDeserializerConfig(Map props) { 60 | super(CONFIG, props); 61 | } 62 | 63 | public AesMessageDeserializerConfig(ConfigDef config, Map props) { 64 | super(config, props); 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /clients/src/main/java/com/nucypher/kafka/clients/decrypt/AesStructuredMessageDeserializer.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.decrypt; 2 | 3 | import com.nucypher.crypto.EncryptionAlgorithm; 4 | import com.nucypher.kafka.cipher.CipherFactory; 5 | import com.nucypher.kafka.clients.granular.DataFormat; 6 | import com.nucypher.kafka.clients.granular.StructuredDataAccessor; 7 | import com.nucypher.kafka.clients.granular.StructuredMessageHandler; 8 | import com.nucypher.kafka.errors.CommonException; 9 | import org.apache.kafka.common.config.AbstractConfig; 10 | import org.apache.kafka.common.serialization.Deserializer; 11 | 12 | import java.security.PrivateKey; 13 | import java.util.Map; 14 | 15 | /** 16 | * The structured message {@link Deserializer} which uses AES and encryption algorithm 17 | * 18 | * @param type to be deserialized into. 19 | */ 20 | public class AesStructuredMessageDeserializer extends AesMessageDeserializer { 21 | 22 | private StructuredDataAccessor accessor; 23 | private StructuredMessageHandler structuredMessageHandler; 24 | private boolean isConfigured; 25 | 26 | /** 27 | * Constructor used by Kafka consumer 28 | */ 29 | public AesStructuredMessageDeserializer() { 30 | super(); 31 | isConfigured = false; 32 | } 33 | 34 | /** 35 | * @param deserializer Kafka deserializer 36 | * @param algorithmClass class of encryption algorithm 37 | * @param privateKey EC private key 38 | * @param format data format 39 | */ 40 | public AesStructuredMessageDeserializer( 41 | Deserializer deserializer, 42 | Class algorithmClass, 43 | PrivateKey privateKey, 44 | DataFormat format) { 45 | this(deserializer, algorithmClass, privateKey, format.getAccessorClass()); 46 | } 47 | 48 | /** 49 | * @param deserializer Kafka deserializer 50 | * @param algorithmClass class of encryption algorithm 51 | * @param privateKey EC private key 52 | * @param dataAccessorClass data accessor class 53 | */ 54 | public AesStructuredMessageDeserializer( 55 | Deserializer deserializer, 56 | Class algorithmClass, 57 | PrivateKey privateKey, 58 | Class dataAccessorClass) { 59 | this(deserializer, algorithmClass, privateKey, dataAccessorClass, 60 | null, null, null); 61 | } 62 | 63 | /** 64 | * @param deserializer Kafka deserializer 65 | * @param algorithmClass class of encryption algorithm 66 | * @param privateKey EC private key 67 | * @param dataAccessorClass data accessor class 68 | * @param decryptionCacheCapacity decryption cache capacity 69 | * @param provider data encryption provider 70 | * @param transformation data transformation 71 | */ 72 | public AesStructuredMessageDeserializer( 73 | Deserializer deserializer, 74 | Class algorithmClass, 75 | PrivateKey privateKey, 76 | Class dataAccessorClass, 77 | Integer decryptionCacheCapacity, 78 | CipherFactory.CipherProvider provider, 79 | String transformation) { 80 | super(deserializer, algorithmClass, privateKey, decryptionCacheCapacity, provider, transformation); 81 | try { 82 | accessor = dataAccessorClass.newInstance(); 83 | } catch (InstantiationException | IllegalAccessException e) { 84 | throw new CommonException(e); 85 | } 86 | structuredMessageHandler = new StructuredMessageHandler(messageHandler); 87 | isConfigured = true; 88 | } 89 | 90 | @Override 91 | public void configure(Map configs, boolean isKey) { 92 | super.configure(configs, isKey); 93 | if (!isConfigured) { 94 | AbstractConfig config = new AesStructuredMessageDeserializerConfig(configs); 95 | accessor = config.getConfiguredInstance( 96 | AesStructuredMessageDeserializerConfig.GRANULAR_DATA_ACCESSOR_CONFIG, 97 | StructuredDataAccessor.class); 98 | structuredMessageHandler = new StructuredMessageHandler(messageHandler); 99 | isConfigured = true; 100 | } 101 | accessor.configure(configs, isKey); 102 | } 103 | 104 | @Override 105 | public T deserialize(String topic, byte[] data) { 106 | byte[] decrypted = structuredMessageHandler.decrypt(topic, data, accessor); 107 | return deserializer.deserialize(topic, decrypted); 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /clients/src/main/java/com/nucypher/kafka/clients/decrypt/AesStructuredMessageDeserializerConfig.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.decrypt; 2 | 3 | import com.nucypher.kafka.clients.StructuredMessageSerDeConfig; 4 | import org.apache.kafka.common.config.ConfigDef; 5 | 6 | import java.util.Map; 7 | 8 | /** 9 | * Configuration for {@link AesStructuredMessageDeserializer} 10 | */ 11 | public class AesStructuredMessageDeserializerConfig extends AesMessageDeserializerConfig { 12 | 13 | public static final String GRANULAR_DATA_ACCESSOR_CONFIG = 14 | StructuredMessageSerDeConfig.GRANULAR_DATA_ACCESSOR_CONFIG; 15 | 16 | private static final ConfigDef CONFIG; 17 | 18 | static { 19 | CONFIG = baseConfigDef(); 20 | StructuredMessageSerDeConfig.addGranularConfigDef(CONFIG); 21 | } 22 | 23 | public AesStructuredMessageDeserializerConfig(Map props) { 24 | super(CONFIG, props); 25 | } 26 | 27 | } 28 | -------------------------------------------------------------------------------- /clients/src/main/java/com/nucypher/kafka/clients/encrypt/AesMessageSerializerConfig.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.encrypt; 2 | 3 | import com.nucypher.kafka.clients.MessageSerDeConfig; 4 | import org.apache.kafka.clients.producer.ProducerConfig; 5 | import org.apache.kafka.common.config.ConfigDef; 6 | import org.apache.kafka.common.serialization.ByteArraySerializer; 7 | 8 | import java.util.Map; 9 | 10 | /** 11 | * Configuration for {@link AesMessageSerializer} 12 | */ 13 | public class AesMessageSerializerConfig extends MessageSerDeConfig { 14 | 15 | public static final String PUBLIC_KEY_CONFIG = "encryption.public.key"; 16 | public static final String PUBLIC_KEY_DOC = "Path to the EC public key"; 17 | 18 | public static final String VALUE_SERIALIZER_CLASS_CONFIG = "encryption.value.serializer"; 19 | public static final String VALUE_SERIALIZER_CLASS_DOC = 20 | ProducerConfig.VALUE_SERIALIZER_CLASS_DOC; 21 | 22 | public static final String KEY_SERIALIZER_CLASS_CONFIG = "encryption.key.serializer"; 23 | public static final String KEY_SERIALIZER_CLASS_DOC = 24 | ProducerConfig.KEY_SERIALIZER_CLASS_DOC; 25 | 26 | public static final String CACHE_ENCRYPTION_CAPACITY_CONFIG = "cache.encryption.capacity"; 27 | public static final String CACHE_ENCRYPTION_CAPACITY_DOC = "Encryption cache capacity"; 28 | public static final int CACHE_ENCRYPTION_CAPACITY_DEFAULT = 200000; 29 | 30 | public static final String MAX_USING_DEK_CONFIG = "encryption.dek.max.using"; 31 | public static final String MAX_USING_DEK_DOC = "Max number of using each DEK"; 32 | public static final int MAX_USING_DEK_DEFAULT = 1000; 33 | 34 | private static final ConfigDef CONFIG; 35 | 36 | static { 37 | CONFIG = MessageSerDeConfig.baseConfigDef() 38 | .define(PUBLIC_KEY_CONFIG, 39 | ConfigDef.Type.STRING, 40 | ConfigDef.Importance.HIGH, 41 | PUBLIC_KEY_DOC) 42 | .define(VALUE_SERIALIZER_CLASS_CONFIG, 43 | ConfigDef.Type.CLASS, 44 | ByteArraySerializer.class, 45 | ConfigDef.Importance.HIGH, 46 | VALUE_SERIALIZER_CLASS_DOC) 47 | .define(KEY_SERIALIZER_CLASS_CONFIG, 48 | ConfigDef.Type.CLASS, 49 | ByteArraySerializer.class, 50 | ConfigDef.Importance.HIGH, 51 | KEY_SERIALIZER_CLASS_DOC) 52 | .define(CACHE_ENCRYPTION_CAPACITY_CONFIG, 53 | ConfigDef.Type.INT, 54 | CACHE_ENCRYPTION_CAPACITY_DEFAULT, 55 | ConfigDef.Importance.LOW, 56 | CACHE_ENCRYPTION_CAPACITY_DOC) 57 | .define(MAX_USING_DEK_CONFIG, 58 | ConfigDef.Type.INT, 59 | MAX_USING_DEK_DEFAULT, 60 | ConfigDef.Importance.LOW, 61 | MAX_USING_DEK_DOC); 62 | } 63 | 64 | public static ConfigDef baseConfigDef() { 65 | return CONFIG; 66 | } 67 | 68 | public AesMessageSerializerConfig(Map props) { 69 | super(CONFIG, props); 70 | } 71 | 72 | public AesMessageSerializerConfig(ConfigDef config, Map props) { 73 | super(config, props); 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /clients/src/main/java/com/nucypher/kafka/clients/encrypt/AesStructuredMessageSerializerConfig.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.encrypt; 2 | 3 | import com.nucypher.kafka.clients.StructuredMessageSerDeConfig; 4 | import org.apache.kafka.common.config.ConfigDef; 5 | 6 | import java.util.Collections; 7 | import java.util.Map; 8 | 9 | /** 10 | * Configuration for {@link AesStructuredMessageSerializer} 11 | */ 12 | public class AesStructuredMessageSerializerConfig extends AesMessageSerializerConfig { 13 | 14 | public static final String GRANULAR_DATA_ACCESSOR_CONFIG = 15 | StructuredMessageSerDeConfig.GRANULAR_DATA_ACCESSOR_CONFIG; 16 | 17 | public static final String FIELDS_LIST_CONFIG = "encryption.granular.fields"; 18 | public static final String FIELDS_LIST_DOC = "List of fields for encryption"; 19 | 20 | public static final String USE_DERIVED_KEYS_CONFIG = "encryption.granular.use.derived.keys"; 21 | public static final String USE_DERIVED_KEYS_DOC = "Use derived keys for DEK encryption"; 22 | public static final boolean USE_DERIVED_KEYS_DEFAULT = false; 23 | 24 | private static final ConfigDef CONFIG; 25 | 26 | static { 27 | CONFIG = baseConfigDef() 28 | .define(FIELDS_LIST_CONFIG, 29 | ConfigDef.Type.LIST, 30 | Collections.emptyList(), 31 | ConfigDef.Importance.HIGH, 32 | FIELDS_LIST_DOC) 33 | .define(USE_DERIVED_KEYS_CONFIG, 34 | ConfigDef.Type.BOOLEAN, 35 | USE_DERIVED_KEYS_DEFAULT, 36 | ConfigDef.Importance.LOW, 37 | USE_DERIVED_KEYS_DOC); 38 | StructuredMessageSerDeConfig.addGranularConfigDef(CONFIG); 39 | } 40 | 41 | public AesStructuredMessageSerializerConfig(Map props) { 42 | super(CONFIG, props); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /clients/src/main/resources/P521.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN EC PARAMETERS----- 2 | MIIBwwIBATBNBgcqhkjOPQEBAkIB//////////////////////////////////// 3 | //////////////////////////////////////////////////8wgZ8EQgH///// 4 | //////////////////////////////////////////////////////////////// 5 | /////////////////ARCAFGVPrlhjhyaH5KaIaC2hUDuotpyW5mzFfO4tImRjvEJ 6 | 4VYZOVHsfpN7FlLAvTuxvwc1c9+IPSw08e9FH9RrUD8AAxUA0J6IACkcuFOWzGcX 7 | OTKEqqDaZLoEgYUEAMaFjga3BATpzZ4+y2YjlbRCnGSBOQU/tSH4KK9ga009uqFL 8 | Xnfv51ko/h3BJ6L/qN4zSLPBhWpCm/l+fjHC5b1mARg5KWp4mjvABFyKX7QsfRvZ 9 | mPVESVebRGgXr70XJz5mLJfucple9CZAxVC5AT+tB2E1PHCGonLCQIi+lHaf0WZQ 10 | AkIB///////////////////////////////////////////6UYaHg78vlmt/zAFI 11 | 9wml0Du1ybiJnEeuu2+3HpE4ZAkCAQE= 12 | -----END EC PARAMETERS----- 13 | -----BEGIN EC PRIVATE KEY----- 14 | MIHcAgEBBEIAfzzIW+AJhTtmFSp3qqLWajKIDj1cYodPac0GYO8ku217TNUy6KmX 15 | WDmc3nY3snVf29G2cZdb8JU9aHs/+b541aagBwYFK4EEACOhgYkDgYYABAFeHVTy 16 | WkYVAACm2xj8OM5o4YkqTgzq4eSs0gT7knuoQb0dRS3LKtwfg89h7LRA3VKmRk38 17 | oHdQln0J3ZfjfM+8ogFC1j+l0bFQmbNqDSan2HaqLobrfMw/1j4Jsz5hkwCjQEoU 18 | GOlHBIJVZU0QZL3ByFXUe2ouGNXFvHpW/tPgofTR9Q== 19 | -----END EC PRIVATE KEY----- 20 | -----BEGIN PUBLIC KEY----- 21 | MIGbMBAGByqGSM49AgEGBSuBBAAjA4GGAAQBXh1U8lpGFQAAptsY/DjOaOGJKk4M 22 | 6uHkrNIE+5J7qEG9HUUtyyrcH4PPYey0QN1SpkZN/KB3UJZ9Cd2X43zPvKIBQtY/ 23 | pdGxUJmzag0mp9h2qi6G63zMP9Y+CbM+YZMAo0BKFBjpRwSCVWVNEGS9wchV1Htq 24 | LhjVxbx6Vv7T4KH00fU= 25 | -----END PUBLIC KEY----- 26 | -------------------------------------------------------------------------------- /clients/src/main/resources/consumer.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=localhost:9092 2 | group.id=test 3 | client.id=100500 4 | enable.auto.commit=true 5 | #key.deserializer=org.apache.kafka.common.serialization.StringDeserializer 6 | #value.deserializer=org.apache.kafka.common.serialization.StringDeserializer 7 | 8 | # fast session timeout makes it more fun to play with failover 9 | session.timeout.ms=10000 10 | 11 | # These buffer sizes seem to be needed to avoid consumer switching to 12 | # a mode where it processes one bufferful every 5 seconds with multiple 13 | # timeouts along the way. No idea why this happens. 14 | fetch.min.bytes=50000 15 | receive.buffer.bytes=262144 16 | max.partition.fetch.bytes=2097152 -------------------------------------------------------------------------------- /clients/src/main/resources/producer.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=localhost:9092 2 | acks=all 3 | retries=0 4 | batch.size=16384 5 | auto.commit.interval.ms=1000 6 | linger.ms=0 7 | block.on.buffer.full=true 8 | # define it inside the code 9 | #key.serializer=org.apache.kafka.common.serialization.StringSerializer 10 | #value.serializer=org.apache.kafka.common.serialization.StringSerializer 11 | -------------------------------------------------------------------------------- /clients/src/test/groovy/com/nucypher/kafka/clients/AesMessageEncryptorDecryptorSpec.groovy: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients 2 | 3 | import com.nucypher.crypto.EncryptionAlgorithm 4 | import com.nucypher.kafka.TestUtils 5 | import com.nucypher.kafka.clients.decrypt.AesMessageDeserializer 6 | import com.nucypher.kafka.clients.encrypt.AesMessageSerializer 7 | import com.nucypher.kafka.utils.KeyUtils 8 | import org.apache.kafka.common.serialization.StringDeserializer 9 | import org.apache.kafka.common.serialization.StringSerializer 10 | import spock.lang.Specification 11 | 12 | import java.security.KeyPair 13 | 14 | import static TestUtils.PEM 15 | 16 | /** 17 | * Test for {@link AesMessageSerializer} and {@link AesMessageDeserializer} 18 | */ 19 | class AesMessageEncryptorDecryptorSpec extends Specification { 20 | 21 | static final Class ALGORITHM = 22 | TestUtils.ENCRYPTION_ALGORITHM_CLASS 23 | 24 | def 'encrypt and decrypt message'() { 25 | setup: 'initialization' 26 | 27 | String topic = "topic" 28 | Random random = new Random() 29 | String data = new BigInteger(130, random).toString(32) 30 | 31 | File file = new File(this.getClass().getClassLoader() 32 | .getResource(PEM).getFile()) 33 | KeyPair keyPair = KeyUtils.getECKeyPairFromPEM(file.getAbsolutePath()) 34 | 35 | AesMessageSerializer messageSerializer = 36 | new AesMessageSerializer<>( 37 | new StringSerializer(), 38 | ALGORITHM, 39 | keyPair.public, 40 | null 41 | ) 42 | 43 | AesMessageDeserializer messageDeserializer = 44 | new AesMessageDeserializer<>( 45 | new StringDeserializer(), 46 | ALGORITHM, 47 | keyPair.private 48 | ) 49 | 50 | when: 'encrypt and decrypt' 51 | byte[] encrypted = messageSerializer.serialize(topic, data) 52 | String result = messageDeserializer.deserialize(topic, encrypted) 53 | 54 | then: 'should be initial data' 55 | result == data 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /commons/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'com.github.hauner.jarTest' 2 | 3 | group 'com.nucypher.kafka' 4 | 5 | sourceCompatibility = 1.7 6 | targetCompatibility = 1.7 7 | 8 | jar { 9 | archiveName = "nucypher-kafka-commons-${version}.${extension}" 10 | } 11 | 12 | dependencies { 13 | compile project(':crypto') 14 | compile project(':kafka:clients') 15 | 16 | compile "org.slf4j:slf4j-api:$slf4jVersion" 17 | compile("org.apache.logging.log4j:log4j-slf4j-impl:$log4jVersion") 18 | compile("org.apache.logging.log4j:log4j-1.2-api:$log4jVersion") 19 | 20 | compile "org.bouncycastle:bcpkix-jdk15on:$bouncyCastleVersion" 21 | 22 | compile("com.101tec:zkclient:$zkclientVersion") { 23 | exclude group: "org.slf4j", module: 'slf4j-api' 24 | exclude group: "org.slf4j", module: 'slf4j-log4j12' 25 | exclude group: "log4j", module: 'log4j' 26 | } 27 | 28 | compile "com.fasterxml.jackson.core:jackson-databind:$jacksonVersion" 29 | compile "com.jayway.jsonpath:json-path:$jsonPathVersion" 30 | compile "org.apache.avro:avro:$avroVersion" 31 | compile("io.confluent:kafka-avro-serializer:$confluentVersion") { 32 | exclude group: "org.slf4j", module: 'slf4j-api' 33 | exclude group: "org.slf4j", module: 'slf4j-log4j12' 34 | exclude group: "org.apache.avro", module: 'avro' 35 | exclude group: "com.fasterxml.jackson.core", module: 'jackson-databind' 36 | } 37 | 38 | compile "com.google.guava:guava:$guavaVersion" 39 | compile "org.reflections:reflections:$reflectionsVersion" 40 | compile "org.apache.commons:commons-crypto:$commonsCryptoVersion" 41 | 42 | testCompile "junit:junit:$junitVersion" 43 | testCompile "org.apache.curator:curator-test:$curatorVersion" 44 | testCompile "org.apache.curator:curator-framework:$curatorVersion" 45 | testCompile "org.spockframework:spock-core:$spockVersion" 46 | testCompile "cglib:cglib-nodep:$cglibVersion" 47 | } 48 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/Constants.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka; 2 | 3 | import org.bouncycastle.jce.provider.BouncyCastleProvider; 4 | 5 | import java.io.Serializable; 6 | 7 | /** 8 | * Commons constants 9 | */ 10 | public class Constants { 11 | 12 | public static final String BOUNCY_CASTLE_PROVIDER_NAME = 13 | new BouncyCastleProvider().getName(); 14 | public static final String AES_ALGORITHM_NAME = "AES"; 15 | public static final String KEY_FACTORY_ALGORITHM = "ECDSA"; 16 | 17 | } 18 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/DefaultProvider.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka; 2 | 3 | import org.bouncycastle.jce.provider.BouncyCastleProvider; 4 | 5 | import java.security.Security; 6 | 7 | /** 8 | * Class for provider initialization 9 | */ 10 | public class DefaultProvider { 11 | 12 | private static volatile boolean isInitialized = false; 13 | 14 | /** 15 | * Initialize ones per JVM 16 | */ 17 | public static void initializeProvider() { 18 | if (!isInitialized) { 19 | synchronized (DefaultProvider.class) { 20 | // double check it's correct! 21 | if (!isInitialized) { 22 | Security.addProvider(new BouncyCastleProvider()); 23 | isInitialized = true; 24 | } 25 | } 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/INamed.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka; 2 | 3 | /** 4 | * Interface for objects with names 5 | * 6 | * @author szotov 7 | */ 8 | public interface INamed { 9 | 10 | /** 11 | * @return object name 12 | */ 13 | public String getName(); 14 | 15 | /** 16 | * @return short object name 17 | */ 18 | public String getShortName(); 19 | 20 | } 21 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/cipher/CipherFactory.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.cipher; 2 | 3 | import com.nucypher.kafka.Constants; 4 | import com.nucypher.kafka.errors.CommonException; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | /** 9 | * Factory for {@link ICipher} 10 | */ 11 | public class CipherFactory { 12 | 13 | private static final Logger LOGGER = LoggerFactory.getLogger(CipherFactory.class); 14 | 15 | /** 16 | * Cipher provider 17 | */ 18 | public enum CipherProvider { 19 | /** 20 | * BouncyCastle provider 21 | */ 22 | BOUNCY_CASTLE, 23 | /** 24 | * OpenSSL provider by JNI 25 | */ 26 | OPENSSL 27 | } 28 | 29 | /** 30 | * Get {@link ICipher} instance 31 | * 32 | * @param provider {@link CipherProvider} 33 | * @param transformation transformation 34 | * @return instance of {@link ICipher} 35 | */ 36 | public static ICipher getCipher(CipherProvider provider, 37 | String transformation) { 38 | LOGGER.info("Creating cipher using provider '{}' and transformation '{}'", 39 | provider, transformation); 40 | String algorithm = transformation.split("/")[0]; 41 | switch (provider) { 42 | case BOUNCY_CASTLE: 43 | return new JCECipher( 44 | Constants.BOUNCY_CASTLE_PROVIDER_NAME, 45 | algorithm, 46 | transformation); 47 | case OPENSSL: 48 | return new OpenSSLCipher(algorithm, transformation); 49 | default: 50 | throw new CommonException("Unknown provider '%s'", provider); 51 | } 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/cipher/ICipher.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.cipher; 2 | 3 | import java.security.Key; 4 | 5 | /** 6 | * Cipher 7 | */ 8 | public interface ICipher { 9 | 10 | /** 11 | * Encrypt data using DEK and IV 12 | * 13 | * @param data data for encryption 14 | * @param key Data Encryption Key 15 | * @param IV initialization vector 16 | * @return encrypted data 17 | */ 18 | public byte[] encrypt(byte[] data, Key key, byte[] IV); 19 | 20 | /** 21 | * Decrypt data using DEK and IV 22 | * 23 | * @param data data for decryption 24 | * @param key Data Encryption Key 25 | * @param IV initialization vector 26 | * @return decrypted data 27 | */ 28 | public byte[] decrypt(byte[] data, Key key, byte[] IV); 29 | 30 | } 31 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/cipher/JCECipher.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.cipher; 2 | 3 | import com.nucypher.kafka.errors.CommonException; 4 | 5 | import javax.crypto.BadPaddingException; 6 | import javax.crypto.Cipher; 7 | import javax.crypto.IllegalBlockSizeException; 8 | import javax.crypto.NoSuchPaddingException; 9 | import javax.crypto.SecretKey; 10 | import javax.crypto.ShortBufferException; 11 | import javax.crypto.spec.IvParameterSpec; 12 | import javax.crypto.spec.SecretKeySpec; 13 | import java.security.InvalidAlgorithmParameterException; 14 | import java.security.InvalidKeyException; 15 | import java.security.Key; 16 | import java.security.NoSuchAlgorithmException; 17 | import java.security.NoSuchProviderException; 18 | import java.security.spec.AlgorithmParameterSpec; 19 | import java.util.Arrays; 20 | 21 | /** 22 | * JCE cipher 23 | */ 24 | public class JCECipher implements ICipher { 25 | 26 | private String transformation; 27 | private String algorithm; 28 | private String provider; 29 | 30 | /** 31 | * @param provider provider 32 | * @param algorithm algorithm 33 | * @param transformation transformation string 34 | */ 35 | public JCECipher(String provider, String algorithm, String transformation) { 36 | this.provider = provider; 37 | this.algorithm = algorithm; 38 | this.transformation = transformation; 39 | } 40 | 41 | private Cipher getCipher(boolean isEncryption, Key key, byte[] IV) { 42 | Cipher cipher; 43 | try { 44 | cipher = Cipher.getInstance(transformation, provider); 45 | } catch (NoSuchAlgorithmException | NoSuchProviderException | NoSuchPaddingException ex) { 46 | throw new CommonException( 47 | ex, 48 | "Unable to get instance of Cipher for %s for security provider: %s", 49 | transformation, provider); 50 | } 51 | 52 | SecretKey keyValue = new SecretKeySpec(key.getEncoded(), algorithm); 53 | AlgorithmParameterSpec IVspec = new IvParameterSpec(IV); 54 | 55 | try { 56 | cipher.init(isEncryption ? Cipher.ENCRYPT_MODE : Cipher.DECRYPT_MODE, 57 | keyValue, IVspec); 58 | } catch (InvalidKeyException | InvalidAlgorithmParameterException ex) { 59 | throw new CommonException("Unable to initialize Cipher", ex); 60 | } 61 | return cipher; 62 | } 63 | 64 | @Override 65 | public byte[] encrypt(byte[] data, Key key, byte[] IV) { 66 | return translate(true, data, key, IV); 67 | } 68 | 69 | @Override 70 | public byte[] decrypt(byte[] data, Key key, byte[] IV) { 71 | return translate(false, data, key, IV); 72 | } 73 | 74 | private byte[] translate(boolean isEncryption, byte[] data, Key key, byte[] IV) { 75 | Cipher cipher = getCipher(isEncryption, key, IV); 76 | byte[] output = new byte[cipher.getOutputSize(data.length)]; 77 | try { 78 | int updateBytes = cipher.update(data, 0, data.length, output, 0); 79 | int finalBytes = cipher.doFinal(data, 0, 0, output, updateBytes); 80 | if (updateBytes + finalBytes < output.length) { 81 | output = Arrays.copyOf(output, updateBytes + finalBytes); 82 | } 83 | } catch (ShortBufferException | IllegalBlockSizeException | BadPaddingException e) { 84 | throw new CommonException(e); 85 | } 86 | return output; 87 | } 88 | 89 | } 90 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/cipher/OpenSSLCipher.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.cipher; 2 | 3 | import com.nucypher.kafka.errors.CommonException; 4 | import org.apache.commons.crypto.cipher.CryptoCipher; 5 | import org.apache.commons.crypto.cipher.CryptoCipherFactory; 6 | import org.apache.commons.crypto.utils.Utils; 7 | 8 | import javax.crypto.BadPaddingException; 9 | import javax.crypto.Cipher; 10 | import javax.crypto.IllegalBlockSizeException; 11 | import javax.crypto.SecretKey; 12 | import javax.crypto.ShortBufferException; 13 | import javax.crypto.spec.IvParameterSpec; 14 | import javax.crypto.spec.SecretKeySpec; 15 | import java.io.IOException; 16 | import java.security.InvalidAlgorithmParameterException; 17 | import java.security.InvalidKeyException; 18 | import java.security.Key; 19 | import java.security.spec.AlgorithmParameterSpec; 20 | import java.util.Arrays; 21 | import java.util.Properties; 22 | 23 | /** 24 | * OpenSSL cipher 25 | */ 26 | public class OpenSSLCipher implements ICipher { 27 | 28 | static { 29 | try { 30 | System.loadLibrary("crypto"); 31 | } catch (UnsatisfiedLinkError e) { 32 | System.loadLibrary("libcrypto"); 33 | } 34 | } 35 | 36 | private String transformation; 37 | private String algorithm; 38 | 39 | /** 40 | * @param algorithm algorithm 41 | * @param transformation transformation string 42 | */ 43 | public OpenSSLCipher(String algorithm, String transformation) { 44 | this.algorithm = algorithm; 45 | this.transformation = transformation; 46 | } 47 | 48 | private CryptoCipher getCipher(boolean isEncryption, Key key, byte[] IV) { 49 | Properties properties = new Properties(); 50 | properties.setProperty(CryptoCipherFactory.CLASSES_KEY, 51 | CryptoCipherFactory.CipherProvider.OPENSSL.getClassName()); 52 | CryptoCipher cipher; 53 | try { 54 | cipher = Utils.getCipherInstance(transformation, properties); 55 | } catch (IOException ex) { 56 | throw new CommonException(ex); 57 | } 58 | 59 | SecretKey keyValue = new SecretKeySpec(key.getEncoded(), algorithm); 60 | AlgorithmParameterSpec IVspec = new IvParameterSpec(IV); 61 | 62 | try { 63 | cipher.init(isEncryption ? Cipher.ENCRYPT_MODE : Cipher.DECRYPT_MODE, 64 | keyValue, IVspec); 65 | } catch (InvalidKeyException | InvalidAlgorithmParameterException ex) { 66 | throw new CommonException("Unable to initialize Cipher", ex); 67 | } 68 | return cipher; 69 | } 70 | 71 | /** 72 | * Encrypt data using DEK and IV 73 | * 74 | * @param data data for encryption 75 | * @param key Data Encryption Key 76 | * @param IV initialization vector 77 | * @return encrypted data 78 | */ 79 | public byte[] encrypt(byte[] data, Key key, byte[] IV) { 80 | return translate(true, data, key, IV); 81 | } 82 | 83 | /** 84 | * Decrypt data using DEK and IV 85 | * 86 | * @param data data for decryption 87 | * @param key Data Encryption Key 88 | * @param IV initialization vector 89 | * @return decrypted data 90 | */ 91 | public byte[] decrypt(byte[] data, Key key, byte[] IV) { 92 | return translate(false, data, key, IV); 93 | } 94 | 95 | private byte[] translate(boolean isEncryption, byte[] data, Key key, byte[] IV) { 96 | byte[] output = new byte[2 * data.length]; 97 | try (CryptoCipher cipher = getCipher(isEncryption, key, IV)) { 98 | int updateBytes = cipher.update(data, 0, data.length, output, 0); 99 | int finalBytes = cipher.doFinal(data, 0, 0, output, updateBytes); 100 | return Arrays.copyOf(output, updateBytes + finalBytes); 101 | } catch (IOException | 102 | ShortBufferException | 103 | BadPaddingException | 104 | IllegalBlockSizeException ex) { 105 | throw new CommonException("Unable to cipher", ex); 106 | } 107 | } 108 | 109 | } 110 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/clients/EncryptedDataEncryptionKey.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients; 2 | 3 | import com.google.common.primitives.Ints; 4 | 5 | import java.util.Arrays; 6 | import java.util.Objects; 7 | 8 | /** 9 | * Encrypted Data Encryption Key (EDEK) 10 | */ 11 | public class EncryptedDataEncryptionKey { 12 | 13 | private static final byte IS_COMPLEX_TRUE = 1; 14 | private static final byte IS_COMPLEX_FALSE = 0; 15 | 16 | private byte[] bytes; 17 | private boolean isComplex; 18 | 19 | /** 20 | * @param bytes EDEK bytes 21 | * @param isComplex is EDEK complex 22 | */ 23 | public EncryptedDataEncryptionKey(byte[] bytes, boolean isComplex) { 24 | this.bytes = bytes; 25 | this.isComplex = isComplex; 26 | } 27 | 28 | /** 29 | * @param bytes EDEK bytes 30 | */ 31 | public EncryptedDataEncryptionKey(byte[] bytes) { 32 | this(bytes, false); 33 | } 34 | 35 | /** 36 | * @return EDEK bytes 37 | */ 38 | public byte[] getBytes() { 39 | return bytes; 40 | } 41 | 42 | 43 | /** 44 | * @return is EDEK complex 45 | */ 46 | public boolean isComplex() { 47 | return isComplex; 48 | } 49 | 50 | /** 51 | * @return size of serialized EDEK 52 | */ 53 | public int size() { 54 | return bytes.length + 5; 55 | } 56 | 57 | /** 58 | * Serialize {@link EncryptedDataEncryptionKey} 59 | * 60 | * @return serialized data 61 | */ 62 | public byte[] serialize() { 63 | return serialize(new byte[size()], 0); 64 | } 65 | 66 | /** 67 | * Serialize {@link EncryptedDataEncryptionKey} into buffer 68 | * 69 | * @param buffer buffer byte array 70 | * @param offset start offset 71 | * @return serialized data 72 | */ 73 | public byte[] serialize(byte[] buffer, int offset) { 74 | buffer[offset] = !isComplex ? IS_COMPLEX_FALSE : IS_COMPLEX_TRUE; 75 | byte[] edekLengthBytes = Ints.toByteArray(bytes.length); 76 | System.arraycopy( 77 | edekLengthBytes, 0, buffer, offset + 1, edekLengthBytes.length); 78 | System.arraycopy( 79 | bytes, 0, buffer, offset + edekLengthBytes.length + 1, bytes.length); 80 | return buffer; 81 | } 82 | 83 | /** 84 | * Deserialize {@link EncryptedDataEncryptionKey} from byte array 85 | * 86 | * @param bytes byte array 87 | * @return {@link EncryptedDataEncryptionKey} instance 88 | */ 89 | public static EncryptedDataEncryptionKey deserialize(byte[] bytes) { 90 | return deserialize(bytes, 0); 91 | } 92 | 93 | /** 94 | * Deserialize {@link EncryptedDataEncryptionKey} from byte array 95 | * 96 | * @param bytes byte array 97 | * @param offset start offset 98 | * @return {@link EncryptedDataEncryptionKey} instance 99 | */ 100 | public static EncryptedDataEncryptionKey deserialize(byte[] bytes, int offset) { 101 | byte isComplexByte = bytes[offset]; 102 | byte[] edek = new byte[Ints.fromBytes( 103 | bytes[offset + 1], bytes[offset + 2], bytes[offset + 3], bytes[offset + 4])]; 104 | System.arraycopy(bytes, offset + 5, edek, 0, edek.length); 105 | return new EncryptedDataEncryptionKey(edek, isComplexByte == IS_COMPLEX_TRUE); 106 | } 107 | 108 | @Override 109 | public boolean equals(Object o) { 110 | if (this == o) return true; 111 | if (o == null || getClass() != o.getClass()) return false; 112 | EncryptedDataEncryptionKey that = (EncryptedDataEncryptionKey) o; 113 | return isComplex == that.isComplex && 114 | Arrays.equals(bytes, that.bytes); 115 | } 116 | 117 | @Override 118 | public int hashCode() { 119 | return Objects.hash(bytes, isComplex); 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/clients/Message.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients; 2 | 3 | import com.google.common.primitives.Ints; 4 | 5 | import java.util.Arrays; 6 | import java.util.Objects; 7 | 8 | /** 9 | * Message 10 | */ 11 | public class Message { 12 | 13 | private byte[] payload; 14 | private byte[] iv; 15 | private EncryptedDataEncryptionKey edek; 16 | 17 | /** 18 | * @param payload data 19 | * @param iv IV bytes 20 | * @param edek EDEK 21 | */ 22 | public Message(byte[] payload, byte[] iv, EncryptedDataEncryptionKey edek) { 23 | this.payload = payload; 24 | this.edek = edek; 25 | this.iv = iv; 26 | } 27 | 28 | /** 29 | * @param payload data 30 | * @param iv IV bytes 31 | * @param edek EDEK bytes 32 | */ 33 | public Message(byte[] payload, byte[] iv, byte[] edek) { 34 | this(payload, iv, new EncryptedDataEncryptionKey(edek)); 35 | } 36 | 37 | /** 38 | * @param payload data 39 | * @param iv IV bytes 40 | * @param edek EDEK bytes 41 | * @param isComplex is EDEK complex 42 | */ 43 | public Message(byte[] payload, byte[] iv, byte[] edek, boolean isComplex) { 44 | this(payload, iv, new EncryptedDataEncryptionKey(edek, isComplex)); 45 | } 46 | 47 | /** 48 | * @param payload data 49 | * @param iv IV bytes 50 | */ 51 | public Message(byte[] payload, byte[] iv) { 52 | this(payload, iv, (EncryptedDataEncryptionKey) null); 53 | } 54 | 55 | /** 56 | * @return data 57 | */ 58 | public byte[] getPayload() { 59 | return payload; 60 | } 61 | 62 | /** 63 | * @return EDEK 64 | */ 65 | public EncryptedDataEncryptionKey getEDEK() { 66 | return edek; 67 | } 68 | 69 | /** 70 | * @param edek EDEK 71 | */ 72 | public void setEDEK(EncryptedDataEncryptionKey edek) { 73 | this.edek = edek; 74 | } 75 | 76 | /** 77 | * @return IV 78 | */ 79 | public byte[] getIV() { 80 | return iv; 81 | } 82 | 83 | /** 84 | * Serialize {@link Message} 85 | * 86 | * @return serialized data 87 | */ 88 | public byte[] serialize() { 89 | int length = 8 + payload.length + iv.length + (edek != null ? edek.size() : 0); 90 | byte[] data = new byte[length]; 91 | byte[] payloadLengthBytes = Ints.toByteArray(payload.length); 92 | byte[] ivLengthBytes = Ints.toByteArray(iv.length); 93 | System.arraycopy(payloadLengthBytes, 0, data, 0, payloadLengthBytes.length); 94 | System.arraycopy(payload, 0, data, 4, payload.length); 95 | System.arraycopy( 96 | ivLengthBytes, 0, data, 4 + payload.length, ivLengthBytes.length); 97 | System.arraycopy(iv, 0, data, 8 + payload.length, iv.length); 98 | if (edek != null) { 99 | data = edek.serialize(data, 8 + payload.length + iv.length); 100 | } 101 | return data; 102 | } 103 | 104 | /** 105 | * Deserialize {@link Message} from byte array 106 | * 107 | * @param bytes byte array 108 | * @return {@link Message} instance 109 | */ 110 | public static Message deserialize(byte[] bytes) { 111 | byte[] payload = new byte[ 112 | Ints.fromBytes(bytes[0], bytes[1], bytes[2], bytes[3])]; 113 | System.arraycopy(bytes, 4, payload, 0, payload.length); 114 | int i = 4 + payload.length; 115 | byte[] iv = new byte[ 116 | Ints.fromBytes(bytes[i], bytes[i + 1], bytes[i + 2], bytes[i + 3])]; 117 | System.arraycopy(bytes, i + 4, iv, 0, iv.length); 118 | EncryptedDataEncryptionKey edek = null; 119 | if (8 + payload.length + iv.length < bytes.length) { 120 | edek = EncryptedDataEncryptionKey.deserialize( 121 | bytes, 8 + payload.length + iv.length); 122 | } 123 | 124 | return new Message(payload, iv, edek); 125 | } 126 | 127 | @Override 128 | public boolean equals(Object o) { 129 | if (this == o) return true; 130 | if (o == null || getClass() != o.getClass()) return false; 131 | Message message = (Message) o; 132 | return Arrays.equals(payload, message.payload) && 133 | Arrays.equals(iv, message.iv) && 134 | Objects.equals(edek, message.edek); 135 | } 136 | 137 | @Override 138 | public int hashCode() { 139 | return Objects.hash(payload, iv, edek); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/clients/granular/DataFormat.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.granular; 2 | 3 | import com.nucypher.kafka.INamed; 4 | import com.nucypher.kafka.errors.CommonException; 5 | 6 | /** 7 | * Data format enum 8 | * 9 | * @author szotov 10 | */ 11 | public enum DataFormat implements INamed { 12 | 13 | /** 14 | * Avro 15 | */ 16 | AVRO("avro", AvroDataAccessor.class), 17 | /** 18 | * Avro Schema Less 19 | */ 20 | AVRO_SCHEMA_LESS("avro_schema_less", AvroSchemaLessDataAccessor.class), 21 | /** 22 | * JSON 23 | */ 24 | JSON("json", JsonDataAccessor.class); 25 | 26 | private String shortName; 27 | private Class accessorClass; 28 | 29 | DataFormat(String shortName, Class accessorClass) { 30 | this.shortName = shortName; 31 | this.accessorClass = accessorClass; 32 | } 33 | 34 | @Override 35 | public String getShortName() { 36 | return shortName; 37 | } 38 | 39 | @Override 40 | public String getName() { 41 | return name(); 42 | } 43 | 44 | /** 45 | * @return accessor class 46 | */ 47 | public Class getAccessorClass() { 48 | return accessorClass; 49 | } 50 | 51 | /** 52 | * Check whether one of the formats contains the accessor class 53 | * 54 | * @param accessorClass accessor class to check 55 | * @return result of checking 56 | */ 57 | public static boolean contains(Class accessorClass) { 58 | for (DataFormat format : values()) { 59 | if (format.getAccessorClass().equals(accessorClass)) { 60 | return true; 61 | } 62 | } 63 | return false; 64 | } 65 | 66 | /** 67 | * Get data format from accessor class 68 | * 69 | * @param accessorClass accessor class 70 | * @return data format 71 | */ 72 | public static DataFormat valueOf(Class accessorClass) { 73 | for (DataFormat format : DataFormat.values()) { 74 | if (format.getAccessorClass().equals(accessorClass)) { 75 | return format; 76 | } 77 | } 78 | throw new CommonException( 79 | "No such data format with accessor class '%s'", accessorClass.getName()); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/clients/granular/OneMessageDataAccessor.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.granular; 2 | 3 | import java.util.NoSuchElementException; 4 | 5 | /** 6 | * Abstract {@link StructuredDataAccessor} with only one message 7 | */ 8 | public abstract class OneMessageDataAccessor implements StructuredDataAccessor { 9 | 10 | private boolean hasNext = true; 11 | private boolean isEmpty = false; 12 | 13 | /** 14 | * @param isEmpty is message empty 15 | */ 16 | protected void setEmpty(boolean isEmpty) { 17 | hasNext = !isEmpty; 18 | this.isEmpty = isEmpty; 19 | } 20 | 21 | @Override 22 | public boolean hasNext() { 23 | return hasNext; 24 | } 25 | 26 | @Override 27 | public void seekToNext() throws NoSuchElementException { 28 | if (!hasNext()) { 29 | throw new NoSuchElementException(); 30 | } 31 | hasNext = false; 32 | } 33 | 34 | @Override 35 | public void reset() { 36 | hasNext = !isEmpty; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/clients/granular/StructuredDataAccessor.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.granular; 2 | 3 | import java.util.Map; 4 | import java.util.NoSuchElementException; 5 | import java.util.Set; 6 | 7 | /** 8 | * The structured data accessor. The implementation must have default constructor 9 | */ 10 | public interface StructuredDataAccessor { 11 | 12 | /** 13 | * Configure this class. 14 | * 15 | * @param configs configs in key/value pairs 16 | * @param isKey whether is for key or value 17 | */ 18 | public void configure(Map configs, boolean isKey); 19 | 20 | /** 21 | * Deserialize data into internal object 22 | * 23 | * @param topic topic associated with data 24 | * @param data input data 25 | */ 26 | public void deserialize(String topic, byte[] data); 27 | 28 | /** 29 | * Serialize internal data into byte array 30 | * 31 | * @return serialized data 32 | */ 33 | public byte[] serialize(); 34 | 35 | /** 36 | * @return all fields which available for encryption 37 | */ 38 | public Set getAllFields(); 39 | 40 | /** 41 | * @return all encrypted fields and their EDEK 42 | */ 43 | public Map getAllEDEKs(); 44 | 45 | /** 46 | * Get encrypted fields data 47 | * 48 | * @param field input field 49 | * @return field data 50 | */ 51 | public byte[] getEncrypted(String field); 52 | 53 | /** 54 | * Get unencrypted fields data 55 | * 56 | * @param field input field 57 | * @return field data 58 | */ 59 | public byte[] getUnencrypted(String field); 60 | 61 | /** 62 | * Add encrypted field 63 | * 64 | * @param field field name 65 | * @param data encrypted data 66 | */ 67 | public void addEncrypted(String field, byte[] data); 68 | 69 | /** 70 | * Add EDEK 71 | * 72 | * @param field field name 73 | * @param edek EDEK 74 | */ 75 | public void addEDEK(String field, byte[] edek); 76 | 77 | /** 78 | * Add unencrypted field 79 | * 80 | * @param field field name 81 | * @param data non-encrypted data 82 | */ 83 | public void addUnencrypted(String field, byte[] data); 84 | 85 | /** 86 | * Remove EDEK 87 | * 88 | * @param field field name 89 | */ 90 | public void removeEDEK(String field); 91 | 92 | /** 93 | * @return {@code true} if the iteration has more elements 94 | */ 95 | public boolean hasNext(); 96 | 97 | /** 98 | * Seek to the next element in the iteration 99 | * 100 | * @throws NoSuchElementException if the iteration has no more elements 101 | */ 102 | public void seekToNext() throws NoSuchElementException; 103 | 104 | /** 105 | * Reset to the first message 106 | */ 107 | public void reset(); 108 | 109 | } 110 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/errors/CommonException.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.errors; 2 | 3 | /** 4 | * {@link RuntimeException} wrapper 5 | */ 6 | public class CommonException extends RuntimeException { 7 | 8 | private static final long serialVersionUID = 487684204074816608L; 9 | 10 | /** 11 | * @param cause the cause 12 | * @param format see format in {@link String#format(String, Object...)} 13 | * @param args see args in {@link String#format(String, Object...)} 14 | */ 15 | public CommonException(Throwable cause, String format, Object... args) { 16 | super(String.format(format, args), cause); 17 | } 18 | 19 | /** 20 | * @param format see format in {@link String#format(String, Object...)} 21 | * @param args see args in {@link String#format(String, Object...)} 22 | */ 23 | public CommonException(String format, Object... args) { 24 | super(String.format(format, args)); 25 | } 26 | 27 | /** 28 | * See {@link RuntimeException#RuntimeException()} 29 | */ 30 | public CommonException() { 31 | super(); 32 | } 33 | 34 | /** 35 | * See {@link RuntimeException#RuntimeException(String, Throwable, boolean, boolean)} 36 | */ 37 | protected CommonException(String message, Throwable cause, 38 | boolean enableSuppression, boolean writableStackTrace) { 39 | super(message, cause, enableSuppression, writableStackTrace); 40 | } 41 | 42 | /** 43 | * See {@link RuntimeException#RuntimeException(String, Throwable)} 44 | */ 45 | public CommonException(String message, Throwable cause) { 46 | super(message, cause); 47 | } 48 | 49 | /** 50 | * See {@link RuntimeException#RuntimeException(String)} 51 | */ 52 | public CommonException(String message) { 53 | super(message); 54 | } 55 | 56 | /** 57 | * See {@link RuntimeException#RuntimeException(Throwable)} 58 | */ 59 | public CommonException(Throwable cause) { 60 | super(cause); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/utils/AESKeyGenerators.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import com.nucypher.kafka.DefaultProvider; 4 | import com.nucypher.kafka.errors.CommonException; 5 | 6 | import javax.crypto.KeyGenerator; 7 | import javax.crypto.spec.SecretKeySpec; 8 | import java.security.Key; 9 | import java.security.NoSuchAlgorithmException; 10 | import java.security.NoSuchProviderException; 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | 14 | import static com.nucypher.kafka.Constants.BOUNCY_CASTLE_PROVIDER_NAME; 15 | import static com.nucypher.kafka.Constants.AES_ALGORITHM_NAME; 16 | 17 | /** 18 | * AES key generators 19 | */ 20 | public class AESKeyGenerators { 21 | 22 | private static final int BITS_IN_BYTE = 8; 23 | private static final Map keyGenerators = new HashMap<>(); 24 | 25 | static { 26 | DefaultProvider.initializeProvider(); 27 | getKeyGenerator(128); 28 | getKeyGenerator(192); 29 | getKeyGenerator(256); 30 | } 31 | 32 | private static KeyGenerator getKeyGenerator(int size) { 33 | KeyGenerator keyGenerator = keyGenerators.get(size); 34 | if (keyGenerator == null) { 35 | synchronized (AESKeyGenerators.class) { 36 | keyGenerator = keyGenerators.get(size); 37 | if (keyGenerator == null) { 38 | try { 39 | keyGenerator = KeyGenerator.getInstance( 40 | AES_ALGORITHM_NAME, BOUNCY_CASTLE_PROVIDER_NAME); 41 | } catch (NoSuchAlgorithmException | NoSuchProviderException e) { 42 | throw new CommonException(e); 43 | } 44 | keyGenerator.init(size); 45 | keyGenerators.put(size, keyGenerator); 46 | } 47 | } 48 | } 49 | return keyGenerator; 50 | } 51 | 52 | /** 53 | * Generate DEK - AES 54 | * 55 | * @param size key size in bytes 56 | * @return DEK 57 | */ 58 | public static Key generateDEK(int size) { 59 | size = BITS_IN_BYTE * size; 60 | KeyGenerator keyGenerator = getKeyGenerator(size); 61 | return keyGenerator.generateKey(); 62 | } 63 | 64 | /** 65 | * Create Key from byte array 66 | * 67 | * @param keyBytes - byte[] 68 | * @param symmetricAlgorithmName - "AES" 69 | * @return Key 70 | */ 71 | public static Key create(byte[] keyBytes, String symmetricAlgorithmName) { 72 | return new SecretKeySpec(keyBytes, 0, keyBytes.length, symmetricAlgorithmName); 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/utils/AvroUtils.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import com.nucypher.kafka.errors.CommonException; 4 | import org.apache.avro.Schema; 5 | import org.apache.avro.generic.GenericDatumReader; 6 | import org.apache.avro.generic.GenericDatumWriter; 7 | import org.apache.avro.io.BinaryDecoder; 8 | import org.apache.avro.io.BinaryEncoder; 9 | import org.apache.avro.io.DatumReader; 10 | import org.apache.avro.io.DatumWriter; 11 | import org.apache.avro.io.DecoderFactory; 12 | import org.apache.avro.io.EncoderFactory; 13 | import org.apache.avro.specific.SpecificDatumReader; 14 | 15 | import java.io.ByteArrayOutputStream; 16 | import java.io.IOException; 17 | 18 | /** 19 | * Class for working with Avro format 20 | * 21 | * @author szotov 22 | */ 23 | public class AvroUtils { 24 | 25 | /** 26 | * Get object from bytes 27 | * 28 | * @param schema schema 29 | * @param data byte array 30 | * @return deserialized object 31 | */ 32 | public static Object deserialize(Schema schema, byte[] data) { 33 | return deserialize(schema, data, false); 34 | } 35 | 36 | 37 | /** 38 | * Get object from bytes 39 | * 40 | * @param schema schema 41 | * @param data byte array 42 | * @param useSpecificAvroReader use specific reader 43 | * @return deserialized object 44 | */ 45 | public static Object deserialize(Schema schema, byte[] data, boolean useSpecificAvroReader) { 46 | BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(data, null); 47 | DatumReader reader; 48 | if (!useSpecificAvroReader) { 49 | reader = new GenericDatumReader<>(schema); 50 | } else { 51 | reader = new SpecificDatumReader<>(schema); 52 | } 53 | Object value; 54 | try { 55 | value = reader.read(null, decoder); 56 | } catch (IOException e) { 57 | throw new CommonException(e); 58 | } 59 | return value; 60 | } 61 | 62 | /** 63 | * Serialize object to bytes 64 | * 65 | * @param schema schema 66 | * @param object object 67 | * @return bytes 68 | */ 69 | public static byte[] serialize(Schema schema, Object object) { 70 | ByteArrayOutputStream output = new ByteArrayOutputStream(); 71 | BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(output, null); 72 | DatumWriter writer = new GenericDatumWriter<>(schema); 73 | try { 74 | writer.write(object, encoder); 75 | encoder.flush(); 76 | output.flush(); 77 | } catch (IOException e) { 78 | throw new CommonException(e); 79 | } 80 | 81 | return output.toByteArray(); 82 | } 83 | 84 | } 85 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/utils/ByteUtils.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import org.apache.avro.Schema; 4 | import org.apache.avro.SchemaBuilder; 5 | import org.apache.avro.generic.GenericContainer; 6 | import org.apache.avro.reflect.ReflectData; 7 | 8 | /** 9 | * Basically for test only 10 | */ 11 | public class ByteUtils { 12 | 13 | /** 14 | * Serialize any valid Object into byte array 15 | * 16 | * @param value any object 17 | * @return byte array 18 | */ 19 | public static byte[] serialize(final Object value) { 20 | Schema schema; 21 | if (value == null) { 22 | schema = SchemaBuilder.builder().nullType(); 23 | } else if (value instanceof GenericContainer) { 24 | schema = ((GenericContainer) value).getSchema(); 25 | } else { 26 | schema = ReflectData.get().getSchema(value.getClass()); 27 | } 28 | return AvroUtils.serialize(schema, value); 29 | } 30 | 31 | /** 32 | * Deserialize byte array into Object 33 | * 34 | * @param bytes bytes 35 | * @param clazz class of object 36 | * @return deserialized object 37 | */ 38 | @SuppressWarnings("unchecked") 39 | public static T deserialize(byte[] bytes, Class clazz) { 40 | Schema schema; 41 | if (bytes == null) { 42 | schema = SchemaBuilder.builder().nullType(); 43 | } else { 44 | schema = ReflectData.get().getSchema(clazz); 45 | } 46 | return (T) AvroUtils.deserialize(schema, bytes); 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/utils/EncryptionAlgorithmUtils.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import com.nucypher.crypto.AlgorithmName; 4 | import com.nucypher.crypto.EncryptionAlgorithm; 5 | import com.nucypher.kafka.errors.CommonException; 6 | import org.reflections.Reflections; 7 | 8 | import java.util.HashSet; 9 | import java.util.Set; 10 | 11 | /** 12 | * Utils for working with {@link EncryptionAlgorithm} implementations 13 | */ 14 | public class EncryptionAlgorithmUtils { 15 | 16 | private static final String PACKAGE = 17 | EncryptionAlgorithm.class.getPackage().getName(); 18 | 19 | /** 20 | * @return all available algorithms and their names 21 | */ 22 | public static Set getAvailableAlgorithms() { 23 | Set algorithms = new HashSet<>(); 24 | Reflections reflections = new Reflections(PACKAGE); 25 | Set> classes = 26 | reflections.getSubTypesOf(EncryptionAlgorithm.class); 27 | for (Class clazz : classes) { 28 | AlgorithmName algorithmName = clazz.getAnnotation(AlgorithmName.class); 29 | String name = algorithmName != null ? 30 | algorithmName.value() : clazz.getCanonicalName(); 31 | algorithms.add(name); 32 | } 33 | return algorithms; 34 | } 35 | 36 | /** 37 | * Get instance of {@link EncryptionAlgorithm} 38 | * 39 | * @param name algorithm name or full class name 40 | * @return instance of {@link EncryptionAlgorithm} 41 | */ 42 | public static EncryptionAlgorithm getEncryptionAlgorithm(String name) { 43 | String nameLowerCase = name.toLowerCase(); 44 | Reflections reflections = new Reflections(PACKAGE); 45 | Set> classes = 46 | reflections.getSubTypesOf(EncryptionAlgorithm.class); 47 | Class algorithmClass = null; 48 | for (Class clazz : classes) { 49 | AlgorithmName algorithmName = clazz.getAnnotation(AlgorithmName.class); 50 | if (algorithmName != null && 51 | algorithmName.value().toLowerCase().equals(nameLowerCase) || 52 | clazz.getCanonicalName().equals(name)) { 53 | algorithmClass = clazz; 54 | break; 55 | } 56 | } 57 | if (algorithmClass == null) { 58 | throw new CommonException("Algorithm '%s' not found", name); 59 | } 60 | try { 61 | return algorithmClass.newInstance(); 62 | } catch (InstantiationException | IllegalAccessException e) { 63 | throw new CommonException(e); 64 | } 65 | } 66 | 67 | /** 68 | * Get instance of {@link EncryptionAlgorithm} by class name 69 | * 70 | * @param className full class name 71 | * @return instance of {@link EncryptionAlgorithm} 72 | */ 73 | @SuppressWarnings("unchecked") 74 | public static EncryptionAlgorithm getEncryptionAlgorithmByClass(String className) { 75 | try { 76 | Class algorithmClass = 77 | (Class) Class.forName(className); 78 | return algorithmClass.newInstance(); 79 | } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) { 80 | throw new CommonException(e); 81 | } 82 | } 83 | 84 | /** 85 | * Get instance of {@link EncryptionAlgorithm} by class 86 | * 87 | * @param algorithmClass class of encryption algorithm 88 | * @return instance of {@link EncryptionAlgorithm} 89 | */ 90 | public static EncryptionAlgorithm getEncryptionAlgorithmByClass( 91 | Class algorithmClass) { 92 | try { 93 | return algorithmClass.newInstance(); 94 | } catch (IllegalAccessException | InstantiationException e) { 95 | throw new CommonException(e); 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/utils/KeyType.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import com.nucypher.kafka.INamed; 4 | 5 | /** 6 | * Key type 7 | */ 8 | public enum KeyType implements INamed { 9 | 10 | PRIVATE("priv"), 11 | PUBLIC("pub"), 12 | PRIVATE_AND_PUBLIC("all"), 13 | DEFAULT("def"); 14 | 15 | private String shortName; 16 | 17 | KeyType(String shortName) { 18 | this.shortName = shortName; 19 | } 20 | 21 | @Override 22 | public String getShortName() { 23 | return shortName; 24 | } 25 | 26 | @Override 27 | public String getName() { 28 | return name(); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/utils/StringUtils.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | /** 4 | * 5 | */ 6 | public class StringUtils { 7 | 8 | /** 9 | * Is String blank 10 | * "" - true 11 | * " " - true 12 | * "\n\t" - true 13 | * "wfwef " - false 14 | * 15 | * @param string - 16 | * @return - 17 | */ 18 | public static boolean isBlank(String string) { 19 | return string == null || "".equals(string.trim()); 20 | } 21 | 22 | /** 23 | * vs to isBlank 24 | * 25 | * @param string - 26 | * @return - 27 | */ 28 | public static boolean isNotBlank(String string) { 29 | return !isBlank(string); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/utils/SubkeyGenerator.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import com.nucypher.kafka.Constants; 4 | import com.nucypher.kafka.errors.CommonException; 5 | 6 | import org.bouncycastle.crypto.Digest; 7 | import org.bouncycastle.crypto.digests.SHA256Digest; 8 | import org.bouncycastle.crypto.generators.HKDFBytesGenerator; 9 | import org.bouncycastle.crypto.params.HKDFParameters; 10 | import org.bouncycastle.jce.interfaces.ECPrivateKey; 11 | import org.bouncycastle.jce.spec.ECParameterSpec; 12 | import org.bouncycastle.jce.spec.ECPrivateKeySpec; 13 | 14 | import java.math.BigInteger; 15 | import java.nio.charset.Charset; 16 | import java.security.KeyFactory; 17 | import java.security.PrivateKey; 18 | 19 | /** 20 | * EC key generator using HKDF instantiated with HMAC-SHA256. It is initialized 21 | * by a master EC private key. 22 | */ 23 | public class SubkeyGenerator { 24 | 25 | public final static Digest DIGEST = new SHA256Digest(); 26 | 27 | // /** 28 | // * Random, non-secret, re-usable salt, as described in RFC5869, Section 29 | // 3.1. 30 | // */ 31 | // private final static byte[] SALT = 32 | // { (byte) 0x07, (byte) 0xfc, (byte) 0xc0, (byte) 0xf0, 33 | // (byte) 0xeb, (byte) 0xce, (byte) 0xd2, (byte) 0x17, 34 | // (byte) 0x10, (byte) 0x62, (byte) 0x6b, (byte) 0xcc, 35 | // (byte) 0xa2, (byte) 0x5b, (byte) 0x9c, (byte) 0x2a }; 36 | 37 | 38 | public static PrivateKey deriveKey(PrivateKey privateKey, byte[] data, byte[] salt) { 39 | 40 | byte[] inputKeyMaterial = privateKey.getEncoded(); 41 | 42 | HKDFParameters params = new HKDFParameters(inputKeyMaterial, salt, data); 43 | 44 | HKDFBytesGenerator hkdf = new HKDFBytesGenerator(DIGEST); 45 | 46 | hkdf.init(params); 47 | 48 | ECParameterSpec parameters = ((ECPrivateKey) privateKey).getParameters(); 49 | BigInteger n = parameters.getN(); 50 | int bitLength = n.bitLength(); 51 | int byteLength = bitLength / 8 + (bitLength % 8 == 0 ? 0 : 1); 52 | 53 | byte[] bytes = new byte[byteLength]; 54 | hkdf.generateBytes(bytes, 0, byteLength); 55 | BigInteger generatedD = new BigInteger(bytes).mod(n); 56 | 57 | ECPrivateKeySpec privateKeySpec = new ECPrivateKeySpec(generatedD, parameters); 58 | 59 | try { 60 | KeyFactory keyFactory = KeyFactory.getInstance(Constants.KEY_FACTORY_ALGORITHM, 61 | Constants.BOUNCY_CASTLE_PROVIDER_NAME); 62 | return keyFactory.generatePrivate(privateKeySpec); 63 | } catch (Exception e) { 64 | throw new CommonException(e); 65 | } 66 | 67 | } 68 | 69 | public static PrivateKey deriveKey(PrivateKey privateKey, String data, byte[] salt) { 70 | return deriveKey(privateKey, data.getBytes(Charset.forName("UTF8")), salt); 71 | } 72 | 73 | public static PrivateKey deriveKey(PrivateKey privateKey, String data) { 74 | return deriveKey(privateKey, data.getBytes(Charset.forName("UTF8")), null); 75 | } 76 | 77 | public static PrivateKey deriveKey(PrivateKey privateKey, byte[] data) { 78 | return deriveKey(privateKey, data, null); 79 | } 80 | 81 | } 82 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/zk/Channel.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.zk; 2 | 3 | import com.nucypher.kafka.clients.granular.DataFormat; 4 | import com.nucypher.kafka.clients.granular.StructuredDataAccessor; 5 | 6 | import java.util.HashSet; 7 | import java.util.Objects; 8 | import java.util.Set; 9 | 10 | /** 11 | * Class for holding information about channel 12 | * 13 | * @author szotov 14 | */ 15 | public class Channel { 16 | 17 | private String name; 18 | private EncryptionType type; 19 | private Set fields; 20 | private Class accessorClass; 21 | 22 | /** 23 | * Create simple channel with full encryption 24 | * 25 | * @param name channel name 26 | */ 27 | public Channel(String name) { 28 | this.name = name; 29 | this.type = EncryptionType.FULL; 30 | this.fields = new HashSet<>(); 31 | } 32 | 33 | /** 34 | * Create channel with partial encryption 35 | * 36 | * @param name channel name 37 | * @param fields list of fields 38 | * @param accessorClass structured data accessor class 39 | */ 40 | public Channel(String name, 41 | Set fields, 42 | Class accessorClass) { 43 | this.name = name; 44 | this.type = EncryptionType.GRANULAR; 45 | this.fields = fields; 46 | this.accessorClass = accessorClass; 47 | } 48 | 49 | /** 50 | * @param name channel name 51 | * @param type encryption type 52 | * @param fields list of fields 53 | * @param accessorClass structured data accessor class 54 | */ 55 | public Channel(String name, 56 | EncryptionType type, 57 | Set fields, 58 | Class accessorClass) { 59 | this.name = name; 60 | this.type = type; 61 | this.fields = fields; 62 | this.accessorClass = accessorClass; 63 | } 64 | 65 | /** 66 | * @return channel name 67 | */ 68 | public String getName() { 69 | return name; 70 | } 71 | 72 | /** 73 | * @return collection of fields 74 | */ 75 | public Set getFields() { 76 | return fields; 77 | } 78 | 79 | /** 80 | * @return encryption type 81 | */ 82 | public EncryptionType getType() { 83 | return type; 84 | } 85 | 86 | /** 87 | * @return structured data accessor class 88 | */ 89 | public Class getAccessorClass() { 90 | return accessorClass; 91 | } 92 | 93 | @Override 94 | public boolean equals(Object o) { 95 | if (this == o) return true; 96 | if (o == null || getClass() != o.getClass()) return false; 97 | Channel channel = (Channel) o; 98 | return Objects.equals(name, channel.name) && 99 | type == channel.type && 100 | Objects.equals(fields, channel.fields) && 101 | Objects.equals(accessorClass, channel.accessorClass); 102 | } 103 | 104 | @Override 105 | public int hashCode() { 106 | return Objects.hash(name, type, fields, accessorClass); 107 | } 108 | 109 | @Override 110 | public String toString() { 111 | StringBuilder result = new StringBuilder("Channel ["); 112 | result.append("name=").append(name).append(", "); 113 | result.append("type=").append(type); 114 | if (type == EncryptionType.GRANULAR) { 115 | result.append(", ").append("fields=").append(fields); 116 | if (DataFormat.contains(accessorClass)) { 117 | DataFormat format = DataFormat.valueOf(accessorClass); 118 | result.append(", ").append("format=").append(format); 119 | } 120 | result.append(", ").append("accessor=").append(accessorClass.getCanonicalName()); 121 | } 122 | result.append("]"); 123 | return result.toString(); 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/zk/ClientType.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.zk; 2 | 3 | import com.nucypher.kafka.INamed; 4 | 5 | /** 6 | * Client type enum 7 | * 8 | * @author szotov 9 | */ 10 | public enum ClientType implements INamed { 11 | 12 | /** 13 | * Consumer type 14 | */ 15 | CONSUMER("cons"), 16 | /** 17 | * Producer type 18 | */ 19 | PRODUCER("prod"); 20 | 21 | private String shortName; 22 | 23 | ClientType(String shortName) { 24 | this.shortName = shortName; 25 | } 26 | 27 | @Override 28 | public String getShortName() { 29 | return shortName; 30 | } 31 | 32 | @Override 33 | public String getName() { 34 | return name(); 35 | } 36 | 37 | /** 38 | * Check enum contains the value 39 | * 40 | * @param value value to check 41 | * @return result of checking 42 | */ 43 | public static boolean contains(String value) { 44 | for (ClientType type : values()) { 45 | if (type.name().equals(value)) { 46 | return true; 47 | } 48 | } 49 | return false; 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /commons/src/main/java/com/nucypher/kafka/zk/EncryptionType.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.zk; 2 | 3 | import com.nucypher.kafka.INamed; 4 | import com.nucypher.kafka.errors.CommonException; 5 | 6 | /** 7 | * Encryption type enum 8 | * 9 | * @author szotov 10 | */ 11 | public enum EncryptionType implements INamed { 12 | 13 | /** 14 | * Message is fully encrypted 15 | */ 16 | FULL("full", (byte) 0), 17 | /** 18 | * Message is encrypted in the fields 19 | */ 20 | GRANULAR("gran", (byte) 1); 21 | 22 | private String shortName; 23 | private byte code; 24 | 25 | EncryptionType(String shortName, byte code) { 26 | this.shortName = shortName; 27 | this.code = code; 28 | } 29 | 30 | @Override 31 | public String getShortName() { 32 | return shortName; 33 | } 34 | 35 | @Override 36 | public String getName() { 37 | return name(); 38 | } 39 | 40 | /** 41 | * @return byte code 42 | */ 43 | public byte getCode() { 44 | return code; 45 | } 46 | 47 | /** 48 | * Get encryption type from code 49 | * 50 | * @param code byte code 51 | * @return encryption type 52 | */ 53 | public static EncryptionType valueOf(byte code) { 54 | for (EncryptionType type : EncryptionType.values()) { 55 | if (type.getCode() == code) { 56 | return type; 57 | } 58 | } 59 | throw new CommonException("No such encryption type with code '%d'", code); 60 | } 61 | 62 | } 63 | -------------------------------------------------------------------------------- /commons/src/test/groovy/com/nucypher/kafka/cipher/CipherSpec.groovy: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.cipher 2 | 3 | import com.nucypher.kafka.utils.AESKeyGenerators 4 | import spock.lang.Shared 5 | import spock.lang.Specification 6 | 7 | import java.security.Key 8 | 9 | /** 10 | * Test for ciphers 11 | */ 12 | class CipherSpec extends Specification { 13 | 14 | @Shared 15 | byte[] originalData 16 | @Shared 17 | byte[] iv 18 | 19 | def setupSpec() { 20 | Random random = new Random() 21 | originalData = new byte[1024] 22 | random.nextBytes(originalData) 23 | iv = new byte[16] 24 | random.nextBytes(iv) 25 | } 26 | 27 | def 'encrypt and decrypt data using BouncyCastle'() { 28 | setup: 'initialize' 29 | Key key = AESKeyGenerators.generateDEK(keySize) 30 | ICipher cipher = CipherFactory.getCipher( 31 | CipherFactory.CipherProvider.BOUNCY_CASTLE, 32 | transformation) 33 | 34 | when: 'encrypt and decrypt data' 35 | byte[] encryptedData = cipher.encrypt(originalData, key, iv) 36 | byte[] decryptedData = cipher.decrypt(encryptedData, key, iv) 37 | 38 | then: 'compare original and decrypted data' 39 | decryptedData == originalData 40 | 41 | where: 42 | [keySize, transformation] << [ 43 | [16, 24, 32], 44 | ["AES/GCM/NoPadding", 45 | "AES/CBC/PKCS5Padding", 46 | "AES/CBC/PKCS7Padding"]].combinations() 47 | } 48 | 49 | def 'encrypt and decrypt data using OpenSSL'() { 50 | setup: 'initialize' 51 | Key key = AESKeyGenerators.generateDEK(keySize) 52 | ICipher cipher = CipherFactory.getCipher( 53 | CipherFactory.CipherProvider.OPENSSL, 54 | transformation) 55 | 56 | when: 'encrypt and decrypt data' 57 | byte[] encryptedData = cipher.encrypt(originalData, key, iv) 58 | byte[] decryptedData = cipher.decrypt(encryptedData, key, iv) 59 | 60 | then: 'compare original and decrypted data' 61 | decryptedData == originalData 62 | 63 | where: 64 | [keySize, transformation] << [ 65 | [16, 24, 32], 66 | // ["AES/GCM/NoPadding", 67 | // "AES/CBC/PKCS5Padding", 68 | // "AES/CBC/PKCS7Padding"] 69 | ["AES/CBC/PKCS5Padding"]].combinations() 70 | } 71 | 72 | } 73 | -------------------------------------------------------------------------------- /commons/src/test/groovy/com/nucypher/kafka/clients/MessageHandlerSpec.groovy: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients 2 | 3 | import com.nucypher.kafka.cipher.ICipher 4 | import com.nucypher.kafka.encrypt.DataEncryptionKeyManager 5 | import com.nucypher.kafka.utils.AESKeyGenerators 6 | import com.nucypher.kafka.utils.WrapperReEncryptionKey 7 | import spock.lang.Specification 8 | 9 | import java.security.Key 10 | 11 | /** 12 | * Test for {@link MessageHandler} 13 | */ 14 | class MessageHandlerSpec extends Specification { 15 | 16 | static final DEK = AESKeyGenerators.generateDEK(32) 17 | 18 | def 'test encryption'() { 19 | setup: 'initialize' 20 | Random random = new Random() 21 | byte[] data = new byte[1024] 22 | random.nextBytes(data) 23 | String topic = "TOPIC" 24 | Key key = DEK 25 | 26 | DataEncryptionKeyManager keyManager = Mock() 27 | ICipher cipher = Mock() 28 | MessageHandler messageHandler = new MessageHandler(cipher, keyManager) 29 | 30 | when: 'encrypt message' 31 | byte[] serialized = messageHandler.encrypt(topic, data) 32 | Message message = Message.deserialize(serialized) 33 | 34 | then: 'should be message object' 35 | message.payload == data 36 | message.iv != null 37 | message.EDEK.bytes == key.getEncoded() 38 | 1 * keyManager.getDEK(topic) >> key 39 | 1 * keyManager.encryptDEK(key, topic) >> key.getEncoded() 40 | 1 * cipher.encrypt(data, key, _) >> data 41 | } 42 | 43 | def 'test decryption'() { 44 | setup: 'initialize' 45 | Random random = new Random() 46 | byte[] data = new byte[1024] 47 | random.nextBytes(data) 48 | byte[] iv = new byte[data.length] 49 | random.nextBytes(iv) 50 | Key key = DEK 51 | Message message = new Message( 52 | data, iv, new EncryptedDataEncryptionKey(key.getEncoded())) 53 | 54 | DataEncryptionKeyManager keyManager = Mock() 55 | ICipher cipher = Mock() 56 | MessageHandler messageHandler = new MessageHandler(cipher, keyManager) 57 | 58 | when: 'decrypt message' 59 | byte[] decrypted = messageHandler.decrypt(message.serialize()) 60 | 61 | then: 'should be initial data' 62 | decrypted == data 63 | 1 * keyManager.decryptEDEK(key.getEncoded(), false) >> key 64 | 1 * cipher.decrypt(data, key, iv) >> data 65 | } 66 | 67 | def 'test re-encryption'() { 68 | setup: 'initialize' 69 | Random random = new Random() 70 | byte[] data = new byte[1024] 71 | random.nextBytes(data) 72 | byte[] iv = new byte[data.length] 73 | random.nextBytes(iv) 74 | String topic = "TOPIC" 75 | Key key = DEK 76 | Message message = new Message( 77 | data, iv, new EncryptedDataEncryptionKey(key.getEncoded())) 78 | 79 | DataEncryptionKeyManager keyManager = Mock() 80 | WrapperReEncryptionKey reKey = Mock() 81 | MessageHandler messageHandler = new MessageHandler(keyManager) 82 | 83 | when: 'simple re-encrypt message' 84 | byte[] reEncrypted = messageHandler.reEncrypt(topic, message.serialize(), reKey) 85 | message = Message.deserialize(reEncrypted) 86 | 87 | then: 'should be right message object' 88 | message.payload == data 89 | message.iv == iv 90 | message.EDEK.bytes == key.getEncoded() 91 | !message.EDEK.isComplex() 92 | 1 * keyManager.reEncryptEDEK(topic, key.getEncoded(), reKey) >> key.getEncoded() 93 | 94 | when: 'complex re-encrypt message' 95 | reEncrypted = messageHandler.reEncrypt(topic, message.serialize(), reKey) 96 | message = Message.deserialize(reEncrypted) 97 | 98 | then: 'should be right message object' 99 | message.payload == data 100 | message.iv == iv 101 | message.EDEK.bytes == key.getEncoded() 102 | message.EDEK.isComplex() 103 | keyManager.reEncryptEDEK(topic, key.getEncoded(), reKey) >> key.getEncoded() 104 | reKey.isComplex() >> true 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /commons/src/test/groovy/com/nucypher/kafka/clients/MessageSpec.groovy: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients 2 | 3 | import spock.lang.Specification 4 | 5 | /** 6 | * Test for {@link Message} 7 | */ 8 | class MessageSpec extends Specification { 9 | 10 | def 'test serialization and deserialization'() { 11 | setup: 'initialize parameters' 12 | 13 | Random random = new Random() 14 | byte[] edek = "123456789".getBytes() 15 | byte[] iv = "123456789".getBytes() 16 | byte[] payload = new byte[1024] 17 | random.nextBytes(payload) 18 | 19 | when: 'serialize and deserialize message' 20 | Message message = new Message(payload, iv, new EncryptedDataEncryptionKey(edek)) 21 | byte[] serialized = message.serialize() 22 | message = Message.deserialize(serialized) 23 | 24 | then: 'compare deserialized with original data' 25 | message.payload == payload 26 | message.EDEK.bytes == edek 27 | message.IV == iv 28 | !message.EDEK.complex 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /commons/src/test/java/com/nucypher/kafka/TestUtils.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka; 2 | 3 | import com.nucypher.crypto.EncryptionAlgorithm; 4 | import com.nucypher.crypto.impl.ElGamalEncryptionAlgorithm; 5 | import com.nucypher.kafka.utils.EncryptionAlgorithmUtils; 6 | 7 | import java.util.Arrays; 8 | import java.util.Collection; 9 | import java.util.Set; 10 | 11 | /** 12 | * Utils for tests 13 | */ 14 | public class TestUtils { 15 | 16 | /** 17 | * Test file which contains EC key pair 18 | */ 19 | public static final String PEM = "P521.pem"; 20 | 21 | /** 22 | * Encryption algorithm for tests 23 | */ 24 | public static final EncryptionAlgorithm ENCRYPTION_ALGORITHM = 25 | new ElGamalEncryptionAlgorithm(); 26 | 27 | /** 28 | * Class of encryption algorithm for tests 29 | */ 30 | public static final Class ENCRYPTION_ALGORITHM_CLASS = 31 | ENCRYPTION_ALGORITHM.getClass(); 32 | 33 | /** 34 | * @return available encryption algorithms 35 | */ 36 | public static Collection getEncryptionAlgorithms() { 37 | Set algorithms = EncryptionAlgorithmUtils.getAvailableAlgorithms(); 38 | Object[][] data = new Object[algorithms.size()][]; 39 | int i = 0; 40 | for (String algorithm : algorithms) { 41 | data[i] = new Object[]{ 42 | EncryptionAlgorithmUtils.getEncryptionAlgorithm(algorithm)}; 43 | i++; 44 | } 45 | // Object[][] data = new Object[][]{ 46 | // {new BBS98EncryptionAlgorithm()}, 47 | // {new ElGamalEncryptionAlgorithm()} 48 | // }; 49 | return Arrays.asList(data); 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /commons/src/test/java/com/nucypher/kafka/utils/GranularUtilsTest.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import com.nucypher.kafka.errors.CommonException; 4 | import org.hamcrest.core.StringContains; 5 | import org.junit.Rule; 6 | import org.junit.Test; 7 | import org.junit.rules.ExpectedException; 8 | 9 | import java.security.PrivateKey; 10 | 11 | import static org.junit.Assert.assertNotNull; 12 | 13 | /** 14 | * Test for {@link GranularUtils} 15 | * 16 | * @author szotov 17 | */ 18 | public class GranularUtilsTest { 19 | 20 | @Rule 21 | public ExpectedException expectedException = ExpectedException.none(); 22 | 23 | /** 24 | * Test generating private keys from messages 25 | */ 26 | @Test 27 | public void testGeneratePrivateKeysFromMessages() throws Exception { 28 | String privateKey = getClass().getResource("/private-key-prime256v1-1.pem").getPath(); 29 | PrivateKey key = GranularUtils.deriveKeyFromData(privateKey, "a.c"); 30 | assertNotNull(key); 31 | } 32 | 33 | /** 34 | * Test generating private keys from messages with public key 35 | */ 36 | @Test 37 | public void testGeneratePrivateKeysFromMessagesWithException() throws Exception { 38 | expectedException.expect(CommonException.class); 39 | expectedException.expectMessage(StringContains.containsString("must contain private key")); 40 | String key = getClass().getResource("/public-key-secp521r1-2.pem").getPath(); 41 | GranularUtils.deriveKeyFromData(key, null); 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /commons/src/test/java/com/nucypher/kafka/utils/KeyUtilsAlgorithmTest.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import com.nucypher.crypto.EncryptionAlgorithm; 4 | import com.nucypher.kafka.TestUtils; 5 | import org.bouncycastle.jce.ECNamedCurveTable; 6 | import org.bouncycastle.jce.spec.ECParameterSpec; 7 | import org.junit.Rule; 8 | import org.junit.Test; 9 | import org.junit.rules.TemporaryFolder; 10 | import org.junit.runner.RunWith; 11 | import org.junit.runners.Parameterized; 12 | 13 | import java.io.IOException; 14 | import java.util.Collection; 15 | import java.util.Random; 16 | 17 | import static org.junit.Assert.assertArrayEquals; 18 | import static org.junit.Assert.assertEquals; 19 | 20 | /** 21 | * Parameterized test for {@link KeyUtils} using encryption algorithms 22 | * 23 | * @author szotov 24 | */ 25 | @RunWith(value = Parameterized.class) 26 | public final class KeyUtilsAlgorithmTest { 27 | 28 | @Rule 29 | public TemporaryFolder testFolder = new TemporaryFolder(); 30 | 31 | private Random random = new Random(); 32 | private EncryptionAlgorithm algorithm; 33 | 34 | /** 35 | * @param algorithm {@link EncryptionAlgorithm} 36 | */ 37 | public KeyUtilsAlgorithmTest(EncryptionAlgorithm algorithm) { 38 | this.algorithm = algorithm; 39 | } 40 | 41 | /** 42 | * @return collection of {@link EncryptionAlgorithm} values 43 | */ 44 | @Parameterized.Parameters 45 | public static Collection data() { 46 | return TestUtils.getEncryptionAlgorithms(); 47 | } 48 | 49 | /** 50 | * Test converting byte array to {@link WrapperReEncryptionKey} and back 51 | */ 52 | @Test 53 | public void testWrapperReKey() throws IOException { 54 | ECParameterSpec ecSpec = ECNamedCurveTable.getParameterSpec("secp521r1"); 55 | byte[] key = new byte[16]; 56 | random.nextBytes(key); 57 | 58 | WrapperReEncryptionKey wrapper = new WrapperReEncryptionKey(algorithm, key, ecSpec); 59 | byte[] serializedData = wrapper.toByteArray(); 60 | wrapper = WrapperReEncryptionKey.getInstance(serializedData); 61 | 62 | assertEquals(algorithm.getClass(), wrapper.getAlgorithm().getClass()); 63 | assertArrayEquals(key, wrapper.getReEncryptionKey().toByteArray()); 64 | assertEquals(ecSpec, wrapper.getECParameterSpec()); 65 | 66 | byte[] encryptedRandomKey = new byte[32]; 67 | random.nextBytes(encryptedRandomKey); 68 | 69 | wrapper = new WrapperReEncryptionKey(algorithm, key, ecSpec, encryptedRandomKey); 70 | serializedData = wrapper.toByteArray(); 71 | wrapper = WrapperReEncryptionKey.getInstance(serializedData); 72 | 73 | assertEquals(algorithm.getClass(), wrapper.getAlgorithm().getClass()); 74 | assertArrayEquals(key, wrapper.getReEncryptionKey().toByteArray()); 75 | assertEquals(ecSpec, wrapper.getECParameterSpec()); 76 | assertArrayEquals(encryptedRandomKey, wrapper.getEncryptedRandomKey()); 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /commons/src/test/java/com/nucypher/kafka/utils/SubkeyGeneratorTest.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.utils; 2 | 3 | import com.nucypher.crypto.EncryptionAlgorithm; 4 | import com.nucypher.kafka.TestUtils; 5 | import org.bouncycastle.jce.ECNamedCurveTable; 6 | import org.bouncycastle.jce.spec.ECParameterSpec; 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | import org.junit.runners.Parameterized; 10 | 11 | import java.io.IOException; 12 | import java.security.PrivateKey; 13 | import java.util.Collections; 14 | import java.util.HashSet; 15 | import java.util.Set; 16 | 17 | import static org.junit.Assert.assertEquals; 18 | import static org.junit.Assert.assertNotEquals; 19 | 20 | /** 21 | * Test for {@link SubkeyGenerator} 22 | * 23 | * @author szotov 24 | */ 25 | @RunWith(Parameterized.class) 26 | public final class SubkeyGeneratorTest { 27 | 28 | private static final EncryptionAlgorithm ALGORITHM = TestUtils.ENCRYPTION_ALGORITHM; 29 | 30 | private ECParameterSpec ecParameterSpec; 31 | 32 | /** 33 | * @return array of available EC curve names 34 | */ 35 | @Parameterized.Parameters(name = "{index}: Curve name {0}") 36 | public static Object[] data() { 37 | Set set = new HashSet<>(); 38 | set.addAll(Collections.list(org.bouncycastle.asn1.x9.ECNamedCurveTable.getNames())); 39 | return set.toArray(); 40 | } 41 | 42 | public SubkeyGeneratorTest(String curveName) { 43 | ecParameterSpec = ECNamedCurveTable.getParameterSpec(curveName); 44 | } 45 | 46 | /** 47 | * Test generating key using HMAC 48 | */ 49 | @Test 50 | public void testGenerateKey() throws IOException { 51 | for (int i = 0; i < 20; i++) { 52 | PrivateKey basePrivateKey = ALGORITHM.generateECKeyPair(ecParameterSpec).getPrivate(); 53 | 54 | String message = "a.b.2"; 55 | PrivateKey generatedPrivateKey1 = SubkeyGenerator.deriveKey(basePrivateKey, message, null); 56 | 57 | message = "a.b"; 58 | PrivateKey generatedPrivateKey2 = SubkeyGenerator.deriveKey(basePrivateKey, message, null); 59 | assertNotEquals(generatedPrivateKey1, generatedPrivateKey2); 60 | 61 | 62 | PrivateKey generatedPrivateKey3 = SubkeyGenerator.deriveKey(basePrivateKey, message, null); 63 | assertEquals(generatedPrivateKey2, generatedPrivateKey3); 64 | 65 | basePrivateKey = ALGORITHM.generateECKeyPair(ecParameterSpec).getPrivate(); 66 | PrivateKey generatedPrivateKey4 = SubkeyGenerator.deriveKey(basePrivateKey, message, null); 67 | assertNotEquals(generatedPrivateKey3, generatedPrivateKey4); 68 | } 69 | } 70 | 71 | // @Test 72 | // public void testDeterministic() { 73 | // ECKeyPairGenerator keyGenerator = new ECKeyPairGenerator(ecSpec); 74 | // PrivateKey basePrivateKey = keyGenerator.generateKeyPair().getPrivate(); 75 | // 76 | // String message = "a.b.2"; 77 | // HmacKeyGenerator hmacGenerator = new HmacKeyGenerator(basePrivateKey); 78 | // PrivateKey generatedPrivateKey1 = hmacGenerator.generatePrivateKey(message); 79 | // 80 | // hmacGenerator = new HmacKeyGenerator(basePrivateKey); 81 | // PrivateKey generatedPrivateKey2 = hmacGenerator.generatePrivateKey(message); 82 | // 83 | // assertEquals(generatedPrivateKey1, generatedPrivateKey2); 84 | // 85 | // } 86 | // 87 | // @Test 88 | // public void testStateless() { 89 | // ECKeyPairGenerator keyGenerator = new ECKeyPairGenerator(ecSpec); 90 | // PrivateKey basePrivateKey = keyGenerator.generateKeyPair().getPrivate(); 91 | // 92 | // String message = "a.b.2"; 93 | // HmacKeyGenerator hmacGenerator = new HmacKeyGenerator(basePrivateKey); 94 | // PrivateKey generatedPrivateKey1 = hmacGenerator.generatePrivateKey(message); 95 | // 96 | // hmacGenerator = new HmacKeyGenerator(basePrivateKey); 97 | // PrivateKey generatedPrivateKey2 = hmacGenerator.generatePrivateKey(message); 98 | // 99 | // assertEquals(generatedPrivateKey1, generatedPrivateKey2); 100 | // 101 | // } 102 | } 103 | -------------------------------------------------------------------------------- /commons/src/test/java/com/nucypher/kafka/zk/DataUtils.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.zk; 2 | 3 | import com.nucypher.crypto.EncryptionAlgorithm; 4 | import com.nucypher.kafka.TestUtils; 5 | import com.nucypher.kafka.clients.granular.StructuredDataAccessorStub; 6 | import com.nucypher.kafka.utils.WrapperReEncryptionKey; 7 | import org.bouncycastle.jce.ECNamedCurveTable; 8 | import org.bouncycastle.jce.spec.ECParameterSpec; 9 | 10 | import java.math.BigInteger; 11 | import java.util.Random; 12 | import java.util.concurrent.TimeUnit; 13 | 14 | /** 15 | * Class with data generators for tests 16 | * 17 | * @author szotov 18 | */ 19 | public class DataUtils { 20 | 21 | private static final EncryptionAlgorithm ALGORITHM = TestUtils.ENCRYPTION_ALGORITHM; 22 | private static final Random RANDOM = new Random(); 23 | private static final ECParameterSpec EC_SPEC = 24 | ECNamedCurveTable.getParameterSpec("secp521r1"); 25 | 26 | /** 27 | * @return empty re-encryption key 28 | */ 29 | public static WrapperReEncryptionKey getReEncryptionKeyEmpty() { 30 | return new WrapperReEncryptionKey(); 31 | } 32 | 33 | /** 34 | * @return random re-encryption key 35 | */ 36 | public static WrapperReEncryptionKey getReEncryptionKeySimple() { 37 | byte[] data1 = new byte[7]; 38 | RANDOM.nextBytes(data1); 39 | return new WrapperReEncryptionKey(ALGORITHM, data1, EC_SPEC); 40 | } 41 | 42 | /** 43 | * @return random re-encryption key with additional key 44 | */ 45 | public static WrapperReEncryptionKey getReEncryptionKeyComplex() { 46 | byte[] data1 = new byte[7]; 47 | RANDOM.nextBytes(data1); 48 | byte[] data2 = new byte[14]; 49 | RANDOM.nextBytes(data2); 50 | return new WrapperReEncryptionKey(ALGORITHM, data1, EC_SPEC, data2); 51 | } 52 | 53 | /** 54 | * @return random date after current date 55 | */ 56 | public static Long getExpiredMillis() { 57 | return System.currentTimeMillis() + TimeUnit.DAYS.toMillis(RANDOM.nextInt()); 58 | } 59 | 60 | /** 61 | * Convert expired date to byte array 62 | * 63 | * @param expired expired date 64 | * @return converted date 65 | */ 66 | public static byte[] getByteArrayFromExpired(Long expired) { 67 | return BigInteger.valueOf(expired).toByteArray(); 68 | } 69 | 70 | /** 71 | * @return bytes for {@link EncryptionType#FULL} 72 | */ 73 | public static byte[] getFullEncryptedChannel() { 74 | return new byte[]{EncryptionType.FULL.getCode()}; 75 | } 76 | 77 | /** 78 | * @return bytes for {@link EncryptionType#GRANULAR} and {@link StructuredDataAccessorStub} 79 | */ 80 | public static byte[] getPartialEncryptedChannel() { 81 | byte[] classNameBytes = StructuredDataAccessorStub.class.getCanonicalName().getBytes(); 82 | byte[] data = new byte[classNameBytes.length + 1]; 83 | data[0] = EncryptionType.GRANULAR.getCode(); 84 | System.arraycopy(classNameBytes, 0, data, 1, classNameBytes.length); 85 | return data; 86 | } 87 | 88 | } 89 | -------------------------------------------------------------------------------- /commons/src/test/java/com/nucypher/kafka/zk/ZooKeeperSASLResource.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.zk; 2 | 3 | import com.nucypher.kafka.errors.CommonException; 4 | import org.apache.curator.RetryPolicy; 5 | import org.apache.curator.framework.CuratorFramework; 6 | import org.apache.curator.framework.CuratorFrameworkFactory; 7 | import org.apache.curator.retry.RetryOneTime; 8 | import org.apache.curator.test.InstanceSpec; 9 | import org.apache.curator.test.TestingServer; 10 | import org.junit.rules.ExternalResource; 11 | 12 | import java.io.IOException; 13 | import java.util.HashMap; 14 | import java.util.Map; 15 | 16 | /** 17 | * Curator testing server with SASL as resource 18 | * 19 | * @author szotov 20 | * 21 | */ 22 | public final class ZooKeeperSASLResource extends ExternalResource { 23 | private TestingServer testingServer; 24 | private CuratorFramework curatorFramework; 25 | private CuratorFramework adminCuratorFramework; 26 | private RetryPolicy retryPolicy; 27 | 28 | public ZooKeeperSASLResource() { 29 | this(new RetryOneTime(1000)); 30 | } 31 | 32 | public ZooKeeperSASLResource(RetryPolicy curatorRetryPolicy) { 33 | this.retryPolicy = curatorRetryPolicy; 34 | } 35 | 36 | @Override 37 | protected void before() throws Exception { 38 | String jaas = getClass().getResource("/jaas_test.conf").getPath(); 39 | System.setProperty("java.security.auth.login.config", jaas); 40 | System.setProperty("zookeeper.DigestAuthenticationProvider.superDigest", 41 | "zkAdmin:AjgEq8UnRSdnG/GV6d2ONoelKZc="); 42 | 43 | Map customProperties = new HashMap<>(); 44 | customProperties.put("authProvider.1", 45 | "org.apache.zookeeper.server.auth.SASLAuthenticationProvider"); 46 | InstanceSpec spec = new InstanceSpec(null, -1, -1, -1, true, 1, -1, -1, customProperties); 47 | testingServer = new TestingServer(spec, true); 48 | curatorFramework = CuratorFrameworkFactory.newClient( 49 | testingServer.getConnectString(), retryPolicy); 50 | curatorFramework.start(); 51 | adminCuratorFramework = CuratorFrameworkFactory.builder() 52 | .authorization("digest", "zkAdmin:123".getBytes()) 53 | .connectString(testingServer.getConnectString()) 54 | .retryPolicy(retryPolicy) 55 | .build(); 56 | adminCuratorFramework.start(); 57 | //curatorFramework.blockUntilConnected(); 58 | //blockUntilConnected not worked for simple zooKeeper connection 59 | curatorFramework.checkExists().forPath("/"); 60 | System.out.println("ZooKeeper server was started"); 61 | } 62 | 63 | @Override 64 | protected void after() { 65 | try { 66 | curatorFramework.close(); 67 | testingServer.stop(); 68 | } catch (IOException e) { 69 | throw new CommonException(e); 70 | } 71 | System.out.println("ZooKeeper server was stopped"); 72 | } 73 | 74 | /** 75 | * @return {@link TestingServer} 76 | */ 77 | public TestingServer getZooKeeperTestingServer() { 78 | return testingServer; 79 | } 80 | 81 | /** 82 | * @return {@link CuratorFramework} 83 | */ 84 | public CuratorFramework getApacheCuratorFramework() { 85 | return curatorFramework; 86 | } 87 | 88 | /** 89 | * @return {@link CuratorFramework} with admin privileges 90 | */ 91 | public CuratorFramework getAdminApacheCuratorFramework() { 92 | return adminCuratorFramework; 93 | } 94 | } -------------------------------------------------------------------------------- /commons/src/test/resources/P521.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN EC PARAMETERS----- 2 | MIIBwwIBATBNBgcqhkjOPQEBAkIB//////////////////////////////////// 3 | //////////////////////////////////////////////////8wgZ8EQgH///// 4 | //////////////////////////////////////////////////////////////// 5 | /////////////////ARCAFGVPrlhjhyaH5KaIaC2hUDuotpyW5mzFfO4tImRjvEJ 6 | 4VYZOVHsfpN7FlLAvTuxvwc1c9+IPSw08e9FH9RrUD8AAxUA0J6IACkcuFOWzGcX 7 | OTKEqqDaZLoEgYUEAMaFjga3BATpzZ4+y2YjlbRCnGSBOQU/tSH4KK9ga009uqFL 8 | Xnfv51ko/h3BJ6L/qN4zSLPBhWpCm/l+fjHC5b1mARg5KWp4mjvABFyKX7QsfRvZ 9 | mPVESVebRGgXr70XJz5mLJfucple9CZAxVC5AT+tB2E1PHCGonLCQIi+lHaf0WZQ 10 | AkIB///////////////////////////////////////////6UYaHg78vlmt/zAFI 11 | 9wml0Du1ybiJnEeuu2+3HpE4ZAkCAQE= 12 | -----END EC PARAMETERS----- 13 | -----BEGIN EC PRIVATE KEY----- 14 | MIHcAgEBBEIAfzzIW+AJhTtmFSp3qqLWajKIDj1cYodPac0GYO8ku217TNUy6KmX 15 | WDmc3nY3snVf29G2cZdb8JU9aHs/+b541aagBwYFK4EEACOhgYkDgYYABAFeHVTy 16 | WkYVAACm2xj8OM5o4YkqTgzq4eSs0gT7knuoQb0dRS3LKtwfg89h7LRA3VKmRk38 17 | oHdQln0J3ZfjfM+8ogFC1j+l0bFQmbNqDSan2HaqLobrfMw/1j4Jsz5hkwCjQEoU 18 | GOlHBIJVZU0QZL3ByFXUe2ouGNXFvHpW/tPgofTR9Q== 19 | -----END EC PRIVATE KEY----- 20 | -----BEGIN PUBLIC KEY----- 21 | MIGbMBAGByqGSM49AgEGBSuBBAAjA4GGAAQBXh1U8lpGFQAAptsY/DjOaOGJKk4M 22 | 6uHkrNIE+5J7qEG9HUUtyyrcH4PPYey0QN1SpkZN/KB3UJZ9Cd2X43zPvKIBQtY/ 23 | pdGxUJmzag0mp9h2qi6G63zMP9Y+CbM+YZMAo0BKFBjpRwSCVWVNEGS9wchV1Htq 24 | LhjVxbx6Vv7T4KH00fU= 25 | -----END PUBLIC KEY----- 26 | -------------------------------------------------------------------------------- /commons/src/test/resources/jaas_test.conf: -------------------------------------------------------------------------------- 1 | Server { 2 | org.apache.zookeeper.server.auth.DigestLoginModule required 3 | user_kafka="test" 4 | user_zkAdmin1="123"; 5 | }; 6 | 7 | Client { 8 | org.apache.zookeeper.server.auth.DigestLoginModule required 9 | username="zkAdmin1" 10 | password="123"; 11 | }; -------------------------------------------------------------------------------- /commons/src/test/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /commons/src/test/resources/private-key-prime256v1-1.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN EC PARAMETERS----- 2 | MIH3AgEBMCwGByqGSM49AQECIQD/////AAAAAQAAAAAAAAAAAAAAAP////////// 3 | /////zBbBCD/////AAAAAQAAAAAAAAAAAAAAAP///////////////AQgWsY12Ko6 4 | k+ez671VdpiGvGUdBrDMU7D2O848PifSYEsDFQDEnTYIhucEk2pmeOETnSa3gZ9+ 5 | kARBBGsX0fLhLEJH+Lzm5WOkQPJ3A32BLeszoPShOUXYmMKWT+NC4v4af5uO5+tK 6 | fA+eFivOM1drMV7Oy7ZAaDe/UfUCIQD/////AAAAAP//////////vOb6racXnoTz 7 | ucrC/GMlUQIBAQ== 8 | -----END EC PARAMETERS----- 9 | -----BEGIN EC PRIVATE KEY----- 10 | MIIBaAIBAQQgNmTBnRu+FXWNrR50qv70AnXIvNsYyMiKYv9AxVX3zGeggfowgfcC 11 | AQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAAAAAAAAAAAAAA//////////////// 12 | MFsEIP////8AAAABAAAAAAAAAAAAAAAA///////////////8BCBaxjXYqjqT57Pr 13 | vVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSdNgiG5wSTamZ44ROdJreBn36QBEEE 14 | axfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5RdiYwpZP40Li/hp/m47n60p8D54W 15 | K84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA//////////+85vqtpxeehPO5ysL8 16 | YyVRAgEBoUQDQgAEdo3beXtMvGdZluLHUamYnzpiiRAFjmty1ua+CsouuhPaAy3J 17 | WT2mRyd01PPaHH14YOJqwqPifYm+JT99OGRgmw== 18 | -----END EC PRIVATE KEY----- 19 | -------------------------------------------------------------------------------- /commons/src/test/resources/private-key-prime256v1-2.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN EC PRIVATE KEY----- 2 | MIIBaAIBAQQgacWDiWtPaaIDU9MF2q+DvxykRGiSw9znddG/goZdfaaggfowgfcC 3 | AQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAAAAAAAAAAAAAA//////////////// 4 | MFsEIP////8AAAABAAAAAAAAAAAAAAAA///////////////8BCBaxjXYqjqT57Pr 5 | vVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSdNgiG5wSTamZ44ROdJreBn36QBEEE 6 | axfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5RdiYwpZP40Li/hp/m47n60p8D54W 7 | K84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA//////////+85vqtpxeehPO5ysL8 8 | YyVRAgEBoUQDQgAE+KPkN8huxdPrBNMScrUXg1SI9rafSbpnS6O5oZThESDMmbr8 9 | Nm1ZzrHyRmZW8Vhma/mlHd0xAgACmqrmw7qGvQ== 10 | -----END EC PRIVATE KEY----- 11 | -------------------------------------------------------------------------------- /commons/src/test/resources/private-key-secp521r1-1.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN EC PARAMETERS----- 2 | MIIBwgIBATBNBgcqhkjOPQEBAkIB//////////////////////////////////// 3 | //////////////////////////////////////////////////8wgZ4EQgH///// 4 | //////////////////////////////////////////////////////////////// 5 | /////////////////ARBUZU+uWGOHJofkpohoLaFQO6i2nJbmbMV87i0iZGO8Qnh 6 | Vhk5Uex+k3sWUsC9O7G/BzVz34g9LDTx70Uf1GtQPwADFQDQnogAKRy4U5bMZxc5 7 | MoSqoNpkugSBhQQAxoWOBrcEBOnNnj7LZiOVtEKcZIE5BT+1Ifgor2BrTT26oUte 8 | d+/nWSj+HcEnov+o3jNIs8GFakKb+X5+McLlvWYBGDkpaniaO8AEXIpftCx9G9mY 9 | 9URJV5tEaBevvRcnPmYsl+5ymV70JkDFULkBP60HYTU8cIaicsJAiL6Udp/RZlAC 10 | QgH///////////////////////////////////////////pRhoeDvy+Wa3/MAUj3 11 | CaXQO7XJuImcR667b7cekThkCQIBAQ== 12 | -----END EC PARAMETERS----- 13 | -----BEGIN EC PRIVATE KEY----- 14 | MIICnQIBAQRCAR6VVy6PiJEkgeJgaldralPppdWjUxQY5Ga0XR6BwVm21oudThpi 15 | Tcjob8RlevHIJwEMkx0mkcL17isZeXHv5aA5oIIBxjCCAcICAQEwTQYHKoZIzj0B 16 | AQJCAf////////////////////////////////////////////////////////// 17 | ////////////////////////////MIGeBEIB//////////////////////////// 18 | //////////////////////////////////////////////////////////wEQVGV 19 | PrlhjhyaH5KaIaC2hUDuotpyW5mzFfO4tImRjvEJ4VYZOVHsfpN7FlLAvTuxvwc1 20 | c9+IPSw08e9FH9RrUD8AAxUA0J6IACkcuFOWzGcXOTKEqqDaZLoEgYUEAMaFjga3 21 | BATpzZ4+y2YjlbRCnGSBOQU/tSH4KK9ga009uqFLXnfv51ko/h3BJ6L/qN4zSLPB 22 | hWpCm/l+fjHC5b1mARg5KWp4mjvABFyKX7QsfRvZmPVESVebRGgXr70XJz5mLJfu 23 | cple9CZAxVC5AT+tB2E1PHCGonLCQIi+lHaf0WZQAkIB//////////////////// 24 | ///////////////////////6UYaHg78vlmt/zAFI9wml0Du1ybiJnEeuu2+3HpE4 25 | ZAkCAQGhgYkDgYYABAH88mtkVmCPYoaH4yUg3L9RSVsp1Z6o795fWs3ssp4NrR3A 26 | nPMLWJbM0eA2HvMYxNAN+jgIQYJq3W1lJXD7cAEvRgFJssgZGphhH3z5bYDTzfxb 27 | 9UAFqBjj1tqp+l4vJvMUIbT3lIcKViy7LwtWjgp60pbZySE3gFpS6GaYpq/QpSqh 28 | xg== 29 | -----END EC PRIVATE KEY----- 30 | -------------------------------------------------------------------------------- /commons/src/test/resources/private-key-secp521r1-2.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN EC PRIVATE KEY----- 2 | MIICnQIBAQRCATQ2TXo4qsdPz3HgLJ+VOsK/wLn2qS1VPxze6VgczfNKhAU5RYAa 3 | 9ZzbZ/ynRkGWNHJ4TYEdea2uyF6vQEayLqttoIIBxjCCAcICAQEwTQYHKoZIzj0B 4 | AQJCAf////////////////////////////////////////////////////////// 5 | ////////////////////////////MIGeBEIB//////////////////////////// 6 | //////////////////////////////////////////////////////////wEQVGV 7 | PrlhjhyaH5KaIaC2hUDuotpyW5mzFfO4tImRjvEJ4VYZOVHsfpN7FlLAvTuxvwc1 8 | c9+IPSw08e9FH9RrUD8AAxUA0J6IACkcuFOWzGcXOTKEqqDaZLoEgYUEAMaFjga3 9 | BATpzZ4+y2YjlbRCnGSBOQU/tSH4KK9ga009uqFLXnfv51ko/h3BJ6L/qN4zSLPB 10 | hWpCm/l+fjHC5b1mARg5KWp4mjvABFyKX7QsfRvZmPVESVebRGgXr70XJz5mLJfu 11 | cple9CZAxVC5AT+tB2E1PHCGonLCQIi+lHaf0WZQAkIB//////////////////// 12 | ///////////////////////6UYaHg78vlmt/zAFI9wml0Du1ybiJnEeuu2+3HpE4 13 | ZAkCAQGhgYkDgYYABAH6R65G+fDSK9MYuoTPv1wu25MSC7dy5mQxIUl9vUG5T40m 14 | itmKuFSjql3n2r34Bl0l4xLVQ3GckmvJ2vsC25LaKgApgQSdvsSKpNfe876ciIRg 15 | //XwYXec21yrx+T3QdfsZ2meYuM+O+49AgokobYXho+tO8Y5dB63Po5WJQNnQPGg 16 | RQ== 17 | -----END EC PRIVATE KEY----- 18 | -------------------------------------------------------------------------------- /commons/src/test/resources/public-key-prime256v1-1.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA 3 | AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA//// 4 | ///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd 5 | NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5 6 | RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA 7 | //////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABHaN23l7TLxnWZbix1GpmJ86 8 | YokQBY5rctbmvgrKLroT2gMtyVk9pkcndNTz2hx9eGDiasKj4n2JviU/fThkYJs= 9 | -----END PUBLIC KEY----- 10 | -------------------------------------------------------------------------------- /commons/src/test/resources/public-key-prime256v1-2.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIIBSzCCAQMGByqGSM49AgEwgfcCAQEwLAYHKoZIzj0BAQIhAP////8AAAABAAAA 3 | AAAAAAAAAAAA////////////////MFsEIP////8AAAABAAAAAAAAAAAAAAAA//// 4 | ///////////8BCBaxjXYqjqT57PrvVV2mIa8ZR0GsMxTsPY7zjw+J9JgSwMVAMSd 5 | NgiG5wSTamZ44ROdJreBn36QBEEEaxfR8uEsQkf4vOblY6RA8ncDfYEt6zOg9KE5 6 | RdiYwpZP40Li/hp/m47n60p8D54WK84zV2sxXs7LtkBoN79R9QIhAP////8AAAAA 7 | //////////+85vqtpxeehPO5ysL8YyVRAgEBA0IABPij5DfIbsXT6wTTEnK1F4NU 8 | iPa2n0m6Z0ujuaGU4REgzJm6/DZtWc6x8kZmVvFYZmv5pR3dMQIAApqq5sO6hr0= 9 | -----END PUBLIC KEY----- 10 | -------------------------------------------------------------------------------- /commons/src/test/resources/public-key-secp521r1-1.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIICXDCCAc8GByqGSM49AgEwggHCAgEBME0GByqGSM49AQECQgH///////////// 3 | //////////////////////////////////////////////////////////////// 4 | /////////zCBngRCAf////////////////////////////////////////////// 5 | ///////////////////////////////////////8BEFRlT65YY4cmh+SmiGgtoVA 6 | 7qLacluZsxXzuLSJkY7xCeFWGTlR7H6TexZSwL07sb8HNXPfiD0sNPHvRR/Ua1A/ 7 | AAMVANCeiAApHLhTlsxnFzkyhKqg2mS6BIGFBADGhY4GtwQE6c2ePstmI5W0Qpxk 8 | gTkFP7Uh+CivYGtNPbqhS1537+dZKP4dwSei/6jeM0izwYVqQpv5fn4xwuW9ZgEY 9 | OSlqeJo7wARcil+0LH0b2Zj1RElXm0RoF6+9Fyc+ZiyX7nKZXvQmQMVQuQE/rQdh 10 | NTxwhqJywkCIvpR2n9FmUAJCAf////////////////////////////////////// 11 | ////+lGGh4O/L5Zrf8wBSPcJpdA7tcm4iZxHrrtvtx6ROGQJAgEBA4GGAAQB/PJr 12 | ZFZgj2KGh+MlINy/UUlbKdWeqO/eX1rN7LKeDa0dwJzzC1iWzNHgNh7zGMTQDfo4 13 | CEGCat1tZSVw+3ABL0YBSbLIGRqYYR98+W2A0838W/VABagY49baqfpeLybzFCG0 14 | 95SHClYsuy8LVo4KetKW2ckhN4BaUuhmmKav0KUqocY= 15 | -----END PUBLIC KEY----- 16 | -------------------------------------------------------------------------------- /commons/src/test/resources/public-key-secp521r1-2.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIICXDCCAc8GByqGSM49AgEwggHCAgEBME0GByqGSM49AQECQgH///////////// 3 | //////////////////////////////////////////////////////////////// 4 | /////////zCBngRCAf////////////////////////////////////////////// 5 | ///////////////////////////////////////8BEFRlT65YY4cmh+SmiGgtoVA 6 | 7qLacluZsxXzuLSJkY7xCeFWGTlR7H6TexZSwL07sb8HNXPfiD0sNPHvRR/Ua1A/ 7 | AAMVANCeiAApHLhTlsxnFzkyhKqg2mS6BIGFBADGhY4GtwQE6c2ePstmI5W0Qpxk 8 | gTkFP7Uh+CivYGtNPbqhS1537+dZKP4dwSei/6jeM0izwYVqQpv5fn4xwuW9ZgEY 9 | OSlqeJo7wARcil+0LH0b2Zj1RElXm0RoF6+9Fyc+ZiyX7nKZXvQmQMVQuQE/rQdh 10 | NTxwhqJywkCIvpR2n9FmUAJCAf////////////////////////////////////// 11 | ////+lGGh4O/L5Zrf8wBSPcJpdA7tcm4iZxHrrtvtx6ROGQJAgEBA4GGAAQB+keu 12 | Rvnw0ivTGLqEz79cLtuTEgu3cuZkMSFJfb1BuU+NJorZirhUo6pd59q9+AZdJeMS 13 | 1UNxnJJrydr7AtuS2ioAKYEEnb7EiqTX3vO+nIiEYP/18GF3nNtcq8fk90HX7Gdp 14 | nmLjPjvuPQIKJKG2F4aPrTvGOXQetz6OViUDZ0DxoEU= 15 | -----END PUBLIC KEY----- 16 | -------------------------------------------------------------------------------- /dependencies_libs.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'java' 2 | 3 | task copyDependenciesToLib(type: Copy) { 4 | into "$buildDir/libs/lib" 5 | from configurations.runtime 6 | } 7 | 8 | build.dependsOn copyDependenciesToLib -------------------------------------------------------------------------------- /examples/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'java' 2 | 3 | group 'com.nucypher.kafka' 4 | 5 | sourceCompatibility = 1.8 6 | targetCompatibility = 1.8 7 | 8 | dependencies { 9 | 10 | compile project(':clients') 11 | compile project(':proxy') 12 | compile project(path: ':commons', configuration: 'testArchives') 13 | 14 | compile "org.hdrhistogram:HdrHistogram:2.1.9" 15 | compile("io.confluent:kafka-schema-registry:$confluentVersion") { 16 | exclude group: "org.slf4j", module: 'slf4j-api' 17 | exclude group: "org.slf4j", module: 'slf4j-log4j12' 18 | exclude group: "org.apache.avro", module: 'avro' 19 | exclude group: "com.fasterxml.jackson.core", module: 'jackson-databind' 20 | } 21 | compile "org.openjdk.jmh:jmh-core:$jmhVersion" 22 | compile "org.openjdk.jmh:jmh-generator-annprocess:$jmhVersion" 23 | } 24 | 25 | jar { 26 | baseName = 'nucypher-kafka-examples' 27 | } 28 | 29 | copyDependenciesToLib { 30 | 31 | into "$buildDir/libs/lib" 32 | from configurations.runtime 33 | 34 | doLast { 35 | jar { 36 | 37 | manifest { 38 | attributes("Class-Path": configurations.runtime.collect { "lib/$it.name" }.join(' ')) 39 | } 40 | } 41 | } 42 | 43 | } 44 | 45 | jar.dependsOn copyDependenciesToLib 46 | 47 | def projectName = "${jar.baseName}-${version}" 48 | 49 | task zip(type: Zip) { 50 | archivesBaseName = "${jar.baseName}" 51 | from("$buildDir/libs") { 52 | include '**/*.jar' 53 | into("${projectName}") 54 | } 55 | } 56 | 57 | task tar(type: Tar) { 58 | archivesBaseName = "${jar.baseName}" 59 | from("$buildDir/libs") { 60 | include '**/*.jar' 61 | into("${projectName}") 62 | } 63 | } 64 | 65 | zip.dependsOn jar 66 | zip.dependsOn copyDependenciesToLib 67 | tar.dependsOn jar 68 | tar.dependsOn copyDependenciesToLib 69 | build.dependsOn zip 70 | build.dependsOn tar 71 | 72 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/clients/example/granular/AvroConsumer.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.example.granular; 2 | 3 | import com.google.common.io.Resources; 4 | import com.nucypher.kafka.TestUtils; 5 | import com.nucypher.kafka.clients.decrypt.AesStructuredMessageDeserializer; 6 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 7 | import com.nucypher.kafka.clients.granular.DataFormat; 8 | import com.nucypher.kafka.utils.KeyUtils; 9 | import org.HdrHistogram.Histogram; 10 | import org.apache.avro.file.DataFileReader; 11 | import org.apache.avro.file.SeekableByteArrayInput; 12 | import org.apache.avro.file.SeekableInput; 13 | import org.apache.avro.generic.GenericDatumReader; 14 | import org.apache.avro.generic.GenericRecord; 15 | import org.apache.avro.io.DatumReader; 16 | import org.apache.kafka.clients.consumer.ConsumerRecord; 17 | import org.apache.kafka.clients.consumer.ConsumerRecords; 18 | import org.apache.kafka.clients.consumer.KafkaConsumer; 19 | import org.apache.kafka.common.config.SaslConfigs; 20 | import org.apache.kafka.common.protocol.SecurityProtocol; 21 | import org.apache.kafka.common.serialization.ByteArrayDeserializer; 22 | import org.apache.kafka.common.serialization.StringDeserializer; 23 | 24 | import java.io.File; 25 | import java.io.InputStream; 26 | import java.security.PrivateKey; 27 | import java.util.Collections; 28 | import java.util.Properties; 29 | import java.util.Random; 30 | 31 | /** 32 | * Avro granular consumer 33 | */ 34 | public class AvroConsumer { 35 | 36 | public static void main(String[] args) throws Exception { 37 | JaasUtils.initializeConfiguration(); 38 | 39 | Histogram stats = new Histogram(1, 10000000, 2); 40 | KafkaConsumer consumer; 41 | try (InputStream props = Resources.getResource("consumer.properties").openStream()) { 42 | Properties properties = new Properties(); 43 | properties.load(props); 44 | if (properties.getProperty("group.id") == null) { 45 | properties.setProperty("group.id", "group-" + new Random().nextInt(100000)); 46 | } 47 | properties.put("security.protocol", SecurityProtocol.SASL_PLAINTEXT.toString()); 48 | properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 49 | 50 | // load PEM file from resources 51 | File file = new File(AvroConsumer.class.getClassLoader() 52 | .getResource(TestUtils.PEM).getFile()); 53 | final PrivateKey privateKey = 54 | KeyUtils.getECKeyPairFromPEM(file.getAbsolutePath()).getPrivate(); 55 | 56 | consumer = new KafkaConsumer<>( 57 | properties, 58 | new StringDeserializer(), 59 | new AesStructuredMessageDeserializer<>( 60 | new ByteArrayDeserializer(), 61 | TestUtils.ENCRYPTION_ALGORITHM_CLASS, 62 | privateKey, 63 | DataFormat.AVRO 64 | ) 65 | ); 66 | 67 | } 68 | 69 | consumer.subscribe(Collections.singletonList("granular-avro")); 70 | int timeouts = 0; 71 | while (true) { 72 | ConsumerRecords records = consumer.poll(200); 73 | System.out.println("records.count():" + records.count()); 74 | if (records.count() == 0) { 75 | timeouts++; 76 | } else { 77 | System.out.printf("Got %d records after %d timeouts\n", records.count(), timeouts); 78 | timeouts = 0; 79 | } 80 | for (ConsumerRecord record : records) { 81 | SeekableInput seekableInput = new SeekableByteArrayInput(record.value()); 82 | DatumReader datumReader = new GenericDatumReader<>(); 83 | DataFileReader dataFileReader = new DataFileReader<>(seekableInput, datumReader); 84 | 85 | GenericRecord genericRecord = dataFileReader.next(); 86 | long latency = (long) ((System.nanoTime() * 1e-9 - (double) genericRecord.get("t")) * 1000); 87 | stats.recordValue(latency); 88 | } 89 | } 90 | } 91 | } 92 | 93 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/clients/example/granular/AvroProducer.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.example.granular; 2 | 3 | import com.google.common.io.Resources; 4 | import com.nucypher.kafka.TestUtils; 5 | import com.nucypher.kafka.clients.encrypt.AesStructuredMessageSerializer; 6 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 7 | import com.nucypher.kafka.clients.granular.DataFormat; 8 | import com.nucypher.kafka.utils.KeyUtils; 9 | import org.apache.avro.Schema; 10 | import org.apache.avro.SchemaBuilder; 11 | import org.apache.avro.file.DataFileWriter; 12 | import org.apache.avro.generic.GenericData; 13 | import org.apache.avro.generic.GenericDatumWriter; 14 | import org.apache.avro.generic.GenericRecord; 15 | import org.apache.avro.io.DatumWriter; 16 | import org.apache.kafka.clients.producer.KafkaProducer; 17 | import org.apache.kafka.clients.producer.ProducerRecord; 18 | import org.apache.kafka.common.config.SaslConfigs; 19 | import org.apache.kafka.common.protocol.SecurityProtocol; 20 | import org.apache.kafka.common.serialization.ByteArraySerializer; 21 | import org.apache.kafka.common.serialization.StringSerializer; 22 | 23 | import java.io.ByteArrayOutputStream; 24 | import java.io.File; 25 | import java.io.InputStream; 26 | import java.security.PublicKey; 27 | import java.util.Properties; 28 | 29 | /** 30 | * JSON granular producer 31 | */ 32 | public class AvroProducer { 33 | 34 | public static void main(String[] args) throws Exception { 35 | Schema schema = SchemaBuilder.record("record") 36 | .fields() 37 | .name("type").type().stringType().stringDefault("test") 38 | .name("t").type().doubleType().noDefault() 39 | .name("k").type().intType().noDefault() 40 | .endRecord(); 41 | 42 | JaasUtils.initializeConfiguration(); 43 | 44 | KafkaProducer producer; 45 | try (InputStream props = Resources.getResource("producer.properties").openStream()) { 46 | Properties properties = new Properties(); 47 | properties.load(props); 48 | properties.put("security.protocol", SecurityProtocol.SASL_PLAINTEXT.toString()); 49 | properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 50 | 51 | File file = new File(AvroProducer.class.getClassLoader() 52 | .getResource(TestUtils.PEM).getFile()); 53 | PublicKey publicKey = KeyUtils.getECKeyPairFromPEM(file.getAbsolutePath()).getPublic(); 54 | 55 | producer = new KafkaProducer<>( 56 | properties, 57 | new StringSerializer(), 58 | new AesStructuredMessageSerializer<>( 59 | new ByteArraySerializer(), 60 | TestUtils.ENCRYPTION_ALGORITHM_CLASS, 61 | publicKey, 62 | null, 63 | DataFormat.AVRO 64 | ) 65 | ); 66 | } 67 | try { 68 | GenericRecord record = new GenericData.Record(schema); 69 | 70 | for (int i = 0; i < 1000000; i++) { 71 | record.put("type", "test"); 72 | record.put("t", System.nanoTime() * 1e-9); 73 | record.put("k", i); 74 | ByteArrayOutputStream dataOutputStream = new ByteArrayOutputStream(); 75 | DatumWriter writer = new GenericDatumWriter<>(); 76 | DataFileWriter dataFileWriter = new DataFileWriter<>(writer); 77 | dataFileWriter.create(schema, dataOutputStream); 78 | dataFileWriter.append(record); 79 | dataFileWriter.close(); 80 | byte[] bytes = dataOutputStream.toByteArray(); 81 | 82 | // send lots of messages 83 | producer.send(new ProducerRecord<>("granular-avro", bytes)); 84 | Thread.sleep(1000); 85 | } 86 | } catch (Throwable throwable) { 87 | throwable.printStackTrace(); 88 | } finally { 89 | producer.close(); 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/clients/example/granular/AvroSchemaLessConsumer.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.example.granular; 2 | 3 | import com.google.common.io.Resources; 4 | import com.nucypher.kafka.TestUtils; 5 | import com.nucypher.kafka.clients.decrypt.AesStructuredMessageDeserializer; 6 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 7 | import com.nucypher.kafka.clients.granular.DataFormat; 8 | import com.nucypher.kafka.utils.KeyUtils; 9 | import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; 10 | import io.confluent.kafka.serializers.KafkaAvroDeserializer; 11 | import org.HdrHistogram.Histogram; 12 | import org.apache.avro.generic.GenericRecord; 13 | import org.apache.kafka.clients.consumer.ConsumerRecord; 14 | import org.apache.kafka.clients.consumer.ConsumerRecords; 15 | import org.apache.kafka.clients.consumer.KafkaConsumer; 16 | import org.apache.kafka.common.config.SaslConfigs; 17 | import org.apache.kafka.common.protocol.SecurityProtocol; 18 | import org.apache.kafka.common.serialization.Deserializer; 19 | import org.apache.kafka.common.serialization.StringDeserializer; 20 | 21 | import java.io.File; 22 | import java.io.InputStream; 23 | import java.security.PrivateKey; 24 | import java.util.Collections; 25 | import java.util.HashMap; 26 | import java.util.Map; 27 | import java.util.Properties; 28 | import java.util.Random; 29 | 30 | /** 31 | * Avro granular consumer 32 | */ 33 | public class AvroSchemaLessConsumer { 34 | 35 | public static void main(String[] args) throws Exception { 36 | JaasUtils.initializeConfiguration(); 37 | 38 | Histogram stats = new Histogram(1, 10000000, 2); 39 | KafkaConsumer consumer; 40 | try (InputStream props = Resources.getResource("consumer.properties").openStream()) { 41 | Properties properties = new Properties(); 42 | properties.load(props); 43 | if (properties.getProperty("group.id") == null) { 44 | properties.setProperty("group.id", "group-" + new Random().nextInt(100000)); 45 | } 46 | properties.put("security.protocol", SecurityProtocol.SASL_PLAINTEXT.toString()); 47 | properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 48 | properties.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081"); 49 | 50 | File file = new File(AvroSchemaLessConsumer.class.getClassLoader() 51 | .getResource(TestUtils.PEM).getFile()); 52 | final PrivateKey privateKey = KeyUtils.getECKeyPairFromPEM(file.getAbsolutePath()).getPrivate(); 53 | 54 | Deserializer deserializer = new AesStructuredMessageDeserializer<>( 55 | new KafkaAvroDeserializer(), 56 | TestUtils.ENCRYPTION_ALGORITHM_CLASS, 57 | privateKey, 58 | DataFormat.AVRO_SCHEMA_LESS 59 | ); 60 | Map configs = new HashMap<>(); 61 | for (final String name : properties.stringPropertyNames()) { 62 | configs.put(name, properties.getProperty(name)); 63 | } 64 | deserializer.configure(configs, false); 65 | consumer = new KafkaConsumer<>( 66 | properties, 67 | new StringDeserializer(), 68 | deserializer 69 | ); 70 | } 71 | 72 | consumer.subscribe(Collections.singletonList("granular-avro-schema-less")); 73 | int timeouts = 0; 74 | while (true) { 75 | ConsumerRecords records = consumer.poll(200); 76 | System.out.println("records.count():" + records.count()); 77 | if (records.count() == 0) { 78 | timeouts++; 79 | } else { 80 | System.out.printf("Got %d records after %d timeouts\n", records.count(), timeouts); 81 | timeouts = 0; 82 | } 83 | for (ConsumerRecord record : records) { 84 | GenericRecord genericRecord = (GenericRecord) record.value(); 85 | long latency = (long) ((System.nanoTime() * 1e-9 - (double) genericRecord.get("t")) * 1000); 86 | stats.recordValue(latency); 87 | } 88 | } 89 | } 90 | } 91 | 92 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/clients/example/granular/AvroSchemaLessProducer.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.example.granular; 2 | 3 | import com.google.common.io.Resources; 4 | import com.nucypher.kafka.TestUtils; 5 | import com.nucypher.kafka.clients.encrypt.AesStructuredMessageSerializer; 6 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 7 | import com.nucypher.kafka.clients.granular.DataFormat; 8 | import com.nucypher.kafka.utils.KeyUtils; 9 | import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; 10 | import io.confluent.kafka.serializers.KafkaAvroSerializer; 11 | import org.apache.avro.Schema; 12 | import org.apache.avro.SchemaBuilder; 13 | import org.apache.avro.generic.GenericData; 14 | import org.apache.avro.generic.GenericRecord; 15 | import org.apache.kafka.clients.producer.KafkaProducer; 16 | import org.apache.kafka.clients.producer.ProducerRecord; 17 | import org.apache.kafka.common.config.SaslConfigs; 18 | import org.apache.kafka.common.protocol.SecurityProtocol; 19 | import org.apache.kafka.common.serialization.Serializer; 20 | import org.apache.kafka.common.serialization.StringSerializer; 21 | 22 | import java.io.File; 23 | import java.io.InputStream; 24 | import java.security.PublicKey; 25 | import java.util.HashMap; 26 | import java.util.Map; 27 | import java.util.Properties; 28 | 29 | /** 30 | * JSON granular producer 31 | */ 32 | public class AvroSchemaLessProducer { 33 | 34 | public static void main(String[] args) throws Exception { 35 | Schema schema = SchemaBuilder.record("record") 36 | .fields() 37 | .name("type").type().stringType().stringDefault("test") 38 | .name("t").type().doubleType().noDefault() 39 | .name("k").type().intType().noDefault() 40 | .endRecord(); 41 | 42 | JaasUtils.initializeConfiguration(); 43 | 44 | // set up the producer 45 | KafkaProducer producer; 46 | try (InputStream props = Resources.getResource("producer.properties").openStream()) { 47 | Properties properties = new Properties(); 48 | properties.load(props); 49 | properties.put("security.protocol", SecurityProtocol.SASL_PLAINTEXT.toString()); 50 | properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 51 | properties.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081"); 52 | 53 | // load PEM file from resources 54 | File file = new File(AvroSchemaLessProducer.class.getClassLoader() 55 | .getResource(TestUtils.PEM).getFile()); 56 | PublicKey publicKey = KeyUtils.getECKeyPairFromPEM(file.getAbsolutePath()).getPublic(); 57 | 58 | Serializer serializer = new AesStructuredMessageSerializer<>( 59 | new KafkaAvroSerializer(), 60 | TestUtils.ENCRYPTION_ALGORITHM_CLASS, 61 | publicKey, 62 | null, 63 | DataFormat.AVRO_SCHEMA_LESS 64 | ); 65 | Map configs = new HashMap<>(); 66 | for (final String name : properties.stringPropertyNames()) { 67 | configs.put(name, properties.getProperty(name)); 68 | } 69 | serializer.configure(configs, false); 70 | producer = new KafkaProducer<>( 71 | properties, 72 | new StringSerializer(), 73 | serializer 74 | ); 75 | } 76 | try { 77 | GenericRecord record = new GenericData.Record(schema); 78 | for (int i = 0; i < 1000000; i++) { 79 | record.put("type", "test"); 80 | record.put("t", System.nanoTime() * 1e-9); 81 | record.put("k", i); 82 | // send lots of messages 83 | producer.send(new ProducerRecord<>("granular-avro-schema-less", record)); 84 | Thread.sleep(1000); 85 | } 86 | } catch (Throwable throwable) { 87 | throwable.printStackTrace(); 88 | } finally { 89 | producer.close(); 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/clients/example/granular/JsonConsumer.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.example.granular; 2 | 3 | import com.fasterxml.jackson.databind.JsonNode; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.google.common.io.Resources; 6 | import com.nucypher.kafka.TestUtils; 7 | import com.nucypher.kafka.clients.decrypt.AesStructuredMessageDeserializer; 8 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 9 | import com.nucypher.kafka.clients.granular.DataFormat; 10 | import com.nucypher.kafka.utils.KeyUtils; 11 | import org.HdrHistogram.Histogram; 12 | import org.apache.kafka.clients.consumer.ConsumerRecord; 13 | import org.apache.kafka.clients.consumer.ConsumerRecords; 14 | import org.apache.kafka.clients.consumer.KafkaConsumer; 15 | import org.apache.kafka.common.config.SaslConfigs; 16 | import org.apache.kafka.common.protocol.SecurityProtocol; 17 | import org.apache.kafka.common.serialization.StringDeserializer; 18 | 19 | import java.io.File; 20 | import java.io.InputStream; 21 | import java.security.PrivateKey; 22 | import java.util.Collections; 23 | import java.util.Properties; 24 | import java.util.Random; 25 | 26 | /** 27 | * JSON granular consumer 28 | */ 29 | public class JsonConsumer { 30 | 31 | public static void main(String[] args) throws Exception { 32 | JaasUtils.initializeConfiguration(); 33 | 34 | ObjectMapper mapper = new ObjectMapper(); 35 | Histogram stats = new Histogram(1, 10000000, 2); 36 | // and the consumer 37 | KafkaConsumer consumer; 38 | try (InputStream props = Resources.getResource("consumer.properties").openStream()) { 39 | Properties properties = new Properties(); 40 | properties.load(props); 41 | if (properties.getProperty("group.id") == null) { 42 | properties.setProperty("group.id", "group-" + new Random().nextInt(100000)); 43 | } 44 | properties.put("security.protocol", SecurityProtocol.SASL_PLAINTEXT.toString()); 45 | properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 46 | 47 | // load PEM file from resources 48 | File file = new File(JsonConsumer.class.getClassLoader() 49 | .getResource(TestUtils.PEM).getFile()); 50 | final PrivateKey privateKey = 51 | KeyUtils.getECKeyPairFromPEM(file.getAbsolutePath()).getPrivate(); 52 | 53 | consumer = new KafkaConsumer<>( 54 | properties, 55 | new StringDeserializer(), 56 | new AesStructuredMessageDeserializer<>( 57 | new StringDeserializer(), 58 | TestUtils.ENCRYPTION_ALGORITHM_CLASS, 59 | privateKey, 60 | DataFormat.JSON 61 | ) 62 | ); 63 | 64 | } 65 | 66 | consumer.subscribe(Collections.singletonList("granular-json")); 67 | int timeouts = 0; 68 | //noinspection InfiniteLoopStatement 69 | while (true) { 70 | // Thread.sleep(200); 71 | // read records with a short timeout. If we time out, we don't really care. 72 | ConsumerRecords records = consumer.poll(200); 73 | System.out.println("records.count():" + records.count()); 74 | if (records.count() == 0) { 75 | timeouts++; 76 | } else { 77 | System.out.printf("Got %d records after %d timeouts\n", records.count(), timeouts); 78 | timeouts = 0; 79 | } 80 | for (ConsumerRecord record : records) { 81 | // the send time is encoded inside the message 82 | JsonNode message = mapper.readTree(record.value()); 83 | long latency = (long) ((System.nanoTime() * 1e-9 - message.get("t").asDouble()) * 1000); 84 | stats.recordValue(latency); 85 | } 86 | } 87 | } 88 | } 89 | 90 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/clients/example/granular/JsonProducer.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.example.granular; 2 | 3 | import com.google.common.io.Resources; 4 | import com.nucypher.kafka.TestUtils; 5 | import com.nucypher.kafka.clients.encrypt.AesStructuredMessageSerializer; 6 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 7 | import com.nucypher.kafka.clients.granular.DataFormat; 8 | import com.nucypher.kafka.utils.KeyUtils; 9 | import org.apache.kafka.clients.producer.KafkaProducer; 10 | import org.apache.kafka.clients.producer.ProducerRecord; 11 | import org.apache.kafka.common.config.SaslConfigs; 12 | import org.apache.kafka.common.protocol.SecurityProtocol; 13 | import org.apache.kafka.common.serialization.StringSerializer; 14 | 15 | import java.io.File; 16 | import java.io.InputStream; 17 | import java.security.PublicKey; 18 | import java.util.Locale; 19 | import java.util.Properties; 20 | 21 | /** 22 | * JSON granular producer 23 | */ 24 | public class JsonProducer { 25 | 26 | public static void main(String[] args) throws Exception { 27 | JaasUtils.initializeConfiguration(); 28 | 29 | // set up the producer 30 | KafkaProducer producer; 31 | try (InputStream props = Resources.getResource("producer.properties").openStream()) { 32 | Properties properties = new Properties(); 33 | properties.load(props); 34 | properties.put("security.protocol", SecurityProtocol.SASL_PLAINTEXT.toString()); 35 | properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 36 | 37 | // load PEM file from resources 38 | File file = new File(JsonProducer.class.getClassLoader() 39 | .getResource(TestUtils.PEM).getFile()); 40 | PublicKey publicKey = 41 | KeyUtils.getECKeyPairFromPEM(file.getAbsolutePath()).getPublic(); 42 | 43 | producer = new KafkaProducer<>( 44 | properties, 45 | new StringSerializer(), 46 | new AesStructuredMessageSerializer<>( 47 | new StringSerializer(), 48 | TestUtils.ENCRYPTION_ALGORITHM_CLASS, 49 | publicKey, 50 | null, 51 | DataFormat.JSON 52 | ) 53 | ); 54 | 55 | } 56 | try { 57 | for (int i = 0; i < 1000000; i++) { 58 | // send lots of messages 59 | producer.send(new ProducerRecord<>( 60 | "granular-json", 61 | String.format(Locale.US, 62 | "{\"type\":\"test\", \"t\":%.3f, \"k\":%d}", 63 | System.nanoTime() * 1e-9, 64 | i))); 65 | 66 | Thread.sleep(1000); 67 | } 68 | } catch (Throwable throwable) { 69 | throwable.printStackTrace(); 70 | } finally { 71 | producer.close(); 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/clients/example/granular/SchemaRegistry.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.example.granular; 2 | 3 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 4 | import io.confluent.kafka.schemaregistry.rest.SchemaRegistryMain; 5 | 6 | import java.io.File; 7 | import java.io.IOException; 8 | 9 | /** 10 | * {@link SchemaRegistryMain} runner 11 | */ 12 | public class SchemaRegistry { 13 | 14 | public static void main(String[] args) throws IOException { 15 | JaasUtils.initializeConfiguration(); 16 | System.setProperty("zookeeper.sasl.client", "false"); 17 | 18 | File file = new File(SchemaRegistry.class.getClassLoader() 19 | .getResource("schema-registry.properties").getFile()); 20 | SchemaRegistryMain.main(new String[]{file.getAbsolutePath()}); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/clients/example/utils/JaasUtils.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.clients.example.utils; 2 | 3 | import com.nucypher.kafka.errors.CommonException; 4 | 5 | import java.net.URL; 6 | 7 | /** 8 | * Utils for JAAS configuration 9 | */ 10 | public class JaasUtils { 11 | 12 | /** 13 | * Initialize JAAS using jaas.conf 14 | */ 15 | public static void initializeConfiguration() { 16 | initializeConfiguration("jaas.conf"); 17 | } 18 | 19 | /** 20 | * Initialize JAAS using configuration 21 | * 22 | * @param fileName configuration file 23 | */ 24 | public static void initializeConfiguration(String fileName) { 25 | URL jaas = JaasUtils.class.getClassLoader().getResource(fileName); 26 | if (jaas == null) { 27 | throw new CommonException("%s file not found", fileName); 28 | } 29 | System.setProperty(org.apache.kafka.common.security.JaasUtils.JAVA_LOGIN_CONFIG_PARAM, 30 | jaas.toExternalForm()); 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/proxy/ProxyFromProperties.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy; 2 | 3 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 4 | 5 | import java.io.IOException; 6 | import java.net.URL; 7 | 8 | /** 9 | * Start proxy using configuration file 10 | */ 11 | public class ProxyFromProperties { 12 | 13 | public static void main(String[] args) throws IOException { 14 | JaasUtils.initializeConfiguration("jaas_proxy.conf"); 15 | URL configs = JaasUtils.class.getClassLoader().getResource("proxy.properties"); 16 | ProxyServer.main(new String[]{configs.getFile().toString()}); 17 | } 18 | 19 | } 20 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/proxy/TransparentProxyFromInstance.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy; 2 | 3 | import com.nucypher.kafka.clients.example.utils.JaasUtils; 4 | import org.apache.kafka.clients.CommonClientConfigs; 5 | import org.apache.kafka.common.config.SaslConfigs; 6 | import org.apache.kafka.common.protocol.SecurityProtocol; 7 | import org.apache.kafka.common.serialization.ByteArrayDeserializer; 8 | import org.apache.kafka.common.serialization.ByteArraySerializer; 9 | 10 | import java.io.IOException; 11 | import java.util.Collections; 12 | import java.util.HashMap; 13 | import java.util.Map; 14 | 15 | /** 16 | * Start proxy by creating new instance from code 17 | */ 18 | public class TransparentProxyFromInstance { 19 | 20 | public static void main(String[] args) throws IOException { 21 | JaasUtils.initializeConfiguration("jaas_proxy.conf"); 22 | Map configs = new HashMap<>(); 23 | configs.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, 24 | SecurityProtocol.SASL_PLAINTEXT.name); 25 | configs.put(SaslConfigs.SASL_ENABLED_MECHANISMS, 26 | Collections.singletonList("PLAIN")); 27 | final ProxyServer server = new ProxyServer( 28 | null, 29 | 9192, 30 | 1, 31 | 1, 32 | "localhost", 33 | 9092, 34 | SecurityProtocol.SASL_PLAINTEXT, 35 | new ByteArraySerializer(), 36 | new ByteArrayDeserializer(), 37 | null, 38 | configs); 39 | Runtime.getRuntime().addShutdownHook(new Thread(server::close)); 40 | server.start(); 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/proxy/benchmark/ConsumerBenchmark.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy.benchmark; 2 | 3 | import com.nucypher.kafka.clients.decrypt.AesStructuredMessageDeserializer; 4 | import com.nucypher.kafka.clients.decrypt.AesStructuredMessageDeserializerConfig; 5 | import com.nucypher.kafka.clients.granular.JsonDataAccessor; 6 | import org.apache.kafka.clients.consumer.ConsumerConfig; 7 | import org.apache.kafka.clients.consumer.ConsumerRecord; 8 | import org.apache.kafka.clients.consumer.ConsumerRecords; 9 | import org.apache.kafka.clients.consumer.KafkaConsumer; 10 | import org.apache.kafka.common.config.SaslConfigs; 11 | import org.apache.kafka.common.protocol.SecurityProtocol; 12 | import org.apache.kafka.common.security.JaasUtils; 13 | import org.apache.kafka.common.serialization.StringDeserializer; 14 | import org.openjdk.jmh.annotations.Benchmark; 15 | import org.openjdk.jmh.annotations.BenchmarkMode; 16 | import org.openjdk.jmh.annotations.Measurement; 17 | import org.openjdk.jmh.annotations.Mode; 18 | import org.openjdk.jmh.annotations.OutputTimeUnit; 19 | import org.openjdk.jmh.annotations.Param; 20 | import org.openjdk.jmh.annotations.Scope; 21 | import org.openjdk.jmh.annotations.Setup; 22 | import org.openjdk.jmh.annotations.State; 23 | import org.openjdk.jmh.annotations.TearDown; 24 | import org.openjdk.jmh.infra.Blackhole; 25 | import org.openjdk.jmh.runner.Runner; 26 | import org.openjdk.jmh.runner.options.Options; 27 | import org.openjdk.jmh.runner.options.OptionsBuilder; 28 | 29 | import java.util.Collections; 30 | import java.util.Properties; 31 | import java.util.UUID; 32 | import java.util.concurrent.TimeUnit; 33 | 34 | /** 35 | * Consumer-proxy benchmark. Run Kafka and Proxy before benchmarking 36 | */ 37 | @State(Scope.Benchmark) 38 | @BenchmarkMode(Mode.Throughput) 39 | @OutputTimeUnit(TimeUnit.SECONDS) 40 | @Measurement(iterations = 100) 41 | public class ConsumerBenchmark { 42 | 43 | public static final String TOPIC = "benchmark"; 44 | 45 | private KafkaConsumer consumer; 46 | 47 | @Param({"localhost:9092;false", "localhost:9092;true", "localhost:9192;false"}) 48 | public String parameters; 49 | 50 | private long count; 51 | 52 | @Setup 53 | public void setup() { 54 | String[] parts = parameters.split(";"); 55 | String server = parts[0]; 56 | boolean clientEncryption = Boolean.parseBoolean(parts[1]); 57 | System.setProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM, "src/main/resources/jaas.conf"); 58 | Properties properties = new Properties(); 59 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, server); 60 | properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, server + "-" + UUID.randomUUID().toString()); 61 | properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); 62 | properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 63 | properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 64 | properties.put("security.protocol", SecurityProtocol.SASL_PLAINTEXT.toString()); 65 | properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 66 | if (clientEncryption) { 67 | properties.put(AesStructuredMessageDeserializerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, 68 | properties.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG)); 69 | properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, 70 | AesStructuredMessageDeserializer.class); 71 | properties.put(AesStructuredMessageDeserializerConfig.GRANULAR_DATA_ACCESSOR_CONFIG, 72 | JsonDataAccessor.class); 73 | properties.put(AesStructuredMessageDeserializerConfig.PRIVATE_KEY_CONFIG, 74 | "src/main/resources/P521.pem"); 75 | } 76 | consumer = new KafkaConsumer<>(properties); 77 | consumer.subscribe(Collections.singleton(TOPIC)); 78 | } 79 | 80 | @TearDown 81 | public void close() { 82 | consumer.close(); 83 | System.out.println("Count = " + count); 84 | } 85 | 86 | @Benchmark 87 | public void testConsumer(Blackhole blackhole) { 88 | ConsumerRecords records = consumer.poll(Long.MAX_VALUE); 89 | for (ConsumerRecord record : records) { 90 | blackhole.consume(record.value()); 91 | } 92 | count += records.count(); 93 | } 94 | 95 | public static void main(String[] args) throws Exception { 96 | Options options = new OptionsBuilder() 97 | .include(ConsumerBenchmark.class.getCanonicalName()) 98 | .forks(1) 99 | .build(); 100 | 101 | new Runner(options).run(); 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /examples/src/main/java/com/nucypher/kafka/proxy/benchmark/ProducerBenchmark.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy.benchmark; 2 | 3 | import com.nucypher.kafka.clients.encrypt.AesStructuredMessageSerializer; 4 | import com.nucypher.kafka.clients.encrypt.AesStructuredMessageSerializerConfig; 5 | import com.nucypher.kafka.clients.granular.JsonDataAccessor; 6 | import org.apache.kafka.clients.producer.KafkaProducer; 7 | import org.apache.kafka.clients.producer.ProducerConfig; 8 | import org.apache.kafka.clients.producer.ProducerRecord; 9 | import org.apache.kafka.common.config.SaslConfigs; 10 | import org.apache.kafka.common.protocol.SecurityProtocol; 11 | import org.apache.kafka.common.security.JaasUtils; 12 | import org.apache.kafka.common.serialization.StringSerializer; 13 | import org.openjdk.jmh.annotations.Benchmark; 14 | import org.openjdk.jmh.annotations.BenchmarkMode; 15 | import org.openjdk.jmh.annotations.Measurement; 16 | import org.openjdk.jmh.annotations.Mode; 17 | import org.openjdk.jmh.annotations.OutputTimeUnit; 18 | import org.openjdk.jmh.annotations.Param; 19 | import org.openjdk.jmh.annotations.Scope; 20 | import org.openjdk.jmh.annotations.Setup; 21 | import org.openjdk.jmh.annotations.State; 22 | import org.openjdk.jmh.annotations.TearDown; 23 | import org.openjdk.jmh.annotations.Warmup; 24 | import org.openjdk.jmh.runner.Runner; 25 | import org.openjdk.jmh.runner.options.Options; 26 | import org.openjdk.jmh.runner.options.OptionsBuilder; 27 | 28 | import java.util.Collections; 29 | import java.util.Properties; 30 | import java.util.concurrent.ExecutionException; 31 | import java.util.concurrent.TimeUnit; 32 | 33 | /** 34 | * Producer-proxy benchmark. Run Kafka and Proxy before benchmarking 35 | */ 36 | @State(Scope.Benchmark) 37 | @BenchmarkMode(Mode.Throughput) 38 | @OutputTimeUnit(TimeUnit.SECONDS) 39 | @Measurement(iterations = 100) 40 | @Warmup(iterations = 30) 41 | public class ProducerBenchmark { 42 | 43 | private static final String TOPIC = "benchmark"; 44 | 45 | private KafkaProducer producer; 46 | 47 | @Param({"localhost:9092;false", "localhost:9092;true", "localhost:9192;false"}) 48 | public String parameters; 49 | 50 | @Setup 51 | public void setup() { 52 | String[] parts = parameters.split(";"); 53 | String server = parts[0]; 54 | boolean clientEncryption = Boolean.parseBoolean(parts[1]); 55 | System.setProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM, "src/main/resources/jaas.conf"); 56 | Properties properties = new Properties(); 57 | properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, server); 58 | properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 59 | properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 60 | properties.put("security.protocol", SecurityProtocol.SASL_PLAINTEXT.toString()); 61 | properties.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); 62 | if (clientEncryption) { 63 | properties.put(AesStructuredMessageSerializerConfig.VALUE_SERIALIZER_CLASS_CONFIG, 64 | properties.get(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)); 65 | properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, 66 | AesStructuredMessageSerializer.class); 67 | properties.put(AesStructuredMessageSerializerConfig.GRANULAR_DATA_ACCESSOR_CONFIG, 68 | JsonDataAccessor.class); 69 | properties.put(AesStructuredMessageSerializerConfig.PUBLIC_KEY_CONFIG, 70 | "src/main/resources/P521.pem"); 71 | properties.put(AesStructuredMessageSerializerConfig.FIELDS_LIST_CONFIG, 72 | Collections.singletonList("a")); 73 | } 74 | producer = new KafkaProducer<>(properties); 75 | } 76 | 77 | @TearDown 78 | public void close() { 79 | producer.close(); 80 | } 81 | 82 | @Benchmark 83 | public void testProducer() { 84 | long time = System.nanoTime(); 85 | producer.send(new ProducerRecord<>(TOPIC, 86 | "{\"a\":" + time + 87 | ",\"b\":" + time + "}")); 88 | } 89 | 90 | @Benchmark 91 | public void testProducerSync() throws ExecutionException, InterruptedException { 92 | long time = System.nanoTime(); 93 | producer.send(new ProducerRecord<>(TOPIC, 94 | "{\"a\":" + time + 95 | ",\"b\":" + time + "}")).get(); 96 | } 97 | 98 | public static void main(String[] args) throws Exception { 99 | Options options = new OptionsBuilder() 100 | .include(ProducerBenchmark.class.getCanonicalName()) 101 | .forks(1) 102 | .build(); 103 | 104 | new Runner(options).run(); 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /examples/src/main/resources/P521.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN EC PARAMETERS----- 2 | MIIBwwIBATBNBgcqhkjOPQEBAkIB//////////////////////////////////// 3 | //////////////////////////////////////////////////8wgZ8EQgH///// 4 | //////////////////////////////////////////////////////////////// 5 | /////////////////ARCAFGVPrlhjhyaH5KaIaC2hUDuotpyW5mzFfO4tImRjvEJ 6 | 4VYZOVHsfpN7FlLAvTuxvwc1c9+IPSw08e9FH9RrUD8AAxUA0J6IACkcuFOWzGcX 7 | OTKEqqDaZLoEgYUEAMaFjga3BATpzZ4+y2YjlbRCnGSBOQU/tSH4KK9ga009uqFL 8 | Xnfv51ko/h3BJ6L/qN4zSLPBhWpCm/l+fjHC5b1mARg5KWp4mjvABFyKX7QsfRvZ 9 | mPVESVebRGgXr70XJz5mLJfucple9CZAxVC5AT+tB2E1PHCGonLCQIi+lHaf0WZQ 10 | AkIB///////////////////////////////////////////6UYaHg78vlmt/zAFI 11 | 9wml0Du1ybiJnEeuu2+3HpE4ZAkCAQE= 12 | -----END EC PARAMETERS----- 13 | -----BEGIN EC PRIVATE KEY----- 14 | MIHcAgEBBEIAfzzIW+AJhTtmFSp3qqLWajKIDj1cYodPac0GYO8ku217TNUy6KmX 15 | WDmc3nY3snVf29G2cZdb8JU9aHs/+b541aagBwYFK4EEACOhgYkDgYYABAFeHVTy 16 | WkYVAACm2xj8OM5o4YkqTgzq4eSs0gT7knuoQb0dRS3LKtwfg89h7LRA3VKmRk38 17 | oHdQln0J3ZfjfM+8ogFC1j+l0bFQmbNqDSan2HaqLobrfMw/1j4Jsz5hkwCjQEoU 18 | GOlHBIJVZU0QZL3ByFXUe2ouGNXFvHpW/tPgofTR9Q== 19 | -----END EC PRIVATE KEY----- 20 | -----BEGIN PUBLIC KEY----- 21 | MIGbMBAGByqGSM49AgEGBSuBBAAjA4GGAAQBXh1U8lpGFQAAptsY/DjOaOGJKk4M 22 | 6uHkrNIE+5J7qEG9HUUtyyrcH4PPYey0QN1SpkZN/KB3UJZ9Cd2X43zPvKIBQtY/ 23 | pdGxUJmzag0mp9h2qi6G63zMP9Y+CbM+YZMAo0BKFBjpRwSCVWVNEGS9wchV1Htq 24 | LhjVxbx6Vv7T4KH00fU= 25 | -----END PUBLIC KEY----- 26 | -------------------------------------------------------------------------------- /examples/src/main/resources/consumer.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=localhost:9092 2 | group.id=test 3 | client.id=100500 4 | enable.auto.commit=true 5 | #key.deserializer=org.apache.kafka.common.serialization.StringDeserializer 6 | #value.deserializer=org.apache.kafka.common.serialization.StringDeserializer 7 | 8 | # fast session timeout makes it more fun to play with failover 9 | session.timeout.ms=10000 10 | 11 | # These buffer sizes seem to be needed to avoid consumer switching to 12 | # a mode where it processes one bufferful every 5 seconds with multiple 13 | # timeouts along the way. No idea why this happens. 14 | fetch.min.bytes=50000 15 | receive.buffer.bytes=262144 16 | max.partition.fetch.bytes=2097152 -------------------------------------------------------------------------------- /examples/src/main/resources/generated.reduced.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "one", 3 | "level": 1, 4 | "levelTwo": { 5 | "name": "two", 6 | "level": 2, 7 | "levelThree": { 8 | "name": "three.2", 9 | "level": 3 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /examples/src/main/resources/jaas.conf: -------------------------------------------------------------------------------- 1 | KafkaClient { 2 | org.apache.kafka.common.security.plain.PlainLoginModule required 3 | username="alice" 4 | password="alice-secret"; 5 | }; -------------------------------------------------------------------------------- /examples/src/main/resources/jaas_proxy.conf: -------------------------------------------------------------------------------- 1 | KafkaServer { 2 | org.apache.kafka.common.security.plain.PlainLoginModule required 3 | user_alice="alice-secret"; 4 | }; -------------------------------------------------------------------------------- /examples/src/main/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /examples/src/main/resources/producer.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=localhost:9092 2 | acks=all 3 | retries=0 4 | batch.size=16384 5 | auto.commit.interval.ms=1000 6 | linger.ms=0 7 | block.on.buffer.full=true 8 | # define it inside the code 9 | #key.serializer=org.apache.kafka.common.serialization.StringSerializer 10 | #value.serializer=org.apache.kafka.common.serialization.StringSerializer 11 | -------------------------------------------------------------------------------- /examples/src/main/resources/proxy.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=localhost:9092 2 | security.protocol=SASL_PLAINTEXT 3 | sasl.mechanism=PLAIN 4 | sasl.enabled.mechanisms=PLAIN 5 | 6 | value.serializer=com.nucypher.kafka.clients.encrypt.AesStructuredMessageSerializer 7 | encryption.granular.data.accessor=com.nucypher.kafka.clients.granular.JsonDataAccessor 8 | encryption.public.key=src/main/resources/P521.pem 9 | encryption.granular.fields=a 10 | encryption.value.serializer=org.apache.kafka.common.serialization.ByteArraySerializer 11 | 12 | value.deserializer=com.nucypher.kafka.clients.decrypt.AesStructuredMessageDeserializer 13 | encryption.private.key=src/main/resources/P521.pem 14 | encryption.value.deserializer=org.apache.kafka.common.serialization.ByteArrayDeserializer -------------------------------------------------------------------------------- /examples/src/main/resources/schema-registry.properties: -------------------------------------------------------------------------------- 1 | # Copyright 2014 Confluent Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | listeners=http://0.0.0.0:8081 16 | kafkastore.connection.url=localhost:2181 17 | kafkastore.topic=_schemas 18 | debug=true 19 | 20 | kafkastore.sasl.mechanism=PLAIN 21 | kafkastore.security.protocol=SASL_PLAINTEXT -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | version = 1.0-SNAPSHOT 2 | 3 | bouncyCastleVersion = 1.56 4 | curatorVersion = 2.11.1 5 | gradleRootVersion = 3.2.1 6 | guavaVersion = 20.0 7 | jacksonVersion = 2.8.5 8 | joptSimpleVersion = 6.0-alpha-1 9 | jsonPathVersion = 2.2.0 10 | junitVersion = 4.12 11 | log4jVersion = 2.7 12 | lombokVersion = 1.16.10 13 | mockitoVersion = 1.10.19 14 | powermockVersion = 1.6.5 15 | scalaVersion = 2.11.8 16 | slf4jVersion = 1.7.21 17 | zkclientVersion = 0.10 18 | spockVersion = 1.1-groovy-2.4-rc-3 19 | cglibVersion = 3.2.4 20 | avroVersion = 1.8.1 21 | confluentVersion = 3.1.1 22 | commonsIOVersion = 2.5 23 | jmhVersion = 1.17.4 24 | reflectionsVersion = 0.9.11 25 | commonsCryptoVersion = 1.0.0 -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Mon May 15 19:51:03 MSK 2017 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-3.2.1-bin.zip 7 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | set DIRNAME=%~dp0 12 | if "%DIRNAME%" == "" set DIRNAME=. 13 | set APP_BASE_NAME=%~n0 14 | set APP_HOME=%DIRNAME% 15 | 16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 17 | set DEFAULT_JVM_OPTS= 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | 53 | :win9xME_args 54 | @rem Slurp the command line arguments. 55 | set CMD_LINE_ARGS= 56 | set _SKIP=2 57 | 58 | :win9xME_args_slurp 59 | if "x%~1" == "x" goto execute 60 | 61 | set CMD_LINE_ARGS=%* 62 | 63 | :execute 64 | @rem Setup the command line 65 | 66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 67 | 68 | @rem Execute Gradle 69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 70 | 71 | :end 72 | @rem End local scope for the variables with windows NT shell 73 | if "%ERRORLEVEL%"=="0" goto mainEnd 74 | 75 | :fail 76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 77 | rem the _cmd.exe /c_ return code! 78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 79 | exit /b 1 80 | 81 | :mainEnd 82 | if "%OS%"=="Windows_NT" endlocal 83 | 84 | :omega 85 | -------------------------------------------------------------------------------- /initialize_project.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 1 4 | # Download submodules: Apache Kafka, bbs98-java 5 | git submodule init 6 | git submodule update 7 | 8 | # 2 9 | # Download gradle wrapper 10 | gradle wrapper -------------------------------------------------------------------------------- /proxy/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'java' 2 | apply plugin: 'application' 3 | 4 | group 'com.nucypher.kafka' 5 | 6 | sourceCompatibility = 1.7 7 | targetCompatibility = 1.7 8 | 9 | mainClassName = 'com.nucypher.kafka.proxy.ProxyServer' 10 | applicationName = 'nucypher-kafka-proxy' 11 | 12 | dependencies { 13 | 14 | compile project(':clients') 15 | compile project(':kafka:clients') 16 | compile (project(':kafka:core')) { 17 | exclude group: "org.slf4j", module: 'slf4j-api' 18 | exclude group: "org.slf4j", module: 'slf4j-log4j12' 19 | exclude group: "log4j", module: 'log4j' 20 | } 21 | compile "commons-io:commons-io:$commonsIOVersion" 22 | 23 | testCompile "org.spockframework:spock-core:$spockVersion" 24 | testCompile "junit:junit:$junitVersion" 25 | testCompile "org.mockito:mockito-core:$mockitoVersion" 26 | } 27 | 28 | task copyToLib(type: Copy) { 29 | 30 | into "$buildDir/libs/lib" 31 | from configurations.runtime 32 | 33 | doLast { 34 | jar { 35 | archiveName = "$applicationName-${version}.${extension}" 36 | 37 | manifest { 38 | attributes("Main-Class": mainClassName) 39 | attributes("Class-Path": configurations.runtime.collect { "lib/$it.name" }.join(' ')) 40 | } 41 | } 42 | } 43 | 44 | } 45 | 46 | jar.dependsOn copyToLib 47 | 48 | applicationDistribution.from("config-example/") { 49 | into "config-example" 50 | } 51 | 52 | startScripts { 53 | doLast { 54 | def windowsScriptFile = file getWindowsScript() 55 | def unixScriptFile = file getUnixScript() 56 | windowsScriptFile.text = windowsScriptFile.text.replaceAll( 57 | 'CLASSPATH=\\S*', 'CLASSPATH=%APP_HOME%\\\\lib\\\\*') 58 | unixScriptFile.text = unixScriptFile.text.replaceAll( 59 | 'CLASSPATH=\\S*\n', 'CLASSPATH=\\$APP_HOME/lib/*\n') 60 | } 61 | } -------------------------------------------------------------------------------- /proxy/config-example/jaas_proxy.conf: -------------------------------------------------------------------------------- 1 | KafkaServer { 2 | org.apache.kafka.common.security.plain.PlainLoginModule required 3 | user_alice="alice-secret"; 4 | }; 5 | 6 | Client { 7 | org.apache.kafka.common.security.plain.PlainLoginModule required 8 | username="kafka" 9 | password="kafka-password"; 10 | }; -------------------------------------------------------------------------------- /proxy/config-example/proxy-broker.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=localhost:9092 2 | security.protocol=SASL_PLAINTEXT 3 | sasl.mechanism=PLAIN 4 | sasl.enabled.mechanisms=PLAIN 5 | 6 | proxy.type=BROKER 7 | reencryption.keys.path=/keys 8 | -------------------------------------------------------------------------------- /proxy/src/main/java/com/nucypher/kafka/proxy/Acceptor.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy; 2 | 3 | import org.apache.commons.io.IOUtils; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | import java.io.Closeable; 8 | import java.io.IOException; 9 | import java.net.InetSocketAddress; 10 | import java.nio.channels.SelectionKey; 11 | import java.nio.channels.Selector; 12 | import java.nio.channels.ServerSocketChannel; 13 | import java.nio.channels.SocketChannel; 14 | import java.util.Iterator; 15 | 16 | /** 17 | * Acceptor thread that handles new connections 18 | */ 19 | public class Acceptor extends Thread implements Closeable { 20 | 21 | private static final Logger LOGGER = LoggerFactory.getLogger(Acceptor.class); 22 | 23 | private final ServerSocketChannel serverSocketChannel; 24 | private final String serverHost; 25 | private final int port; 26 | private final Processor[] processors; 27 | 28 | private int currentProcessor = 0; 29 | 30 | /** 31 | * @param serverHost server host 32 | * @param port local port 33 | * @param processors array of processors 34 | * @throws IOException when error while opening socket 35 | */ 36 | public Acceptor(String serverHost, int port, Processor[] processors) throws IOException { 37 | setName("acceptor"); 38 | serverSocketChannel = ServerSocketChannel.open(); 39 | serverSocketChannel.configureBlocking(false); 40 | serverSocketChannel.socket().bind(new InetSocketAddress(serverHost, port)); 41 | this.port = serverSocketChannel.socket().getLocalPort(); 42 | this.processors = processors; 43 | this.serverHost = serverHost; 44 | } 45 | 46 | /** 47 | * @return local port 48 | */ 49 | public int getPort() { 50 | return port; 51 | } 52 | 53 | @Override 54 | public void run() { 55 | try { 56 | java.nio.channels.Selector acceptSelector = java.nio.channels.Selector.open(); 57 | serverSocketChannel.register(acceptSelector, SelectionKey.OP_ACCEPT); 58 | LOGGER.info("Acceptor listens on {}:{}", serverHost, port); 59 | while (!isInterrupted() && serverSocketChannel.isOpen()) { 60 | if (acceptSelector.select(1000) > 0) { 61 | accept(acceptSelector); 62 | } 63 | } 64 | } catch (IOException e) { 65 | LOGGER.error("Error in acceptor thread", e); 66 | } finally { 67 | close(); 68 | } 69 | } 70 | 71 | private void accept(Selector acceptSelector) throws IOException { 72 | Iterator it = acceptSelector.selectedKeys().iterator(); 73 | while (it.hasNext()) { 74 | SelectionKey key = it.next(); 75 | if (key.isAcceptable()) { 76 | SocketChannel socketChannel = ((ServerSocketChannel) key.channel()).accept(); 77 | socketChannel.configureBlocking(false); 78 | if (LOGGER.isDebugEnabled()) { 79 | LOGGER.debug("Accepted new client connection {}:{}", 80 | socketChannel.socket().getInetAddress().getHostAddress(), 81 | socketChannel.socket().getPort()); 82 | } 83 | processors[currentProcessor].accept(socketChannel); 84 | currentProcessor = (currentProcessor + 1) % processors.length; 85 | } 86 | it.remove(); 87 | } 88 | } 89 | 90 | @Override 91 | public void close() { 92 | if (!isInterrupted()) { 93 | interrupt(); 94 | } 95 | IOUtils.closeQuietly(serverSocketChannel); 96 | LOGGER.info("Acceptor is stopped"); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /proxy/src/main/java/com/nucypher/kafka/proxy/BrokerChannelBuilders.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy; 2 | 3 | import org.apache.kafka.common.config.SaslConfigs; 4 | import org.apache.kafka.common.config.types.Password; 5 | import org.apache.kafka.common.network.ChannelBuilder; 6 | import org.apache.kafka.common.network.Mode; 7 | import org.apache.kafka.common.network.SaslChannelBuilder; 8 | import org.apache.kafka.common.protocol.SecurityProtocol; 9 | import org.apache.kafka.common.security.JaasContext; 10 | 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | 14 | /** 15 | * Modified {@link org.apache.kafka.common.network.ChannelBuilders} 16 | * for creating channels from proxy to broker 17 | */ 18 | public class BrokerChannelBuilders { 19 | 20 | private BrokerChannelBuilders() { 21 | } 22 | 23 | /** 24 | * @param securityProtocol the securityProtocol 25 | * @param configs client config 26 | * @param clientSaslMechanism SASL mechanism 27 | * @param jaasConfig JAAS configuration 28 | * @return the configured `ChannelBuilder` 29 | * @throws IllegalArgumentException if `clientSaslMechanism` is not null 30 | * for SASL security protocol 31 | */ 32 | public static ChannelBuilder brokerChannelBuilder(SecurityProtocol securityProtocol, 33 | Map configs, 34 | String clientSaslMechanism, 35 | Password jaasConfig) { 36 | if (securityProtocol == SecurityProtocol.SASL_PLAINTEXT || 37 | securityProtocol == SecurityProtocol.SASL_SSL) { 38 | if (clientSaslMechanism == null) 39 | throw new IllegalArgumentException( 40 | "`clientSaslMechanism` must be non-null in client mode " + 41 | "if `securityProtocol` is `" + securityProtocol + "`"); 42 | } 43 | return create(securityProtocol, configs, clientSaslMechanism, jaasConfig); 44 | } 45 | 46 | private static ChannelBuilder create(SecurityProtocol securityProtocol, 47 | Map configs, 48 | String clientSaslMechanism, 49 | Password jaasConfig) { 50 | Mode mode = Mode.CLIENT; 51 | ChannelBuilder channelBuilder; 52 | switch (securityProtocol) { 53 | case SASL_SSL: 54 | case SASL_PLAINTEXT: 55 | Map jaasConfigs = new HashMap<>(1); 56 | jaasConfigs.put(SaslConfigs.SASL_JAAS_CONFIG, jaasConfig); 57 | JaasContext jaasContext = JaasContext.load( 58 | JaasContext.Type.CLIENT, null, jaasConfigs); 59 | channelBuilder = new SaslChannelBuilder(mode, jaasContext, securityProtocol, 60 | clientSaslMechanism, true, null); 61 | break; 62 | default: 63 | throw new IllegalArgumentException("Unexpected securityProtocol " + securityProtocol); 64 | } 65 | 66 | channelBuilder.configure(configs); 67 | return channelBuilder; 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /proxy/src/main/java/com/nucypher/kafka/proxy/ProxyType.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy; 2 | 3 | /** 4 | * Type of proxy-server 5 | */ 6 | public enum ProxyType { 7 | 8 | /** 9 | * Client proxy. Proxy works as serializer/deserializer 10 | */ 11 | CLIENT, 12 | /** 13 | * Broker proxy. Proxy works as re-encryptor 14 | */ 15 | BROKER; 16 | 17 | } 18 | -------------------------------------------------------------------------------- /proxy/src/main/java/com/nucypher/kafka/proxy/Utils.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy; 2 | 3 | import com.nucypher.kafka.errors.CommonException; 4 | 5 | import java.lang.reflect.Field; 6 | import java.nio.channels.SocketChannel; 7 | 8 | /** 9 | * Proxy utils 10 | */ 11 | public class Utils { 12 | 13 | private Utils() { 14 | 15 | } 16 | 17 | /** 18 | * Get id for channel 19 | * 20 | * @param channel channel 21 | * @return id 22 | */ 23 | public static String id(SocketChannel channel) { 24 | return channel.socket().getLocalAddress().getHostAddress() + ":" + 25 | channel.socket().getLocalPort() + "-" + 26 | channel.socket().getInetAddress().getHostAddress() + ":" + 27 | channel.socket().getPort(); 28 | } 29 | 30 | /** 31 | * Get destination id from source id 32 | * 33 | * @param source source id 34 | * @return destination id 35 | */ 36 | public static String getDestination(String source) { 37 | if (source.contains("-broker")) { 38 | return source.substring(0, source.indexOf("-broker")); 39 | } else { 40 | return source + "-broker"; 41 | } 42 | } 43 | 44 | /** 45 | * Checks if the destination is broker 46 | * 47 | * @param destination destination id 48 | * @return result of checking 49 | */ 50 | public static boolean isToBroker(String destination) { 51 | return destination.endsWith("-broker"); 52 | } 53 | 54 | /** 55 | * Get field value 56 | * 57 | * @param object object 58 | * @param fieldName field name 59 | * @return value 60 | */ 61 | @SuppressWarnings("unchecked") 62 | public static T getField(Object object, String fieldName) { 63 | Field hostNameField; 64 | try { 65 | hostNameField = object.getClass().getDeclaredField(fieldName); 66 | hostNameField.setAccessible(true); 67 | return (T) hostNameField.get(object); 68 | } catch (NoSuchFieldException | IllegalAccessException e) { 69 | throw new CommonException(e); 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /proxy/src/main/java/com/nucypher/kafka/proxy/config/AbstractProxyConfig.java: -------------------------------------------------------------------------------- 1 | package com.nucypher.kafka.proxy.config; 2 | 3 | import com.nucypher.kafka.clients.ReEncryptionHandlerConfigs; 4 | import org.apache.kafka.clients.CommonClientConfigs; 5 | import org.apache.kafka.common.config.AbstractConfig; 6 | import org.apache.kafka.common.config.ConfigDef; 7 | import org.apache.kafka.common.config.SaslConfigs; 8 | 9 | import java.util.Map; 10 | 11 | /** 12 | * Abstract proxy configuration 13 | */ 14 | public class AbstractProxyConfig extends AbstractConfig { 15 | 16 | private static final ConfigDef CONFIG; 17 | 18 | /** 19 | * security.protocol 20 | */ 21 | public static final String SECURITY_PROTOCOL_CONFIG = CommonClientConfigs.SECURITY_PROTOCOL_CONFIG; 22 | 23 | /** 24 | * sasl.enabled.mechanisms 25 | */ 26 | public static final String SASL_ENABLED_MECHANISMS = SaslConfigs.SASL_ENABLED_MECHANISMS; 27 | 28 | /** 29 | * sasl.kerberos.principal.to.local.rules 30 | */ 31 | public static final String SASL_KERBEROS_PRINCIPAL_TO_LOCAL_RULES = 32 | SaslConfigs.SASL_KERBEROS_PRINCIPAL_TO_LOCAL_RULES; 33 | 34 | static { 35 | CONFIG = ReEncryptionHandlerConfigs.getConfigDef() 36 | // security support 37 | .define(SASL_ENABLED_MECHANISMS, 38 | ConfigDef.Type.LIST, 39 | SaslConfigs.DEFAULT_SASL_ENABLED_MECHANISMS, 40 | ConfigDef.Importance.MEDIUM, 41 | SaslConfigs.SASL_ENABLED_MECHANISMS_DOC) 42 | .define(SASL_KERBEROS_PRINCIPAL_TO_LOCAL_RULES, 43 | ConfigDef.Type.LIST, 44 | SaslConfigs.DEFAULT_SASL_KERBEROS_PRINCIPAL_TO_LOCAL_RULES, 45 | ConfigDef.Importance.MEDIUM, 46 | SaslConfigs.SASL_KERBEROS_PRINCIPAL_TO_LOCAL_RULES_DOC) 47 | .define(SECURITY_PROTOCOL_CONFIG, 48 | ConfigDef.Type.STRING, 49 | CommonClientConfigs.DEFAULT_SECURITY_PROTOCOL, 50 | ConfigDef.Importance.MEDIUM, 51 | CommonClientConfigs.SECURITY_PROTOCOL_DOC) 52 | .withClientSslSupport() 53 | .withClientSaslSupport(); 54 | } 55 | 56 | public static ConfigDef getConfigDef() { 57 | return CONFIG; 58 | } 59 | 60 | public AbstractProxyConfig(Map props) { 61 | super(CONFIG, props); 62 | } 63 | 64 | AbstractProxyConfig(ConfigDef definition, Map originals) { 65 | super(definition, originals, true); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /proxy/src/main/resources/log4j2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /proxy/src/test/resources/jaas.conf: -------------------------------------------------------------------------------- 1 | KafkaServer { 2 | org.apache.kafka.common.security.plain.PlainLoginModule required 3 | username="alice" 4 | password="alice-secret" 5 | user_alice="alice-secret"; 6 | }; -------------------------------------------------------------------------------- /screenshots/EC_Keys.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/EC_Keys.png -------------------------------------------------------------------------------- /screenshots/Full_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Full_1.png -------------------------------------------------------------------------------- /screenshots/Full_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Full_2.png -------------------------------------------------------------------------------- /screenshots/Full_3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Full_3.png -------------------------------------------------------------------------------- /screenshots/Full_4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Full_4.png -------------------------------------------------------------------------------- /screenshots/Full_5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Full_5.png -------------------------------------------------------------------------------- /screenshots/Full_6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Full_6.png -------------------------------------------------------------------------------- /screenshots/Granular_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_1.png -------------------------------------------------------------------------------- /screenshots/Granular_10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_10.png -------------------------------------------------------------------------------- /screenshots/Granular_11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_11.png -------------------------------------------------------------------------------- /screenshots/Granular_12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_12.png -------------------------------------------------------------------------------- /screenshots/Granular_13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_13.png -------------------------------------------------------------------------------- /screenshots/Granular_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_2.png -------------------------------------------------------------------------------- /screenshots/Granular_3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_3.png -------------------------------------------------------------------------------- /screenshots/Granular_4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_4.png -------------------------------------------------------------------------------- /screenshots/Granular_5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_5.png -------------------------------------------------------------------------------- /screenshots/Granular_6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_6.png -------------------------------------------------------------------------------- /screenshots/Granular_7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_7.png -------------------------------------------------------------------------------- /screenshots/Granular_8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_8.png -------------------------------------------------------------------------------- /screenshots/Granular_9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Granular_9.png -------------------------------------------------------------------------------- /screenshots/Kafka.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Kafka.png -------------------------------------------------------------------------------- /screenshots/Simple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/Simple.png -------------------------------------------------------------------------------- /screenshots/ZooKeeper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/screenshots/ZooKeeper.png -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | include 'admin' 2 | include 'commons' 3 | include 'clients' 4 | include 'examples' 5 | include 'proxy' 6 | 7 | /////////////////////////////////////////////////// 8 | // 9 | // As git submodules 10 | // 11 | 12 | // https://github.com/nucypher/nucypher-crypto-oss.git 13 | include 'crypto' 14 | project(':crypto').projectDir= new File('crypto') 15 | 16 | // https://github.com/nucypher/kafka.git 17 | include 'kafka' 18 | project(':kafka').projectDir = new File('kafka') 19 | 20 | include 'kafka:core' 21 | project(':kafka:core').projectDir = new File('kafka/core') 22 | 23 | include 'kafka:clients' 24 | project(':kafka:clients').projectDir = new File('kafka/clients') 25 | 26 | include 'kafka:examples' 27 | project(':kafka:examples').projectDir = new File('kafka/examples') 28 | 29 | include 'kafka:streams' 30 | project(':kafka:streams').projectDir = new File('kafka/streams') 31 | 32 | include 'kafka:streams:examples' 33 | project(':kafka:streams:examples').projectDir = new File('kafka/streams/examples') 34 | 35 | include 'kafka:tools' 36 | project(':kafka:tools').projectDir = new File('kafka/tools') 37 | 38 | include 'kafka:connect:api' 39 | project(':kafka:connect:api').projectDir = new File('kafka/connect/api') 40 | 41 | include 'kafka:connect:file' 42 | project(':kafka:connect:file').projectDir = new File('kafka/connect/file') 43 | 44 | include 'kafka:connect:json' 45 | project(':kafka:connect:json').projectDir = new File('kafka/connect/json') 46 | 47 | include 'kafka:connect:runtime' 48 | project(':kafka:connect:runtime').projectDir = new File('kafka/connect/runtime') 49 | 50 | include 'kafka:connect:transforms' 51 | project(':kafka:connect:transforms').projectDir = new File('kafka/connect/transforms') 52 | 53 | include 'kafka:jmh-benchmarks' 54 | project(':kafka:jmh-benchmarks').projectDir = new File('kafka/jmh-benchmarks') 55 | 56 | include 'kafka:log4j-appender' 57 | project(':kafka:log4j-appender').projectDir = new File('kafka/log4j-appender') -------------------------------------------------------------------------------- /tools/aes.256.java.files/UnlimitedJCEPolicyJDK7.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/tools/aes.256.java.files/UnlimitedJCEPolicyJDK7.zip -------------------------------------------------------------------------------- /tools/aes.256.java.files/jce_policy-8.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/tools/aes.256.java.files/jce_policy-8.zip -------------------------------------------------------------------------------- /tools/aes.256.java.files/original/security/blacklist: -------------------------------------------------------------------------------- 1 | # JNLPAppletLauncher applet-launcher.jar 2 | SHA1-Digest-Manifest: 5Bo5/eg892hQ9mgbUW56iDmsp1k= 3 | 4 | # 7066583 5 | SHA1-Digest-Manifest: x17xGEFzBRXY2pLtXiIbp8J7U9M= 6 | SHA1-Digest-Manifest: ya6YNTzMCFYUO4lwhmz9OWhhIz8= 7 | SHA1-Digest-Manifest: YwuPyF/KMcxcQhgxilzNybFM2+8= 8 | 9 | # 7066809 10 | SHA1-Digest-Manifest: dBKbNW1PZSjJ0lGcCeewcCrYx5g= 11 | SHA1-Digest-Manifest: lTYCkD1wm5uDcp2G2PNPcADG/ds= 12 | SHA1-Digest-Manifest: GKwQJtblDEuSVf3LdC1ojpUJRGg= 13 | 14 | # 7186931 15 | SHA1-Digest-Manifest: 0CUppG7J6IL8xHqPCnA377Koahw= 16 | SHA1-Digest-Manifest: 3aJU1qSK6IYmt5MSh2IIIj5G1XE= 17 | SHA1-Digest-Manifest: 8F4F0TXA4ureZbfEXWIFm76QGg4= 18 | SHA1-Digest-Manifest: B1NaDg834Bgg+VE9Ca+tDZOd2BI= 19 | SHA1-Digest-Manifest: bOoQga+XxC3j0HiP552+fYCdswo= 20 | SHA1-Digest-Manifest: C4mtepHAyIKiAjjqOm6xYMo8TkM= 21 | SHA1-Digest-Manifest: cDXEH+bR01R8QVxL+KFKYqFgsR0= 22 | SHA1-Digest-Manifest: cO2ccW2cckTvpR0HVgQa362PyHI= 23 | SHA1-Digest-Manifest: D/TyRle6Sl+CDuBFmdOPy03ERaw= 24 | SHA1-Digest-Manifest: eJfWm86yHp2Oz5U8WrMKbpv6GGA= 25 | SHA1-Digest-Manifest: g3mA5HqcRBlKaUVQsapnKhOSEas= 26 | SHA1-Digest-Manifest: HSPXCvBNG/PaSXg8thDGqSeZlR8= 27 | SHA1-Digest-Manifest: iMHjGyv5gEnTi8uj68yzalml8XQ= 28 | SHA1-Digest-Manifest: jd5MvBoWh7aHiUa1v15lqXVQ9y4= 29 | SHA1-Digest-Manifest: L0l3WOuMNWujmXo5+O/GtmGyyYk= 30 | SHA1-Digest-Manifest: l4meuozuSFLkTZTS6xW3sixdlBI= 31 | SHA1-Digest-Manifest: l93uYyDZGyynzYTknp31yyuNivU= 32 | SHA1-Digest-Manifest: lbhLWSopUIqPQ08UVIA927Y7jZQ= 33 | SHA1-Digest-Manifest: nv5+0eBNHpRIsB9D6TmEbWoNCTs= 34 | SHA1-Digest-Manifest: obWCTaz3uOZwDBDZUsbrrTKoDig= 35 | SHA1-Digest-Manifest: odqJCMnKdgvQLOCAMSWEj1EPQTc= 36 | SHA1-Digest-Manifest: OfQZHjo8GK14bHD4z4dDIp4ZFjE= 37 | SHA1-Digest-Manifest: PVAkXuUCgiDQI19GPrw01Vz4rGQ= 38 | SHA1-Digest-Manifest: Q9HXbUcSCjhwkgpk5NNVG/sArVA= 39 | SHA1-Digest-Manifest: qMVUh9i3yJcTKpuZYSFZH9dspqE= 40 | SHA1-Digest-Manifest: TFYT30IirbYk89l/uKykM6g2cVQ= 41 | SHA1-Digest-Manifest: trhKo6XiSGxRrS//rCL9e3Ca6D4= 42 | SHA1-Digest-Manifest: uWffvhFaWVw3lrER/SJH7Hl4yFg= 43 | SHA1-Digest-Manifest: vSd+kv1p+3jrVK9FjDCBJcoy5us= 44 | SHA1-Digest-Manifest: WX77FlRyFyeUriu+xi/PE1uLALU= 45 | SHA1-Digest-Manifest: WyqHV02O4PYZkcbidH4HKlp/8hY= 46 | SHA1-Digest-Manifest: Y82nn7CFTu1XAOCDjemWwyPLssg= 47 | 48 | # 8002218 49 | SHA1-Digest-Manifest: saJIx2dYM4ac+paV09g7hyvF7rs= 50 | SHA1-Digest-Manifest: hYV/Hs2xEIHeidB+9ZVjAtTBr/g= 51 | 52 | # 8016260 53 | SHA1-Digest-Manifest: 0smdeWASG13zex3fGDxo83K7qYo= 54 | SHA1-Digest-Manifest: 4WnMPDBMLBjDfKvAj69W0KuSS8w= 55 | SHA1-Digest-Manifest: 8j/GuWpBhLDCVZBHU1MQNOiDsvM= 56 | SHA1-Digest-Manifest: AR4m0ypEY28WDWxcbuFu9kj5Zjc= 57 | SHA1-Digest-Manifest: GfE+OoJReiuOJ+pKLOZu+Vu0WEM= 58 | SHA1-Digest-Manifest: X3VMVm3b45zxF3/ADtQHa/uIKJU= 59 | SHA1-Digest-Manifest: arC0JJpAy9ZvSEXRYy49Ye2CibQ= 60 | SHA1-Digest-Manifest: pkjJdE2tSwj6et5Bi4mMWF5acSc= 61 | SHA1-Digest-Manifest: sn3aY301ITBSofNBRNcr7ThjaGI= 62 | 63 | # 8017544 64 | SHA1-Digest-Manifest: fkUYOYVHwIrvWEVwB1HAcovhNEc= 65 | 66 | # 8021173 67 | SHA1-Digest-Manifest: Dz2BT/+NVo3sql0U3m5sPFdH/VA= 68 | 69 | # 8037056 70 | SHA1-Digest-Manifest: MVlhreomxYWnvgDArNM90zet5W0= 71 | SHA1-Digest-Manifest: w5irJZDYfOvvHy+O/VIIz2cKw3w= 72 | SHA1-Digest-Manifest: KPRzEcU95ksGJcok+QoDFLuH1QQ= 73 | SHA1-Digest-Manifest: ez41Owng2PUbr2C5rioCQQsm/b0= 74 | SHA-256-Digest-Manifest: VLL/4H0q9NvvtmcFOJ4Xby/5A8CLvniP17VCOFDh7sU= 75 | SHA1-Digest-Manifest: pmEfDqcnm+9D5OfuJbCdyl/5KhQ= 76 | SHA1-Digest-Manifest: HG+pOMGiD4nq7aBST8PmLBRXYSo= 77 | SHA1-Digest-Manifest: 44zEyySUXrZjoYLatw4tftx8clM= 78 | SHA1-Digest-Manifest: fK87QwgSCJzZ/6qEPFG3AkrKvZg= 79 | SHA1-Digest-Manifest: jElS0V6NAE1eAGRcoipqAh4m3gE= 80 | SHA1-Digest-Manifest: XBjfIsdA7VHbWJhwq2jQtm82bL0= 81 | SHA1-Digest-Manifest: /pFoxkI0CBIj8WqLDYR5jpaQ3uU= 82 | SHA1-Digest-Manifest: LbWvEAjCRxp5eJGBM23aT+HIkD4= 83 | SHA1-Digest-Manifest: GIOU0xrFoKvwp/fe0MV5BeQG2dQ= 84 | SHA1-Digest-Manifest: Tb/mRuIKYF7NBYRSZycBgpHaj6s= 85 | SHA1-Digest-Manifest: wD5oyERifHf8PXCgztVoSicsEHc= 86 | SHA1-Digest-Manifest: rWsJWE9ROgb01ZMvLIboUaKFdYg= 87 | SHA1-Digest-Manifest: BYVBeMAT5dhNcv11cG8w00udM2I= 88 | SHA1-Digest-Manifest: 3+g4kzj8ObkUJBTmbbgrXInfziU= 89 | SHA-256-Digest-Manifest: Kx3zq14JuV5ByE35cwkegUnCsiAvBRa4Q6iNQ/JO7I0= 90 | SHA1-Digest-Manifest: L+0tzPoloVGMtoykhAuZASD3g0g= 91 | 92 | # 8067454 93 | SHA1-Digest-Manifest: mF8yk1Hxc1uH9UorvfG2GJ+ScqY= 94 | SHA1-Digest-Manifest: yUcLgsHB7H6rf04gLNe0ikKrmfI= 95 | SHA1-Digest-Manifest: UcdnWBajIuVvJjoGHAPA11Gkg7E= 96 | -------------------------------------------------------------------------------- /tools/aes.256.java.files/original/security/blacklisted.certs: -------------------------------------------------------------------------------- 1 | Algorithm=SHA-256 2 | 14E6D2764A4B06701C6CBC376A253775F79C782FBCB6C0EE6F99DE4BA1024ADD 3 | 31C8FD37DB9B56E708B03D1F01848B068C6DA66F36FB5D82C008C6040FA3E133 4 | 3946901F46B0071E90D78279E82FABABCA177231A704BE72C5B0E8918566EA66 5 | 450F1B421BB05C8609854884559C323319619E8B06B001EA2DCBB74A23AA3BE2 6 | 4CBBF8256BC9888A8007B2F386940A2E394378B0D903CBB3863C5A6394B889CE 7 | 4FEE0163686ECBD65DB968E7494F55D84B25486D438E9DE558D629D28CD4D176 8 | 5E83124D68D24E8E177E306DF643D5EA99C5A94D6FC34B072F7544A1CABB7C7B 9 | 76A45A496031E4DD2D7ED23E8F6FF97DBDEA980BAAC8B0BA94D7EDB551348645 10 | 8A1BD21661C60015065212CC98B1ABB50DFD14C872A208E66BAE890F25C448AF 11 | 9ED8F9B0E8E42A1656B8E1DD18F42BA42DC06FE52686173BA2FC70E756F207DC 12 | A686FEE577C88AB664D0787ECDFFF035F4806F3DE418DC9E4D516324FFF02083 13 | B8686723E415534BC0DBD16326F9486F85B0B0799BF6639334E61DAAE67F36CD 14 | D24566BF315F4E597D6E381C87119FB4198F5E9E2607F5F4AB362EF7E2E7672F 15 | D3A936E1A7775A45217C8296A1F22AC5631DCDEC45594099E78EEEBBEDCBA967 16 | DF21016B00FC54F9FE3BC8B039911BB216E9162FAD2FD14D990AB96E951B49BE 17 | F5B6F88F75D391A4B1EB336F9E201239FB6B1377DB8CFA7B84736216E5AFFFD7 18 | FC02FD48DB92D4DCE6F11679D38354CF750CFC7F584A520EB90BDE80E241F2BD 19 | FDEDB5BDFCB67411513A61AEE5CB5B5D7C52AF06028EFC996CC1B05B1D6CEA2B 20 | -------------------------------------------------------------------------------- /tools/aes.256.java.files/original/security/cacerts: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/tools/aes.256.java.files/original/security/cacerts -------------------------------------------------------------------------------- /tools/aes.256.java.files/original/security/java.policy: -------------------------------------------------------------------------------- 1 | 2 | // Standard extensions get all permissions by default 3 | 4 | grant codeBase "file:${{java.ext.dirs}}/*" { 5 | permission java.security.AllPermission; 6 | }; 7 | 8 | // default permissions granted to all domains 9 | 10 | grant { 11 | // Allows any thread to stop itself using the java.lang.Thread.stop() 12 | // method that takes no argument. 13 | // Note that this permission is granted by default only to remain 14 | // backwards compatible. 15 | // It is strongly recommended that you either remove this permission 16 | // from this policy file or further restrict it to code sources 17 | // that you specify, because Thread.stop() is potentially unsafe. 18 | // See the API specification of java.lang.Thread.stop() for more 19 | // information. 20 | permission java.lang.RuntimePermission "stopThread"; 21 | 22 | // allows anyone to listen on dynamic ports 23 | permission java.net.SocketPermission "localhost:0", "listen"; 24 | 25 | // "standard" properies that can be read by anyone 26 | 27 | permission java.util.PropertyPermission "java.version", "read"; 28 | permission java.util.PropertyPermission "java.vendor", "read"; 29 | permission java.util.PropertyPermission "java.vendor.url", "read"; 30 | permission java.util.PropertyPermission "java.class.version", "read"; 31 | permission java.util.PropertyPermission "os.name", "read"; 32 | permission java.util.PropertyPermission "os.version", "read"; 33 | permission java.util.PropertyPermission "os.arch", "read"; 34 | permission java.util.PropertyPermission "file.separator", "read"; 35 | permission java.util.PropertyPermission "path.separator", "read"; 36 | permission java.util.PropertyPermission "line.separator", "read"; 37 | 38 | permission java.util.PropertyPermission "java.specification.version", "read"; 39 | permission java.util.PropertyPermission "java.specification.vendor", "read"; 40 | permission java.util.PropertyPermission "java.specification.name", "read"; 41 | 42 | permission java.util.PropertyPermission "java.vm.specification.version", "read"; 43 | permission java.util.PropertyPermission "java.vm.specification.vendor", "read"; 44 | permission java.util.PropertyPermission "java.vm.specification.name", "read"; 45 | permission java.util.PropertyPermission "java.vm.version", "read"; 46 | permission java.util.PropertyPermission "java.vm.vendor", "read"; 47 | permission java.util.PropertyPermission "java.vm.name", "read"; 48 | }; 49 | 50 | -------------------------------------------------------------------------------- /tools/aes.256.java.files/original/security/javaws.policy: -------------------------------------------------------------------------------- 1 | 2 | grant codeBase "file:${jnlpx.home}/javaws.jar" { 3 | permission java.security.AllPermission; 4 | }; 5 | 6 | -------------------------------------------------------------------------------- /tools/aes.256.java.files/original/security/trusted.libraries: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nucypher/kafka-as-module-oss/637451e622f7d9f2f47acf83be07ebf4865a516d/tools/aes.256.java.files/original/security/trusted.libraries -------------------------------------------------------------------------------- /tools/create.patch/apply_patch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # right now make patch for particular version of Kafka and Scala 4 | # kafka_2.10-0.10.1.1.jar 5 | 6 | KAFKA_DIR=$1 7 | 8 | echo "Trying to patch a Kafka directory:"$KAFKA_DIR 9 | 10 | # Check that Kafka directory is exist 11 | if [ -d "$KAFKA_DIR" ] 12 | then 13 | echo "Kafka directory is present. Trying to apply a patch!" 14 | else 15 | echo "Kafka directory is missing. Unable to patch Kafka!" 16 | exit 1 17 | fi 18 | 19 | 20 | KAFKA_JAR=kafka_2.10-0.10.1.1.jar 21 | PATCH_DIR=tools/create.patch/kafka-patch 22 | 23 | # validate that correct version of Kafka and Scala are used 24 | if [ -f "$KAFKA_DIR/libs/$KAFKA_JAR" ] 25 | then 26 | echo "Kafka original '$KAFKA_JAR' file is present. Trying to apply a patch!" 27 | else 28 | echo "Kafka original '$KAFKA_JAR' file is missing. Unable to patch Kafka!" 29 | exit 1 30 | fi 31 | 32 | # patched kafka-run-class.sh with bunch of NuCypher *.jar's 33 | cp $PATCH_DIR/bin/kafka-run-class.sh $KAFKA_DIR/bin 34 | 35 | # clean up previous patch 36 | if [ -d "$KAFKA_DIR/lib/nucypher" ] 37 | then 38 | rm -rf $KAFKA_DIR/lib/nucypher 39 | fi 40 | 41 | cp -avr $PATCH_DIR/libs/nucypher $KAFKA_DIR/libs/ 42 | 43 | # copy patched Kafka file 44 | cp $PATCH_DIR/libs/kafka_2.10-1.0-SNAPSHOT.jar $KAFKA_DIR/libs/$KAFKA_JAR 45 | 46 | -------------------------------------------------------------------------------- /tools/create.patch/create_patch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | ./build_project.sh 5 | 6 | PATCH_DIR=tools/create.patch/kafka-patch 7 | 8 | # clean up previous patch 9 | if [ -d "$PATCH_DIR" ] 10 | then 11 | rm -rf $PATCH_DIR/ 12 | fi 13 | 14 | ## 15 | mkdir $PATCH_DIR 16 | mkdir $PATCH_DIR/libs 17 | mkdir $PATCH_DIR/libs/nucypher 18 | mkdir $PATCH_DIR/libs/nucypher/lib 19 | mkdir $PATCH_DIR/bin 20 | 21 | # 22 | cp -avr admin/build/libs/lib/*.jar $PATCH_DIR/libs/nucypher/lib/ 23 | cp -avr clients/build/libs/lib/*.jar $PATCH_DIR/libs/nucypher/lib/ 24 | cp -avr commons/build/libs/lib/*.jar $PATCH_DIR/libs/nucypher/lib/ 25 | 26 | # 27 | cp -av admin/build/libs/*.jar $PATCH_DIR/libs/nucypher 28 | cp -av clients/build/libs/*.jar $PATCH_DIR/libs/nucypher 29 | cp -av commons/build/libs/*.jar $PATCH_DIR/libs/nucypher 30 | 31 | # patched kafka-run-class.sh with bunch of NuCypher .jar's 32 | cp tools/create.patch/kafka-run-class.sh $PATCH_DIR/bin 33 | 34 | 35 | rm -rf $PATCH_DIR/libs/nucypher/lib/nucypher*.jar 36 | 37 | 38 | # kafka patched jar 39 | cp kafka/core/build/libs/kafka_2.10-1.0-SNAPSHOT.jar $PATCH_DIR/libs/ 40 | 41 | 42 | tar cvzf nucypher-patch-kafka_2.10-1.0-SNAPSHOT.tar.gz tools/create.patch/apply_patch.sh ./README.md tools/create.patch/kafka-patch/ 43 | 44 | 45 | 46 | --------------------------------------------------------------------------------