├── .gitignore ├── .scalafmt.conf ├── LICENSE ├── README.md ├── aerospike-scala-domain └── src │ └── main │ └── scala │ └── ru │ └── tinkoff │ └── aerospikescala │ └── domain │ └── ABin.scala ├── aerospike-scala-example └── src │ └── main │ ├── protobuf │ └── designers.proto │ └── scala │ └── ru │ └── tinkoff │ └── aerospikeexamples │ └── example │ ├── AClient.scala │ ├── CleanUp.scala │ ├── KBSampleApp.scala │ ├── KBSampleScheme.scala │ ├── ProtoSample.scala │ ├── ProtoScheme.scala │ ├── SampleApp.scala │ ├── SampleCollectionsApp.scala │ └── scheme.scala ├── aerospike-scala-macros └── src │ └── main │ └── scala │ └── ru │ └── tinkoff │ └── aerospikemacro │ ├── cast │ └── Caster.scala │ ├── converters │ ├── BinWrapper.scala │ ├── KeyWrapper.scala │ └── Utils.scala │ ├── domain │ └── DBCredentials.scala │ └── printer │ └── Printer.scala ├── aerospike-scala-proto └── src │ ├── main │ ├── protobuf │ │ └── designers.proto │ └── scala │ │ └── ru │ │ └── tinkoff │ │ └── aerospikeproto │ │ └── wrapper │ │ └── ProtoBinWrapper.scala │ └── test │ └── scala │ └── ru.tinkoff.aerospikeproto.wrapper │ └── ProtoBinTest.scala ├── build.sbt ├── cookbook ├── BinMagic.md ├── KeyMagic.md ├── SpikeImplMethods.md ├── domain.md ├── protobuf │ └── ProtoBinWrapper.md └── schemes │ ├── anyBinTypes.md │ └── oneBinType.md ├── project ├── Dependencies.scala ├── assembly.sbt ├── build.properties ├── plugins.sbt └── scalapb.sbt └── src ├── main ├── resources │ └── reference.conf └── scala │ └── ru │ └── tinkoff │ └── aerospike │ └── dsl │ ├── Actions.scala │ ├── AdminProvider.scala │ ├── CollectionsProvider.scala │ ├── GetProvider.scala │ ├── HeaderProvider.scala │ ├── MainProvider.scala │ ├── NodeProvider.scala │ ├── PolicyProvider.scala │ ├── Spike.scala │ ├── SpikeImpl.scala │ ├── batchread │ └── BatchReadWrapper.scala │ ├── domain.scala │ ├── errors │ └── AerospikeDSLError.scala │ └── scheme │ └── Scheme.scala └── test └── scala └── ru └── tinkoff ├── ACMock.scala ├── SpikeImplTest.scala └── aerospikemacro └── converters ├── BatchReadWrapperTest.scala ├── BinTest.scala ├── CasterTest.scala └── KeyTest.scala /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | project/target 3 | project/project/target 4 | /test-output 5 | 6 | # jira plugin 7 | /atlassian-ide-plugin.xml 8 | 9 | # Intellij Idea 10 | *.iml 11 | .idea/** 12 | .idea 13 | !/.idea/copyright/ 14 | !/.idea/inspectionProfiles/ 15 | !/.idea/runConfigurations/ 16 | !/.idea/scopes/ 17 | !/.idea/codeStyleSettings.xml 18 | !/.idea/dataSources.ids 19 | !/.idea/dataSources.xml 20 | !/.idea/encodings.xml 21 | !/.idea/highlighting.xml 22 | !/.idea/vcs.xml 23 | /.idea_modules 24 | **/project/target 25 | **/project/project/target 26 | 27 | # WorkSheet files 28 | *.sc 29 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | style = defaultWithAlign 2 | maxColumn = 120 3 | // http://scalameta.org/scalafmt/#align.openParenCallSite 4 | align.openParenCallSite = true 5 | align.openParenDefnSite = false 6 | // http://scalameta.org/scalafmt/#align.arrowEnumeratorGenerator 7 | align.arrowEnumeratorGenerator = true 8 | danglingParentheses = true 9 | // http://scalameta.org/scalafmt/#optIn.breakChainOnFirstMethodDot 10 | optIn.breakChainOnFirstMethodDot = true 11 | // http://scalameta.org/scalafmt/#lineEndings 12 | lineEndings = unix 13 | // http://scalameta.org/scalafmt/#newlines.penalizeSingleSelectMultiArgList 14 | newlines.penalizeSingleSelectMultiArgList = true 15 | project.git = true 16 | // http://scalameta.org/scalafmt/#binPack.literalArgumentLists 17 | binPack.literalArgumentLists = false 18 | // http://scalameta.org/scalafmt/#runner.optimizer.forceConfigStyleOnOffset 19 | runner.optimizer.forceConfigStyleMinArgCount = 3 20 | // http://scalameta.org/scalafmt/#RedundantBraces 21 | rewrite.redundantBraces.maxLines = 100 22 | rewrite.redundantBraces.includeUnitMethods = true 23 | rewrite.redundantBraces.stringInterpolation = true -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright (c) 2016 Tinkoff 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # aerospike-scala-dsl 2 | ![N|Solid](https://avatars0.githubusercontent.com/u/5486989?v=3&s=100) 3 | 4 | To start working with Aerospike using this DSL you have to add dependency sbt: 5 | ```sh 6 | "ru.tinkoff" %% "aerospike-scala" % "1.1.14", 7 | "com.aerospike" % "aerospike-client" % "3.3.1", // in case you don't have it 8 | "ru.tinkoff" %% "aerospike-scala-example" % "1.1.14" // usage examples 9 | "ru.tinkoff" %% "aerospike-scala-proto" % "1.1.14" // protobuff serialization support 10 | ````` 11 | Since I'm using Aerospike Java Client (version 3.3.1, recomended on www.aerospike.com), 12 | you need to create com.aerospike.client.async.AsyncClient to pass it into `ru.tinkoff.aerospike.dsl.SpikeImpl` class. 13 | `SpikeImpl` has methods to operate with Aerospike for specified types of `Keys` and `Bins`, which is the most common case. 14 | Example for that object creation you can find in `ru.tinkoff.aerospikeexamples.example.AClient` (```sh "ru.tinkoff" % "aerospike-scala-example"```). 15 | Or just follow the `quickstart` instructions. 16 | 17 | 18 | #Protobuf 19 | Serialization is here! [Check it out!](./cookbook/protobuf/ProtoBinWrapper.md) 20 | 21 | # Quickstart 22 | 23 | Add this settings to application.conf file and specify your own host, port, namespace and setName: 24 | 25 | ```sh 26 | ru-tinkoff-aerospike-dsl { 27 | keyWrapper-namespace = "test" 28 | keyWrapper-setName = "test" 29 | example-host = "somehost.com" 30 | example-port = 3000 31 | } 32 | ``` 33 | after that call `ru.tinkoff.aerospikeexamples.example.AClient.client` in your service (or something where you need to communicate with Aerospike) 34 | and pass result of that call into `ru.tinkoff.aerospike.dsl.SpikeImpl`: 35 | ```scala 36 | import scala.concurrent.ExecutionContext.Implicits.global 37 | 38 | val client = AClient.client 39 | val spike = new SpikeImpl(client) 40 | ``` 41 | or you can call `AClient.spikeImpl`, which gives an example of `SpikeImpl` with host and port from `ru-tinkoff-aerospike-dsl` settings 42 | ```scala 43 | val spike = AClient.spikeImpl 44 | ``` 45 | **Note: don't forget to add host and port in application.conf, or you will get an exception from com.aerospike.client lib:** 46 | ```sh 47 | Exception in thread "main" com.aerospike.client.AerospikeException$Connection: Error Code 11: Failed to connect to host(s): 48 | host 3000 Error Code 11: Invalid host: host 3000 49 | ``` 50 | For namespace and setName parameters add 51 | ```scala 52 | implicit val dbc = AClient.dbc 53 | ``` 54 | Now you can use it like this: 55 | ```scala 56 | import ru.tinkoff.aerospike.dsl.{CallKB, SpikeImpl} 57 | import ru.tinkoff.aerospikeexamples.example.AClient 58 | import ru.tinkoff.aerospikescala.domain.SingleBin 59 | import ru.tinkoff.aerospikemacro.converters._ 60 | 61 | import scala.concurrent.Await 62 | import scala.concurrent.duration.Duration 63 | import scala.concurrent.ExecutionContext.Implicits.global 64 | 65 | object HelloAerospike extends App { 66 | 67 | val client = AClient.client 68 | val spike = new SpikeImpl(client) 69 | implicit val dbc = AClient.dbc 70 | 71 | Await.result(spike.callKB(CallKB.Put, "testKey", SingleBin("helloName", "helloValue")), Duration.Inf) 72 | } 73 | ``` 74 | This will `Put` string value "helloValue" with name "helloName" in Aerospike with `Key` "testKey". For that call will be generated 75 | converters, both to convert passed values into inner `com.aerospike.clients` values. 76 | ```js 77 | aql> select * from test.test 78 | [ 79 | { 80 | "helloName": "helloValue" 81 | } 82 | ] 83 | 84 | ``` 85 | All available methods you can see in `SpikeImpl` class by your self. For more information and usage examples [cookbook](./cookbook). 86 | 87 | # DSL schema options 88 | 89 | In `ru.tinkoff.aerospike.dsl.scheme` we have two traits: 90 | - for work with one key type and different types of `Bins` 91 | 92 | ```scala 93 | trait Scheme[K] 94 | ``` 95 | 96 | - for work with one key type and one `Bin` type 97 | 98 | ```scala 99 | trait KBScheme[K, B] 100 | ``` 101 | 102 | If you want to work with one type of Key and different types of Bins - [any Bin types](./cookbook/schemes/anyBinTypes.md) 103 | If you want to work with one type of Key and one type of Bins [one Bin type](./cookbook/schemes/oneBinType.md) 104 | 105 | # Application 106 | Implemented for ASyncClient. 107 | Recommended to use with `Aerospike 3`. -------------------------------------------------------------------------------- /aerospike-scala-domain/src/main/scala/ru/tinkoff/aerospikescala/domain/ABin.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikescala.domain 18 | 19 | /** 20 | * @author MarinaSigaeva 21 | * @since 15.11.16 22 | */ 23 | trait ABin[B] 24 | 25 | case class SingleBin[B](name: String, value: B) extends ABin[B] 26 | 27 | case class MBin[B](values: Map[String, B]) extends ABin[B] { 28 | def asOne: List[SingleBin[B]] = values.view.map(v => SingleBin(v._1, v._2)).toList 29 | 30 | } 31 | 32 | case class ByteSegment(bytes: Array[Byte], offset: Int, length: Int) -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/protobuf/designers.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package ru.tinkoff.aerospikeexamples.designers; 4 | 5 | option java_multiple_files = true; 6 | option java_outer_classname = "DesignerProto"; 7 | 8 | message Designer { 9 | string name = 1; 10 | int32 age = 2; 11 | } 12 | 13 | message Designers { 14 | repeated Designer designers = 1; 15 | } -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/AClient.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikeexamples.example 18 | 19 | import com.aerospike.client.Host 20 | import com.aerospike.client.async.{AsyncClient, AsyncClientPolicy} 21 | import com.typesafe.config.{Config, ConfigFactory} 22 | import ru.tinkoff.aerospike.dsl.SpikeImpl 23 | import ru.tinkoff.aerospikemacro.domain.DBCredentials 24 | 25 | import scala.concurrent.ExecutionContext 26 | import scala.util.{Failure, Success, Try} 27 | 28 | /** 29 | * @author MarinaSigaeva 30 | * @since 20.10.16 31 | */ 32 | object AClient { 33 | 34 | val config: Config = ConfigFactory.load() 35 | val hosts: List[String] = Try(List(config.getString("ru-tinkoff-aerospike-dsl.example-host"))) 36 | .getOrElse(throw new Exception("Add host for aerospike in application.conf file")) 37 | val port: Int = Try(config.getInt("ru-tinkoff-aerospike-dsl.example-port")) 38 | .getOrElse(throw new Exception("Add host for aerospike in application.conf file")) 39 | 40 | val namespace: String = Try(config.getString("ru-tinkoff-aerospike-dsl.keyWrapper-namespace")) 41 | .getOrElse(throw new Exception("Add namespace for aerospike in application.conf file")) 42 | val setName: String = Try(config.getString("ru-tinkoff-aerospike-dsl.keyWrapper-setName")) 43 | .getOrElse(throw new Exception("Add setName for aerospike in application.conf file")) 44 | 45 | def dbc = DBCredentials(namespace, setName) 46 | 47 | def client: AsyncClient = create(hosts) 48 | 49 | def create(hs: List[String]): AsyncClient = 50 | Try(new AsyncClient(new AsyncClientPolicy, hs.map(new Host(_, port)): _*)) match { 51 | case Success(c) => c 52 | case Failure(th) => throw th 53 | } 54 | 55 | def spikeImpl(implicit ex: ExecutionContext) = new SpikeImpl(client)(ex) 56 | } 57 | -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/CleanUp.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikeexamples.example 18 | 19 | import ru.tinkoff.aerospike.dsl.SpikeImpl 20 | import ru.tinkoff.aerospikemacro.converters.KeyWrapper 21 | 22 | import scala.concurrent.ExecutionContext.Implicits.global 23 | import scala.concurrent.duration.Duration.Inf 24 | import scala.concurrent.{Await, Future} 25 | import scala.language.experimental.macros 26 | 27 | 28 | /** 29 | * @author MarinaSigaeva 30 | * @since 20.10.16 31 | * 32 | * This will delete all data stored in SampleApp 33 | */ 34 | object CleanUp extends App { 35 | 36 | val client = AClient.client 37 | val spike = new SpikeImpl(client) 38 | implicit val dbc = AClient.dbc 39 | 40 | val keys = List("mapKey", "mapSimpleString", "mapStringString", "mapIntString", "mapStringInt", "mapLong", "mapFloat", "mapDouble", "stringKey", 41 | "intBinKey", "floatBinKey", "doubleBinKey", "boolBinKey", "shortBinKey", "longBinKey", "charBinKey", "byteBinKey", 42 | "listStBinKey", "listIntKey", "listLongKey", "listFloatKey", "listDoubleKey", "sampleKey", "hListKey", "oneKey", 43 | "manyKey", "hListKey2", "tuple3Key", "arrayByteKey", "byteSegmKey", "arrayStKey", 44 | "arrayDoubleKey", "arrayFloatKey", "arrayLongKey", "arrayIntKey", "protoDesigner", "protoDesigners", "seqArrBuff", 45 | "arrayBoolKey", "listBoolKey") 46 | 47 | val result = for (key <- keys) yield spike.deleteK(key) 48 | Await.result(Future.sequence(result), Inf) 49 | 50 | } 51 | -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/KBSampleApp.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikeexamples.example 18 | 19 | import ru.tinkoff.aerospike.dsl.SpikeImpl 20 | import ru.tinkoff.aerospikemacro.printer.Printer 21 | import ru.tinkoff.aerospikescala.domain.{MBin, SingleBin} 22 | 23 | import scala.concurrent.Await 24 | import scala.concurrent.duration.Duration.Inf 25 | import scala.language.experimental.macros 26 | 27 | /** 28 | * @author MarinaSigaeva 29 | * @since 26.10.16 30 | */ 31 | object KBSampleApp extends App { 32 | 33 | import scala.concurrent.ExecutionContext.Implicits.global 34 | 35 | val client = AClient.client 36 | val spike = new SpikeImpl(client) 37 | 38 | val myObj = KBSampleScheme(spike) 39 | 40 | private val oneValue = 2 41 | private val manyValues = Map("aName" -> 2, "bName" -> 13) 42 | myObj.putOne("oneKey", SingleBin("oneName", oneValue)) 43 | myObj.putMany("manyKey", MBin(manyValues)) 44 | 45 | val one = Await.result(myObj.getOne("oneKey"), Inf) 46 | assert(one._2.contains(oneValue)) 47 | Printer.printNameValue(one) 48 | 49 | val many = Await.result(myObj.getMany("manyKey"), Inf) 50 | assert(many == manyValues.mapValues(Some.apply)) 51 | Printer.printNameValue(many) 52 | } 53 | -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/KBSampleScheme.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikeexamples.example 18 | 19 | import ru.tinkoff.aerospike.dsl.scheme.KBScheme 20 | import ru.tinkoff.aerospike.dsl.{CallKB, SpikeImpl} 21 | import ru.tinkoff.aerospikemacro.converters.{BinWrapper, KeyWrapper} 22 | import ru.tinkoff.aerospikemacro.domain.DBCredentials 23 | import ru.tinkoff.aerospikescala.domain.{MBin, SingleBin} 24 | 25 | import scala.concurrent.{ExecutionContext, Future} 26 | 27 | /** 28 | * @author MarinaSigaeva 29 | * @since 26.10.16 30 | */ 31 | case class KBSampleScheme(spike: SpikeImpl) extends KBScheme[String, Int] { 32 | implicit val dbc: DBCredentials = AClient.dbc 33 | 34 | def putOne(k: String, a: SingleBin[Int])(implicit e: ExecutionContext): Future[Unit] = 35 | spike.callKB[String, Int](CallKB.Put, k, a) 36 | 37 | /** 38 | * Map represents Bin as list of pairs of (bin name -> value). 39 | * This will put your Map[String, Int] to Aerospike like Key -> List[Bin]. 40 | * For example Map("a" -> 2, "b" -> 13) for key = "k1" will look like this: 41 | * k1 -> List(Bin("a", 2), Bin("b", 13)) 42 | * */ 43 | def putMany(k: String, a: MBin[Int])(implicit e: ExecutionContext): Future[Unit] = 44 | spike.callKB[String, Int](CallKB.Put, k, a) 45 | 46 | def getOne(k: String)(implicit e: ExecutionContext): Future[(String, Option[Int])] = 47 | spike 48 | .getByKey[String, Int](k, Nil) 49 | .map { optValue => 50 | optValue.flatMap(_._1.find(_._2.nonEmpty)).getOrElse(throw new Exception("No data found")) 51 | } 52 | 53 | def getMany(k: String)(implicit e: ExecutionContext): Future[Map[String, Option[Int]]] = 54 | spike 55 | .getByKey[String, Int](k, Nil) 56 | .map(optValue => optValue.map(_._1).getOrElse(throw new Exception("No data found"))) 57 | } 58 | -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/ProtoSample.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2017 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package ru.tinkoff.aerospikeexamples.example 17 | 18 | import ru.tinkoff.aerospikemacro.printer.Printer.{printNameValue => show} 19 | import ru.tinkoff.aerospikemacro.converters.KeyWrapper 20 | import ru.tinkoff.aerospikeproto.wrapper.ProtoBinWrapper 21 | import ru.tinkoff.aerospikescala.domain.SingleBin 22 | 23 | import scala.concurrent.Await 24 | import scala.concurrent.duration.Duration.Inf 25 | import scala.concurrent.ExecutionContext.Implicits.global 26 | import scala.language.experimental.macros 27 | import ru.tinkoff.aerospikeexamples.designers.designers.Designer 28 | import ru.tinkoff.aerospikeexamples.designers.designers.Designers 29 | import ProtoBinWrapper._ 30 | 31 | /** 32 | * @author MarinaSigaeva 33 | * @since 23.03.17 34 | */ 35 | object ProtoSample extends App { 36 | 37 | val db = new ProtoScheme 38 | implicit val dbc = AClient.dbc 39 | 40 | val one = Designer("Karl Lagerfeld", 83) 41 | val many = Designers( 42 | List( 43 | one, 44 | Designer("Diane von Furstenberg", 70), 45 | Designer("Donatella Versace", 61) 46 | ) 47 | ) 48 | 49 | /* '''aql> select * from test.test''' 50 | * [ 51 | * { 52 | * "pDesigner": "0A 0E 4B 61 72 6C 20 4C 61 67 65 72 66 65 6C 64 10 53" 53 | * }, 54 | * { 55 | * "pDesigners": "0A 12 0A 0E 4B 61 72 6C 20 4C 61 67 65 72 66 65 6C 64 10 53 0A 19 0A 15 44 69 61 6E 65 20 76 6F 6E 20 46 75 72 73 74 65 6E 62 65 72 67 10 46 0A 15 0A 11 44 6F 6E 61 74 65 6C 6C 61 20 56 65 72 73 61 63 65 10 3D" 56 | * } 57 | * ] 58 | */ 59 | 60 | db.put("protoDesigner", SingleBin("pDesigner", one)) 61 | db.put("protoDesigners", SingleBin("pDesigners", many)) 62 | 63 | val oneDesigner = Await.result(db.get[Designer]("protoDesigner"), Inf) 64 | val manyDesigners = Await.result(db.get[Designers]("protoDesigners"), Inf) 65 | 66 | /* -------------------- 67 | * '''oneDesigner''' => Map(pDesigner -> Some(name: "Karl Lagerfeld" 68 | * age: 83 69 | * )) 70 | * -------------------- 71 | * '''manyDesigners''' => Map(pDesigners -> Some(designers { 72 | * name: "Karl Lagerfeld" 73 | * age: 83 74 | * } 75 | * designers { 76 | * name: "Diane von Furstenberg" 77 | * age: 70 78 | * } 79 | * designers { 80 | * name: "Donatella Versace" 81 | * age: 61 82 | * } 83 | * )) 84 | */ 85 | assert(oneDesigner.values.exists(_.contains(one))) 86 | assert(manyDesigners.values.exists(_.contains(many))) 87 | show(oneDesigner) 88 | show(manyDesigners) 89 | } 90 | -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/ProtoScheme.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2017 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | // @formatter:off 18 | package ru.tinkoff.aerospikeexamples.example 19 | 20 | import com.aerospike.client.policy.WritePolicy 21 | import com.trueaccord.lenses.Updatable 22 | import com.trueaccord.scalapb.{GeneratedMessage, Message} 23 | import ru.tinkoff.aerospike.dsl.CallKB 24 | import ru.tinkoff.aerospikemacro.converters.KeyWrapper 25 | import ru.tinkoff.aerospikescala.domain.SingleBin 26 | 27 | import scala.concurrent.{ExecutionContext, Future} 28 | import ru.tinkoff.aerospikeproto.wrapper.ProtoBinWrapper 29 | 30 | /** 31 | * @author MarinaSigaeva 32 | * @since 23.03.17 33 | */ 34 | class ProtoScheme { 35 | 36 | // Crutch. See https://github.com/TinkoffCreditSystems/aerospike-scala/issues/15 37 | private def client(implicit ex: ExecutionContext) = AClient.spikeImpl 38 | 39 | def put[K, I <: GeneratedMessage with Message[I] with Updatable[I], R <: ProtoBinWrapper[I]] 40 | (k: K, bin: SingleBin[I])(implicit 41 | kw: KeyWrapper[K], 42 | bw: R, 43 | e: ExecutionContext, 44 | pw: Option[WritePolicy] = None): Future[Unit] = { 45 | client.callKB[K, I](CallKB.Put, k, bin)(kw, bw, pw) 46 | } 47 | 48 | def absGet[K, I <: GeneratedMessage with Message[I] with Updatable[I], R <: ProtoBinWrapper[I]] 49 | (k: K)(implicit 50 | kw: KeyWrapper[K], 51 | bw: R, 52 | e: ExecutionContext, 53 | pw: Option[WritePolicy] = None): Future[Map[String, Option[I]]] = { 54 | client 55 | .getByKey[K, I](k) 56 | .map(r => r.map(_._1).getOrElse(throw new Exception("No data found"))) 57 | } 58 | 59 | def get[I <: GeneratedMessage with Message[I] with Updatable[I]] 60 | (k: String)(implicit 61 | kw: KeyWrapper[String], 62 | bw: ProtoBinWrapper[I], 63 | e: ExecutionContext, 64 | pw: Option[WritePolicy] = None): Future[Map[String, Option[I]]] = 65 | absGet[String, I, ProtoBinWrapper[I]](k)(kw, bw, e, pw) 66 | } 67 | -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/SampleApp.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikeexamples.example 18 | 19 | import ru.tinkoff.aerospikemacro.printer.Printer 20 | import ru.tinkoff.aerospikescala.domain.{ByteSegment, SingleBin} 21 | import shapeless._ 22 | 23 | import scala.concurrent.Await 24 | import scala.concurrent.duration.Duration.Inf 25 | import scala.language.experimental.macros 26 | 27 | /** 28 | * @author MarinaSigaeva 29 | * @since 20.10.16 30 | */ 31 | object SampleApp extends App { 32 | 33 | import scala.concurrent.ExecutionContext.Implicits.global 34 | 35 | val spike = AClient.spikeImpl 36 | 37 | val myObj = SampleScheme(spike) 38 | 39 | myObj.putMapSimpleString( 40 | "mapSimpleString", 41 | SingleBin("TmapBinName", Map(Sample("t1", 3) -> "v1", Sample("t2", 2) -> "v2", Sample("t3", 1) -> "v3")) 42 | ) 43 | // @formatter:off 44 | myObj.putMap("mapKey", SingleBin("mapBinName", Map("a" -> "v1", "b" -> "v2", "c" -> "v3"))) 45 | myObj.putMapIS("mapIntString", SingleBin("mapISName", Map(9 -> "v1", 2 -> "v2", 3 -> "v3"))) 46 | myObj.putMapSI("mapStringInt", SingleBin("mapSIName", Map("a" -> 33, "b" -> 44, "c" -> 99))) 47 | myObj.putMapLong("mapLong", SingleBin("mapLongName", Map("a" -> 30030L, "b" -> 40004L, "c" -> 90009L))) 48 | myObj.putMapFloat("mapFloat", SingleBin("mapFloatName", Map("a" -> 30.3F, "b" -> 400.04F, "c" -> 9.01F))) 49 | myObj.putMapDouble("mapDouble", SingleBin("mapDoubleName", Map("a" -> 300.30, "b" -> 4000.4, "c" -> 90.09))) 50 | // @formatter:on 51 | 52 | myObj.putString("stringKey", SingleBin("stringBinName", "strBinValue")) 53 | myObj.putInt("intBinKey", SingleBin("intBinName", 202)) 54 | myObj.putFloat("floatBinKey", SingleBin("floatBinName", 1.11F)) 55 | myObj.putDouble("doubleBinKey", SingleBin("doubleBinName", 3.3)) 56 | myObj.putBoolean("boolBinKey", SingleBin("boolBinName", true)) 57 | myObj.putShort("shortBinKey", SingleBin("shortBinName", 2)) 58 | myObj.putLong("longBinKey", SingleBin("longBinName", 9000900L)) 59 | myObj.putChar("charBinKey", SingleBin("charBinName", 'h')) 60 | myObj.putByte("byteBinKey", SingleBin("byteBinName", Byte.MinValue)) 61 | 62 | myObj.putSample("sampleKey", SingleBin("sampleBin", Sample("sampleName", 2))) 63 | myObj.putHList("hListKey", SingleBin("hListBin", "hlist" :: 2 :: 3 :: HNil)) 64 | myObj.putHList2("hListKey2", SingleBin("hListBin2", "hlist" :: 2 :: 3.12F :: List("a", "b") :: List(12, 23) :: HNil)) 65 | myObj.putTuple("tuple3Key", SingleBin("tuple3Bin", ("abc", 2L, 3.12))) 66 | 67 | val mapSimpleString = Await.result(myObj.getMapSimpleString("mapSimpleString"), Inf) 68 | Printer.printNameValue(mapSimpleString) 69 | 70 | val mapStringString = Await.result(myObj.getMap("mapKey"), Inf) 71 | Printer.printNameValue(mapStringString) 72 | 73 | val mapIntString = Await.result(myObj.getMapIS("mapIntString"), Inf) 74 | Printer.printNameValue(mapIntString) 75 | 76 | val mapStringInt = Await.result(myObj.getMapSI("mapStringInt"), Inf) 77 | Printer.printNameValue(mapStringInt) 78 | 79 | val mapLong = Await.result(myObj.getMapLong("mapLong"), Inf) 80 | Printer.printNameValue(mapLong) 81 | 82 | val mapFloat = Await.result(myObj.getMapFloat("mapFloat"), Inf) 83 | Printer.printNameValue(mapFloat) 84 | 85 | val mapDouble = Await.result(myObj.getMapLong("mapDouble"), Inf) 86 | Printer.printNameValue(mapDouble) 87 | 88 | val strings = Await.result(myObj.getString("stringKey"), Inf) 89 | Printer.printNameValue(strings) 90 | 91 | val ints = Await.result(myObj.getInt("intBinKey"), Inf) 92 | Printer.printNameValue(ints) 93 | 94 | val floats = Await.result(myObj.getFloat("floatBinKey"), Inf) 95 | Printer.printNameValue(floats) 96 | 97 | val doubles = Await.result(myObj.getDouble("doubleBinKey"), Inf) 98 | Printer.printNameValue(doubles) 99 | 100 | val booleans = Await.result(myObj.getBoolean("boolBinKey"), Inf) 101 | Printer.printNameValue(booleans) 102 | 103 | val shorts = Await.result(myObj.getShort("shortBinKey"), Inf) 104 | Printer.printNameValue(shorts) 105 | 106 | val longs = Await.result(myObj.getLong("longBinKey"), Inf) 107 | Printer.printNameValue(longs) 108 | 109 | val chars = Await.result(myObj.getChar("charBinKey"), Inf) 110 | Printer.printNameValue(chars) 111 | 112 | val bytes = Await.result(myObj.getByte("byteBinKey"), Inf) 113 | Printer.printNameValue(bytes) 114 | 115 | val sample = Await.result(myObj.getSample("sampleKey"), Inf) 116 | Printer.printNameValue(sample) 117 | 118 | val hlist = Await.result(myObj.getHList("hListKey"), Inf) 119 | Printer.printNameValue(hlist) 120 | 121 | val hlist2 = Await.result(myObj.getHList2("hListKey2"), Inf) 122 | Printer.printNameValue(hlist2) 123 | 124 | /* val byteSegment = Await.result(myObj.getArrayByte("byteSegmKey"), Inf) 125 | Printer.printNameValue(byteSegment)*/ 126 | 127 | /* val tuple3 = Await.result(myObj.getTuple("tuple3Key"), Inf) 128 | Printer.printNameValue(tuple3)*/ 129 | } 130 | -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/SampleCollectionsApp.scala: -------------------------------------------------------------------------------- 1 | package ru.tinkoff.aerospikeexamples.example 2 | 3 | import ru.tinkoff.aerospikemacro.printer.Printer 4 | import ru.tinkoff.aerospikescala.domain.{ByteSegment, SingleBin} 5 | import shapeless._ 6 | 7 | import scala.collection.mutable.ArrayBuffer 8 | import scala.concurrent.Await 9 | import scala.concurrent.duration.Duration.Inf 10 | import scala.language.experimental.macros 11 | 12 | /** 13 | * @author MarinaSigaeva 14 | * @since 04.04.17 15 | */ 16 | object SampleCollectionsApp extends App { 17 | 18 | import scala.concurrent.ExecutionContext.Implicits.global 19 | 20 | val spike = AClient.spikeImpl 21 | 22 | val myObj = SampleScheme(spike) 23 | 24 | myObj.putListSt("listStBinKey", SingleBin("listStringBin", List("a", "b"))) 25 | myObj.putListInt("listIntKey", SingleBin("listIntBin", List(1, 2, 3, 4))) 26 | myObj.putListLong("listLongKey", SingleBin("listLongBin", List(1000L, 2000L, 3000L, 4000L))) 27 | myObj.putListFloat("listFloatKey", SingleBin("listFloatBin", List(1.12F, 2.13F, 3.5F, 4.5F))) 28 | myObj.putListDouble("listDoubleKey", SingleBin("listDoubleBin", List(12.11, 12.13, 23.5, 46.5))) 29 | myObj.putListBoolean("listBoolKey", SingleBin("listBoolBin", List(true, false, false, true))) 30 | myObj.putArrayString("arrayStKey", SingleBin("arrayStBin", Array("abcd", "efgh", "ijkl"))) 31 | myObj.putArrayInt("arrayIntKey", SingleBin("arrayInt", Array(3, 6, 8))) 32 | myObj.putArrayLong("arrayLongKey", SingleBin("arrayLong", Array(1L, 56L, 98L))) 33 | myObj.putArrayFloat("arrayFloatKey", SingleBin("arrayFloat", Array(1.12F, 2.13F, 3.5F))) 34 | myObj.putArrayDouble("arrayDoubleKey", SingleBin("arrayDouble", Array(12.13, 23.5, 46.5))) 35 | myObj.putArrayByte("arrayByteKey", SingleBin("arrayByteBin", Array(Byte.MinValue, Byte.MaxValue, Byte.MinValue))) 36 | myObj.putArrayBoolean("arrayBoolKey", SingleBin("arrayBoolBin", Array(true, false, true))) 37 | myObj.putSeqArrayBuffer("seqArrBuff", SingleBin("ww", Seq(ArrayBuffer(1.2, 3.1, 5.6)))) 38 | // myObj.putByteSegment("byteSegmKey", SingleBin("byteSegment", ByteSegment(Array(Byte.MinValue, Byte.MaxValue), 12, 33))) 39 | 40 | val listStrs = Await.result(myObj.getListSt("listStBinKey"), Inf) 41 | Printer.printNameValue(listStrs) 42 | 43 | val listInt = Await.result(myObj.getListInt("listIntKey"), Inf) 44 | Printer.printNameValue(listInt) 45 | 46 | val listLong = Await.result(myObj.getListLong("listLongKey"), Inf) 47 | Printer.printNameValue(listLong) 48 | 49 | val listFloat = Await.result(myObj.getListFloat("listFloatKey"), Inf) 50 | Printer.printNameValue(listFloat) 51 | 52 | val listDouble = Await.result(myObj.getListDouble("listDoubleKey"), Inf) 53 | Printer.printNameValue(listDouble) 54 | 55 | val listBoolean = Await.result(myObj.getListBoolean("listBoolKey"), Inf) 56 | Printer.printNameValue(listBoolean) 57 | 58 | val arrayString = Await.result(myObj.getArrayString("arrayStKey"), Inf) 59 | Printer.printNameValue(arrayString) 60 | 61 | val arrayInt = Await.result(myObj.getArrayInt("arrayIntKey"), Inf) 62 | Printer.printNameValue(arrayInt) 63 | 64 | val arrayLong = Await.result(myObj.getArrayLong("arrayLongKey"), Inf) 65 | Printer.printNameValue(arrayLong) 66 | 67 | val arrayFloat = Await.result(myObj.getArrayFloat("arrayFloatKey"), Inf) 68 | Printer.printNameValue(arrayFloat) 69 | 70 | val arrayDouble = Await.result(myObj.getArrayDouble("arrayDoubleKey"), Inf) 71 | Printer.printNameValue(arrayDouble) 72 | 73 | val arrayBoolean = Await.result(myObj.getArrayBoolean("arrayBoolKey"), Inf) 74 | Printer.printNameValue(arrayBoolean) 75 | 76 | val arrayByteBin = Await.result(myObj.getArrayByte("arrayByteKey"), Inf) 77 | Printer.printNameValue(arrayByteBin) 78 | 79 | /* val seqArrBuff = Await.result(myObj.getSeqArrayBuffer("seqArrBuff"), Inf) 80 | Printer.printNameValue(seqArrBuff)*/ 81 | } 82 | -------------------------------------------------------------------------------- /aerospike-scala-example/src/main/scala/ru/tinkoff/aerospikeexamples/example/scheme.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikeexamples.example 18 | 19 | import com.aerospike.client.Value.MapValue 20 | import ru.tinkoff.aerospike.dsl.scheme.Scheme 21 | import ru.tinkoff.aerospike.dsl.{CallKB, SpikeImpl} 22 | import ru.tinkoff.aerospikemacro.converters.{BinWrapper, KeyWrapper} 23 | import ru.tinkoff.aerospikescala.domain.{ByteSegment, SingleBin} 24 | import shapeless._ 25 | 26 | import scala.collection.JavaConverters._ 27 | import com.aerospike.client.Value 28 | import ru.tinkoff.aerospikemacro.domain.DBCredentials 29 | 30 | import scala.collection.mutable.ArrayBuffer 31 | import scala.concurrent.{ExecutionContext, Future} 32 | import scala.language.experimental.macros 33 | import scala.util.matching.Regex 34 | 35 | /** 36 | * @author MarinaSigaeva 37 | * @since 26.09.16 38 | * 39 | * That Scheme example demonstrates how to work with Key of type String and different Bin types 40 | */ 41 | case class Sample(name: String, i: Int) 42 | 43 | case class SampleScheme(spike: SpikeImpl) extends Scheme[String] { 44 | implicit val dbc: DBCredentials = AClient.dbc 45 | 46 | /* This is custom wrapper for Bin of type Simple. By default all case classes stored in Aerospike as Map[String, Any], 47 | where keys are names of parameters. For example Simple(name = "sampleName", i = 2) will be stored as Map("name" -> "sampleName", "i" -> 2)& 48 | Inside com.aerospike.client.Record it will look like this: 49 | bins:(sampleBin:{name=sampleName, i=2}) 50 | To get your Sample value properly you need to write fetch() function as it is shown below 51 | */ 52 | implicit val sampleWrap = new BinWrapper[Sample] { 53 | override def fetch(any: Any): Option[Sample] = 54 | scala.util.Try { 55 | Value.getFromRecordObject(any) match { 56 | case m: MapValue => 57 | m.getObject match { 58 | case ms: java.util.Map[String @unchecked, Any @unchecked] => 59 | Sample(ms.get("name").toString, ms.get("i").toString.toInt) 60 | } 61 | } 62 | }.toOption 63 | } 64 | 65 | /* This is custom wrapper for Bin of type Map[Sample, String] 66 | For example for Map(Sample(t1,3) -> v1, Sample(t2,2) -> v2, Sample(t3,1) -> v3) Bin will 67 | be stored like com.aerospike.client.Value.MapValue, where keys are Sample(...).toString() 68 | So inside com.aerospike.client.Record it will look like this: 69 | bins:(BIN_NAME:{Sample(t1,3)=v1, Sample(t2,2)=v2, Sample(t3,1)=v3}) 70 | And if you want to get your Sample type keys to be unwrapped properly you need to write fetch() function as it is shown below 71 | */ 72 | implicit val sampleMapWrap = new BinWrapper[Map[Sample, String]] { 73 | val rex = "Sample\\((\\w+)\\,(\\d+)\\)" 74 | val trRex: Regex = rex.r 75 | 76 | override def toValue(v: Map[Sample, String]): MapValue = 77 | new MapValue(v.map { case (sample, value) => sample.toString -> value }.asJava) 78 | 79 | override def fetch(any: Any): Option[Map[Sample, String]] = 80 | Value.getFromRecordObject(any) match { 81 | case m: MapValue => 82 | m.getObject match { 83 | case ms: java.util.Map[Any @unchecked, String @unchecked] => 84 | scala.util 85 | .Try(ms.asScala.map { 86 | case (tr, v) if tr.toString.matches("Sample\\((\\w+)\\,(\\d+)\\)") => 87 | tr.toString match { 88 | case trRex(n, i) => Sample(n, i.toInt) -> v 89 | } 90 | }.toMap) 91 | .toOption 92 | case _ => None 93 | } 94 | case _ => None 95 | } 96 | } 97 | 98 | // Here is how to put basic types (you can use it as example and do the same for different kind of operations like Prepend, Add and so on) 99 | def putString(k: String, a: SingleBin[String])(implicit e: ExecutionContext): Future[Unit] = 100 | spike.callKB[String, String](CallKB.Put, k, a) 101 | 102 | def putInt(k: String, a: SingleBin[Int])(implicit e: ExecutionContext): Future[Unit] = 103 | spike.callKB[String, Int](CallKB.Put, k, a) 104 | 105 | def putFloat(k: String, a: SingleBin[Float])(implicit e: ExecutionContext): Future[Unit] = 106 | spike.callKB[String, Float](CallKB.Put, k, a) 107 | 108 | def putDouble(k: String, a: SingleBin[Double])(implicit e: ExecutionContext): Future[Unit] = 109 | spike.callKB[String, Double](CallKB.Put, k, a) 110 | 111 | def putBoolean(k: String, a: SingleBin[Boolean])(implicit e: ExecutionContext): Future[Unit] = 112 | spike.callKB[String, Boolean](CallKB.Put, k, a) 113 | 114 | def putShort(k: String, a: SingleBin[Short])(implicit e: ExecutionContext): Future[Unit] = 115 | spike.callKB[String, Short](CallKB.Put, k, a) 116 | 117 | def putLong(k: String, a: SingleBin[Long])(implicit e: ExecutionContext): Future[Unit] = 118 | spike.callKB[String, Long](CallKB.Put, k, a) 119 | 120 | def putChar(k: String, a: SingleBin[Char])(implicit e: ExecutionContext): Future[Unit] = 121 | spike.callKB[String, Char](CallKB.Put, k, a) 122 | 123 | def putByte(k: String, a: SingleBin[Byte])(implicit e: ExecutionContext): Future[Unit] = 124 | spike.callKB[String, Byte](CallKB.Put, k, a) 125 | 126 | def putMap(k: String, a: SingleBin[Map[String, String]])( 127 | implicit e: ExecutionContext 128 | ): Future[Unit] = 129 | spike.callKB[String, Map[String, String]](CallKB.Put, k, a) 130 | 131 | def putMapSimpleString(k: String, a: SingleBin[Map[Sample, String]])( 132 | implicit e: ExecutionContext 133 | ): Future[Unit] = 134 | spike.callKB[String, Map[Sample, String]](CallKB.Put, k, a) 135 | 136 | /* Note, Aerospikes AQL is good for values with String types. So if you want to store Map with key of any other type - you will see nothing in terminal. 137 | But you can use function, which gets that value by key (for example getMapIS(...) below) and print it to be sure it all works fine. 138 | An example for Map[Int, String] is in SampleApp.scala: 139 | myObj.putMapIS("mapIntString", SingleBin("mapISName", Map(9 -> "v1", 2 -> "v2", 3 -> "v3"))) 140 | */ 141 | 142 | def putMapIS(k: String, a: SingleBin[Map[Int, String]])( 143 | implicit e: ExecutionContext 144 | ): Future[Unit] = 145 | spike.callKB[String, Map[Int, String]](CallKB.Put, k, a) 146 | 147 | def putMapSI(k: String, a: SingleBin[Map[String, Int]])( 148 | implicit e: ExecutionContext 149 | ): Future[Unit] = 150 | spike.callKB[String, Map[String, Int]](CallKB.Put, k, a) 151 | 152 | def putMapLong(k: String, a: SingleBin[Map[String, Long]])( 153 | implicit e: ExecutionContext 154 | ): Future[Unit] = 155 | spike.callKB[String, Map[String, Long]](CallKB.Put, k, a) 156 | 157 | def putMapFloat(k: String, a: SingleBin[Map[String, Float]])( 158 | implicit e: ExecutionContext 159 | ): Future[Unit] = 160 | spike.callKB[String, Map[String, Float]](CallKB.Put, k, a) 161 | 162 | def putMapDouble(k: String, a: SingleBin[Map[String, Double]])( 163 | implicit e: ExecutionContext 164 | ): Future[Unit] = 165 | spike.callKB[String, Map[String, Double]](CallKB.Put, k, a) 166 | 167 | /* HList is stored as Map[String, Any] in Aerospike's MapValue. 168 | For example "hlist" :: 2 :: 3 :: HNil will be stored as Map("0" -> "hlist", "1" -> 2, "2" -> 3) 169 | */ 170 | def putHList(k: String, a: SingleBin[String :: Int :: Int :: HNil])( 171 | implicit e: ExecutionContext 172 | ): Future[Unit] = 173 | spike.callKB[String, String :: Int :: Int :: HNil](CallKB.Put, k, a) 174 | 175 | def putHList2( 176 | k: String, 177 | a: SingleBin[String :: Int :: Float :: List[String] :: List[Int] :: HNil] 178 | )(implicit e: ExecutionContext): Future[Unit] = 179 | spike.callKB[String, String :: Int :: Float :: List[String] :: List[Int] :: HNil](CallKB.Put, k, a) 180 | 181 | def putListSt(k: String, a: SingleBin[List[String]])( 182 | implicit e: ExecutionContext 183 | ): Future[Unit] = 184 | spike.callKB[String, List[String]](CallKB.Put, k, a) 185 | 186 | def putListInt(k: String, a: SingleBin[List[Int]])(implicit e: ExecutionContext): Future[Unit] = 187 | spike.callKB[String, List[Int]](CallKB.Put, k, a) 188 | 189 | def putListLong(k: String, a: SingleBin[List[Long]])( 190 | implicit e: ExecutionContext 191 | ): Future[Unit] = 192 | spike.callKB[String, List[Long]](CallKB.Put, k, a) 193 | 194 | def putListFloat(k: String, a: SingleBin[List[Float]])( 195 | implicit e: ExecutionContext 196 | ): Future[Unit] = 197 | spike.callKB[String, List[Float]](CallKB.Put, k, a) 198 | 199 | def putListDouble(k: String, a: SingleBin[List[Double]])( 200 | implicit e: ExecutionContext 201 | ): Future[Unit] = 202 | spike.callKB[String, List[Double]](CallKB.Put, k, a) 203 | 204 | def putListBoolean(k: String, a: SingleBin[List[Boolean]])( 205 | implicit e: ExecutionContext 206 | ): Future[Unit] = 207 | spike.callKB[String, List[Boolean]](CallKB.Put, k, a) 208 | 209 | def putSample(k: String, a: SingleBin[Sample])(implicit e: ExecutionContext): Future[Unit] = 210 | spike.callKB[String, Sample](CallKB.Put, k, a) 211 | 212 | def putTuple(k: String, a: SingleBin[(String, Long, Double)])( 213 | implicit e: ExecutionContext 214 | ): Future[Unit] = 215 | spike.callKB[String, (String, Long, Double)](CallKB.Put, k, a) 216 | 217 | def putSeqArrayBuffer(k: String, a: SingleBin[Seq[ArrayBuffer[Double]]])( 218 | implicit e: ExecutionContext 219 | ): Future[Unit] = 220 | spike.callKB[String, Seq[ArrayBuffer[Double]]](CallKB.Put, k, a) 221 | 222 | def putArrayByte(k: String, a: SingleBin[Array[Byte]])( 223 | implicit e: ExecutionContext 224 | ): Future[Unit] = 225 | spike.callKB[String, Array[Byte]](CallKB.Put, k, a) 226 | 227 | def putArrayBoolean(k: String, a: SingleBin[Array[Boolean]])( 228 | implicit e: ExecutionContext 229 | ): Future[Unit] = 230 | spike.callKB[String, Array[Boolean]](CallKB.Put, k, a) 231 | 232 | def putArrayString(k: String, a: SingleBin[Array[String]])( 233 | implicit e: ExecutionContext 234 | ): Future[Unit] = 235 | spike.callKB[String, Array[String]](CallKB.Put, k, a) 236 | 237 | def putArrayInt(k: String, a: SingleBin[Array[Int]])( 238 | implicit e: ExecutionContext 239 | ): Future[Unit] = 240 | spike.callKB[String, Array[Int]](CallKB.Put, k, a) 241 | 242 | def putArrayLong(k: String, a: SingleBin[Array[Long]])( 243 | implicit e: ExecutionContext 244 | ): Future[Unit] = 245 | spike.callKB[String, Array[Long]](CallKB.Put, k, a) 246 | 247 | def putArrayFloat(k: String, a: SingleBin[Array[Float]])( 248 | implicit e: ExecutionContext 249 | ): Future[Unit] = 250 | spike.callKB[String, Array[Float]](CallKB.Put, k, a) 251 | 252 | def putArrayDouble(k: String, a: SingleBin[Array[Double]])( 253 | implicit e: ExecutionContext 254 | ): Future[Unit] = 255 | spike.callKB[String, Array[Double]](CallKB.Put, k, a) 256 | 257 | def putByteSegment(k: String, a: SingleBin[ByteSegment])( 258 | implicit e: ExecutionContext 259 | ): Future[Unit] = 260 | spike.callKB[String, ByteSegment](CallKB.Put, k, a) 261 | 262 | def getString(k: String)(implicit e: ExecutionContext): Future[String] = 263 | spike 264 | .getByKey[String, String](k) 265 | .map( 266 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 267 | .getOrElse(throw new Exception("No data found")) 268 | ) 269 | 270 | def getInt(k: String)(implicit e: ExecutionContext): Future[Int] = 271 | spike 272 | .getByKey[String, Int](k) 273 | .map( 274 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 275 | .getOrElse(throw new Exception("No data found")) 276 | ) 277 | 278 | def getFloat(k: String)(implicit e: ExecutionContext): Future[Float] = 279 | spike 280 | .getByKey[String, Float](k) 281 | .map( 282 | _.flatMap(v => v._1.values.filter(_.nonEmpty).head) 283 | .getOrElse(throw new Exception("No data found")) 284 | ) 285 | 286 | def getDouble(k: String)(implicit e: ExecutionContext): Future[Double] = 287 | spike 288 | .getByKey[String, Double](k) 289 | .map( 290 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 291 | .getOrElse(throw new Exception("No data found")) 292 | ) 293 | 294 | def getBoolean(k: String)(implicit e: ExecutionContext): Future[Boolean] = 295 | spike 296 | .getByKey[String, Boolean](k) 297 | .map( 298 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 299 | .getOrElse(throw new Exception("No data found")) 300 | ) 301 | 302 | def getShort(k: String)(implicit e: ExecutionContext): Future[Short] = 303 | spike 304 | .getByKey[String, Short](k) 305 | .map( 306 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 307 | .getOrElse(throw new Exception("No data found")) 308 | ) 309 | 310 | def getLong(k: String)(implicit e: ExecutionContext): Future[Long] = 311 | spike 312 | .getByKey[String, Long](k) 313 | .map( 314 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 315 | .getOrElse(throw new Exception("No data found")) 316 | ) 317 | 318 | def getChar(k: String)(implicit e: ExecutionContext): Future[Char] = 319 | spike 320 | .getByKey[String, Char](k) 321 | .map( 322 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 323 | .getOrElse(throw new Exception("No data found")) 324 | ) 325 | 326 | def getByte(k: String)(implicit e: ExecutionContext): Future[Byte] = 327 | spike 328 | .getByKey[String, Byte](k) 329 | .map( 330 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 331 | .getOrElse(throw new Exception("No data found")) 332 | ) 333 | 334 | def getHList(k: String)(implicit e: ExecutionContext): Future[String :: Int :: Int :: HNil] = 335 | spike 336 | .getByKey[String, String :: Int :: Int :: HNil](k) 337 | .map( 338 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 339 | .getOrElse(throw new Exception("No data found")) 340 | ) 341 | 342 | def getHList2(k: String)( 343 | implicit e: ExecutionContext 344 | ): Future[String :: Int :: Float :: List[String] :: List[Int] :: HNil] = 345 | spike 346 | .getByKey[String, String :: Int :: Float :: List[String] :: List[Int] :: HNil](k) 347 | .map( 348 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 349 | .getOrElse(throw new Exception("No data found")) 350 | ) 351 | 352 | def getSample(k: String)(implicit e: ExecutionContext): Future[Sample] = 353 | spike 354 | .getByKey[String, Sample](k) 355 | .map( 356 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 357 | .getOrElse(throw new Exception("No data found")) 358 | ) 359 | 360 | def getTuple(k: String)(implicit e: ExecutionContext): Future[(String, Long, Double)] = 361 | spike 362 | .getByKey[String, (String, Long, Double)](k) 363 | .map( 364 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 365 | .getOrElse(throw new Exception("No data found")) 366 | ) 367 | 368 | def getListSt(k: String)(implicit e: ExecutionContext): Future[List[String]] = 369 | spike 370 | .getByKey[String, List[String]](k) 371 | .map( 372 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 373 | .getOrElse(throw new Exception("No data found")) 374 | ) 375 | 376 | def getListInt(k: String)(implicit e: ExecutionContext): Future[List[Int]] = 377 | spike 378 | .getByKey[String, List[Int]](k) 379 | .map( 380 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 381 | .getOrElse(throw new Exception("No data found")) 382 | ) 383 | 384 | def getListLong(k: String)(implicit e: ExecutionContext): Future[List[Long]] = 385 | spike 386 | .getByKey[String, List[Long]](k) 387 | .map( 388 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 389 | .getOrElse(throw new Exception("No data found")) 390 | ) 391 | 392 | def getListFloat(k: String)(implicit e: ExecutionContext): Future[List[Float]] = 393 | spike 394 | .getByKey[String, List[Float]](k) 395 | .map( 396 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 397 | .getOrElse(throw new Exception("No data found")) 398 | ) 399 | 400 | def getListDouble(k: String)(implicit e: ExecutionContext): Future[List[Double]] = 401 | spike 402 | .getByKey[String, List[Double]](k) 403 | .map( 404 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 405 | .getOrElse(throw new Exception("No data found")) 406 | ) 407 | 408 | def getListBoolean(k: String)(implicit e: ExecutionContext): Future[List[Boolean]] = 409 | spike 410 | .getByKey[String, List[Boolean]](k) 411 | .map( 412 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 413 | .getOrElse(throw new Exception("No data found")) 414 | ) 415 | 416 | def getSeqArrayBuffer( 417 | k: String 418 | )(implicit e: ExecutionContext): Future[Seq[ArrayBuffer[Double]]] = 419 | spike 420 | .getByKey[String, Seq[ArrayBuffer[Double]]](k) 421 | .map( 422 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 423 | .getOrElse(throw new Exception("No data found")) 424 | ) 425 | 426 | def getArrayByte(k: String)(implicit e: ExecutionContext): Future[Array[Byte]] = 427 | spike 428 | .getByKey[String, Array[Byte]](k) 429 | .map( 430 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 431 | .getOrElse(throw new Exception("No data found")) 432 | ) 433 | 434 | def getArrayString(k: String)(implicit e: ExecutionContext): Future[Array[String]] = 435 | spike 436 | .getByKey[String, Array[String]](k) 437 | .map( 438 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 439 | .getOrElse(throw new Exception("No data found")) 440 | ) 441 | 442 | def getArrayInt(k: String)(implicit e: ExecutionContext): Future[Array[Int]] = 443 | spike 444 | .getByKey[String, Array[Int]](k) 445 | .map( 446 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 447 | .getOrElse(throw new Exception("No data found")) 448 | ) 449 | 450 | def getArrayLong(k: String)(implicit e: ExecutionContext): Future[Array[Long]] = 451 | spike 452 | .getByKey[String, Array[Long]](k) 453 | .map( 454 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 455 | .getOrElse(throw new Exception("No data found")) 456 | ) 457 | 458 | def getArrayFloat(k: String)(implicit e: ExecutionContext): Future[Array[Float]] = 459 | spike 460 | .getByKey[String, Array[Float]](k) 461 | .map( 462 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 463 | .getOrElse(throw new Exception("No data found")) 464 | ) 465 | 466 | def getArrayDouble(k: String)(implicit e: ExecutionContext): Future[Array[Double]] = 467 | spike 468 | .getByKey[String, Array[Double]](k) 469 | .map( 470 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 471 | .getOrElse(throw new Exception("No data found")) 472 | ) 473 | 474 | def getArrayBoolean(k: String)(implicit e: ExecutionContext): Future[Array[Boolean]] = 475 | spike 476 | .getByKey[String, Array[Boolean]](k) 477 | .map( 478 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 479 | .getOrElse(throw new Exception("No data found")) 480 | ) 481 | 482 | def getByteSegment(k: String)(implicit e: ExecutionContext): Future[ByteSegment] = 483 | spike 484 | .getByKey[String, ByteSegment](k) 485 | .map( 486 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 487 | .getOrElse(throw new Exception("No data found")) 488 | ) 489 | 490 | def getMap(k: String)(implicit e: ExecutionContext): Future[Map[String, String]] = 491 | spike 492 | .getByKey[String, Map[String, String]](k) 493 | .map( 494 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 495 | .getOrElse(throw new Exception("No data found")) 496 | ) 497 | 498 | def getMapIS(k: String)(implicit e: ExecutionContext): Future[Map[Int, String]] = 499 | spike 500 | .getByKey[String, Map[Int, String]](k) 501 | .map( 502 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 503 | .getOrElse(throw new Exception("No data found")) 504 | ) 505 | 506 | def getMapSI(k: String)(implicit e: ExecutionContext): Future[Map[String, Int]] = 507 | spike 508 | .getByKey[String, Map[String, Int]](k) 509 | .map( 510 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 511 | .getOrElse(throw new Exception("No data found")) 512 | ) 513 | 514 | def getMapLong(k: String)(implicit e: ExecutionContext): Future[Map[String, Long]] = 515 | spike 516 | .getByKey[String, Map[String, Long]](k) 517 | .map( 518 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 519 | .getOrElse(throw new Exception("No data found")) 520 | ) 521 | 522 | def getMapFloat(k: String)(implicit e: ExecutionContext): Future[Map[String, Float]] = 523 | spike 524 | .getByKey[String, Map[String, Float]](k) 525 | .map( 526 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 527 | .getOrElse(throw new Exception("No data found")) 528 | ) 529 | 530 | def getMapDouble(k: String)(implicit e: ExecutionContext): Future[Map[String, Double]] = 531 | spike 532 | .getByKey[String, Map[String, Double]](k) 533 | .map( 534 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 535 | .getOrElse(throw new Exception("No data found")) 536 | ) 537 | 538 | def getMapSimpleString(k: String)( 539 | implicit bC: BinWrapper[Map[Sample, String]], 540 | e: ExecutionContext 541 | ): Future[Map[Sample, String]] = 542 | spike 543 | .getByKey[String, Map[Sample, String]](k) 544 | .map( 545 | _.flatMap(e => e._1.values.filter(_.nonEmpty).head) 546 | .getOrElse(throw new Exception("No data found")) 547 | ) 548 | 549 | } 550 | -------------------------------------------------------------------------------- /aerospike-scala-macros/src/main/scala/ru/tinkoff/aerospikemacro/cast/Caster.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikemacro.cast 18 | 19 | import shapeless.syntax.std.tuple._ 20 | 21 | /** 22 | * @author MarinaSigaeva 23 | * @since 27.10.16 24 | */ 25 | object Caster { 26 | type TP = Any with Product 27 | 28 | def castHListElements(allElems: List[Any], typeStr: String): List[Any] = { 29 | val need = List("Boolean", "Float", "Char", "Int", "Short", "Byte") 30 | val types = typeStr 31 | .replaceAll("""shapeless.::""", "") 32 | .replace(",shapeless.HNil", "") 33 | .toCharArray 34 | .filter(e => e != '[' & e != ']') 35 | .mkString 36 | .split(",") 37 | 38 | (for (i <- types.indices) yield { 39 | if (need.contains(types(i))) cast(allElems(i), types(i)) else allElems(i) 40 | }).toList 41 | } 42 | 43 | def cast(elem: Any, desc: String): Any = { 44 | elem match { 45 | case long: java.lang.Long if desc == "Boolean" => long == 1 46 | case long: java.lang.Long if desc == "Int" => long.toInt 47 | case long: java.lang.Long if desc == "Short" => long.toShort 48 | case long: java.lang.Long if desc == "Byte" => long.toByte 49 | case dbl: java.lang.Double if desc == "Float" => dbl.toFloat 50 | case str: java.lang.String if desc == "Char" => 51 | str.toString.toCharArray.headOption 52 | case _ => elem 53 | } 54 | } 55 | 56 | def castTuple(elems: Map[Any, Any], types: List[String]): Option[TP] = { 57 | val casted = types.indices.map(i => cast(elems(i.toString), types(i))).toList 58 | casted.length match { 59 | case l if l > 0 && l < 23 => scala.util.Try(buildTuple(casted)).toOption 60 | case _ => None 61 | } 62 | } 63 | 64 | def buildTuple(elems: List[Any]): TP = { 65 | (for (i <- 1 until elems.length) yield Tuple1(elems.head) :+ elems(i)).toList 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /aerospike-scala-macros/src/main/scala/ru/tinkoff/aerospikemacro/converters/BinWrapper.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikemacro.converters 18 | 19 | import ru.tinkoff.aerospikescala.domain.{ByteSegment, MBin, SingleBin} 20 | 21 | import scala.language.experimental.macros 22 | import scala.reflect.macros.blackbox 23 | 24 | /** 25 | * @author MarinaSigaeva 26 | * @since 08.09.16 27 | */ 28 | trait BinWrapper[BT] { 29 | 30 | import com.aerospike.client.Value._ 31 | import com.aerospike.client.{Bin, Record, Value} 32 | import shapeless.HList.hlistOps 33 | import shapeless.{HList, _} 34 | 35 | import scala.collection.JavaConverters._ 36 | import scala.collection.immutable.{List, ListMap, Map} 37 | import scala.reflect.ClassTag 38 | import scala.reflect.runtime.universe._ 39 | import scala.util.{Failure, Success} 40 | 41 | type One = (String, BT) 42 | type Singleton = SingleBin[BT] 43 | type Many = Map[String, BT] 44 | type Multi = MBin[BT] 45 | type Out = (Map[String, Option[BT]], Int, Int) 46 | 47 | // ToDo Is it needed? 48 | val comma = "," 49 | 50 | def apply(many: Many): List[Bin] = 51 | many.view.flatMap(one => scala.util.Try(apply(one)).toOption).toList 52 | 53 | def apply(many: Multi): List[Bin] = many.asOne.view.map(apply).toList 54 | 55 | def apply(one: One): Bin = 56 | if (one._1.length > 14) 57 | throwE("Current limit for bin name is 14 characters") 58 | else gen(one) 59 | 60 | def apply(one: Singleton): Bin = apply((one.name, one.value)) 61 | 62 | def apply(r: Record): Out = { 63 | val outValue: Map[String, Option[BT]] = { 64 | r.bins.asScala 65 | .collect { 66 | case (name, bt: Any) => name -> fetch(bt) 67 | } 68 | .iterator 69 | .toMap 70 | } 71 | if (outValue.values.isEmpty && !r.bins.isEmpty) 72 | throw new ClassCastException( 73 | s"Failed to cast ${weakTypeOf[BT]}. Please, implement fetch function in BinWrapper" 74 | ) 75 | else (outValue, r.generation, r.expiration) 76 | } 77 | 78 | /** 79 | saving as BlobValue, GeoJSONValue, ValueArray or NullValue not implemented here 80 | Your case classes will be saved as Map[String, Any] in com.aerospike.client.MapValue. 81 | If you want another format just override toValue function 82 | */ 83 | def toValue(v: BT): Value = v match { 84 | case h: HList => 85 | val m = fromHList(h, 0, h.runtimeLength - 1) 86 | new MapValue(m.asJava) 87 | case ByteSegment(bytes, offset, length) => 88 | new ByteSegmentValue(bytes, offset, length) 89 | case b: Int => new IntegerValue(b) 90 | case b: String => new StringValue(b) 91 | case b: Short => new IntegerValue(b) 92 | case b: Char => new StringValue(b.toString) 93 | case b: Byte => new IntegerValue(b) 94 | case b: Long => new LongValue(b) 95 | case b: Boolean => new BooleanValue(b) 96 | case b: Float => new FloatValue(b) 97 | case b: Double => new DoubleValue(b) 98 | case b: Array[Byte] => new BytesValue(b) 99 | case jl: java.util.List[_] => new ListValue(jl) 100 | case s: List[_] => new ListValue(s.asJava) 101 | case a: Array[_] => new ListValue(a.toList.asJava) 102 | case jm: java.util.Map[_, _] => new MapValue(jm) 103 | case m: Map[_, _] => new MapValue(m.asJava) 104 | case t: Any with Product if isTuple(t) => 105 | new MapValue(tupleMapped(t).asJava) 106 | case yourCaseClass => 107 | scala.util.Try(defaultToValue(yourCaseClass)) match { 108 | case Success(m) => new MapValue(m.asJava) 109 | case Failure(_) => 110 | throwE( 111 | s"You need to write your own function toValue(v: ${v.getClass}): " + 112 | "com.aerospike.client.Value function in BinWrapper implicit" 113 | ) 114 | } 115 | } 116 | 117 | def typed[T](x: T)(implicit tag: WeakTypeTag[T]): T = x.asInstanceOf[T] 118 | 119 | def tupleMapped[TPL <: Any with Product](tpl: TPL): Map[String, Any] = { 120 | val i = tpl.productArity 121 | val m = optTuple(tpl).map(mapify(i, _)).getOrElse(Map.empty) 122 | ListMap(m.toSeq.sortBy(_._1): _*).view.map { 123 | case (k, v) => k.toString -> v 124 | }.toMap 125 | } 126 | 127 | def mapify[H <: Any with Product](i: Int, t: H): Map[Int, Any] = { 128 | (for (e <- 0 until i) yield (e, t.productElement(e)))(collection.breakOut) 129 | } 130 | 131 | def fromHList[L <: HList](hList: L, i: Int, maxIndex: Int): Map[String, Any] = { 132 | val h0 = Map(i.toString -> typed(hList.productElement(0))) 133 | hList match { 134 | case _ :: tail if i < maxIndex => 135 | h0 ++ fromHList(tail, i + 1, maxIndex) 136 | case _ => h0 137 | } 138 | } 139 | 140 | def defaultToValue[T](x: T): Map[String, Any] = { 141 | val clazz = weakTypeOf[T].getClass 142 | val classTag = ClassTag[T](clazz) 143 | val rm = scala.reflect.runtime.currentMirror 144 | val accessors = 145 | rm.classSymbol(x.getClass).toType.decls.sorted.view.collect { 146 | case m: MethodSymbol if m.isGetter && m.isPublic => m 147 | } 148 | val instanceMirror = rm.reflect(x)(classTag) 149 | (for (acc <- accessors.view) yield { 150 | acc.name.toString -> instanceMirror.reflectMethod(acc).apply() 151 | }).toMap 152 | } 153 | 154 | def fetch(any: Any): Option[BT] = 155 | scala.util.Try(any.asInstanceOf[BT]).toOption 156 | 157 | def gen(b: One): Bin = new Bin(b._1, toValue(b._2)) 158 | 159 | def throwE(msg: String) = throw new IllegalArgumentException(msg) 160 | 161 | def throwClassCast(tpe: String) = 162 | throw new ClassCastException( 163 | s"Failed to cast $tpe. Please, implement fetch function in BinWrapper" 164 | ) 165 | 166 | def toKVmap[K, V]( 167 | any: Any, 168 | getView: String => Array[String] = plain 169 | )(implicit k: String => K, v: String => V): Map[K, V] = any match { 170 | case a: Value => 171 | val objString = a.getObject.toString 172 | val anyView = getView(objString).view 173 | (for { 174 | elem <- anyView 175 | kvs = elem.split("=") if kvs.length > 1 176 | } yield k(kvs(0)) -> v(kvs(1))).toMap 177 | case _ => Map.empty 178 | } 179 | 180 | def toLs[T](s: String)(implicit to: String => T): List[T] = 181 | s.view(5, s.length).mkString.split(", ").view.map(to).toList 182 | 183 | def plain(s: String): Array[String] = 184 | s.view(1, s.length - 1).mkString.split(", ") 185 | 186 | def coll(s: String): Array[String] = { 187 | val all = s.view(1, s.length - 1).mkString.split("\\), ", s.length).view 188 | all.dropRight(1).toArray ++ Array(all.last.dropRight(1)) 189 | } 190 | 191 | def isTuple[T](x: T): Boolean = x.getClass.getSimpleName.contains("Tuple") 192 | 193 | private def optTuple[TPL <: Any with Product](tpl: TPL) = tpl match { 194 | case t2: Tuple2[_, _] => Option(t2) 195 | case t3: Tuple3[_, _, _] => Option(t3) 196 | case t4: Tuple4[_, _, _, _] => Option(t4) 197 | case t5: Tuple5[_, _, _, _, _] => Option(t5) 198 | case t6: Tuple6[_, _, _, _, _, _] => Option(t6) 199 | case t7: Tuple7[_, _, _, _, _, _, _] => Option(t7) 200 | case t8: Tuple8[_, _, _, _, _, _, _, _] => Option(t8) 201 | case t9: Tuple9[_, _, _, _, _, _, _, _, _] => Option(t9) 202 | case t10: Tuple10[_, _, _, _, _, _, _, _, _, _] => Option(t10) 203 | case t11: Tuple11[_, _, _, _, _, _, _, _, _, _, _] => Option(t11) 204 | case t12: Tuple12[_, _, _, _, _, _, _, _, _, _, _, _] => Option(t12) 205 | case t13: Tuple13[_, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t13) 206 | case t14: Tuple14[_, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t14) 207 | case t15: Tuple15[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t15) 208 | case t16: Tuple16[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t16) 209 | case t17: Tuple17[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t17) 210 | case t18: Tuple18[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t18) 211 | case t19: Tuple19[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t19) 212 | case t20: Tuple20[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t20) 213 | case t21: Tuple21[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t21) 214 | case t22: Tuple22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Option(t22) 215 | case _ => None 216 | } 217 | } 218 | 219 | object BinWrapper { 220 | 221 | implicit def materializeBinWrapper[T]: BinWrapper[T] = 222 | macro materializeBinWrapperImpl[T] 223 | 224 | def materializeBinWrapperImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[BinWrapper[T]] = { 225 | import c.universe._ 226 | val tpe = weakTypeOf[T] 227 | // val one = weakTypeOf[(String, T)] 228 | // val singleton = weakTypeOf[SingleBin[T]] 229 | // val multi = weakTypeOf[MBin[T]] 230 | // val many = weakTypeOf[Map[String, T]] 231 | val out = weakTypeOf[(Map[String, Option[T]], Int, Int)] 232 | val tpeSt = q"${tpe.toString}" 233 | 234 | def mp(add: Tree) = 235 | q"""override def fetch(any: Any): Option[$tpe] = any match { 236 | case v: $tpe => Option(v) 237 | case any: Any => scala.util.Try{$add}.toOption 238 | case oth => None 239 | } 240 | """ 241 | 242 | def tupleFetch(ts: List[String]) = 243 | q"""override def fetch(any: Any): Option[$tpe] = 244 | Value.getFromRecordObject(any) match { 245 | case m: MapValue => m.getObject match { 246 | case ms: java.util.Map[Any @unchecked, Any @unchecked] => 247 | val res = castTuple(ms.asScala.iterator.toMap, $ts) 248 | res.collect { case t: $tpe => t } 249 | case _ => None 250 | } 251 | case _ => None 252 | }""" 253 | 254 | val mh = 255 | q"""override def fetch(any: Any): Option[$tpe] = Value.getFromRecordObject(any) match { 256 | case m: MapValue => m.getObject match { 257 | case ms: java.util.Map[Any @unchecked, Any @unchecked] => 258 | val newList = castHListElements(ms.asScala.values.toList, $tpeSt) 259 | newList.toHList[$tpe] 260 | case _ => None 261 | } 262 | case _ => None 263 | }""" 264 | 265 | def typedList(pType: Type): Tree = 266 | q"""override def fetch(any: Any): Option[$tpe] = Try{Value.getFromRecordObject(any) match { 267 | case lv: ListValue => lv.getObject match { 268 | case ls: java.util.List[$pType @unchecked] => ls.asScala.toList 269 | } 270 | } 271 | }.toOption """ 272 | 273 | /* 274 | Example. Remove exception and Try allocation to improve performance. 275 | 276 | def typedList(pType: Type): Tree = 277 | q"""override def fetch(any: Any): Option[$tpe] = 278 | Value.getFromRecordObject(any) match { 279 | case lv: ListValue => lv.getObject match { 280 | case ls: java.util.List[$pType @unchecked] => Some(ls.asScala.toList) 281 | case _ => None 282 | } 283 | } 284 | case _ => None 285 | } 286 | """ 287 | 288 | def typedList(pType: Type): Tree = 289 | q"""override def fetch(any: Any): Option[$tpe] = { 290 | val value = Value.getFromRecordObject(any) 291 | if (value.isInstanceOf[ListValue]) { 292 | val ls = value.getObject 293 | if (ls.isInstanceOf[java.util.List[$pType @unchecked]]) Some(ls.asScala.toList) 294 | else None 295 | } else None 296 | } 297 | """*/ 298 | 299 | def typedArray(pType: Type): Tree = 300 | q"""override def fetch(any: Any): Option[$tpe] = Try { 301 | Value.getFromRecordObject(any) match { 302 | case lv: ListValue => lv.getObject match { 303 | case ls: java.util.List[$pType @unchecked] => 304 | ls.asScala.toArray 305 | } 306 | } 307 | }.toOption """ 308 | 309 | def streamedArray(pType: Type, to: Tree): Tree = 310 | q"""override def fetch(any: Any): Option[$tpe] = Try { 311 | Value.getFromRecordObject(any) match { 312 | case lv: ListValue => lv.getObject match { 313 | case ls: java.util.List[$pType @unchecked] => 314 | ls.asScala.map($to).toArray 315 | } 316 | } 317 | }.toOption """ 318 | 319 | def typedMap(k: Type, v: Type): Tree = 320 | q""" override def fetch(any: Any): Option[$tpe] = Try{Value.getFromRecordObject(any) match { 321 | case m: MapValue => m.getObject match { 322 | case ms: java.util.Map[$k @unchecked, $v @unchecked] => ms.asScala.iterator.toMap 323 | } 324 | } 325 | }.toOption""" 326 | 327 | def tupleArity(tpe: Type): Int = { 328 | val rex = "Tuple(\\d{1,2})".r 329 | tpe.typeSymbol.name.encodedName.toString match { 330 | case rex(n) if n.toInt > 1 && n.toInt < 23 => n.toInt 331 | case _ => 0 332 | } 333 | } 334 | 335 | val fetchValue = tpe match { 336 | case t if t.toString.contains("HNil") || t.toString.contains("HList") => mh 337 | case t if t =:= weakTypeOf[String] => 338 | q"""override def fetch(any: Any): Option[$tpe] = 339 | Try(Value.getFromRecordObject(any).getObject.toString).toOption 340 | """ 341 | // ToDo Choose between String and Long. Take storage overhead into account. Implement test. 342 | case t if t =:= weakTypeOf[Char] => 343 | q"""override def fetch(any: Any): Option[$tpe] = any match { 344 | case v: String => v.toString.toCharArray.headOption 345 | case _ => None 346 | } """ 347 | case t if t =:= weakTypeOf[Boolean] => 348 | q"""override def fetch(any: Any): Option[$tpe] = any match { 349 | case v: java.lang.Long => Option(v == 1) 350 | case _ => None 351 | } """ 352 | // ToDo Use strict checking of Value.UseDoubleType. Implement test. 353 | case t if t =:= weakTypeOf[Float] => 354 | q"""override def fetch(any: Any): Option[$tpe] = any match { 355 | case v: java.lang.Double => Option(v.toFloat) 356 | case v: java.lang.Long => 357 | Option(java.lang.Double.longBitsToDouble(v).toFloat) 358 | case _ => None 359 | } """ 360 | // ToDo Use strict checking of Value.UseDoubleType. Implement test. 361 | case t if t =:= weakTypeOf[Double] => 362 | q"""override def fetch(any: Any): Option[$tpe] = any match { 363 | case v: java.lang.Double => Option(v) 364 | case v: java.lang.Long => 365 | Option(java.lang.Double.longBitsToDouble(v)) 366 | case v => None 367 | } """ 368 | case t if t =:= weakTypeOf[Int] => 369 | q"""override def fetch(any: Any): Option[$tpe] = any match { 370 | case v: java.lang.Long => Option(v.toInt) 371 | case _ => None 372 | } """ 373 | case t if t =:= weakTypeOf[Short] => 374 | q"""override def fetch(any: Any): Option[$tpe] = any match { 375 | case v: java.lang.Long => Option(v.toShort) 376 | case _ => None 377 | } """ 378 | case t if t =:= weakTypeOf[Byte] => 379 | q"""override def fetch(any: Any): Option[$tpe] = any match { 380 | case v: java.lang.Long => Option(v.toByte) 381 | case _ => None 382 | }""" 383 | case t if t =:= weakTypeOf[List[String]] => typedList(weakTypeOf[String]) 384 | case t if t =:= weakTypeOf[List[Int]] => typedList(weakTypeOf[Int]) 385 | case t if t =:= weakTypeOf[List[Long]] => typedList(weakTypeOf[Long]) 386 | case t if t =:= weakTypeOf[List[Float]] => typedList(weakTypeOf[Float]) 387 | case t if t =:= weakTypeOf[List[Double]] => typedList(weakTypeOf[Double]) 388 | case t if t =:= weakTypeOf[List[Boolean]] => typedList(weakTypeOf[Boolean]) 389 | case t if t =:= weakTypeOf[Array[String]] => typedArray(weakTypeOf[String]) 390 | case t if t =:= weakTypeOf[Array[Int]] => streamedArray(weakTypeOf[Long], q"_.toInt") 391 | case t if t =:= weakTypeOf[Array[Long]] => typedArray(weakTypeOf[Long]) 392 | case t if t =:= weakTypeOf[Array[Float]] => streamedArray(weakTypeOf[Double], q"_.toFloat") 393 | case t if t =:= weakTypeOf[Array[Double]] => typedArray(weakTypeOf[Double]) 394 | case t if t =:= weakTypeOf[Array[Boolean]] => typedArray(weakTypeOf[Boolean]) 395 | case t if t =:= weakTypeOf[Map[Int, String]] => typedMap(weakTypeOf[Int], weakTypeOf[String]) 396 | case t if t =:= weakTypeOf[Map[String, String]] => typedMap(weakTypeOf[String], weakTypeOf[String]) 397 | case t if t =:= weakTypeOf[Map[String, Int]] => typedMap(weakTypeOf[String], weakTypeOf[Int]) 398 | case t if t =:= weakTypeOf[Map[String, Long]] => typedMap(weakTypeOf[String], weakTypeOf[Long]) 399 | case t if t =:= weakTypeOf[Map[String, Float]] => typedMap(weakTypeOf[String], weakTypeOf[Float]) 400 | case t if t =:= weakTypeOf[Map[String, Double]] => typedMap(weakTypeOf[String], weakTypeOf[Double]) 401 | case t if t =:= weakTypeOf[Map[String, List[Int]]] => 402 | mp(q"""toKVmap[String, List[Int]](any, coll)(_.toString, toLs(_)(_.toInt))""") 403 | case t if t =:= weakTypeOf[Map[String, List[String]]] => 404 | mp(q"""toKVmap[String, List[String]](any, coll)(_.toString, toLs(_))""") 405 | case t if t =:= weakTypeOf[Map[String, Any]] => typedMap(weakTypeOf[String], weakTypeOf[Any]) 406 | case t if tupleArity(t) != 0 => 407 | val tplArity = tupleArity(t) 408 | if (tplArity > 0 && tplArity < 23) tupleFetch(t.typeArgs.map(_.toString)) 409 | else q"""None""" 410 | case _ => q"""""" 411 | } 412 | 413 | c.Expr[BinWrapper[T]] { 414 | q""" 415 | 416 | import java.util.{List => JList, Map => JMap} 417 | import com.aerospike.client.{Bin, Record, Value} 418 | import com.aerospike.client.Value.{BlobValue, ListValue, MapValue, ValueArray} 419 | import scala.collection.JavaConverters._ 420 | import scala.collection.mutable.{Seq => mSeq} 421 | import scala.language.experimental.macros 422 | import shapeless.{HList, _} 423 | import shapeless.HList.hlistOps 424 | import syntax.std.traversable._ 425 | import scala.collection.immutable.ListMap 426 | import ru.tinkoff.aerospikemacro.cast.Caster._ 427 | import ru.tinkoff.aerospikemacro.converters._ 428 | import com.aerospike.client.Value 429 | import scala.util.Try 430 | 431 | new BinWrapper[$tpe] { 432 | override def apply(r: Record): $out = { 433 | val outValue: collection.immutable.Map[String, Option[$tpe]] = { 434 | r.bins.asScala.collect { 435 | case (name, bt: Any) => 436 | val res = fetch(bt) 437 | if (res.isEmpty && !r.bins.isEmpty) throwClassCast($tpeSt) else name -> res 438 | }.iterator.toMap 439 | } 440 | 441 | (outValue, r.generation, r.expiration) 442 | } 443 | $fetchValue 444 | } 445 | 446 | """ 447 | } 448 | } 449 | } 450 | -------------------------------------------------------------------------------- /aerospike-scala-macros/src/main/scala/ru/tinkoff/aerospikemacro/converters/KeyWrapper.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikemacro.converters 18 | 19 | import com.aerospike.client.Value._ 20 | import com.aerospike.client.{Key, Value} 21 | import ru.tinkoff.aerospikemacro.domain.{DBCredentials, WrapperException} 22 | 23 | import scala.language.experimental.macros 24 | import scala.reflect.macros.blackbox.Context 25 | 26 | /** 27 | * @author MarinaSigaeva 28 | * @since 19.09.16 29 | */ 30 | trait KeyWrapper[KT] { 31 | 32 | val dbName: String = "" 33 | val tableName: String = "" 34 | 35 | def apply(k: KT): Key = new Key(dbName, tableName, toValue(k)) 36 | 37 | def toValue(v: KT): Value = Value.get(v) match { 38 | case _: NullValue => 39 | throw new WrapperException { 40 | val msg = "You need to write your own toValue function in KeyWrapper" 41 | } 42 | case other => other 43 | } 44 | } 45 | 46 | object KeyWrapper { 47 | import Utils._ 48 | 49 | implicit def materializeK[T](implicit dbc: DBCredentials): KeyWrapper[T] = macro implK[T] 50 | 51 | def implK[T: c.WeakTypeTag](c: Context)(dbc: c.Expr[DBCredentials]): c.Expr[KeyWrapper[T]] = { 52 | import c.universe._ 53 | val tpe = weakTypeOf[T] 54 | 55 | val db = reify(dbc.splice.namespace) 56 | val tableName = reify(dbc.splice.setname) 57 | 58 | val toDBValue = pickValue(c) 59 | 60 | c.Expr[KeyWrapper[T]] { 61 | q""" 62 | import com.aerospike.client.{Key, Value} 63 | import com.aerospike.client.Value._ 64 | import scala.collection.immutable.Seq 65 | import ru.tinkoff.aerospikescala.domain.ByteSegment 66 | import scala.util.{Failure, Success, Try} 67 | 68 | new KeyWrapper[$tpe] { 69 | override val dbName = $db 70 | override val tableName = $tableName 71 | override def toValue(v: $tpe): Value = $toDBValue 72 | } 73 | """ 74 | } 75 | } 76 | 77 | def create[T](dbc: DBCredentials): KeyWrapper[T] = macro implK[T] 78 | 79 | } 80 | -------------------------------------------------------------------------------- /aerospike-scala-macros/src/main/scala/ru/tinkoff/aerospikemacro/converters/Utils.scala: -------------------------------------------------------------------------------- 1 | package ru.tinkoff.aerospikemacro.converters 2 | 3 | import ru.tinkoff.aerospikescala.domain.ByteSegment 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | 7 | /** 8 | * @author MarinaSigaeva 9 | * @since 04.04.17 10 | */ 11 | object Utils { 12 | def pickValue[T: c.WeakTypeTag](c: Context): c.universe.Tree = { 13 | import c.universe._ 14 | val tpe = weakTypeOf[T] 15 | val tpeName = q"${tpe.typeSymbol.fullName}" 16 | 17 | val err = 18 | q"""throw new IllegalArgumentException( 19 | "You need to write your own toValue function in KeyWrapper implicit for type " + $tpeName) """ 20 | 21 | tpe match { 22 | case t if t =:= weakTypeOf[ByteSegment] => q"""v match { 23 | case ByteSegment(bytes, offset, length) => new ByteSegmentValue(bytes, offset, length) 24 | case _ => $err 25 | }""" 26 | case t if t =:= weakTypeOf[Int] => q"""new IntegerValue(v)""" 27 | case t if t =:= weakTypeOf[Long] => q"""new LongValue(v)""" 28 | case t if t =:= weakTypeOf[String] => q"""new StringValue(v)""" 29 | case t if t =:= weakTypeOf[Boolean] => q"""new BooleanValue(v)""" 30 | case t if t =:= weakTypeOf[Float] => q"""new FloatValue(v)""" 31 | case t if t =:= weakTypeOf[Double] => q"""new DoubleValue(v)""" 32 | case t if t =:= weakTypeOf[Array[Byte]] => q"""new BytesValue(v)""" 33 | case t if t =:= weakTypeOf[scala.collection.immutable.Seq[_]] => q"""new ListValue(v)""" 34 | case t if t =:= weakTypeOf[Map[_, _]] => q"""new MapValue(v)""" 35 | case _ => q"""Try(Value.get(v)) match { 36 | case Failure(th) => $err 37 | case Success(s) => s 38 | }""" 39 | } 40 | } 41 | } -------------------------------------------------------------------------------- /aerospike-scala-macros/src/main/scala/ru/tinkoff/aerospikemacro/domain/DBCredentials.scala: -------------------------------------------------------------------------------- 1 | package ru.tinkoff.aerospikemacro.domain 2 | 3 | /** 4 | * @author MarinaSigaeva 5 | * @since 22.11.16 6 | */ 7 | case class DBCredentials(namespace: String, setname: String) 8 | 9 | trait WrapperException extends Exception { val msg: String } 10 | -------------------------------------------------------------------------------- /aerospike-scala-macros/src/main/scala/ru/tinkoff/aerospikemacro/printer/Printer.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikemacro.printer 18 | 19 | import scala.language.experimental.macros 20 | import scala.reflect.macros.blackbox 21 | 22 | /** 23 | * @author MarinaSigaeva 24 | * @since 21.10.16 25 | */ 26 | object Printer { 27 | def printNameValue[T](x: T): Unit = macro impl[T] 28 | 29 | def impl[R](c: blackbox.Context)(x: c.Tree): c.Tree = { 30 | import c.universe._ 31 | val tpe = weakTypeOf[R] 32 | 33 | val name = x match { 34 | case Select(_, TermName(s)) => s 35 | case _ => "" 36 | } 37 | 38 | val isArray = tpe.widen.typeSymbol.name.eq(TypeName("Array")) 39 | 40 | q""" 41 | println("-"*20) 42 | println($name + " => " + $x) 43 | """ 44 | } 45 | } -------------------------------------------------------------------------------- /aerospike-scala-proto/src/main/protobuf/designers.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package ru.tinkoff.aerospikeproto.designers; 4 | 5 | option java_multiple_files = true; 6 | option java_outer_classname = "DesignerProto"; 7 | 8 | message Designer { 9 | string name = 1; 10 | int32 age = 2; 11 | } 12 | 13 | message Designers { 14 | repeated Designer designers = 1; 15 | } -------------------------------------------------------------------------------- /aerospike-scala-proto/src/main/scala/ru/tinkoff/aerospikeproto/wrapper/ProtoBinWrapper.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2017 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package ru.tinkoff.aerospikeproto.wrapper 17 | 18 | import com.aerospike.client.Value 19 | import com.aerospike.client.Value.BytesValue 20 | import com.trueaccord.lenses.Updatable 21 | import com.trueaccord.scalapb.{GeneratedMessage, Message} 22 | import ru.tinkoff.aerospikemacro.converters.BinWrapper 23 | 24 | import scala.language.experimental.macros 25 | import scala.reflect.macros.blackbox 26 | 27 | /** 28 | * @author MarinaSigaeva 29 | * @since 23.03.17 30 | */ 31 | 32 | trait ProtoBinWrapper[T <: GeneratedMessage with Message[T] with Updatable[T]] extends BinWrapper[T] { 33 | override def toValue(v: T): Value = new BytesValue(v.toByteArray) 34 | 35 | override def fetch(any: Any): Option[T] = scala.util.Try { 36 | Value.getFromRecordObject(any) match { 37 | case b: BytesValue => b.getObject match { 38 | case arr: Array[Byte] => parse(arr) 39 | } 40 | } 41 | }.toOption 42 | 43 | def parse: Array[Byte] => T 44 | } 45 | 46 | object ProtoBinWrapper { 47 | implicit def materialize[T <: GeneratedMessage 48 | with Message[T] with Updatable[T]]: ProtoBinWrapper[T] = macro impl[T] 49 | 50 | def impl[T <: GeneratedMessage with Message[T] with Updatable[T] : c.WeakTypeTag] 51 | (c: blackbox.Context): c.Expr[ProtoBinWrapper[T]] = { 52 | import c.universe._ 53 | val tpe = weakTypeOf[T] 54 | 55 | val simpleName = tpe.typeSymbol.fullName.split('.').last 56 | val termName = q"${TermName(simpleName)}" 57 | 58 | c.Expr[ProtoBinWrapper[T]] { 59 | q""" 60 | import com.aerospike.client.Value 61 | import com.aerospike.client.Value.BytesValue 62 | import com.trueaccord.lenses.Updatable 63 | import com.trueaccord.scalapb.{GeneratedMessage, Message} 64 | import ru.tinkoff.aerospikemacro.converters.BinWrapper 65 | 66 | new ProtoBinWrapper[$tpe] { 67 | override def parse: Array[Byte] => $tpe = $termName.parseFrom 68 | } 69 | """ 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /aerospike-scala-proto/src/test/scala/ru.tinkoff.aerospikeproto.wrapper/ProtoBinTest.scala: -------------------------------------------------------------------------------- 1 | package ru.tinkoff.aerospikeproto.wrapper 2 | 3 | import com.aerospike.client.Value 4 | import com.aerospike.client.Value.BytesValue 5 | import com.trueaccord.lenses.Updatable 6 | import com.trueaccord.scalapb.{GeneratedMessage, Message} 7 | import ru.tinkoff.aerospikeproto.wrapper.ProtoBinWrapper 8 | import org.scalatest.{FlatSpec, Matchers} 9 | import ru.tinkoff.aerospikeproto.designers.designers.{Designer, Designers} 10 | 11 | /** 12 | * @author MarinaSigaeva 13 | * @since 03.04.17 14 | */ 15 | class ProtoBinTest extends FlatSpec with Matchers { 16 | 17 | it should "transfer value into BytesValue" in new mocks { 18 | 19 | val expected0 = new BytesValue(one.toByteArray) 20 | val expected1 = new BytesValue(many.toByteArray) 21 | 22 | value(one) shouldBe expected0 23 | value(many) shouldBe expected1 24 | 25 | } 26 | 27 | trait mocks { 28 | 29 | val one = Designer("Karl Lagerfeld", 83) 30 | val many = Designers(List(one, Designer("Diane von Furstenberg", 70), Designer("Donatella Versace", 61))) 31 | 32 | def value[I <: GeneratedMessage with Message[I] with Updatable[I], R <: ProtoBinWrapper[I]] 33 | (v: I)(implicit bw: R): Value = bw.toValue(v) 34 | 35 | } 36 | 37 | } 38 | 39 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | import de.heikoseeberger.sbtheader.HeaderPattern 2 | import Dependencies._ 3 | 4 | autoScalaLibrary := false 5 | 6 | val copyright = headers := Map( 7 | "scala" -> ( 8 | HeaderPattern.cStyleBlockComment, 9 | """|/* 10 | | * Copyright (c) 2016 Tinkoff 11 | | * 12 | | * Licensed under the Apache License, Version 2.0 (the "License"); 13 | | * you may not use this file except in compliance with the License. 14 | | * You may obtain a copy of the License at 15 | | * 16 | | * http://www.apache.org/licenses/LICENSE-2.0 17 | | * 18 | | * Unless required by applicable law or agreed to in writing, software 19 | | * distributed under the License is distributed on an "AS IS" BASIS, 20 | | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 21 | | * See the License for the specific language governing permissions and 22 | | * limitations under the License. 23 | | */ 24 | | 25 | | """.stripMargin 26 | ) 27 | ) 28 | 29 | val setts = Seq( 30 | organization := "ru.tinkoff", 31 | version := "1.1.14", 32 | scalaVersion := Versions.scala, 33 | crossScalaVersions := Versions.scalas, 34 | // Doge 35 | releaseCrossBuild := false, 36 | scalacOptions := Seq("-unchecked", "-deprecation", "-encoding", "utf8"), 37 | copyright, 38 | licenses := Seq( 39 | ("Apache License, Version 2.0", url("https://www.apache.org/licenses/LICENSE-2.0")) 40 | ), 41 | homepage := Some(url("http://tinkoff.ru")), 42 | sonatypeProfileName := "ru.tinkoff", 43 | pgpReadOnly := false, 44 | publishMavenStyle := true, 45 | publishArtifact in Test := false, 46 | pomIncludeRepository := { _ => 47 | false 48 | }, 49 | publishTo <<= version { (v: String) => 50 | val nexus = "https://oss.sonatype.org/" 51 | if (v.trim.endsWith("SNAPSHOT")) 52 | Some("snapshots" at nexus + "content/repositories/snapshots") 53 | else 54 | Some("releases" at nexus + "service/local/staging/deploy/maven2") 55 | }, 56 | pomExtra in Global := { 57 | 58 | 59 | DanyMariaLee 60 | Marina Sigaeva 61 | http://twitter.com/besseifunction 62 | 63 | 64 | }, 65 | scmInfo := Some( 66 | ScmInfo( 67 | url("http://github.com/TinkoffCreditSystems"), 68 | "scm:git:github.com/TinkoffCreditSystems/aerospike-scala", 69 | Some("scm:git:git@github.com:TinkoffCreditSystems/aerospike-scala.git") 70 | ) 71 | ), 72 | credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", "", "") 73 | ) 74 | 75 | lazy val protoSetting = PB.targets in Compile := Seq( 76 | scalapb.gen() -> (sourceManaged in Compile).value 77 | ) 78 | 79 | lazy val macros = 80 | Project(id = "aerospike-scala-macros", base = file("aerospike-scala-macros"), dependencies = Seq(domain)) 81 | .settings(setts) 82 | .settings(libraryDependencies ++= mainLibs(scalaVersion.value)) 83 | 84 | lazy val domain = Project(id = "aerospike-scala-domain", base = file("aerospike-scala-domain")) 85 | .settings(setts) 86 | .settings(moduleName := "aerospike-scala-domain") 87 | 88 | lazy val protoBin = 89 | Project(id = "aerospike-scala-proto", base = file("aerospike-scala-proto"), dependencies = Seq(root)) 90 | .settings(setts) 91 | .settings(protoSetting) 92 | .settings( 93 | Seq( 94 | moduleName := "aerospike-scala-proto", 95 | libraryDependencies ++= commonLibs(scalaVersion.value) 96 | ) 97 | ) 98 | 99 | lazy val example = 100 | Project(id = "aerospike-scala-example", base = file("aerospike-scala-example"), dependencies = Seq(root, protoBin)) 101 | .settings(setts) 102 | .settings(protoSetting) 103 | .settings( 104 | Seq( 105 | moduleName := "aerospike-scala-example", 106 | libraryDependencies ++= exampleLibs(scalaVersion.value) 107 | ) 108 | ) 109 | 110 | lazy val root = 111 | Project(id = "aerospike-scala", base = file("."), dependencies = Seq(domain, macros)) 112 | .settings(setts) 113 | .settings(libraryDependencies ++= commonLibs(scalaVersion.value)) 114 | 115 | /** 116 | * Helpers 117 | */ 118 | lazy val cleanAll = taskKey[Unit](s"Clean all subprojects") 119 | cleanAll in ThisBuild := clean 120 | .all(ScopeFilter(inAnyProject)) 121 | .value 122 | .foreach(identity) 123 | 124 | lazy val compileLibraries = taskKey[Unit](s"compile all libraries") 125 | compileLibraries in ThisBuild := Def 126 | .sequential( 127 | compile in (macros, Compile), 128 | compile in (domain, Compile), 129 | compile in (root, Compile), 130 | compile in (protoBin, Compile) 131 | ) 132 | .value 133 | 134 | lazy val compileAll = taskKey[Unit](s"compile all subprojects") 135 | compileAll in ThisBuild := Def 136 | .sequential( 137 | compileLibraries, 138 | compile in (example, Compile) 139 | ) 140 | .value 141 | 142 | lazy val recompileAll = taskKey[Unit](s"clean and compile all subprojects") 143 | recompileAll in ThisBuild := Def 144 | .sequential( 145 | cleanAll, 146 | compileLibraries, 147 | compile in (example, Compile) 148 | ) 149 | .value 150 | 151 | lazy val compileAndTestAll = taskKey[Unit](s"compile all subprojects and test ${root.id}") 152 | compileAndTestAll in ThisBuild := Def 153 | .sequential( 154 | compileAll, 155 | test in (root, Test) 156 | ) 157 | .value 158 | 159 | lazy val recompileAndTestAll = taskKey[Unit](s"clean, compile all subprojects and test ${root.id}") 160 | recompileAndTestAll in ThisBuild := Def 161 | .sequential( 162 | recompileAll, 163 | test in (root, Test) 164 | ) 165 | .value 166 | 167 | lazy val publishLibrariesLocal = taskKey[Unit](s"publish all libraries locally") 168 | publishLibrariesLocal in ThisBuild := Def 169 | .sequential( 170 | publishLocal in macros, 171 | publishLocal in domain, 172 | publishLocal in root, 173 | publishLocal in protoBin 174 | ) 175 | .value 176 | 177 | lazy val publishLibraries = taskKey[Unit](s"publish all libraries") 178 | publishLibraries in ThisBuild := Def 179 | .sequential( 180 | publish in macros, 181 | publish in domain, 182 | publish in root, 183 | publish in protoBin 184 | ) 185 | .value 186 | 187 | import PgpKeys.publishSigned 188 | lazy val publishSignedLibraries = taskKey[Unit](s"publishSigned all libraries") 189 | publishSignedLibraries in ThisBuild := Def 190 | .sequential( 191 | publishSigned in macros, 192 | publishSigned in domain, 193 | publishSigned in root, 194 | publishSigned in protoBin 195 | ) 196 | .value 197 | 198 | lazy val publishSignedAll = taskKey[Unit](s"publishSigned all subprojects") 199 | publishSignedAll in ThisBuild := Def 200 | .sequential( 201 | publishSignedLibraries, 202 | publishSigned in example 203 | ) 204 | .value 205 | -------------------------------------------------------------------------------- /cookbook/BinMagic.md: -------------------------------------------------------------------------------- 1 | # BinWrapper 2 | 3 | Types of created keys detected from passed value. I recommend to use `Int, Long, String, Boolean, Float, Double, Array[Byte], Seq, List or Map`. 4 | Also there is `ru.tinkoff.aerospikescala.domain.ByteSegment` case class which corresponds to `com.aerospike.client.ByteSegmentValue`. 5 | Converter for `Bin` supports a lot of types, including `HLists` and your own `case classes`. 6 | For using some case class as a `Bin` you will have to show (```scala override def fetch(any: Any): Option[YourType] = { ... }``` ) how to store it in Aerospike. 7 | 8 | Usage is very simple: 9 | ```scala 10 | def getBin[B](b: SingleBin[B])(implicit bC: BinWrapper[B]): Bin = bC.apply(b) 11 | 12 | getBin(SingleBin("name", 3)) shouldBe new Bin("name", 3) 13 | getBin(SingleBin("name", List(1, 2, 3))) shouldBe new Bin("name", Seq(1, 2, 3)) 14 | ``` 15 | `HLists` and `tuples` are stored as `Maps` (here are same result Maps): 16 | ```scala 17 | getBin(SingleBin("name", 2.toDouble :: List("a","b") :: 2 :: "dsdsds" :: HNil)) shouldBe 18 | new Bin("name", Map(0 -> 2.0, 1 -> List[String] = List("a", "b"), 2 -> 2, 3 -> "dsdsds")) 19 | getBin(SingleBin("name", (2.toDouble, List("a","b"), 2, "dsdsds"))) shouldBe 20 | new Bin("name", Map(0 -> 2.0, 1 -> List[String] = List("a", "b"), 2 -> 2, 3 -> "dsdsds")) 21 | ``` 22 | If you need to use `case class` as a value: 23 | ```scala 24 | case class Truck(name: String, number: Int, color: List[Int]) 25 | ``` 26 | Assume we have two different ideas for how to store and (which is more important) how to get our `Truck` value from Aerospike. 27 | Option one - let it store as a `Map` (which is a default case, means no need to do anything else), but then I have to ```scala override fetch``` function like this: 28 | ```scala 29 | implicit val tc2 = new BinWrapper[Truck] { 30 | override def fetch(any: Any): Option[Truck] = { 31 | scala.util.Try{ any match { 32 | case m: java.util.Map[String, Any] => val nm = m.asScala.toMap 33 | Truck(nm("name").toString, nm("number").toString.toInt, 34 | nm("color").toString.split(",").view.map(_.toInt).toList) 35 | }}.toOption 36 | } 37 | } 38 | ``` 39 | Option two: store it like a `json` 40 | ```scala 41 | implicit val tc1 = new BinWrapper[Truck] { 42 | import MyJsonProtocol._ 43 | 44 | override def toValue(truck: Truck) = { 45 | val j = truck.toJson(truckFormat) 46 | new BlobValue(j) 47 | } 48 | } 49 | 50 | object MyJsonProtocol extends DefaultJsonProtocol { 51 | implicit val truckFormat = jsonFormat(Truck, "mega-name", "mega-number", "mega-color") 52 | } 53 | ``` 54 | usage: 55 | ```scala 56 | getBin(SingleBin("name", Truck("truck", 4, List(1, 2, 3))))(tc1) in Aerospike it will look like {"mega-name":"truck","mega-number":4,"mega-color":[1,2,3]} 57 | getBin(SingleBin("name", Truck("truck", 4, List(1, 2, 3))))(tc2) in Aerospike it will look like {name=truck, number=4, color=1,2,3} 58 | ``` 59 | `Wrappers` passed explicitly, because we have two of them in this scope. 60 | 61 | **Note**: saving as `BlobValue, GeoJSONValue, ValueArray` or `NullValue` not implemented in `BinWrapper`. Your case classes will be saved as `Map[String, Any]` in `com.aerospike.client.MapValue`. 62 | If you want another format just override `toValue()` function in `BinWrapper` creation. -------------------------------------------------------------------------------- /cookbook/KeyMagic.md: -------------------------------------------------------------------------------- 1 | # KeyWrapper 2 | 3 | This wrapper converts passed `key` value into `com.aerospike.client.Key`. If you want to use **namespace** and **setName** specifyed in 4 | **application.conf** file - then add 5 | ```scala 6 | val config = ConfigFactory.load() 7 | val namespace = config.getString("ru-tinkoff-aerospike-dsl.keyWrapper-namespace")) 8 | val setName = config.getString("ru-tinkoff-aerospike-dsl.keyWrapper-setName")) 9 | 10 | implicit val dbs = DBCredentials(namespace, setName) 11 | ``` 12 | Or if you want to use different namespaces/setNames - call function 13 | ```scala ru.tinkoff.aerospikemacro.converters.KeyWrapper.create[T](DBCredentials("ns", "setName"))``` 14 | to create each of them an pass explicitly. 15 | 16 | Check it out with small example: 17 | ```scala 18 | def getKey[K](k: K)(implicit kC: KeyWrapper[K]): Key = kC(k) 19 | def withCustomKey[T](any: T, dbs: DBCredentials): Key = create(dbs)(any) 20 | ``` 21 | getKey("StringKey") should give ```scala new com.aerospike.client.Key("test", "test", "StringKey")``` 22 | withCustomKey("StringKey", DBCredentials("dbName01", "tableName1")) should give ```scala new com.aerospike.client.Key("dbName01", "tableName1", "StringKey")``` 23 | 24 | Types of created keys detected from passed value. I recommend to use `Int, Long, String, Boolean, Float, Double, Array[Byte], Seq, List or Map`. 25 | Also there is [ru.tinkoff.aerospikescala.domain.ByteSegment](./cookbook/domain.md) case class which corresponds to `com.aerospike.client.ByteSegmentValue`. 26 | 27 | For using some case class as a `Key` you will have to show how to store it in `Aerospike`. 28 | 29 | For example: 30 | ```scala 31 | case class Cat(name: String, age: Int) 32 | ``` 33 | We can store it as a `String`: 34 | ```scala 35 | implicit val bValue = new KeyWrapper[Cat] { 36 | override def apply(cat: Cat): Key = new Key("test", "test", cat.toString) 37 | } 38 | ``` 39 | `Note` it's recommended to use simple key values. Be careful! If you wrote some serialization for key value - get exact value when calling one of `Get functions`. -------------------------------------------------------------------------------- /cookbook/SpikeImplMethods.md: -------------------------------------------------------------------------------- 1 | # SpikeImpl with methods 2 | 3 | Create an instance of `SpikeImpl` 4 | ```scala 5 | import ru.tinkoff.aerospike.dsl.SpikeImpl 6 | import ru.tinkoff.aerospikeexamples.example.AClient 7 | 8 | val client = AClient.client 9 | val spike = new SpikeImpl(client) 10 | implicit val dbc = DBCredentials("ns", "setName") 11 | ``` 12 | `Put, Append, Prepend, Add` 13 | ```scala 14 | import ru.tinkoff.aerospike.dsl.CallKB._ 15 | import com.aerospike.client.{AerospikeException, Key} 16 | import com.aerospike.client.listener.{ExistsArrayListener, ExistsSequenceListener} 17 | import ru.tinkoff.aerospike.dsl.{CallKB, SpikeImpl} 18 | import ru.tinkoff.aerospikeexamples.example.AClient 19 | import ru.tinkoff.aerospikemacro.converters._ 20 | import ru.tinkoff.aerospikescala.domain.{MBin, SingleBin} 21 | 22 | import scala.concurrent.Await 23 | import scala.concurrent.ExecutionContext.Implicits.global 24 | import scala.concurrent.duration.Duration 25 | 26 | val client = AClient.client 27 | val spike = new SpikeImpl(client) 28 | implicit val dbc = DBCredentials("ns", "setName") 29 | 30 | case class ExistsArrayHandler(keys: Array[Key] = Array(new Key("kName", "ns", 1)), exists: Array[Boolean] = 31 | Array(true)) extends ExistsArrayListener { 32 | def onSuccess(keys: Array[Key], exists: Array[Boolean]): Unit = {} 33 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 34 | } 35 | case class ExistsSequenceHandler(k: Key = new Key("kName", "ns", 1), exists: Boolean = true) extends ExistsSequenceListener { 36 | def onExists(key: Key, exists: Boolean): Unit = {} 37 | def onSuccess(): Unit = {} 38 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 39 | } 40 | 41 | spike.callKB(Put, "StrKey", SingleBin("binName", "binValue")) 42 | spike.callKB(Put, "StrKey", MBin(Map("n1"-> "v1", "n2" -> "v2"))) 43 | spike.callKB(Append, "StrKey", SingleBin("binName", "binValue")) 44 | spike.callKB(Append, "StrKey", MBin(Map("n1"-> "v1", "n2" -> "v2"))) 45 | spike.callKB(Prepend, "StrKey", SingleBin("binName", "binValue")) 46 | spike.callKB(Prepend, "StrKey", MBin(Map("n1"-> "v1", "n2" -> "v2"))) 47 | spike.callKB(Add, "StrKey", SingleBin("binName", "binValue")) 48 | spike.callKB(Add, "StrKey", MBin(Map("n1"-> "v1", "n2" -> "v2"))) 49 | ``` 50 | `Execute` 51 | ```scala 52 | import com.aerospike.client.query.Statement 53 | import ru.tinkoff.aerospike.dsl.SpikeImpl 54 | import ru.tinkoff.aerospikeexamples.example.AClient 55 | import ru.tinkoff.aerospikemacro.converters._ 56 | import ru.tinkoff.aerospike.dsl.Call.Execute 57 | import ru.tinkoff.aerospike.dsl.Param1 58 | import scala.concurrent.ExecutionContext.Implicits.global 59 | 60 | val client = AClient.client 61 | val spike = new SpikeImpl(client) 62 | implicit val dbc = DBCredentials("ns", "setName") 63 | 64 | spike.call(Execute, Param1("zzz","zzz", List(...), Some(new Statement), None)) 65 | ``` 66 | `Query` 67 | ```scala 68 | import ru.tinkoff.aerospike.dsl.Call.Query 69 | import ru.tinkoff.aerospike.dsl.Param2 70 | import com.aerospike.client.query.Statement 71 | import ru.tinkoff.aerospike.dsl.SpikeImpl 72 | import ru.tinkoff.aerospikeexamples.example.AClient 73 | import ru.tinkoff.aerospikemacro.converters._ 74 | import scala.concurrent.ExecutionContext.Implicits.global 75 | 76 | val client = AClient.client 77 | val spike = new SpikeImpl(client) 78 | implicit val dbc = DBCredentials("ns", "setName") 79 | 80 | spike.call(Query, Param2(new Statement, None)) //or Some(com.aerospike.client.listener.RecordSequenceListener) 81 | ``` 82 | `QueryAggregate` 83 | ```scala 84 | import ru.tinkoff.aerospike.dsl.Call.QueryAggregate 85 | import ru.tinkoff.aerospike.dsl.Param1 86 | import com.aerospike.client.query.Statement 87 | import ru.tinkoff.aerospike.dsl.SpikeImpl 88 | import ru.tinkoff.aerospikeexamples.example.AClient 89 | import ru.tinkoff.aerospikemacro.converters._ 90 | import scala.concurrent.ExecutionContext.Implicits.global 91 | 92 | val client = AClient.client 93 | val spike = new SpikeImpl(client) 94 | implicit val dbc = DBCredentials("ns", "setName") 95 | 96 | spike.call(QueryAggregate, Param1("zzz","zzz", List(), Some(new Statement), None)) 97 | spike.call(QueryAggregate, new Statement) 98 | ``` 99 | `ScanAll` 100 | ```scala 101 | import ru.tinkoff.aerospike.dsl.Call.ScanAll 102 | import ru.tinkoff.aerospike.dsl.{Param3, Param4} 103 | import ru.tinkoff.aerospike.dsl.SpikeImpl 104 | import ru.tinkoff.aerospikeexamples.example.AClient 105 | import ru.tinkoff.aerospikemacro.converters._ 106 | import scala.concurrent.ExecutionContext.Implicits.global 107 | 108 | val client = AClient.client 109 | val spike = new SpikeImpl(client) 110 | implicit val dbc = DBCredentials("ns", "setName") 111 | 112 | spike.call(ScanAll, Param3("namespace", "setName", List(), None)) // or Some(com.aerospike.client.ScanCallback) 113 | spike.call(ScanAll, Param4("zzz","zzz", List(), None)) // or Some(com.aerospike.client.listener.RecordSequenceListener) 114 | ``` 115 | `RemoveUdf` 116 | ```scala 117 | import ru.tinkoff.aerospike.dsl.Call.RemoveUdf 118 | import ru.tinkoff.aerospike.dsl.SpikeImpl 119 | import ru.tinkoff.aerospikeexamples.example.AClient 120 | import ru.tinkoff.aerospikemacro.converters._ 121 | import scala.concurrent.ExecutionContext.Implicits.global 122 | 123 | val client = AClient.client 124 | val spike = new SpikeImpl(client) 125 | implicit val dbc = DBCredentials("ns", "setName") 126 | 127 | spike.call(RemoveUdf, "serverPath") 128 | ``` 129 | `RegisterUdfString` 130 | ```scala 131 | import ru.tinkoff.aerospike.dsl.Call.RegisterUdfString 132 | import com.aerospike.client.Language 133 | import ru.tinkoff.aerospike.dsl.SpikeImpl 134 | import ru.tinkoff.aerospikeexamples.example.AClient 135 | import ru.tinkoff.aerospikemacro.converters._ 136 | import ru.tinkoff.aerospike.dsl.Param5 137 | import scala.concurrent.ExecutionContext.Implicits.global 138 | 139 | val client = AClient.client 140 | val spike = new SpikeImpl(client) 141 | implicit val dbc = DBCredentials("ns", "setName") 142 | 143 | spike.call(RegisterUdfString, Param5("code", "serverPath", new Language())) 144 | ``` 145 | `Operate` 146 | ```scala 147 | import ru.tinkoff.aerospike.dsl.CallK 148 | import com.aerospike.client.Operation 149 | import com.aerospike.client.Operation.Type 150 | import com.aerospike.client.Value.StringValue 151 | import com.aerospike.client.listener.RecordListener 152 | import ru.tinkoff.aerospike.dsl.SpikeImpl 153 | import ru.tinkoff.aerospikeexamples.example.AClient 154 | import com.aerospike.client.{AerospikeException, Key, Record} 155 | import scala.collection.JavaConverters._ 156 | import ru.tinkoff.aerospikemacro.converters._ 157 | import ru.tinkoff.aerospikemacro.converters.KeyWrapper.create 158 | import scala.concurrent.ExecutionContext.Implicits.global 159 | 160 | val client = AClient.client 161 | val spike = new SpikeImpl(client) 162 | implicit val dbc = DBCredentials("ns", "setName") 163 | 164 | val stKeyWrapper = create[String]("ns", "setName") 165 | val record1 = new Record(m1, 100, 12) 166 | 167 | case class ReadHandler(key: Key = new Key("kName", "ns", 1), 168 | record: Record = new Record(Map[String, AnyRef]("k" -> new StringValue("v")).asJava, 100, 12)) extends RecordListener { 169 | def onSuccess(key: Key, record: Record): Unit = {} 170 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 171 | } 172 | 173 | val ops = List(new Operation(Type.WRITE, "operateBinName", new StringValue("operate")), Operation.get("operateBinName")) 174 | 175 | spike.callK(CallK.Operate, "strOperateKey", ops) 176 | spike.callK(CallK.Operate, "strOperateKey", any = (ops, ReadHandler(stKeyWrapper("strOperateKey"), record1))) 177 | ``` 178 | `Delete` 179 | ```scala 180 | import ru.tinkoff.aerospike.dsl.CallK.Delete 181 | import com.aerospike.client.{AerospikeException, Key} 182 | import com.aerospike.client.listener.DeleteListener 183 | import ru.tinkoff.aerospike.dsl.SpikeImpl 184 | import ru.tinkoff.aerospikeexamples.example.AClient 185 | import ru.tinkoff.aerospikemacro.converters._ 186 | import scala.concurrent.ExecutionContext.Implicits.global 187 | 188 | val client = AClient.client 189 | val spike = new SpikeImpl(client) 190 | implicit val dbc = DBCredentials("ns", "setName") 191 | 192 | case class DeleteHandler(key: Key = new Key("kName", "ns", 1)) extends DeleteListener { 193 | def onSuccess(key: Key, existed: Boolean): Unit = {} 194 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 195 | } 196 | 197 | spike.callK(Delete, "strDeleteKey", DeleteHandler()) 198 | spike.callK(Delete, "strDeleteKey") 199 | ``` 200 | `Touch` 201 | ```scala 202 | import ru.tinkoff.aerospike.dsl.CallK.Touch 203 | import com.aerospike.client.{AerospikeException, Key} 204 | import com.aerospike.client.listener.WriteListener 205 | import ru.tinkoff.aerospike.dsl.SpikeImpl 206 | import ru.tinkoff.aerospikeexamples.example.AClient 207 | import ru.tinkoff.aerospikemacro.converters._ 208 | import scala.concurrent.ExecutionContext.Implicits.global 209 | 210 | val client = AClient.client 211 | val spike = new SpikeImpl(client) 212 | implicit val dbc = DBCredentials("ns", "setName") 213 | 214 | case class WriteHandler(k: Key = new Key("kName", "ns", 1)) extends WriteListener { 215 | def onSuccess(key: Key): Unit = {} 216 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 217 | } 218 | 219 | spike.callK(Touch, "strTouchKey", WriteHandler()) 220 | spike.callK(Touch, 3) 221 | ``` 222 | `Execute` 223 | ```scala 224 | import ru.tinkoff.aerospike.dsl.CallK.Execute 225 | import com.aerospike.client.{AerospikeException, Key} 226 | import com.aerospike.client.listener.ExecuteListener 227 | import ru.tinkoff.aerospike.dsl.SpikeImpl 228 | import ru.tinkoff.aerospikeexamples.example.AClient 229 | import ru.tinkoff.aerospikemacro.converters._ 230 | import com.aerospike.client.Value.StringValue 231 | import ru.tinkoff.aerospike.dsl.Param1 232 | import scala.concurrent.ExecutionContext.Implicits.global 233 | 234 | val client = AClient.client 235 | val spike = new SpikeImpl(client) 236 | implicit val dbc = DBCredentials("ns", "setName") 237 | 238 | case class ExecuteHandler(k: Key = new Key("kName", "ns", 1), obj: Object = "") extends ExecuteListener { 239 | def onSuccess(key: Key, obj: Object): Unit = {} 240 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 241 | } 242 | 243 | spike.callK(Execute, "strExecKey", Param1("pkg", "fName", List(new StringValue("str")), None, Option(ExecuteHandler()))) 244 | spike.callK(Execute, "strExecKey", Param1("pkg", "fName", List(new StringValue("str")))) 245 | ``` 246 | `Exists` 247 | ```scala 248 | import ru.tinkoff.aerospike.dsl.CallK.Exists 249 | import com.aerospike.client.{AerospikeException, Key} 250 | import ru.tinkoff.aerospike.dsl.SpikeImpl 251 | import ru.tinkoff.aerospikeexamples.example.AClient 252 | import ru.tinkoff.aerospikemacro.converters._ 253 | import com.aerospike.client.listener.ExistsListener 254 | import scala.concurrent.ExecutionContext.Implicits.global 255 | 256 | val client = AClient.client 257 | val spike = new SpikeImpl(client) 258 | implicit val dbc = DBCredentials("ns", "setName") 259 | 260 | case class ExistsHandler(k: Key = new Key("kName", "ns", 1), exists: Boolean = true) extends ExistsListener { 261 | def onSuccess(key: Key, exists: Boolean): Unit = {} 262 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 263 | } 264 | 265 | spike.callK(Exists, "strKey", ExistsHandler()) 266 | spike.callK(Exists, "strKey") 267 | ``` 268 | # Array[Key] 269 | `Exists` 270 | ```scala 271 | import ru.tinkoff.aerospike.dsl.CallKs.Exists 272 | import com.aerospike.client.{AerospikeException, Key} 273 | import ru.tinkoff.aerospike.dsl.SpikeImpl 274 | import ru.tinkoff.aerospikeexamples.example.AClient 275 | import ru.tinkoff.aerospikemacro.converters._ 276 | import com.aerospike.client.listener.{ExistsArrayListener, ExistsSequenceListener} 277 | import scala.concurrent.ExecutionContext.Implicits.global 278 | 279 | val client = AClient.client 280 | val spike = new SpikeImpl(client) 281 | implicit val dbc = DBCredentials("ns", "setName") 282 | 283 | case class ExistsArrayHandler(keys: Array[Key] = Array(new Key("kName", "ns", 1)), exists: Array[Boolean] = 284 | Array(true)) extends ExistsArrayListener { 285 | def onSuccess(keys: Array[Key], exists: Array[Boolean]): Unit = {} 286 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 287 | } 288 | case class ExistsSequenceHandler(k: Key = new Key("kName", "ns", 1), exists: Boolean = true) extends ExistsSequenceListener { 289 | def onExists(key: Key, exists: Boolean): Unit = {} 290 | def onSuccess(): Unit = {} 291 | def onFailure(e: AerospikeException): Unit = e.printStackTrace() 292 | } 293 | 294 | spike.callKs(Exists, Array("sk1", "sk2"), ExistsArrayHandler()) 295 | spike.callKs(Exists, Array("sk1", "sk2"), ExistsSequenceHandler()) 296 | spike.callKs(Exists, Array("sk1", "sk2")) 297 | ``` 298 | -------------------------------------------------------------------------------- /cookbook/domain.md: -------------------------------------------------------------------------------- 1 | # ByteSegment 2 | 3 | Package: `ru.tinkoff.aerospikescala.domain` 4 | ```scala 5 | case class ByteSegment(bytes: Array[Byte], offset: Int, length: Int) 6 | ``` 7 | corresponds to `com.aerospike.client.Value` 8 | ```java 9 | ByteSegmentValue(byte[] bytes, int offset, int length) 10 | ``` 11 | 12 | # Params for Call function 13 | 14 | ```scala 15 | case class Param1(packageName: String, functionName: String, functionArgs: List[Value], 16 | statement: Option[Statement] = None, listener: Option[ExecuteListener] = None) extends Param 17 | 18 | case class Param2(statement: Statement, listener: Option[RecordSequenceListener] = None) extends Param 19 | 20 | case class Param3(namespace: String, setName: String, binNames: List[String], callback: Option[ScanCallback] = None) 21 | 22 | case class Param4(namespace: String, setName: String, binNames: List[String], listener: Option[RecordSequenceListener]) 23 | 24 | case class Param5(code: String, serverPath: String, language: Language) 25 | ``` 26 | 27 | # SingleBin[T] 28 | This type works with `callKB` function. Available For any type of `Bin` and operations `Put, Append, Prepend, Add`. 29 | ```scala 30 | case class SingleBin[B](name: String, value: B) 31 | ``` 32 | 33 | # MBin[T] 34 | This type works with `callKB` function. Applicable when you need to pass more than one value in one operation. Available For any type of `Bin` and operations `Put, Append, Prepend, Add`. 35 | ```scala 36 | case class MBin[B](values: Map[String, B]) 37 | ``` 38 | 39 | # DBCredentials 40 | Package: `ru.tinkoff.aerospikemacro.domain` 41 | Contains database credentials - namespace and setName. 42 | ```scala 43 | case class DBCredentials(namespace: String, setname: String) 44 | ``` 45 | `Note` for test usage it's recommended to use ```scala DBCredentials("test", "test")``` -------------------------------------------------------------------------------- /cookbook/protobuf/ProtoBinWrapper.md: -------------------------------------------------------------------------------- 1 | # ProtoBinWrapper 2 | 3 | Protobuf serialization is available only for types in bounds: 4 | ```scala 5 | I <: GeneratedMessage with Message[I] with Updatable[I] 6 | ``` 7 | 8 | To do that, you need to get generated models of your data with scalaPB plugin ```https://github.com/scalapb/ScalaPB``` 9 | 10 | After you've generated everything you needed you'll be able 11 | to create functions for operating with `brotobuffed` model. 12 | 13 | For example, to put protobuffed data: 14 | 15 | ```scala 16 | def put[K, I <: GeneratedMessage with Message[I] with Updatable[I], R <: ProtoBinWrapper[I]] 17 | (k: K, bin: SingleBin[I])(implicit kw: KeyWrapper[K], 18 | bw: R, e: ExecutionContext, 19 | pw: Option[WritePolicy] = None): Future[Unit] = { 20 | client.callKB[K, I](CallKB.Put, k, bin)(kw, bw, pw) 21 | } 22 | ``` 23 | 24 | ...and to get it: 25 | 26 | ```scala 27 | def get[I <: GeneratedMessage with Message[I] with Updatable[I]] 28 | (k: String)(implicit kw: KeyWrapper[String], bw: ProtoBinWrapper[I], 29 | e: ExecutionContext, pw: Option[WritePolicy] = None): Future[Map[String, Option[I]]] = 30 | absGet[String, I, ProtoBinWrapper[I]](k)(kw, bw, e, pw) 31 | } 32 | ``` 33 | 34 | , where ```def absGet``` is: 35 | 36 | ```scala 37 | def absGet[K, I <: GeneratedMessage with Message[I] with Updatable[I], R <: ProtoBinWrapper[I]] 38 | (k: K)(implicit kw: KeyWrapper[K], bw: R, e: ExecutionContext, 39 | pw: Option[WritePolicy] = None): Future[Map[String, Option[I]]] = { 40 | client.getByKey[K, I](k)(kw, bw, e, pw).map(r => r.map(_._1).getOrElse(throw new Exception("No data found"))) 41 | } 42 | ``` 43 | 44 | After that you can call those functions to operate with Aerospike: 45 | 46 | ```scala 47 | val one = Designer("Karl Lagerfeld", 83) 48 | val many = Designers(List(one, Designer("Diane von Furstenberg", 70), Designer("Donatella Versace", 61))) 49 | 50 | db.put("protoDesigner", SingleBin("pDesigner", one)) 51 | db.put("protoDesigners", SingleBin("pDesigners", many)) 52 | 53 | db.get[Designer]("protoDesigner") 54 | db.get[Designers]("protoDesigners") 55 | ``` 56 | 57 | PS. 58 | And don't forget to import `import ProtoBinWrapper._` so it can create all wrappers for you 59 | 60 | Full example is in `example` module. -------------------------------------------------------------------------------- /cookbook/schemes/anyBinTypes.md: -------------------------------------------------------------------------------- 1 | # One Key and Any-Bin-types schema 2 | 3 | Let's try to use `Scheme[String]` where `Key` is `String` and `Bin` can be `any type` you like. 4 | ```scala 5 | import com.aerospike.client.Value.MapValue 6 | import ru.tinkoff.aerospike.dsl.errors.AerospikeDSLError 7 | import ru.tinkoff.aerospike.dsl.scheme.Scheme 8 | import ru.tinkoff.aerospike.dsl.{CallKB, SpikeImpl} 9 | import ru.tinkoff.aerospikemacro.converters.{BinWrapper, KeyWrapper} 10 | import ru.tinkoff.aerospikescala.domain.SingleBin 11 | import shapeless._ 12 | 13 | import scala.collection.JavaConverters._ 14 | import scala.concurrent.{ExecutionContext, Future} 15 | import scala.language.experimental.macros 16 | 17 | 18 | case class Sample(name: String, i: Int) 19 | 20 | case class SampleScheme(spike: SpikeImpl) extends Scheme[String] { 21 | implicit val dbc = AClient.dbc 22 | 23 | implicit val sampleWrap = new BinWrapper[Sample] { 24 | override def fetch(any: Any): Option[Sample] = any match { 25 | case m: java.util.HashMap[Any, Any] => scala.util.Try(Sample(m.asScala("name").toString, m.asScala("i").toString.toInt)).toOption 26 | case _ => None 27 | } 28 | } 29 | 30 | implicit val sampleMapWrap = new BinWrapper[Map[Sample, String]] { 31 | val rex = "Sample\\((\\w+)\\,(\\d+)\\)" 32 | val trRex = rex.r 33 | 34 | override def toValue(v: Map[Sample, String]): MapValue = 35 | new MapValue(v.map { case (sample, value) => sample.toString -> value }.asJava) 36 | 37 | override def fetch(any: Any): Option[Map[Sample, String]] = any match { 38 | case m: java.util.HashMap[Any, String] => scala.util.Try(m.asScala.view.map { 39 | case (tr, v) if tr.toString.matches("Sample\\((\\w+)\\,(\\d+)\\)") => 40 | tr.toString match { 41 | case trRex(n, i) => Sample(n, i.toInt) -> v 42 | } 43 | }.toMap).toOption 44 | case _ => None 45 | } 46 | } 47 | 48 | // Here is how to put basic types (you can use it as example and do the same for different kind of operations like Prepend, Add and so on) 49 | def putString(k: String, a: SingleBin[String])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, String](CallKB.Put, k, a) 50 | 51 | def putInt(k: String, a: SingleBin[Int])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Int](CallKB.Put, k, a) 52 | 53 | def putFloat(k: String, a: SingleBin[Float])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Float](CallKB.Put, k, a) 54 | 55 | def putDouble(k: String, a: SingleBin[Double])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Double](CallKB.Put, k, a) 56 | 57 | def putBoolean(k: String, a: SingleBin[Boolean])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Boolean](CallKB.Put, k, a) 58 | 59 | def putShort(k: String, a: SingleBin[Short])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Short](CallKB.Put, k, a) 60 | 61 | def putLong(k: String, a: SingleBin[Long])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Long](CallKB.Put, k, a) 62 | 63 | def putChar(k: String, a: SingleBin[Char])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Char](CallKB.Put, k, a) 64 | 65 | def putByte(k: String, a: SingleBin[Byte])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Byte](CallKB.Put, k, a) 66 | 67 | def putMap(k: String, a: SingleBin[Map[String, String]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Map[String, String]](CallKB.Put, k, a) 68 | 69 | def putMapSimpleString(k: String, a: SingleBin[Map[Sample, String]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Map[Sample, String]](CallKB.Put, k, a) 70 | 71 | def putMapIS(k: String, a: SingleBin[Map[Int, String]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Map[Int, String]](CallKB.Put, k, a) 72 | 73 | def putMapSI(k: String, a: SingleBin[Map[String, Int]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Map[String, Int]](CallKB.Put, k, a) 74 | 75 | def putMapLong(k: String, a: SingleBin[Map[String, Long]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Map[String, Long]](CallKB.Put, k, a) 76 | 77 | def putMapFloat(k: String, a: SingleBin[Map[String, Float]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Map[String, Float]](CallKB.Put, k, a) 78 | 79 | def putMapDouble(k: String, a: SingleBin[Map[String, Double]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Map[String, Double]](CallKB.Put, k, a) 80 | 81 | def putHList(k: String, a: SingleBin[String :: Int :: Int :: HNil])(implicit e: ExecutionContext): Future[Unit] = 82 | spike.callKB[String, String :: Int :: Int :: HNil](CallKB.Put, k, a) 83 | 84 | def putHList2(k: String, a: SingleBin[String :: Int :: Float :: List[String] :: List[Int] :: HNil])(implicit e: ExecutionContext): Future[Unit] = 85 | spike.callKB[String, String :: Int :: Float :: List[String] :: List[Int] :: HNil](CallKB.Put, k, a) 86 | 87 | def putListSt(k: String, a: SingleBin[List[String]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, List[String]](CallKB.Put, k, a) 88 | 89 | def putListInt(k: String, a: SingleBin[List[Int]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, List[Int]](CallKB.Put, k, a) 90 | 91 | def putListLong(k: String, a: SingleBin[List[Long]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, List[Long]](CallKB.Put, k, a) 92 | 93 | def putListFloat(k: String, a: SingleBin[List[Float]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, List[Float]](CallKB.Put, k, a) 94 | 95 | def putListDouble(k: String, a: SingleBin[List[Double]])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, List[Double]](CallKB.Put, k, a) 96 | 97 | def putSample(k: String, a: SingleBin[Sample])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Sample](CallKB.Put, k, a) 98 | 99 | def putTuple(k: String, a: SingleBin[(String, Long, Double)])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, (String, Long, Double)](CallKB.Put, k, a) 100 | 101 | def getString(k: String)(implicit e: ExecutionContext): Future[String] = spike.getByKey[String, String](k, Nil).map(_._1).map(_.values.head.getOrElse("no cat found")) 102 | 103 | def getInt(k: String)(implicit e: ExecutionContext): Future[Int] = spike.getByKey[String, Int](k, Nil).map(_._1).map(_.values.head.getOrElse(0)) 104 | 105 | def getFloat(k: String)(implicit e: ExecutionContext): Future[Float] = spike.getByKey[String, Float](k, Nil).map(_._1).map(_.values.head.getOrElse(0F)) 106 | 107 | def getDouble(k: String)(implicit e: ExecutionContext): Future[Double] = spike.getByKey[String, Double](k, Nil).map(_._1).map(_.values.head.getOrElse(0)) 108 | 109 | def getBoolean(k: String)(implicit e: ExecutionContext): Future[Boolean] = spike.getByKey[String, Boolean](k, Nil).map(_._1).map(_.values.head.getOrElse(false)) 110 | 111 | def getShort(k: String)(implicit e: ExecutionContext): Future[Short] = spike.getByKey[String, Short](k, Nil).map(_._1).map(_.values.head.getOrElse(0)) 112 | 113 | def getLong(k: String)(implicit e: ExecutionContext): Future[Long] = spike.getByKey[String, Long](k, Nil).map(_._1).map(_.values.head.getOrElse(0L)) 114 | 115 | def getChar(k: String)(implicit e: ExecutionContext): Future[Char] = spike.getByKey[String, Char](k, Nil).map(_._1).map(_.values.head.getOrElse('a')) 116 | 117 | def getByte(k: String)(implicit e: ExecutionContext): Future[Byte] = spike.getByKey[String, Byte](k, Nil).map(_._1).map(_.values.head.getOrElse(Byte.MaxValue)) 118 | 119 | def getHList(k: String)(implicit e: ExecutionContext): Future[String :: Int :: Int :: HNil] = spike 120 | .getByKey[String, String :: Int :: Int :: HNil](k, Nil).map(_._1).map(_.values.head.get) 121 | 122 | def getHList2(k: String)(implicit e: ExecutionContext): Future[String :: Int :: Float :: List[String] :: List[Int] :: HNil] = spike 123 | .getByKey[String, String :: Int :: Float :: List[String] :: List[Int] :: HNil](k, Nil).map(_._1).map(_.values.head.get) 124 | 125 | def getSample(k: String)(implicit e: ExecutionContext): Future[Sample] = spike 126 | .getByKey[String, Sample](k, Nil).map(_._1).map(_.values.head.getOrElse(throw AerospikeDSLError("Failed to get Sample value from Aerospike"))) 127 | 128 | def getTuple(k: String)(implicit e: ExecutionContext): Future[(String, Long, Double)] = spike 129 | .getByKey[String, (String, Long, Double)](k, Nil).map(_._1).map(_.values.head.getOrElse(throw AerospikeDSLError("Failed to get Sample value from Aerospike"))) 130 | 131 | def getListSt(k: String)(implicit e: ExecutionContext): Future[List[String]] = spike.getByKey[String, List[String]](k, Nil).map(_._1).map(_.values.head.getOrElse(Nil)) 132 | 133 | def getListInt(k: String)(implicit e: ExecutionContext): Future[List[Int]] = spike.getByKey[String, List[Int]](k, Nil).map(_._1).map(_.values.head.getOrElse(Nil)) 134 | 135 | def getListLong(k: String)(implicit e: ExecutionContext): Future[List[Long]] = spike.getByKey[String, List[Long]](k, Nil).map(_._1).map(_.values.head.getOrElse(Nil)) 136 | 137 | def getListFloat(k: String)(implicit e: ExecutionContext): Future[List[Float]] = spike.getByKey[String, List[Float]](k, Nil).map(_._1).map(_.values.head.getOrElse(Nil)) 138 | 139 | def getListDouble(k: String)(implicit e: ExecutionContext): Future[List[Double]] = spike.getByKey[String, List[Double]](k, Nil).map(_._1).map(_.values.head.getOrElse(Nil)) 140 | 141 | def getMap(k: String)(implicit e: ExecutionContext): Future[Map[String, String]] = spike.getByKey[String, Map[String, String]](k, Nil).map(_._1.values.head.getOrElse(Map())) 142 | 143 | def getMapIS(k: String)(implicit e: ExecutionContext): Future[Map[Int, String]] = spike.getByKey[String, Map[Int, String]](k, Nil).map(_._1.values.head.getOrElse(Map())) 144 | 145 | def getMapSI(k: String)(implicit e: ExecutionContext): Future[Map[String, Int]] = spike.getByKey[String, Map[String, Int]](k, Nil).map(_._1.values.head.getOrElse(Map())) 146 | 147 | def getMapLong(k: String)(implicit e: ExecutionContext): Future[Map[String, Long]] = spike.getByKey[String, Map[String, Long]](k, Nil).map(_._1.values.head.getOrElse(Map())) 148 | 149 | def getMapFloat(k: String)(implicit e: ExecutionContext): Future[Map[String, Float]] = spike.getByKey[String, Map[String, Float]](k, Nil).map(_._1.values.head.getOrElse(Map())) 150 | 151 | def getMapDouble(k: String)(implicit e: ExecutionContext): Future[Map[String, Double]] = spike.getByKey[String, Map[String, Double]](k, Nil).map(_._1.values.head.getOrElse(Map())) 152 | 153 | def getMapSimpleString(k: String)(implicit e: ExecutionContext): Future[Map[Sample, String]] = spike 154 | .getByKey[String, Map[Sample, String]](k, Nil)(kC = kw, bC = sampleMapWrap, None, e).map(_._1.values.head.getOrElse(Map())) 155 | 156 | } 157 | ``` 158 | `implicit val sampleWrap` - This is custom wrapper for Bin of type Simple. By default all case classes stored in Aerospike as `Map[String, Any]`, where keys are names of parameters. 159 | For example 160 | ```scala 161 | Simple(name = "sampleName", i = 2) 162 | ``` 163 | will be stored as `Map("name" -> "sampleName", "i" -> 2)`. Inside `com.aerospike.client.Record` it will look like this: 164 | ```sh 165 | bins:(sampleBin:{name=sampleName, i=2}) 166 | ``` 167 | To get your `Sample` value properly you need to write fetch() function as it is shown above. 168 | `implicit val sampleMapWrap` - This is custom wrapper for Bin of type `Map[Sample, String]`. For example for: 169 | ```scala 170 | Map(Sample(t1,3) -> v1, Sample(t2,2) -> v2, Sample(t3,1) -> v3) 171 | ``` 172 | `Bin` will be stored like `com.aerospike.client.Value.MapValue`, where keys are `Sample(...).toString()`. 173 | So inside `com.aerospike.client.Record` it will look like this: 174 | ```sh 175 | bins:(BIN_NAME:{Sample(t1,3)=v1, Sample(t2,2)=v2, Sample(t3,1)=v3}) 176 | ``` 177 | And if you want to get your `Sample` type keys to be unwrapped properly you need to write fetch() function as it is shown above. 178 | 179 | **Note**, Aerospikes `AQL` is good for values with `String` key types. So if you want to store `Map` with key of any other type - 180 | you will see nothing in terminal. But you can use function, which gets that value by key (for example `getMapIS(...)` above) and 181 | print it to be sure it all works fine. An example for `Map[Int, String]` is in `ru.tinkoff.aerospikeexamples.example.SampleApp.scala`: 182 | ```scala 183 | myObj.putMapIS("mapIntString", SingleBin("mapISName", Map(9 -> "v1", 2 -> "v2", 3 -> "v3"))) 184 | ``` 185 | `HList` is stored as `Map[String, Any]` in Aerospike's `MapValue`. For example 186 | ```scala "hlist" :: 2 :: 3 :: HNil``` will be stored as `Map("0" -> "hlist", "1" -> 2, "2" -> 3)`. 187 | 188 | Now we can use that scheme in App: 189 | ```scala 190 | import ru.tinkoff.aerospikemacro.printer.Printer 191 | import ru.tinkoff.aerospikescala.domain.SingleBin 192 | import shapeless._ 193 | 194 | import scala.concurrent.Await 195 | import scala.concurrent.duration.Duration.Inf 196 | import scala.language.experimental.macros 197 | 198 | 199 | object SampleApp extends App { 200 | 201 | import scala.concurrent.ExecutionContext.Implicits.global 202 | 203 | val spike = AClient.spikeImpl 204 | 205 | val myObj = SampleScheme(spike) 206 | 207 | myObj.putMapSimpleString("mapSimpleString", SingleBin("TmapBinName", Map(Sample("t1", 3) -> "v1", Sample("t2", 2) -> "v2", Sample("t3", 1) -> "v3"))) 208 | myObj.putMap("mapKey", SingleBin("mapBinName", Map("a" -> "v1", "b" -> "v2", "c" -> "v3"))) 209 | myObj.putMapIS("mapIntString", SingleBin("mapISName", Map(9 -> "v1", 2 -> "v2", 3 -> "v3"))) 210 | myObj.putMapSI("mapStringInt", SingleBin("mapSIName", Map("a" -> 33, "b" -> 44, "c" -> 99))) 211 | myObj.putMapLong("mapLong", SingleBin("mapLongName", Map("a" -> 30030L, "b" -> 40004L, "c" -> 90009L))) 212 | myObj.putMapFloat("mapFloat", SingleBin("mapFloatName", Map("a" -> 30.3F, "b" -> 400.04F, "c" -> 9.01F))) 213 | myObj.putMapDouble("mapDouble", SingleBin("mapDoubleName", Map("a" -> 300.30, "b" -> 4000.4, "c" -> 90.09))) 214 | 215 | myObj.putString("stringKey", SingleBin("stringBinName", "strBinValue")) 216 | myObj.putInt("intBinKey", SingleBin("intBinName", 202)) 217 | myObj.putFloat("floatBinKey", SingleBin("floatBinName", 1.11F)) 218 | myObj.putDouble("doubleBinKey", SingleBin("doubleBinName", 3.3)) 219 | myObj.putBoolean("boolBinKey", SingleBin("boolBinName", true)) 220 | myObj.putShort("shortBinKey", SingleBin("shortBinName", 2)) 221 | myObj.putLong("longBinKey", SingleBin("longBinName", 9000900L)) 222 | myObj.putChar("charBinKey", SingleBin("charBinName", 'h')) 223 | myObj.putByte("byteBinKey", SingleBin("byteBinName", Byte.MinValue)) 224 | 225 | myObj.putListSt("listStBinKey", SingleBin("listStringBin", List("a", "b"))) 226 | myObj.putListInt("listIntKey", SingleBin("listIntBin", List(1, 2, 3, 4))) 227 | myObj.putListLong("listLongKey", SingleBin("listLongBin", List(1000L, 2000L, 3000L, 4000L))) 228 | myObj.putListFloat("listFloatKey", SingleBin("listFloatBin", List(1.12F, 2.13F, 3.5F, 4.5F))) 229 | myObj.putListDouble("listDoubleKey", SingleBin("listDoubleBin", List(12.11, 12.13, 23.5, 46.5))) 230 | 231 | myObj.putSample("sampleKey", SingleBin("sampleBin", Sample("sampleName", 2))) 232 | myObj.putHList("hListKey", SingleBin("hListBin", "hlist" :: 2 :: 3 :: HNil)) 233 | myObj.putHList2("hListKey2", SingleBin("hListBin2", "hlist" :: 2 :: 3.12F :: List("a", "b") :: List(12, 23) :: HNil)) 234 | } 235 | ``` 236 | ...and now we can see in Aerospike: 237 | ```js 238 | aql> select * from test.test33 239 | [ 240 | { 241 | "TmapBinName": {} // explanation is below 242 | }, 243 | { 244 | "intBinName": 202 245 | }, 246 | { 247 | "longBinName": 9000900 248 | }, 249 | { 250 | "shortBinName": 2 251 | }, 252 | { 253 | "boolBinName": 1 254 | }, 255 | { 256 | "listStringBin": [ 257 | "a", 258 | "b" 259 | ] 260 | }, 261 | { 262 | "charBinName": "h" 263 | }, 264 | { 265 | "mapLongName": { 266 | "a": 30030, 267 | "b": 40004, 268 | "c": 90009 269 | } 270 | }, 271 | { 272 | "listFloatBin": [ 273 | 1.1200000047683716, 274 | 2.130000114440918, 275 | 3.5, 276 | 4.5 277 | ] 278 | }, 279 | { 280 | "mapBinName": { 281 | "a": "v1", 282 | "b": "v2", 283 | "c": "v3" 284 | } 285 | }, 286 | { 287 | "byteBinName": -128 288 | }, 289 | { 290 | "doubleBinName": 3.2999999999999998 291 | }, 292 | { 293 | "mapISName": {} 294 | }, 295 | { 296 | "listLongBin": [ 297 | 1000, 298 | 2000, 299 | 3000, 300 | 4000 301 | ] 302 | }, 303 | { 304 | "listIntBin": [ 305 | 1, 306 | 2, 307 | 3, 308 | 4 309 | ] 310 | }, 311 | { 312 | "listDoubleBin": [ 313 | 12.109999999999999, 314 | 12.130000000000001, 315 | 23.5, 316 | 46.5 317 | ] 318 | }, 319 | { 320 | "hListBin2": { 321 | "0": "hlist", 322 | "1": 2, 323 | "2": 3.119999885559082, 324 | "3": "AC ED 00 05 73 72 00 32 73 63 61 6C 61 2E 63 6F 6C 6C 65 63 74 69 6F 6E 2E 69 6D 6D 75 74 61 62 6C 65 2E 4C 69 73 74 24 53 65 72 69 61 6C 69 7A 61 74 69 6F 6E 50 72 6F 78 79 00 00 00 00 00 00 00 01 03 00 00 78 70 74 00 01 61 74 00 01 62 73 72 00 2C 73 63 61 6C 61 2E 63 6F 6C 6C 65 63 74 69 6F 6E 2E 69 6D 6D 75 74 61 62 6C 65 2E 4C 69 73 74 53 65 72 69 61 6C 69 7A 65 45 6E 64 24 8A 5C 63 5B F7 53 0B 6D 02 00 00 78 70 78", 325 | "4": "AC ED 00 05 73 72 00 32 73 63 61 6C 61 2E 63 6F 6C 6C 65 63 74 69 6F 6E 2E 69 6D 6D 75 74 61 62 6C 65 2E 4C 69 73 74 24 53 65 72 69 61 6C 69 7A 61 74 69 6F 6E 50 72 6F 78 79 00 00 00 00 00 00 00 01 03 00 00 78 70 73 72 00 11 6A 61 76 61 2E 6C 61 6E 67 2E 49 6E 74 65 67 65 72 12 E2 A0 A4 F7 81 87 38 02 00 01 49 00 05 76 61 6C 75 65 78 72 00 10 6A 61 76 61 2E 6C 61 6E 67 2E 4E 75 6D 62 65 72 86 AC 95 1D 0B 94 E0 8B 02 00 00 78 70 00 00 00 0C 73 71 00 7E 00 02 00 00 00 17 73 72 00 2C 73 63 61 6C 61 2E 63 6F 6C 6C 65 63 74 69 6F 6E 2E 69 6D 6D 75 74 61 62 6C 65 2E 4C 69 73 74 53 65 72 69 61 6C 69 7A 65 45 6E 64 24 8A 5C 63 5B F7 53 0B 6D 02 00 00 78 70 78" 326 | } 327 | }, 328 | { 329 | "hListBin": { 330 | "0": "hlist", 331 | "1": 2, 332 | "2": 3 333 | } 334 | }, 335 | { 336 | "stringBinName": "strBinValue" 337 | }, 338 | { 339 | "mapFloatName": { 340 | "a": 30.299999237060547, 341 | "b": 400.04000854492188, 342 | "c": 9.0100002288818359 343 | } 344 | }, 345 | { 346 | "sampleBin": { 347 | "i": "2", 348 | "name": "sampleName" 349 | } 350 | }, 351 | { 352 | "mapDoubleName": { 353 | "a": 300.30000000000001, 354 | "b": 4000.4000000000001, 355 | "c": 90.090000000000003 356 | } 357 | }, 358 | { 359 | "floatBinName": 1.1100000143051147 360 | }, 361 | { 362 | "mapSIName": { 363 | "a": 33, 364 | "b": 44, 365 | "c": 99 366 | } 367 | } 368 | ] 369 | ``` 370 | `AQL` has an issue, it can't show us complicated keys stored, but using current DSL we can `getMapSimpleString(...)` and look: 371 | ```scala 372 | //"TmapBinName": {} 373 | 374 | mapSimpleString => Map(Sample(t1,3) -> v1, Sample(t2,2) -> v2, Sample(t3,1) -> v3) 375 | ``` 376 | all information is here! 377 | 378 | To clean up all data stored during testing with apps made above, write some cleaner app and add there all keys. 379 | For example: 380 | ```scala 381 | import ru.tinkoff.aerospike.dsl.SpikeImpl 382 | import ru.tinkoff.aerospikemacro.converters.KeyWrapper 383 | 384 | import scala.concurrent.ExecutionContext.Implicits.global 385 | import scala.concurrent.duration.Duration.Inf 386 | import scala.concurrent.{Await, Future} 387 | import scala.language.experimental.macros 388 | 389 | 390 | object CleanUp extends App { 391 | 392 | val client = AClient.client 393 | val spike = new SpikeImpl(client) 394 | implicit val dbc = AClient.dbc 395 | 396 | val keys = List("mapKey", "mapSimpleString", "mapStringString", "mapIntString", "mapStringInt", "mapLong", "mapFloat", "mapDouble", "stringKey", 397 | "intBinKey", "floatBinKey", "doubleBinKey", "boolBinKey", "shortBinKey", "longBinKey", "charBinKey", "byteBinKey", 398 | "listStBinKey", "listIntKey", "listLongKey", "listFloatKey", "listDoubleKey", "sampleKey", "hListKey", "oneKey", "manyKey", "hListKey2", "tuple3Key") 399 | 400 | val result = for (key <- keys) yield spike.deleteK(key) 401 | Await.result(Future.sequence(result), Inf) 402 | 403 | } 404 | ``` 405 | Now set is empty: 406 | ```js 407 | aql> select * from test.test33 408 | [ 409 | ] 410 | ``` -------------------------------------------------------------------------------- /cookbook/schemes/oneBinType.md: -------------------------------------------------------------------------------- 1 | # One Key-Bin type schema 2 | 3 | Let's try to use `KBScheme[String, Int]` where `Key` is `String` and `Bin` is `Int`. 4 | ```scala 5 | import ru.tinkoff.aerospike.dsl.{CallKB, SpikeImpl} 6 | import ru.tinkoff.aerospike.dsl.scheme.KBScheme 7 | import ru.tinkoff.aerospikemacro.converters._ 8 | import scala.concurrent.{ExecutionContext, Future} 9 | import ru.tinkoff.aerospike.dsl.SpikeImpl 10 | import ru.tinkoff.aerospikeexamples.example.AClient 11 | import ru.tinkoff.aerospikescala.domain.{MBin, SingleBin} 12 | import scala.concurrent.ExecutionContext.Implicits.global 13 | 14 | case class SampleKBScheme(spike: SpikeImpl) extends KBScheme[String, Int] { 15 | implicit val dbc = AClient.dbc 16 | 17 | def putOne(k: String, a: SingleBin[Int])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Int](CallKB.Put, k, a) 18 | 19 | def putMany(k: String, a: MBin[Int])(implicit e: ExecutionContext): Future[Unit] = spike.callKB[String, Int](CallKB.Put, k, a) 20 | 21 | def getOne(k: String)(implicit e: ExecutionContext): Future[(String, Option[Int])] = spike.getByKey[String, Int](k, Nil).map(_._1.head) 22 | 23 | def getMany(k: String)(implicit e: ExecutionContext): Future[Map[String, Option[Int]]] = spike.getByKey[String, Int](k, Nil).map(_._1) 24 | } 25 | ``` 26 | `def putOne` - this function will put one `Bin` with `Int` value, 27 | `def putMany` - this one will put your `Map[String, Int]` to Aerospike like ```scala Key -> List[Bin] ```. 28 | For example ```scala Map("a" -> 2, "b" -> 13)``` for key = "k1" will look like this: 29 | ```scala k1 -> List(Bin("a", 2), Bin("b", 13)) ``` 30 | 31 | Run application to test that scheme: 32 | ```scala 33 | import ru.tinkoff.aerospike.dsl.SpikeImpl 34 | import ru.tinkoff.aerospikemacro.printer.Printer 35 | import ru.tinkoff.aerospikescala.domain.{MBin, SingleBin} 36 | import scala.concurrent.Await 37 | import scala.concurrent.duration.Duration.Inf 38 | import scala.language.experimental.macros 39 | import scala.concurrent.ExecutionContext.Implicits.global 40 | 41 | object KBSampleApp extends App { 42 | 43 | val client = AClient.client 44 | val spike = new SpikeImpl(client) 45 | 46 | val myObj = SampleKBScheme(spike) 47 | 48 | myObj.putOne("oneKey", SingleBin("oneName", 2)) 49 | myObj.putMany("manyKey", MBin(Map("aName" -> 2, "bName" -> 13))) 50 | 51 | } 52 | ``` 53 | After running `KBSampleApp` select all data from that set: 54 | ```js 55 | aql> select * from test.test33 56 | [ 57 | { 58 | "aName": 2, 59 | "bName": 13 60 | }, 61 | { 62 | "oneName": 2 63 | } 64 | ] 65 | ``` 66 | -------------------------------------------------------------------------------- /project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | 3 | //noinspection TypeAnnotation 4 | object Dependencies { 5 | 6 | object Versions { 7 | val scala = "2.12.1" 8 | val scalas = Seq("2.11.8", "2.12.1") 9 | 10 | val aerospikeClient = "3.3.1" 11 | val aerospikeMock = "1.0.4" 12 | val akkaHttp = "10.0.0" 13 | val junit = "4.12" 14 | val mockito = "2.2.26" 15 | val scalatest = "3.0.1" 16 | val shapeless = "2.3.2" 17 | val typesafeConfig = "1.3.1" 18 | } 19 | 20 | val testLibs = Seq( 21 | "org.scalatest" %% "scalatest" % Versions.scalatest, 22 | "org.mockito" % "mockito-core" % Versions.mockito, 23 | "junit" % "junit" % Versions.junit, 24 | "com.typesafe.akka" %% "akka-http-spray-json" % Versions.akkaHttp, 25 | "com.github.danymarialee" %% "aerospike-mock" % Versions.aerospikeMock 26 | ).map(_ % "test") 27 | 28 | def mainLibs(scalaVersion: String) = Seq( 29 | "com.aerospike" % "aerospike-client" % Versions.aerospikeClient, 30 | "com.chuusai" %% "shapeless" % Versions.shapeless, 31 | "org.scala-lang" % "scala-reflect" % scalaVersion 32 | ) 33 | 34 | def commonLibs(scalaVersion: String) = mainLibs(scalaVersion) ++ testLibs 35 | 36 | def exampleLibs(scalaVersion: String) = 37 | mainLibs(scalaVersion) ++ 38 | Seq( 39 | "com.typesafe" % "config" % Versions.typesafeConfig 40 | ) 41 | 42 | } 43 | -------------------------------------------------------------------------------- /project/assembly.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.3") -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version = 0.13.13 -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | logLevel := Level.Warn 2 | 3 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.3") 4 | addSbtPlugin("de.heikoseeberger" % "sbt-header" % "1.6.0") 5 | addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.0") 6 | addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0") 7 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "1.1") -------------------------------------------------------------------------------- /project/scalapb.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.6") 2 | 3 | libraryDependencies += "com.trueaccord.scalapb" %% "compilerplugin" % "0.6.0-pre2" -------------------------------------------------------------------------------- /src/main/resources/reference.conf: -------------------------------------------------------------------------------- 1 | 2 | ru-tinkoff-aerospike-dsl { 3 | keyWrapper-namespace = "test" 4 | keyWrapper-setName = "test33" 5 | example-host = "127.0.0.1" 6 | example-port = 3000 7 | } -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/Actions.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | /** 20 | * @author MarinaSigaeva 21 | * @since 08.09.16 22 | */ 23 | 24 | trait Call { 25 | def is(other: Call): Boolean = this.toString.equalsIgnoreCase(other.toString) 26 | } 27 | 28 | object Call extends Call { 29 | val all = List(Execute, Query, QueryAggregate, ScanAll, RemoveUdf, RegisterUdfString).mkString(", ") 30 | 31 | case object Execute extends Call 32 | 33 | case object Query extends Call 34 | 35 | case object QueryAggregate extends Call 36 | 37 | case object ScanAll extends Call 38 | 39 | case object RemoveUdf extends Call 40 | 41 | case object RegisterUdfString extends Call 42 | 43 | } 44 | 45 | trait CallKs { 46 | def is(other: CallKs): Boolean = this.toString.equalsIgnoreCase(other.toString) 47 | } 48 | 49 | object CallKs extends CallKs { 50 | 51 | case object Exists extends CallKs 52 | 53 | } 54 | 55 | trait CallKB { 56 | def is(other: CallKB): Boolean = this.toString.equalsIgnoreCase(other.toString) 57 | } 58 | 59 | object CallKB extends CallKB { 60 | val all = List(Append, Put, Prepend, Add).mkString(", ") 61 | 62 | case object Put extends CallKB 63 | 64 | case object Append extends CallKB 65 | 66 | case object Prepend extends CallKB 67 | 68 | case object Add extends CallKB 69 | 70 | } 71 | 72 | trait CallK { 73 | def is(other: CallK): Boolean = this.toString.equalsIgnoreCase(other.toString) 74 | } 75 | 76 | object CallK extends CallK { 77 | val all = List(Operate, Delete, Touch, Execute, Exists).mkString(", ") 78 | 79 | case object Operate extends CallK 80 | 81 | case object Delete extends CallK 82 | 83 | case object Touch extends CallK 84 | 85 | case object Exists extends CallK 86 | 87 | case object Execute extends CallK 88 | 89 | } -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/AdminProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import java.util 20 | 21 | import com.aerospike.client.Language 22 | import com.aerospike.client.admin.{Privilege, Role, User} 23 | import com.aerospike.client.async.IAsyncClient 24 | import com.aerospike.client.policy.{AdminPolicy, Policy} 25 | import com.aerospike.client.query.{IndexCollectionType, IndexType} 26 | import com.aerospike.client.task.{IndexTask, RegisterTask} 27 | 28 | import scala.concurrent.{ExecutionContext, Future} 29 | 30 | /** 31 | * Created by danylee on 11/09/16. 32 | */ 33 | trait AdminProvider { 34 | 35 | def client: IAsyncClient 36 | 37 | implicit val ec: ExecutionContext 38 | 39 | def createRole(policy: AdminPolicy, roleName: String, privileges: util.List[Privilege]): Future[Unit] = 40 | Future(client.createRole(policy, roleName, privileges)) 41 | 42 | def createUser(policy: AdminPolicy, user: String, password: String, roles: util.List[String]): Future[Unit] = 43 | Future(client.createUser(policy, user, password, roles)) 44 | 45 | def createIndex(policy: Policy, namespace: String, setName: String, indexName: String, 46 | binName: String, indexType: IndexType): Future[IndexTask] = 47 | Future(client.createIndex(policy, namespace, setName, indexName, binName, indexType)) 48 | 49 | def createIndex(policy: Policy, namespace: String, setName: String, indexName: String, 50 | binName: String, indexType: IndexType, indexCollectionType: IndexCollectionType): Future[IndexTask] = 51 | Future(client.createIndex(policy, namespace, setName, indexName, binName, indexType, indexCollectionType)) 52 | 53 | def queryUser(policy: AdminPolicy, user: String): Future[User] = 54 | Future(client.queryUser(policy, user)) 55 | 56 | def dropUser(policy: AdminPolicy, user: String): Future[Unit] = 57 | Future(client.dropUser(policy, user)) 58 | 59 | //def close(): Future[Unit] = Future(client.clone()) //todo 60 | 61 | def queryRoles(policy: AdminPolicy): Future[util.List[Role]] = 62 | Future(client.queryRoles(policy)) 63 | 64 | def grantPrivileges(policy: AdminPolicy, roleName: String, privileges: util.List[Privilege]): Future[Unit] = 65 | Future(client.grantPrivileges(policy, roleName, privileges)) 66 | 67 | def register(policy: Policy, clientPath: String, 68 | serverPath: String, language: Language): Future[RegisterTask] = 69 | Future(client.register(policy, clientPath, serverPath, language)) 70 | 71 | def register(policy: Policy, resourceLoader: ClassLoader, resourcePath: String, 72 | serverPath: String, language: Language): Future[RegisterTask] = 73 | Future(client.register(policy, resourceLoader, resourcePath, serverPath, language)) 74 | 75 | def dropRole(policy: AdminPolicy, roleName: String): Future[Unit] = 76 | Future(client.dropRole(policy, roleName)) 77 | 78 | def isConnected: Future[Boolean] = Future(client.isConnected) 79 | 80 | def queryRole(policy: AdminPolicy, roleName: String): Future[Role] = Future(client.queryRole(policy, roleName)) 81 | 82 | def grantRoles(policy: AdminPolicy, user: String, roles: util.List[String]): Future[Unit] = 83 | Future(client.grantRoles(policy, user, roles)) 84 | 85 | def dropIndex(policy: Policy, namespace: String, setName: String, indexName: String): Future[Unit] = 86 | Future(client.dropIndex(policy, namespace, setName, indexName)) 87 | 88 | def changePassword(policy: AdminPolicy, user: String, password: String): Future[Unit] = 89 | Future(client.changePassword(policy, user, password)) 90 | 91 | def queryUsers(policy: AdminPolicy): Future[util.List[User]] = 92 | Future(client.queryUsers(policy)) 93 | 94 | def revokePrivileges(policy: AdminPolicy, roleName: String, privileges: util.List[Privilege]): Future[Unit] = 95 | Future(client.revokePrivileges(policy, roleName, privileges)) 96 | 97 | def revokeRoles(policy: AdminPolicy, user: String, roles: util.List[String]): Future[Unit] = 98 | Future(client.revokeRoles(policy, user, roles)) 99 | 100 | } 101 | 102 | 103 | 104 | 105 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/CollectionsProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import com.aerospike.client.Key 20 | import com.aerospike.client.async.IAsyncClient 21 | import com.aerospike.client.large.{LargeList, LargeMap, LargeSet, LargeStack} 22 | import com.aerospike.client.policy.WritePolicy 23 | 24 | import scala.concurrent.{ExecutionContext, Future} 25 | 26 | /** 27 | * Created by danylee on 11/09/16. 28 | */ 29 | trait CollectionsProvider { 30 | 31 | def client: IAsyncClient 32 | 33 | implicit val ec: ExecutionContext 34 | 35 | def getLargeList(policy: WritePolicy, key: Key, binName: String): Future[LargeList] = 36 | Future(client.getLargeList(policy, key, binName)) 37 | 38 | def getLargeSet(policy: WritePolicy, key: Key, binName: String, userModule: String): Future[LargeSet] = 39 | Future(client.getLargeSet(policy, key, binName, userModule)) 40 | 41 | def getLargeStack(policy: WritePolicy, key: Key, binName: String, userModule: String): Future[LargeStack] = 42 | Future(client.getLargeStack(policy, key, binName, userModule)) 43 | 44 | def getLargeMap(policy: WritePolicy, key: Key, binName: String, userModule: String): Future[LargeMap] = 45 | Future(client.getLargeMap(policy, key, binName, userModule)) 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/GetProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import java.util 20 | 21 | import com.aerospike.client.async.IAsyncClient 22 | import com.aerospike.client.listener._ 23 | import com.aerospike.client.policy.{BatchPolicy, Policy} 24 | import com.aerospike.client.{BatchRead, Key, Record} 25 | 26 | import scala.concurrent.{ExecutionContext, Future} 27 | 28 | /** 29 | * Created by danylee on 11/09/16. 30 | */ 31 | trait GetProvider { 32 | 33 | def client: IAsyncClient 34 | 35 | implicit val ec: ExecutionContext 36 | 37 | def getS(policy: BatchPolicy, listener: BatchSequenceListener, records: util.List[BatchRead]): Future[Unit] = 38 | Future(client.get(policy, listener, records)) 39 | 40 | def get(policy: BatchPolicy, listener: RecordSequenceListener, keys: Array[Key], binNames: String*): Future[Unit] = 41 | Future(client.get(policy, listener, keys, binNames: _*)) 42 | 43 | def get(policy: BatchPolicy, listener: RecordArrayListener, keys: Array[Key], binNames: String*): Future[Unit] = 44 | Future(client.get(policy, listener, keys, binNames: _*)) 45 | 46 | def get(policy: BatchPolicy, listener: RecordSequenceListener, keys: Array[Key]): Future[Unit] = 47 | Future(client.get(policy, listener, keys)) 48 | 49 | def get(policy: BatchPolicy, listener: RecordArrayListener, keys: Array[Key]): Future[Unit] = 50 | Future(client.get(policy, listener, keys)) 51 | 52 | def get(policy: Policy, listener: RecordListener, key: Key, binNames: String*): Future[Unit] = 53 | Future(client.get(policy, listener, key, binNames: _*)) 54 | 55 | def get(policy: Policy, listener: RecordListener, key: Key): Future[Unit] = 56 | Future(client.get(policy, listener, key)) 57 | 58 | def get(policy: BatchPolicy, listener: BatchListListener, records: util.List[BatchRead]): Future[Unit] = 59 | Future(client.get(policy, listener, records)) 60 | 61 | def get(policy: BatchPolicy, records: util.List[BatchRead]): Future[Unit] = 62 | Future(client.get(policy, records)) 63 | 64 | def get(policy: BatchPolicy, keys: Array[Key], binNames: String*): Array[Option[Record]] = 65 | client.get(policy, keys, binNames: _*).map(Option.apply) 66 | 67 | def get(policy: BatchPolicy, keys: Array[Key]): Array[Option[Record]] = 68 | client.get(policy, keys).map(Option.apply) 69 | 70 | def get(policy: Policy, key: Key, binNames: String*): Option[Record] = 71 | Option(client.get(policy, key, binNames: _*)) 72 | 73 | def get(policy: Policy, key: Key): Option[Record] = Option(client.get(policy, key)) 74 | } 75 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/HeaderProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import com.aerospike.client.async.IAsyncClient 20 | import com.aerospike.client.listener.{RecordArrayListener, RecordListener, RecordSequenceListener} 21 | import com.aerospike.client.policy.{BatchPolicy, Policy} 22 | import com.aerospike.client.{Key, Record} 23 | 24 | import scala.concurrent.{ExecutionContext, Future} 25 | 26 | /** 27 | * Created by danylee on 11/09/16. 28 | */ 29 | trait HeaderProvider { 30 | 31 | def client: IAsyncClient 32 | 33 | implicit val ec: ExecutionContext 34 | 35 | def getHeader(policy: Policy, key: Key): Future[Record] = Future(client.getHeader(policy, key)) 36 | 37 | def getHeader(policy: BatchPolicy, keys: Array[Key]): Future[Array[Record]] = Future(client.getHeader(policy, keys)) 38 | 39 | def getHeader(policy: Policy, listener: RecordListener, key: Key): Future[Unit] = Future(client.getHeader(policy, listener, key)) 40 | 41 | def getHeader(policy: BatchPolicy, listener: RecordArrayListener, keys: Array[Key]): Future[Unit] = 42 | Future(client.getHeader(policy, listener, keys)) 43 | 44 | def getHeader(policy: BatchPolicy, listener: RecordSequenceListener, keys: Array[Key]): Future[Unit] = 45 | Future(client.getHeader(policy, listener, keys)) 46 | 47 | } 48 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/MainProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import com.aerospike.client.async.IAsyncClient 20 | import com.aerospike.client.listener.{RecordSequenceListener, _} 21 | import com.aerospike.client.policy.{QueryPolicy, WritePolicy, _} 22 | import com.aerospike.client.query._ 23 | import com.aerospike.client.task.{ExecuteTask, RegisterTask} 24 | import com.aerospike.client.{Bin, Key, Record, _} 25 | 26 | import scala.concurrent.{ExecutionContext, Future} 27 | 28 | /** 29 | * Created by danylee on 11/09/16. 30 | */ 31 | trait MainProvider { 32 | 33 | def client: IAsyncClient 34 | 35 | implicit val ec: ExecutionContext 36 | 37 | def scanAll(policy: ScanPolicy, listener: RecordSequenceListener, namespace: String, 38 | setName: String, binNames: String*): Future[Unit] = 39 | Future(client.scanAll(policy, listener, namespace, setName, binNames: _*)) 40 | 41 | 42 | def scanAll(policy: ScanPolicy, namespace: String, setName: String, 43 | callback: ScanCallback, binNames: String*): Future[Unit] = 44 | Future(client.scanAll(policy, namespace, setName, callback, binNames: _*)) 45 | 46 | def operate(policy: WritePolicy, listener: RecordListener, 47 | key: Key, operations: Operation*): Future[Unit] = 48 | Future(client.operate(policy, listener, key, operations: _*)) 49 | 50 | def prepend(policy: WritePolicy, listener: WriteListener, key: Key, bins: Bin*): Future[Unit] = 51 | Future(client.prepend(policy, listener, key, bins: _*)) 52 | 53 | def put(policy: WritePolicy, listener: WriteListener, key: Key, bins: Bin*): Future[Unit] = 54 | Future(client.put(policy, listener, key, bins: _*)) 55 | 56 | def execute(policy: WritePolicy, listener: ExecuteListener, key: Key, packageName: String, functionName: String, functionArgs: Value*): Future[Unit] = 57 | Future(client.execute(policy, listener, key, packageName, functionName, functionArgs: _*)) 58 | 59 | def append(policy: WritePolicy, listener: WriteListener, key: Key, bins: Bin*): Future[Unit] = 60 | Future(client.append(policy, listener, key, bins: _*)) 61 | 62 | def touch(policy: WritePolicy, listener: WriteListener, key: Key): Future[Unit] = 63 | Future(client.touch(policy, listener, key)) 64 | 65 | def delete(policy: WritePolicy, listener: DeleteListener, key: Key): Future[Unit] = 66 | Future(client.delete(policy, listener, key)) 67 | 68 | def add(policy: WritePolicy, listener: WriteListener, key: Key, bins: Bin*): Future[Unit] = 69 | Future(client.add(policy, listener, key, bins: _*)) 70 | 71 | def exists(policy: Policy, listener: ExistsListener, key: Key): Future[Unit] = 72 | Future(client.exists(policy, listener, key)) 73 | 74 | def exists(policy: BatchPolicy, listener: ExistsArrayListener, keys: Array[Key]): Future[Unit] = 75 | Future(client.exists(policy, listener, keys)) 76 | 77 | def exists(policy: BatchPolicy, listener: ExistsSequenceListener, keys: Array[Key]): Future[Unit] = 78 | Future(client.exists(policy, listener, keys)) 79 | 80 | def query(policy: QueryPolicy, listener: RecordSequenceListener, statement: Statement): Future[Unit] = 81 | Future(client.query(policy, listener, statement)) 82 | 83 | def query(policy: QueryPolicy, statement: Statement): Future[RecordSet] = 84 | Future(client.query(policy, statement)) 85 | 86 | def operate(policy: WritePolicy, key: Key, operations: Operation*): Future[Record] = 87 | Future(client.operate(policy, key, operations: _*)) 88 | 89 | def removeUdf(policy: InfoPolicy, serverPath: String): Future[Unit] = 90 | Future(client.removeUdf(policy, serverPath)) 91 | 92 | def prepend(policy: WritePolicy, key: Key, bins: Bin*): Future[Unit] = 93 | Future(client.prepend(policy, key, bins: _*)) 94 | 95 | def registerUdfString(policy: Policy, code: String, 96 | serverPath: String, language: Language): Future[RegisterTask] = 97 | Future(client.registerUdfString(policy, code, serverPath, language)) 98 | 99 | def put(policy: WritePolicy, key: Key, bins: Bin*): Future[Unit] = 100 | Future(client.put(policy, key, bins: _*)) 101 | 102 | def execute(policy: WritePolicy, key: Key, packageName: String, functionName: String, args: Value*): Future[AnyRef] = 103 | Future(client.execute(policy, key, packageName, functionName, args: _*)) 104 | 105 | def execute(policy: WritePolicy, statement: Statement, packageName: String, functionName: String, functionArgs: Value*): Future[ExecuteTask] = 106 | Future(client.execute(policy, statement, packageName, functionName, functionArgs: _*)) 107 | 108 | def append(policy: WritePolicy, key: Key, bins: Bin*): Future[Unit] = 109 | Future(client.append(policy, key, bins: _*)) 110 | 111 | def touch(policy: WritePolicy, key: Key): Future[Unit] = 112 | Future(client.touch(policy, key)) 113 | 114 | def delete(policy: WritePolicy, key: Key): Future[Boolean] = 115 | Future(client.delete(policy, key)) 116 | 117 | def add(policy: WritePolicy, key: Key, bins: Bin*): Future[Unit] = 118 | Future(client.add(policy, key, bins: _*)) 119 | 120 | def exists(policy: Policy, key: Key): Future[Boolean] = 121 | Future(client.exists(policy, key)) 122 | 123 | def exists(policy: BatchPolicy, keys: Array[Key]): Future[Array[Boolean]] = 124 | Future(client.exists(policy, keys)) 125 | 126 | def queryAggregate(policy: QueryPolicy, statement: Statement, 127 | packageName: String, functionName: String, 128 | functionArgs: Value*): Future[ResultSet] = 129 | Future(client.queryAggregate(policy, statement, packageName, functionName, functionArgs: _*)) 130 | 131 | def queryAggregate(policy: QueryPolicy, statement: Statement): Future[ResultSet] = 132 | Future(client.queryAggregate(policy, statement)) 133 | 134 | } 135 | 136 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/NodeProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import com.aerospike.client.ScanCallback 20 | import com.aerospike.client.async.IAsyncClient 21 | import com.aerospike.client.cluster.Node 22 | import com.aerospike.client.policy.{QueryPolicy, ScanPolicy} 23 | import com.aerospike.client.query.{RecordSet, ResultSet, Statement} 24 | 25 | import scala.collection.JavaConverters._ 26 | import scala.concurrent.{ExecutionContext, Future} 27 | 28 | /** 29 | * Created by danylee on 11/09/16. 30 | */ 31 | trait NodeProvider { 32 | 33 | def client: IAsyncClient 34 | 35 | implicit val ec: ExecutionContext 36 | 37 | def getNodeNames: Future[List[String]] = Future(client.getNodeNames.asScala.toList) 38 | 39 | def scanNode(policy: ScanPolicy, nodeName: String, namespace: String, 40 | setName: String, callback: ScanCallback, binNames: String*): Future[Unit] = 41 | Future(client.scanNode(policy, nodeName, namespace, setName, callback, binNames: _*)) 42 | 43 | def scanNode(policy: ScanPolicy, node: Node, namespace: String, setName: String, 44 | callback: ScanCallback, binNames: String*): Future[Unit] = 45 | Future(client.scanNode(policy, node, namespace, setName, callback, binNames: _*)) 46 | 47 | def queryNode(policy: QueryPolicy, statement: Statement, node: Node): Future[RecordSet] = 48 | Future(client.queryNode(policy, statement, node)) 49 | 50 | def getNodes: Future[Array[Node]] = Future(client.getNodes) 51 | 52 | def queryAggregateNode(policy: QueryPolicy, statement: Statement, node: Node): Future[ResultSet] = 53 | Future(client.queryAggregateNode(policy, statement, node)) 54 | 55 | def getNode(nodeName: String): Future[Node] = Future(client.getNode(nodeName)) 56 | } 57 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/PolicyProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import com.aerospike.client.async.IAsyncClient 20 | import com.aerospike.client.policy._ 21 | 22 | import scala.concurrent.{ExecutionContext, Future} 23 | 24 | /** 25 | * Created by danylee on 11/09/16. 26 | */ 27 | trait PolicyProvider { 28 | 29 | def client: IAsyncClient 30 | 31 | implicit val ec: ExecutionContext 32 | 33 | def getAsyncQueryPolicyDefault: Future[QueryPolicy] = Future(client.getAsyncQueryPolicyDefault) 34 | 35 | def getAsyncReadPolicyDefault: Future[Policy] = Future(client.getAsyncReadPolicyDefault) 36 | 37 | def getAsyncWritePolicyDefault: Future[WritePolicy] = Future(client.getAsyncWritePolicyDefault) 38 | 39 | def getAsyncScanPolicyDefault: Future[ScanPolicy] = Future(client.getAsyncScanPolicyDefault) 40 | 41 | def getAsyncBatchPolicyDefault: Future[BatchPolicy] = Future(client.getAsyncBatchPolicyDefault) 42 | 43 | def getWritePolicyDefault: Future[WritePolicy] = Future(client.getWritePolicyDefault) 44 | 45 | def getReadPolicyDefault: Future[Policy] = Future(client.getReadPolicyDefault) 46 | 47 | def getBatchPolicyDefault: Future[BatchPolicy] = Future(client.getBatchPolicyDefault) 48 | 49 | def getInfoPolicyDefault: Future[InfoPolicy] = Future(client.getInfoPolicyDefault) 50 | 51 | def getQueryPolicyDefault: Future[QueryPolicy] = Future(client.getQueryPolicyDefault) 52 | 53 | def getScanPolicyDefault: Future[ScanPolicy] = Future(client.getScanPolicyDefault) 54 | } 55 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/Spike.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import com.aerospike.client.policy._ 20 | import ru.tinkoff.aerospike.dsl.batchread.BatchReadWrapper 21 | import ru.tinkoff.aerospikemacro.converters.KeyWrapper._ 22 | import ru.tinkoff.aerospikemacro.converters.{BinWrapper, KeyWrapper} 23 | import ru.tinkoff.aerospikescala.domain.ABin 24 | 25 | import scala.concurrent.{ExecutionContext, Future} 26 | 27 | /** 28 | * @author MarinaSigaeva 29 | * @since 14.09.16 30 | */ 31 | trait Spike { 32 | 33 | //for easy types call 34 | def call(action: Call, any: Any = None)( 35 | implicit pw: Option[WritePolicy] = None, 36 | p: Option[Policy] = None, 37 | bp: Option[BatchPolicy] = None, 38 | qp: Option[QueryPolicy] = None, 39 | sp: Option[ScanPolicy] = None, 40 | ip: Option[InfoPolicy] = None): Future[Any] 41 | 42 | //for calls where you need to pass Key and Bin types (ru.tinkoff.aerospike.dsl.converters included) 43 | def callKB[K, B](action: CallKB, k: K, b: ABin[B])( 44 | implicit kC: KeyWrapper[K], 45 | bC: BinWrapper[B], 46 | pw: Option[WritePolicy] = None): Future[Unit] 47 | 48 | //for calls where you need to pass List[Key] type (ru.tinkoff.aerospike.dsl.converters included) 49 | def callKs[K](action: CallKs, ks: Array[K], any: Any = None)( 50 | implicit kC: KeyWrapper[K], 51 | bp: Option[BatchPolicy] = None, 52 | p: Option[Policy] = None, 53 | qp: Option[QueryPolicy] = None): Future[Any] 54 | 55 | //for calls where you need to pass Key type (ru.tinkoff.aerospike.dsl.converters included) 56 | def callK[K](action: CallK, k: K, any: Any = None)( 57 | implicit kC: KeyWrapper[K], 58 | p: Option[Policy] = None, 59 | pw: Option[WritePolicy] = None, 60 | bp: Option[BatchPolicy] = None, 61 | sp: Option[ScanPolicy] = None, 62 | ip: Option[InfoPolicy] = None): Future[Any] 63 | 64 | def getByKey[K, B](k: K, bs: List[String] = Nil)( 65 | implicit kC: KeyWrapper[K], 66 | bC: BinWrapper[B], 67 | ec: ExecutionContext, 68 | optP: Option[Policy] = None): Future[Option[(Map[String, Option[B]], Int, Int)]] 69 | 70 | def getByKeys[K, B](ks: Array[K], bs: List[String] = Nil)( 71 | implicit kC: KeyWrapper[K], 72 | bC: BinWrapper[B], 73 | ec: ExecutionContext, 74 | optBP: Option[BatchPolicy] = None): Future[List[Option[(Map[String, Option[B]], Int, Int)]]] 75 | 76 | def getByKeysWithListener[K, L](ks: Array[K], 77 | listener: L, 78 | bs: List[String] = Nil)( 79 | implicit kC: KeyWrapper[K], 80 | optBP: Option[BatchPolicy] = None): Future[Unit] 81 | 82 | //note, if you will not change namespace, setName parameters in BatchReadWrapper - default values from application.conf will be used 83 | def getByKeysWithBatchListener[L](kws: List[BatchReadWrapper], 84 | listener: Option[L] = None)( 85 | implicit optBP: Option[BatchPolicy] = None): Future[Unit] 86 | 87 | def deleteK[K](k: K)(implicit kC: KeyWrapper[K], 88 | pw: Option[WritePolicy] = None, 89 | e: ExecutionContext): Future[Boolean] 90 | } 91 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/SpikeImpl.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import com.aerospike.client._ 20 | import com.aerospike.client.async._ 21 | import com.aerospike.client.listener._ 22 | import com.aerospike.client.policy._ 23 | import com.aerospike.client.query.Statement 24 | import ru.tinkoff.aerospike.dsl.batchread.BatchReadWrapper 25 | import ru.tinkoff.aerospikemacro.converters.{BinWrapper, KeyWrapper} 26 | import ru.tinkoff.aerospike.dsl.errors.AerospikeDSLError 27 | import ru.tinkoff.aerospikescala.domain.{ABin, MBin, SingleBin} 28 | 29 | import scala.collection.JavaConverters._ 30 | import scala.concurrent.{ExecutionContext, Future} 31 | 32 | /** 33 | * @author MarinaSigaeva 34 | * @since 08.09.16 35 | */ 36 | class SpikeImpl(spikeClient: IAsyncClient)(implicit val ec: ExecutionContext) 37 | extends Spike 38 | with MainProvider 39 | with GetProvider 40 | with AdminProvider 41 | with CollectionsProvider 42 | with HeaderProvider 43 | with NodeProvider 44 | with PolicyProvider { 45 | 46 | val client: IAsyncClient = spikeClient 47 | 48 | def call(action: Call, any: Any = None)( 49 | implicit pw: Option[WritePolicy] = None, 50 | p: Option[Policy] = None, 51 | bp: Option[BatchPolicy] = None, 52 | qp: Option[QueryPolicy] = None, 53 | sp: Option[ScanPolicy] = None, 54 | ip: Option[InfoPolicy] = None 55 | ): Future[Any] = { 56 | import Call._ 57 | any match { 58 | case Param1(pkg, fName, args, Some(stmnt), listener) if (action is Execute) && listener.isEmpty => 59 | execute(pw.getOrElse(new WritePolicy), stmnt, pkg, fName, args: _*) 60 | case Param2(statement, Some(listener)) if action is Query => 61 | query(qp.getOrElse(new QueryPolicy), listener, statement) 62 | case Param2(statement, _) if action is Query => query(qp.getOrElse(new QueryPolicy), statement) 63 | case Param1(pkg, fName, args, Some(stmnt), listener) if (action is QueryAggregate) && listener.isEmpty => 64 | queryAggregate(qp.getOrElse(new QueryPolicy), stmnt, pkg, fName, args: _*) 65 | case statement: Statement if action is QueryAggregate => queryAggregate(qp.getOrElse(new QueryPolicy), statement) 66 | case Param4(namespace: String, setName: String, binNames: List[String], Some(listener: RecordSequenceListener)) 67 | if action is ScanAll => 68 | scan(namespace, setName, binNames, Option(listener)) 69 | case Param3(namespace: String, setName: String, binNames: List[String], Some(callback: ScanCallback)) 70 | if action is ScanAll => 71 | scan(namespace, setName, binNames, callback = Option(callback)) 72 | case serverPath: String if action is RemoveUdf => removeUdf(ip.getOrElse(new InfoPolicy), serverPath) 73 | case Param5(code: String, serverPath: String, language: Language) if action is RegisterUdfString => 74 | registerUdfString(p.getOrElse(new Policy), code, serverPath, language) 75 | case _ => 76 | throw AerospikeDSLError( 77 | s"Unsupported type ${any.getClass} or action $action is not available for this type. " + 78 | s"You can use: ${Call.all}" 79 | ) 80 | } 81 | } 82 | 83 | def callKs[K](action: CallKs, ks: Array[K], any: Any = None)( 84 | implicit kC: KeyWrapper[K], 85 | bp: Option[BatchPolicy] = None, 86 | p: Option[Policy] = None, 87 | qp: Option[QueryPolicy] = None 88 | ): Future[Any] = { 89 | import CallKs._ 90 | any match { 91 | case eal: ExistsArrayListener if action is Exists => existsByKeys(ks, Option(eal)) 92 | case esl: ExistsSequenceListener if action is Exists => existsByKeys(ks, Option(esl)) 93 | case _ if action is Exists => existsByKeys(ks) 94 | case _ => 95 | throw AerospikeDSLError( 96 | s"Unsupported type ${any.getClass} or action $action is not available for this type. " + 97 | s"You can use: Exists" 98 | ) 99 | } 100 | } 101 | 102 | def callK[K](action: CallK, k: K, any: Any = None)( 103 | implicit kC: KeyWrapper[K], 104 | p: Option[Policy] = None, 105 | pw: Option[WritePolicy] = None, 106 | bp: Option[BatchPolicy] = None, 107 | sp: Option[ScanPolicy] = None, 108 | ip: Option[InfoPolicy] = None 109 | ): Future[Any] = { 110 | import CallK._ 111 | any match { 112 | case ops: List[Operation] if action is Operate => operate(pw.getOrElse(new WritePolicy), kC(k), ops: _*) 113 | case (ops: List[Operation], listener: RecordListener) if action is Operate => 114 | operate(pw.getOrElse(new WritePolicy), listener, kC(k), ops: _*) 115 | case listener: DeleteListener if action is Delete => deleteByKey(k, Option(listener)) 116 | case _ if action is Delete => deleteByKey(k) 117 | case listener: WriteListener if action is Touch => touchByKey(k, Option(listener)) 118 | case _ if action is Touch => touchByKey(k) 119 | case Param1(pkg, fName, args, stmnt, listener) if (action is Execute) && stmnt.isEmpty => 120 | execByKey(k, pkg, fName, args, listener) 121 | case Param1(pkg, fName, args, stmnt, listener) if (action is Execute) && stmnt.isEmpty && listener.isEmpty => 122 | execByKey(k, pkg, fName, args) 123 | case listener: ExistsListener if action is Exists => existsByKey(k, Option(listener))(kC, p) 124 | case _ if action is Exists => existsByKey(k)(kC, p) 125 | case _ => 126 | throw AerospikeDSLError( 127 | s"Unsupported type ${any.getClass} or action $action is not available for this type. " + 128 | s"You can use: ${CallK.all}" 129 | ) 130 | 131 | } 132 | } 133 | 134 | def callKB[K, B]( 135 | action: CallKB, 136 | k: K, 137 | b: ABin[B] 138 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], pw: Option[WritePolicy] = None): Future[Unit] = { 139 | import CallKB._ 140 | b match { 141 | case one: SingleBin[B] if action is Put => putOne(k, one) 142 | case one: SingleBin[B] if action is Append => appendOne(k, one) 143 | case one: SingleBin[B] if action is Prepend => prependOne(k, one) 144 | case one: SingleBin[B] if action is Add => addOne(k, one) 145 | case oneM: MBin[B] if action is Put => putMany(k, oneM) 146 | case oneM: MBin[B] if action is Append => appendMany(k, oneM) 147 | case oneM: MBin[B] if action is Prepend => prependMany(k, oneM) 148 | case oneM: MBin[B] if action is Add => addMany(k, oneM) 149 | case _ => 150 | throw AerospikeDSLError( 151 | s"Unsupported type ${b.getClass} or action $action is not available for this type. " + 152 | s"You can use: ${CallKB.all}" 153 | ) 154 | } 155 | } 156 | 157 | def deleteK[K]( 158 | k: K 159 | )(implicit kC: KeyWrapper[K], pw: Option[WritePolicy] = None, e: ExecutionContext): Future[Boolean] = { 160 | val writePolicy = pw.getOrElse(new WritePolicy) 161 | delete(writePolicy, kC(k)) 162 | } 163 | 164 | protected def putOne[K, B]( 165 | k: K, 166 | b: SingleBin[B] 167 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], optWP: Option[WritePolicy] = None): Future[Unit] = { 168 | /* ToDo Probably performance degradation. 169 | It's not needed to create WritePolicy with every `put`. 170 | 1) Aerospike java driver make this check itself. 171 | 2) Default WritePolicy should be single object. 172 | */ 173 | val writePolicy = optWP.getOrElse(new WritePolicy) 174 | put(writePolicy, kC(k), bC.apply(b)) 175 | } 176 | 177 | protected def putMany[K, B]( 178 | k: K, 179 | bs: MBin[B] 180 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], optWP: Option[WritePolicy] = None): Future[Unit] = { 181 | val writePolicy = optWP.getOrElse(new WritePolicy) 182 | put(writePolicy, kC(k), bC.apply(bs): _*) 183 | } 184 | 185 | protected def appendOne[K, B]( 186 | k: K, 187 | b: SingleBin[B] 188 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], optWP: Option[WritePolicy] = None): Future[Unit] = { 189 | val writePolicy = optWP.getOrElse(new WritePolicy) 190 | append(writePolicy, kC(k), bC.apply(b)) 191 | } 192 | 193 | protected def appendMany[K, B]( 194 | k: K, 195 | bs: MBin[B] 196 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], optWP: Option[WritePolicy] = None): Future[Unit] = { 197 | val writePolicy = optWP.getOrElse(new WritePolicy) 198 | append(writePolicy, kC(k), bC.apply(bs): _*) 199 | } 200 | 201 | protected def prependOne[K, B]( 202 | k: K, 203 | b: SingleBin[B] 204 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], optWP: Option[WritePolicy] = None): Future[Unit] = { 205 | val writePolicy = optWP.getOrElse(new WritePolicy) 206 | prepend(writePolicy, kC(k), bC.apply(b)) 207 | } 208 | 209 | protected def prependMany[K, B]( 210 | k: K, 211 | bs: MBin[B] 212 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], optWP: Option[WritePolicy] = None): Future[Unit] = { 213 | val writePolicy = optWP.getOrElse(new WritePolicy) 214 | prepend(writePolicy, kC(k), bC.apply(bs): _*) 215 | } 216 | 217 | protected def addOne[K, B]( 218 | k: K, 219 | b: SingleBin[B] 220 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], optWP: Option[WritePolicy] = None): Future[Unit] = { 221 | val writePolicy = optWP.getOrElse(new WritePolicy) 222 | add(writePolicy, kC(k), bC.apply(b)) 223 | } 224 | 225 | protected def addMany[K, B]( 226 | k: K, 227 | bs: MBin[B] 228 | )(implicit kC: KeyWrapper[K], bC: BinWrapper[B], optWP: Option[WritePolicy] = None): Future[Unit] = { 229 | val writePolicy = optWP.getOrElse(new WritePolicy) 230 | add(writePolicy, kC(k), bC.apply(bs): _*) 231 | } 232 | 233 | protected def deleteByKey[K]( 234 | k: K, 235 | optListener: Option[DeleteListener] = None 236 | )(implicit kC: KeyWrapper[K], optWP: Option[WritePolicy] = None): Future[AnyVal] = { 237 | val writePolicy = optWP.getOrElse(new WritePolicy) 238 | optListener.map(listener => delete(writePolicy, listener, kC(k))).getOrElse(delete(writePolicy, kC(k))) 239 | } 240 | 241 | protected def touchByKey[K]( 242 | k: K, 243 | optListener: Option[WriteListener] = None 244 | )(implicit kC: KeyWrapper[K], optWP: Option[WritePolicy] = None): Future[Unit] = { 245 | val writePolicy = optWP.getOrElse(new WritePolicy) 246 | optListener.map(listener => touch(writePolicy, listener, kC(k))).getOrElse(touch(writePolicy, kC(k))) 247 | } 248 | 249 | protected def execByKey[K]( 250 | k: K, 251 | pkg: String, 252 | fName: String, 253 | args: List[Value], 254 | optListener: Option[ExecuteListener] = None 255 | )(implicit kC: KeyWrapper[K], optWP: Option[WritePolicy] = None): Future[Any] = { 256 | val writePolicy = optWP.getOrElse(new WritePolicy) 257 | optListener 258 | .map(listener => execute(writePolicy, listener, kC(k), pkg, fName, args: _*)) 259 | .getOrElse(execute(writePolicy, kC(k), pkg, fName, args: _*)) 260 | } 261 | 262 | protected def existsByKeys[K, L]( 263 | ks: Array[K], 264 | optListener: Option[L] = None 265 | )(implicit kC: KeyWrapper[K], bp: Option[BatchPolicy] = None): Future[Any] = { 266 | val policy = bp.getOrElse(new BatchPolicy) 267 | val keys = ks.view.map(k => kC(k)).toArray 268 | optListener match { 269 | case Some(eal: ExistsArrayListener) => exists(policy, eal, keys) 270 | case Some(esl: ExistsSequenceListener) => exists(policy, esl, keys) 271 | case _ => exists(policy, keys) 272 | } 273 | } 274 | 275 | protected def existsByKey[K]( 276 | k: K, 277 | optListener: Option[ExistsListener] = None 278 | )(implicit kC: KeyWrapper[K], p: Option[Policy] = None): Future[AnyVal] = { 279 | val policy = p.getOrElse(new Policy) 280 | optListener.map(el => exists(policy, el, kC(k))).getOrElse(exists(policy, kC(k))) 281 | } 282 | 283 | protected def scan( 284 | namespace: String, 285 | setName: String, 286 | binNames: List[String], 287 | listener: Option[RecordSequenceListener] = None, 288 | callback: Option[ScanCallback] = None 289 | )(implicit sp: Option[ScanPolicy] = None): Future[Unit] = { 290 | val policy = sp.getOrElse(new ScanPolicy) 291 | (listener, callback) match { 292 | case (Some(rl: RecordSequenceListener), _) => scanAll(policy, rl, namespace, setName, binNames: _*) 293 | case (None, Some(cb)) => scanAll(policy, namespace, setName, cb, binNames: _*) 294 | case _ => throw AerospikeDSLError("Not supported scanAll type") 295 | } 296 | } 297 | 298 | def getByKey[K, B](k: K, bs: List[String] = Nil)( 299 | implicit kC: KeyWrapper[K], 300 | bC: BinWrapper[B], 301 | ec: ExecutionContext, 302 | optP: Option[Policy] = None 303 | ): Future[Option[(Map[String, Option[B]], Int, Int)]] = { 304 | val policy = optP.getOrElse(new Policy) 305 | if (bs.isEmpty) Future(get(policy, kC(k)).map(bC(_)))(ec) 306 | else Future(get(policy, kC(k), bs: _*).map(record => bC(record)))(ec) 307 | } 308 | 309 | def getByKeyWithListener[K](k: K, listener: RecordListener, bs: List[String] = Nil)( 310 | implicit kC: KeyWrapper[K], 311 | optP: Option[Policy] = None 312 | ): Future[Unit] = { 313 | val policy = optP.getOrElse(new Policy) 314 | if (bs.isEmpty) get(policy, listener, kC(k)) else get(policy, listener, kC(k), bs: _*) 315 | } 316 | 317 | def getByKeys[K, B](ks: Array[K], bs: List[String] = Nil)( 318 | implicit kC: KeyWrapper[K], 319 | bC: BinWrapper[B], 320 | ec: ExecutionContext, 321 | optBP: Option[BatchPolicy] = None 322 | ): Future[List[Option[(Map[String, Option[B]], Int, Int)]]] = { 323 | val policy = optBP.getOrElse(new BatchPolicy) 324 | val keys = ks.view.map(k => kC(k)).toArray 325 | if (bs.isEmpty) Future(get(policy, keys).map(_.map(record => bC(record))).toList)(ec) 326 | else Future(get(policy, keys, bs: _*).map(_.map(record => bC(record))).toList)(ec) 327 | } 328 | 329 | def getByKeysWithListener[K, L](ks: Array[K], listener: L, bs: List[String] = Nil)( 330 | implicit kC: KeyWrapper[K], 331 | optBP: Option[BatchPolicy] = None 332 | ): Future[Unit] = { 333 | val policy = optBP.getOrElse(new BatchPolicy) 334 | val keys = ks.view.map(k => kC(k)).toArray 335 | 336 | listener match { 337 | case l: RecordArrayListener => if (bs.isEmpty) get(policy, l, keys) else get(policy, l, keys, bs: _*) 338 | case l: RecordSequenceListener => if (bs.isEmpty) get(policy, l, keys) else get(policy, l, keys, bs: _*) 339 | case _ => throw AerospikeDSLError(s"Unsupported listener type $listener") 340 | } 341 | } 342 | 343 | def getByKeysWithBatchListener[L](kws: List[BatchReadWrapper], listener: Option[L] = None)( 344 | implicit optBP: Option[BatchPolicy] = None 345 | ): Future[Unit] = { 346 | val policy = optBP.getOrElse(new BatchPolicy) 347 | val records: java.util.List[BatchRead] = kws.view.map(e => e.apply).toList.asJava 348 | 349 | listener match { 350 | case Some(l: BatchListListener) => get(policy, l, records) 351 | case Some(ls: BatchSequenceListener) => getS(policy, ls, records) 352 | case None => get(policy, records) 353 | case _ => throw AerospikeDSLError(s"Unsupported listener type $listener") 354 | } 355 | } 356 | 357 | } 358 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/batchread/BatchReadWrapper.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl.batchread 18 | 19 | import com.aerospike.client.BatchRead 20 | import ru.tinkoff.aerospikemacro.converters._ 21 | import com.aerospike.client.Key 22 | import ru.tinkoff.aerospikemacro.domain.DBCredentials 23 | 24 | import scala.language.experimental.macros 25 | 26 | 27 | /** 28 | * @author MarinaSigaeva 29 | * @since 21.09.16 30 | */ 31 | trait BatchReadWrapper { 32 | val keyValue: Any 33 | val binNames: Array[String] 34 | implicit val dbc: DBCredentials 35 | 36 | def applyO[S <: Any](k: S)(implicit kW: KeyWrapper[S]): BatchRead = new BatchRead(kW(k), binNames) 37 | def apply = applyO(keyValue) 38 | } 39 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/domain.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl 18 | 19 | import com.aerospike.client.listener.{ExecuteListener, RecordSequenceListener} 20 | import com.aerospike.client.query.Statement 21 | import com.aerospike.client.{Language, ScanCallback, Value} 22 | 23 | /** 24 | * Created by danylee on 11/09/16. 25 | */ 26 | 27 | trait Param 28 | 29 | case class Param1(packageName: String, functionName: String, functionArgs: List[Value], 30 | statement: Option[Statement] = None, listener: Option[ExecuteListener] = None) extends Param 31 | 32 | //structural type are expensive (reflection) 33 | 34 | case class Param2(statement: Statement, listener: Option[RecordSequenceListener] = None) extends Param 35 | 36 | case class Param3(namespace: String, setName: String, binNames: List[String], callback: Option[ScanCallback] = None) 37 | 38 | case class Param4(namespace: String, setName: String, binNames: List[String], listener: Option[RecordSequenceListener]) 39 | 40 | case class Param5(code: String, serverPath: String, language: Language) -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/errors/AerospikeDSLError.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl.errors 18 | 19 | /** 20 | * @author MarinaSigaeva 21 | * @since 11.11.16 22 | */ 23 | case class AerospikeDSLError(message: String, code: String = "DSL_ERROR") extends Exception(message) 24 | -------------------------------------------------------------------------------- /src/main/scala/ru/tinkoff/aerospike/dsl/scheme/Scheme.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospike.dsl.scheme 18 | 19 | import ru.tinkoff.aerospike.dsl.SpikeImpl 20 | import ru.tinkoff.aerospikemacro.converters.KeyWrapper 21 | import ru.tinkoff.aerospikemacro.domain.DBCredentials 22 | 23 | 24 | /** 25 | * @author MarinaSigaeva 26 | * @since 26.09.16 27 | */ 28 | 29 | /* This trait will work with one key type and different types of Bins */ 30 | trait Scheme[K] { 31 | val spike: SpikeImpl 32 | implicit val dbc: DBCredentials 33 | // implicit val kw: KeyWrapper[K] 34 | 35 | } 36 | 37 | /* This trait will work with one key type and one Bin type */ 38 | trait KBScheme[K, B] { 39 | val spike: SpikeImpl 40 | implicit val dbc: DBCredentials 41 | //implicit val kw: KeyWrapper[K] 42 | } -------------------------------------------------------------------------------- /src/test/scala/ru/tinkoff/ACMock.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff 18 | 19 | import com.aerospike.client.Value.StringValue 20 | import com.aerospike.client.listener.{ExistsArrayListener, ExistsSequenceListener, RecordSequenceListener, _} 21 | import com.aerospike.client.{Language, ScanCallback, _} 22 | import com.aerospike.client.policy.{InfoPolicy, _} 23 | import com.aerospike.client.query.{RecordSet, ResultSet, Statement} 24 | import com.aerospike.client.task.{ExecuteTask, RegisterTask} 25 | import com.github.danymarialee.mock.MockAerospike 26 | import scala.collection.JavaConverters._ 27 | 28 | /** 29 | * Created by danylee on 30/11/16. 30 | */ 31 | object ACMock { 32 | 33 | val m1: java.util.Map[String, AnyRef] = Map[String, AnyRef]("operateBinName" -> new StringValue("operate")).asJava 34 | val record1 = new Record(m1, 100, 12) 35 | val s1 = new StringValue("execute") 36 | val zMock = new MockAerospike() 37 | val exTask: ExecuteTask = zMock.et1 38 | val regTask: RegisterTask = zMock.rt1 39 | 40 | def spikeMock = new MockAerospike { 41 | 42 | override def put(policy: WritePolicy, key: Key, bins: Bin*): Unit = {} 43 | 44 | override def append(policy: WritePolicy, key: Key, bins: Bin*): Unit = {} 45 | 46 | override def prepend(policy: WritePolicy, key: Key, bins: Bin*): Unit = {} 47 | 48 | override def operate(policy: WritePolicy, key: Key, operations: Operation*): Record = record1 49 | 50 | override def operate(policy: WritePolicy, listener: RecordListener, key: Key, operations: Operation*): Unit = {} 51 | 52 | override def delete(policy: WritePolicy, listener: DeleteListener, key: Key): Unit = {} 53 | 54 | override def delete(policy: WritePolicy, key: Key): Boolean = true 55 | 56 | override def touch(policy: WritePolicy, listener: WriteListener, key: Key): Unit = {} 57 | 58 | override def touch(policy: WritePolicy, key: Key): Unit = {} 59 | 60 | override def execute( 61 | policy: WritePolicy, 62 | listener: ExecuteListener, 63 | key: Key, 64 | packageName: String, 65 | functionName: String, 66 | functionArgs: Value* 67 | ): Unit = {} 68 | 69 | override def execute( 70 | policy: WritePolicy, 71 | key: Key, 72 | packageName: String, 73 | functionName: String, 74 | args: Value* 75 | ): Object = s1 76 | 77 | override def execute( 78 | policy: WritePolicy, 79 | statement: Statement, 80 | packageName: String, 81 | functionName: String, 82 | functionArgs: Value* 83 | ): ExecuteTask = exTask 84 | 85 | override def exists(policy: Policy, listener: ExistsListener, key: Key): Unit = {} 86 | 87 | override def exists(policy: Policy, key: Key): Boolean = true 88 | 89 | override def exists(policy: BatchPolicy, listener: ExistsArrayListener, keys: Array[Key]): Unit = {} 90 | 91 | override def exists(policy: BatchPolicy, listener: ExistsSequenceListener, keys: Array[Key]): Unit = {} 92 | 93 | //noinspection NameBooleanParameters 94 | override def exists(policy: BatchPolicy, keys: Array[Key]): Array[Boolean] = Array(true) 95 | 96 | override def query(policy: QueryPolicy, listener: RecordSequenceListener, statement: Statement): Unit = {} 97 | 98 | override def query(policy: QueryPolicy, statement: Statement): RecordSet = null 99 | 100 | //= new RecordSet(queryExecutor, 1) 101 | override def queryAggregate( 102 | policy: QueryPolicy, 103 | statement: Statement, 104 | packageName: String, 105 | functionName: String, 106 | functionArgs: Value* 107 | ): ResultSet = null 108 | 109 | override def queryAggregate(policy: QueryPolicy, statement: Statement): ResultSet = null 110 | 111 | override def scanAll( 112 | policy: ScanPolicy, 113 | listener: RecordSequenceListener, 114 | namespace: String, 115 | setName: String, 116 | binNames: String* 117 | ): Unit = {} 118 | 119 | override def scanAll( 120 | policy: ScanPolicy, 121 | namespace: String, 122 | setName: String, 123 | callback: ScanCallback, 124 | binNames: String* 125 | ): Unit = {} 126 | 127 | override def removeUdf(policy: InfoPolicy, serverPath: String): Unit = {} 128 | 129 | override def registerUdfString( 130 | policy: Policy, 131 | code: String, 132 | serverPath: String, 133 | language: Language 134 | ): RegisterTask = regTask 135 | 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /src/test/scala/ru/tinkoff/SpikeImplTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff 18 | 19 | 20 | import com.aerospike.client.Operation.Type 21 | import com.aerospike.client.Value.StringValue 22 | import com.aerospike.client._ 23 | import com.aerospike.client.query._ 24 | import org.scalatest.{FlatSpec, Matchers} 25 | import org.scalatest.concurrent.ScalaFutures 26 | import com.github.danymarialee.mock._ 27 | import ru.tinkoff.aerospike.dsl._ 28 | import ru.tinkoff.aerospikemacro.converters._ 29 | import ru.tinkoff.aerospike.dsl.{CallKB, SpikeImpl} 30 | import scala.language.experimental.macros 31 | import shapeless._ 32 | import ru.tinkoff.aerospikemacro.converters._ 33 | import ru.tinkoff.aerospikescala.domain.{MBin, SingleBin} 34 | import ru.tinkoff.aerospike.dsl.errors.AerospikeDSLError 35 | import ru.tinkoff.aerospikemacro.domain.DBCredentials 36 | import scala.concurrent.ExecutionContext.Implicits.global 37 | import org.scalatest.mockito.MockitoSugar 38 | import ACMock._ 39 | 40 | /** 41 | * @author MarinaSigaeva 42 | * @since 08.09.16 43 | */ 44 | class SpikeImplTest extends FlatSpec with Matchers with MockitoSugar with ScalaFutures { 45 | 46 | trait mocks { 47 | val acMock = ACMock.spikeMock 48 | val spikeDao = new SpikeImpl(acMock) 49 | } 50 | 51 | case class Cat(name: String) 52 | 53 | //bin will look like: Bin("binName", "binValue") 54 | "SpikeImpl" should "call[K,B] put method for one element" in new mocks { 55 | implicit val dbc = DBCredentials("test", "test") 56 | spikeDao.callKB[String, String](CallKB.Put, "StrKey", SingleBin("binName", "binValue")).futureValue shouldBe() 57 | } 58 | 59 | //bin will look like: List(Bin("binName", "binValue"), Bin("binName2", "binValue"), Bin("binName3", "binValue")) 60 | it should "call[K,B] prepend method for many elements same type" in new mocks { 61 | implicit val dbc = DBCredentials("test", "test") 62 | 63 | spikeDao.callKB[String, String](CallKB.Prepend, "StrKey", MBin( 64 | Map("binName1" -> "binValue", 65 | "binName2" -> "binValue", 66 | "binName3" -> "binValue"))) 67 | .futureValue shouldBe() 68 | } 69 | 70 | //bin will look like: Bin("binName", ListValue(List("binValue1", "binValue2"))) 71 | it should "call[K,B] append method for seq of elements as a value" in new mocks { 72 | implicit val dbc = DBCredentials("test", "test") 73 | 74 | spikeDao.callKB[String, Seq[String]](CallKB.Append, "StrKey", 75 | SingleBin("binName", Seq("binValue1", "binValue2"))).futureValue shouldBe() 76 | } 77 | 78 | it should "call[K] operate" in new mocks { 79 | 80 | import KeyWrapper._ 81 | 82 | implicit val dbc = DBCredentials("ns", "setName") 83 | val stKeyWrapper = create[String](dbc) 84 | 85 | val ops = List(new Operation(Type.WRITE, "operateBinName", new StringValue("operate")), Operation.get("operateBinName")) 86 | 87 | spikeDao.callK(CallK.Operate, "strOperateKey", ops).futureValue shouldBe record1 88 | 89 | spikeDao.callK(CallK.Operate, "strOperateKey", any = (ops, ReadHandler(stKeyWrapper("strOperateKey"), record1))) 90 | .futureValue shouldBe {} 91 | } 92 | 93 | it should "call[K] delete" in new mocks { 94 | implicit val dbc = DBCredentials("test", "test") 95 | 96 | spikeDao.callK(CallK.Delete, "strDeleteKey", DeleteHandler()).futureValue shouldBe {} 97 | spikeDao.callK(CallK.Delete, "strDeleteKey").futureValue shouldBe true 98 | } 99 | 100 | it should "call[K] touch" in new mocks { 101 | implicit val dbc = DBCredentials("test", "test") 102 | 103 | spikeDao.callK(CallK.Touch, "strTouchKey", WriteHandler()).futureValue shouldBe {} 104 | spikeDao.callK(CallK.Touch, 3).futureValue shouldBe {} 105 | } 106 | 107 | it should "call[K] execute" in new mocks { 108 | implicit val dbc = DBCredentials("test", "test") 109 | 110 | spikeDao.callK(CallK.Execute, "strExecKey", Param1("pkg", "fName", 111 | List(new StringValue("str")), None, Option(ExecuteHandler()))).futureValue shouldBe {} 112 | 113 | spikeDao.callK(CallK.Execute, "strExecKey", Param1("pkg", "fName", List(new StringValue("str")))).futureValue shouldBe s1 114 | } 115 | 116 | it should "call[K] exists" in new mocks { 117 | implicit val dbc = DBCredentials("test", "test") 118 | 119 | spikeDao.callK(CallK.Exists, "strKey", ExistsHandler()).futureValue shouldBe {} 120 | spikeDao.callK(CallK.Exists, "strKey").futureValue shouldBe true 121 | } 122 | 123 | it should "call[K]s exists" in new mocks { 124 | implicit val dbc = DBCredentials("test", "test") 125 | 126 | spikeDao.callKs(CallKs.Exists, Array("sk1", "sk2"), ExistsArrayHandler()).futureValue shouldBe {} 127 | spikeDao.callKs(CallKs.Exists, Array("sk1", "sk2"), ExistsSequenceHandler()).futureValue shouldBe {} 128 | spikeDao.callKs(CallKs.Exists, Array("sk1", "sk2")).futureValue shouldBe Array(true) 129 | } 130 | 131 | it should "call execute" in new mocks { 132 | implicit val dbc = DBCredentials("test", "test") 133 | 134 | spikeDao.call(Call.Execute, Param1("pkg", "fName", List(new StringValue("str")), Option(new Statement()))).futureValue shouldBe exTask 135 | } 136 | 137 | it should "call query" in new mocks { 138 | implicit val dbc = DBCredentials("test", "test") 139 | 140 | spikeDao.call(Call.Query, Param2(new Statement(), Some(RecordSequenceHandler()))).futureValue shouldBe {} 141 | spikeDao.call(Call.Query, Param2(new Statement())).futureValue shouldBe null.asInstanceOf[ResultSet] //spikeMock.rs 142 | } 143 | 144 | it should "call queryAggregate" in new mocks { 145 | implicit val dbc = DBCredentials("test", "test") 146 | 147 | spikeDao.call(Call.QueryAggregate, Param1("pName", "fName", Nil, Some(new Statement()))).futureValue shouldBe null.asInstanceOf[ResultSet] 148 | spikeDao.call(Call.QueryAggregate, new Statement()).futureValue shouldBe null.asInstanceOf[ResultSet] 149 | } 150 | 151 | it should "call scanAll" in new mocks { 152 | implicit val dbc = DBCredentials("test", "test") 153 | 154 | spikeDao.call(Call.ScanAll, Param3("nSpace", "setName", Nil, Some(ScanCallbackImpl()))).futureValue shouldBe {} 155 | spikeDao.call(Call.ScanAll, Param4("nSpace", "setName", Nil, Some(RecordSequenceHandler()))).futureValue shouldBe {} 156 | } 157 | 158 | it should "call removeUdf" in new mocks { 159 | implicit val dbc = DBCredentials("test", "test") 160 | 161 | spikeDao.call(Call.RemoveUdf, "serverPath").futureValue shouldBe {} 162 | } 163 | 164 | it should "call registerUdfString" in new mocks { 165 | implicit val dbc = DBCredentials("test", "test") 166 | 167 | spikeDao.call(Call.RegisterUdfString, Param5("10", "serverPath", Language.LUA)).futureValue shouldBe regTask 168 | } 169 | 170 | it should "throw Unsupported type or action exception" in new mocks { 171 | implicit val dbc = DBCredentials("test", "test") 172 | 173 | intercept[AerospikeDSLError](spikeDao.callK[String](CallK.Operate, "StrKey", 2).futureValue) 174 | .message shouldBe "Unsupported type class java.lang.Integer or action Operate is not available for this type. You can use: Operate, Delete, Touch, Execute, Exists" 175 | } 176 | 177 | } 178 | -------------------------------------------------------------------------------- /src/test/scala/ru/tinkoff/aerospikemacro/converters/BatchReadWrapperTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikemacro.converters 18 | 19 | import java.util 20 | 21 | import com.aerospike.client.BatchRead 22 | import org.scalatest.{FlatSpec, Matchers} 23 | import ru.tinkoff.aerospike.dsl.batchread.BatchReadWrapper 24 | import ru.tinkoff.aerospikemacro.domain.DBCredentials 25 | 26 | import scala.collection.JavaConverters._ 27 | 28 | /** 29 | * @author MarinaSigaeva 30 | * @since 21.09.16 31 | */ 32 | class BatchReadWrapperTest extends FlatSpec with Matchers { 33 | 34 | def getList(kws: List[BatchReadWrapper]): util.List[BatchRead] = { 35 | kws.view 36 | .map(_.apply) 37 | .toList 38 | .asJava 39 | } 40 | 41 | it should "create BatchReads of different Key types" in { 42 | 43 | val b1 = new BatchReadWrapper { 44 | val keyValue = "str" 45 | val binNames = Array("s1", "s2") 46 | implicit val dbc = DBCredentials("test", "test") 47 | } 48 | val b2 = new BatchReadWrapper { 49 | val keyValue = 2 50 | implicit val dbc = DBCredentials("ns", "setName") 51 | val binNames = Array("s3", "s4") 52 | } 53 | 54 | val brs = getList(List(b1, b2)) 55 | brs.get(0).key.namespace shouldBe "test" 56 | brs.get(0).key.setName shouldBe "test" 57 | brs.get(0).binNames shouldBe Array("s1", "s2") 58 | 59 | brs.get(1).key.namespace shouldBe "ns" 60 | brs.get(1).key.setName shouldBe "setName" 61 | brs.get(1).binNames shouldBe Array("s3", "s4") 62 | 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /src/test/scala/ru/tinkoff/aerospikemacro/converters/CasterTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikemacro.converters 18 | 19 | import shapeless._ 20 | import syntax.std.traversable._ 21 | import org.junit.Test 22 | import org.junit.Assert._ 23 | import ru.tinkoff.aerospikemacro.cast.Caster 24 | import scala.reflect.runtime.universe._ 25 | 26 | /** 27 | * @author MarinaSigaeva 28 | * @since 27.10.16 29 | */ 30 | class CasterTest { 31 | val tpeStr = """shapeless.::[String,shapeless.::[Int,shapeless.::[Int,shapeless.HNil]]]""" 32 | val isTypes: Array[String] = tpeStr 33 | .replaceAll("""shapeless.::""", "") 34 | .replace(",shapeless.HNil", "") 35 | .toCharArray 36 | .filter(e => e != '[' & e != ']') 37 | .mkString 38 | .split(",") 39 | 40 | @Test 41 | def testCast() { 42 | val expected = Caster.cast(2L, isTypes(1)).asInstanceOf[Int] 43 | assertTrue(expected == 2) 44 | } 45 | 46 | @Test 47 | def testCastHList() { 48 | val expected = Caster.castHListElements(List("ddd", 2L, 4L), tpeStr).toHList[String :: Int :: Int :: HNil] 49 | assertTrue(expected.isDefined) 50 | } 51 | 52 | @Test 53 | def testCastTuple() { 54 | import collection.JavaConverters._ 55 | 56 | val hMap: java.util.HashMap[Any, Any] = new java.util.HashMap[Any, Any](Map("0" -> 2, "1" -> "asd").asJava) 57 | val tpl = weakTypeOf[Tuple2[Int, String]].typeArgs.map(_.toString) 58 | val expected = Caster.castTuple(hMap.asScala.toMap, tpl) 59 | assertTrue(expected == Option((2, "asd"))) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/test/scala/ru/tinkoff/aerospikemacro/converters/KeyTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Tinkoff 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package ru.tinkoff.aerospikemacro.converters 18 | 19 | import com.aerospike.client.{Key, Value} 20 | import org.scalatest.{FlatSpec, Matchers} 21 | import ru.tinkoff.aerospikemacro.domain.DBCredentials 22 | 23 | /** 24 | * @author MarinaSigaeva 25 | * @since 08.09.16 26 | */ 27 | class KeyTest extends FlatSpec with Matchers { 28 | implicit val dbc = DBCredentials("test", "test") 29 | 30 | def getKey[K](k: K)(implicit kC: KeyWrapper[K]): Key = kC(k) 31 | 32 | case class Cat(name: String, age: Int) 33 | 34 | trait mocks { 35 | 36 | implicit val bValue = new KeyWrapper[Cat] { 37 | override def apply(cat: Cat): Key = new Key("test", "test", cat.toString) 38 | } 39 | 40 | val arr = Array(1.toByte, 1.toByte, 1.toByte) 41 | val stringKey = new Key("test", "test", "StringKey") 42 | val intKey = new Key("test", "test", 12) 43 | val longKey = new Key("test", "test", 1L) 44 | val catKey = new Key("test", "test", Cat("blob", 12).toString) 45 | val byteArrayKey = new Key("test", "test", arr) 46 | } 47 | 48 | it should "work with one namespace and " in new mocks { 49 | 50 | getKey("StringKey") shouldBe stringKey 51 | getKey(12) shouldBe intKey 52 | getKey(1L) shouldBe longKey 53 | getKey(arr) shouldBe byteArrayKey 54 | getKey(Cat("blob", 12)) shouldBe catKey 55 | 56 | } 57 | 58 | trait mocks2 { 59 | 60 | import KeyWrapper._ 61 | def withCustomKey[T](any: T, dbc: DBCredentials): Key = create(dbc)(any) 62 | 63 | val stringKey0 = new Key("test", "test", "StringKey") 64 | val stringKey1 = new Key("dbName01", "tableName1", "StringKey") 65 | val stringKey2 = new Key("dbName02", "tableName2", "StringKey") 66 | 67 | } 68 | 69 | it should "work in different namespaces and setNames" in new mocks2 { 70 | withCustomKey("StringKey", DBCredentials("dbName01", "tableName1")) shouldBe stringKey1 71 | withCustomKey("StringKey", DBCredentials("dbName02", "tableName2")) shouldBe stringKey2 72 | 73 | //and u can still use default namespace and setName 74 | getKey("StringKey") shouldBe stringKey0 75 | 76 | } 77 | 78 | } --------------------------------------------------------------------------------