├── .credo.exs ├── .drone.yml ├── .formatter.exs ├── .gitignore ├── AUTHORS ├── LICENSE ├── README.md ├── bench └── query_performance.exs ├── config └── config.exs ├── docker-compose.yml ├── lib ├── clickhousex.ex └── clickhousex │ ├── codec.ex │ ├── codec │ ├── binary.ex │ ├── binary │ │ └── extractor.ex │ ├── json.ex │ ├── row_binary.ex │ ├── row_binary │ │ └── utils.ex │ └── values.ex │ ├── error.ex │ ├── http_client.ex │ ├── http_request.ex │ ├── protocol.ex │ ├── query.ex │ └── result.ex ├── mix.exs ├── mix.lock └── test ├── clickhouse_case.ex ├── clickhousex ├── codec │ ├── binary_test.exs │ └── extractor_test.exs ├── login_test.exs ├── query_test.exs ├── storage_test.exs └── table_storage_test.exs └── test_helper.exs /.credo.exs: -------------------------------------------------------------------------------- 1 | %{ 2 | configs: [ 3 | %{ 4 | name: "default", 5 | files: %{ 6 | included: [ 7 | "lib/", 8 | "test/" 9 | ], 10 | excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] 11 | }, 12 | plugins: [], 13 | requires: [], 14 | strict: true, 15 | parse_timeout: 5000, 16 | color: false, 17 | checks: [ 18 | # Consistency Checks 19 | {Credo.Check.Consistency.ExceptionNames, []}, 20 | {Credo.Check.Consistency.LineEndings, []}, 21 | {Credo.Check.Consistency.ParameterPatternMatching, []}, 22 | {Credo.Check.Consistency.SpaceAroundOperators, []}, 23 | {Credo.Check.Consistency.SpaceInParentheses, []}, 24 | {Credo.Check.Consistency.TabsOrSpaces, []}, 25 | 26 | # Design Checks 27 | {Credo.Check.Design.AliasUsage, [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]}, 28 | {Credo.Check.Design.TagTODO, [exit_status: 2]}, 29 | {Credo.Check.Design.TagFIXME, []}, 30 | 31 | # Readability Checks 32 | {Credo.Check.Readability.AliasOrder, false}, 33 | {Credo.Check.Readability.FunctionNames, []}, 34 | {Credo.Check.Readability.LargeNumbers, []}, 35 | {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]}, 36 | {Credo.Check.Readability.ModuleAttributeNames, []}, 37 | {Credo.Check.Readability.ModuleDoc, []}, 38 | {Credo.Check.Readability.ModuleNames, []}, 39 | {Credo.Check.Readability.ParenthesesInCondition, []}, 40 | {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []}, 41 | {Credo.Check.Readability.PredicateFunctionNames, []}, 42 | {Credo.Check.Readability.PreferImplicitTry, []}, 43 | {Credo.Check.Readability.RedundantBlankLines, []}, 44 | {Credo.Check.Readability.Semicolons, []}, 45 | {Credo.Check.Readability.SpaceAfterCommas, []}, 46 | {Credo.Check.Readability.StringSigils, []}, 47 | {Credo.Check.Readability.TrailingBlankLine, []}, 48 | {Credo.Check.Readability.TrailingWhiteSpace, []}, 49 | {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, 50 | {Credo.Check.Readability.VariableNames, []}, 51 | 52 | # Refactoring Opportunities 53 | {Credo.Check.Refactor.CondStatements, []}, 54 | {Credo.Check.Refactor.CyclomaticComplexity, []}, 55 | {Credo.Check.Refactor.FunctionArity, []}, 56 | {Credo.Check.Refactor.LongQuoteBlocks, []}, 57 | {Credo.Check.Refactor.MapInto, []}, 58 | {Credo.Check.Refactor.MatchInCondition, []}, 59 | {Credo.Check.Refactor.NegatedConditionsInUnless, []}, 60 | {Credo.Check.Refactor.NegatedConditionsWithElse, []}, 61 | {Credo.Check.Refactor.Nesting, []}, 62 | {Credo.Check.Refactor.UnlessWithElse, []}, 63 | {Credo.Check.Refactor.WithClauses, []}, 64 | 65 | # Warnings 66 | {Credo.Check.Warning.BoolOperationOnSameValues, []}, 67 | {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, 68 | {Credo.Check.Warning.IExPry, []}, 69 | {Credo.Check.Warning.IoInspect, []}, 70 | {Credo.Check.Warning.LazyLogging, []}, 71 | {Credo.Check.Warning.MixEnv, false}, 72 | {Credo.Check.Warning.OperationOnSameValues, []}, 73 | {Credo.Check.Warning.OperationWithConstantResult, []}, 74 | {Credo.Check.Warning.RaiseInsideRescue, []}, 75 | {Credo.Check.Warning.UnusedEnumOperation, []}, 76 | {Credo.Check.Warning.UnusedFileOperation, []}, 77 | {Credo.Check.Warning.UnusedKeywordOperation, []}, 78 | {Credo.Check.Warning.UnusedListOperation, []}, 79 | {Credo.Check.Warning.UnusedPathOperation, []}, 80 | {Credo.Check.Warning.UnusedRegexOperation, []}, 81 | {Credo.Check.Warning.UnusedStringOperation, []}, 82 | {Credo.Check.Warning.UnusedTupleOperation, []}, 83 | {Credo.Check.Warning.UnsafeExec, []}, 84 | 85 | # Controversial and experimental checks (opt-in, just replace `false` with `[]`) 86 | {Credo.Check.Readability.StrictModuleLayout, false}, 87 | {Credo.Check.Consistency.MultiAliasImportRequireUse, false}, 88 | {Credo.Check.Consistency.UnusedVariableNames, false}, 89 | {Credo.Check.Design.DuplicatedCode, false}, 90 | {Credo.Check.Readability.AliasAs, false}, 91 | {Credo.Check.Readability.MultiAlias, false}, 92 | {Credo.Check.Readability.Specs, false}, 93 | {Credo.Check.Readability.SinglePipe, false}, 94 | {Credo.Check.Readability.WithCustomTaggedTuple, false}, 95 | {Credo.Check.Refactor.ABCSize, false}, 96 | {Credo.Check.Refactor.AppendSingleItem, false}, 97 | {Credo.Check.Refactor.DoubleBooleanNegation, false}, 98 | {Credo.Check.Refactor.ModuleDependencies, false}, 99 | {Credo.Check.Refactor.NegatedIsNil, false}, 100 | {Credo.Check.Refactor.PipeChainStart, false}, 101 | {Credo.Check.Refactor.VariableRebinding, false}, 102 | {Credo.Check.Warning.LeakyEnvironment, false}, 103 | {Credo.Check.Warning.MapGetUnsafePass, false}, 104 | {Credo.Check.Warning.UnsafeToAtom, false}, 105 | # lib/clickhousex/http_client.ex:7 106 | # lib/clickhousex/query.ex:36 107 | {Credo.Check.Warning.ApplicationConfigInModuleAttribute, false} 108 | ] 109 | } 110 | ] 111 | } 112 | -------------------------------------------------------------------------------- /.drone.yml: -------------------------------------------------------------------------------- 1 | --- 2 | kind: pipeline 3 | type: docker 4 | name: default 5 | 6 | services: 7 | - name: clickhouse 8 | image: yandex/clickhouse-server 9 | 10 | steps: 11 | - name: deps 12 | image: elixir:alpine 13 | commands: 14 | - apk add --no-cache git 15 | - mix local.hex --force 16 | - mix local.rebar --force 17 | - mix deps.get 18 | 19 | - name: build 20 | image: elixir:alpine 21 | commands: 22 | - apk add --no-cache git 23 | - mix local.hex --force 24 | - mix local.rebar --force 25 | - mix compile --warnings-as-errors 26 | 27 | - name: test 28 | image: elixir:alpine 29 | commands: 30 | - apk add --no-cache git 31 | - mix local.hex --force 32 | - mix local.rebar --force 33 | - env test_db_hostname=clickhouse mix test 34 | 35 | - name: format 36 | image: elixir:alpine 37 | commands: 38 | - mix format --check-formatted 39 | 40 | - name: lint 41 | image: elixir:alpine 42 | commands: 43 | - apk add --no-cache git 44 | - mix local.hex --force 45 | - mix local.rebar --force 46 | - mix credo -a 47 | -------------------------------------------------------------------------------- /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter,.credo}.exs", "{config,lib,test}/**/*.{ex,exs}"], 4 | line_length: 120 5 | ] 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /_build 2 | /deps 3 | .idea/* 4 | *.iml 5 | /doc 6 | .elixir_ls/ -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | The following authors have created the source code of "ClickhousEx" 2 | published and distributed by Appodeal Inc. as the owner: 3 | 4 | Ivan Zinoviev <...> 5 | Roman Chudov 6 | Konstantin Grabar 7 | Evgeniy Shurmin 8 | Alexey Lukyanov 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2018-2018 Appodeal Inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Clickhousex 2 | 3 | ClickHouse database driver to connect with Elixir application by HTTP interface. 4 | 5 | ## Installation 6 | 7 | ```elixir 8 | def deps do 9 | [ 10 | {:clickhousex, "~> 0.4.0"} 11 | ] 12 | end 13 | ``` 14 | 15 | ## Start driver 16 | Call `start_link()/1` function and pass connection options: 17 | 18 | ```elixir 19 | Clickhousex.start_link( 20 | scheme: :http, 21 | hostname: "localhost", 22 | port: 8123, 23 | database: "default", 24 | username: "user", 25 | password: "654321" 26 | ) 27 | ``` 28 | 29 | Options expects a keyword list with zero or more of: 30 | 31 | * `scheme` - Scheme (:http | :https). Default value: :http 32 | * `hostname` - The server hostname. Default value: "localhost" 33 | * `database` - Database name. Default value: "default" 34 | * `port` - The server port number. Default value: 8123 35 | * `username` - Username. Default value: nil 36 | * `password` - User's password. Default value: nil 37 | 38 | ## Queries examples 39 | 40 | ```elixir 41 | iex(1)> {:ok, pid} = Clickhousex.start_link(scheme: :http, hostname: "localhost", port: 8123, database: "system") 42 | {:ok, #PID<0.195.0>} 43 | iex(2)> Clickhousex.query(pid, "SHOW TABLES", []) 44 | {:ok, %Clickhousex.Query{columns: nil, name: "", statement: "SHOW TABLES"}, 45 | %Clickhousex.Result{columns: ["name"], command: :selected, num_rows: 23, 46 | rows: [["asynchronous_metrics"], ["build_options"], ["clusters"], ["columns"], 47 | ["databases"], ["dictionaries"], ["events"], ["functions"], 48 | ["graphite_retentions"], ["merges"], ["metrics"], ["models"], ["numbers"], 49 | ["numbers_mt"], ["one"], ["parts"], ["parts_columns"], ["processes"], 50 | ["replicas"], ["replication_queue"], ["settings"], ["tables"], 51 | ["zookeeper"]]}} 52 | iex(3)> 53 | ``` 54 | 55 | ## Documentation 56 | 57 | Documentation can be found [here](https://hexdocs.pm/clickhousex). 58 | -------------------------------------------------------------------------------- /bench/query_performance.exs: -------------------------------------------------------------------------------- 1 | database = "clickhousex" 2 | table = "#{database}.benchmarks" 3 | create_database = "CREATE DATABASE IF NOT EXISTS #{database}" 4 | drop_database = "DROP DATABASE #{database}" 5 | 6 | create_table = """ 7 | CREATE TABLE IF NOT EXISTS #{table} ( 8 | u64_val UInt64, 9 | string_val String, 10 | list_val Array(String), 11 | nullable_u64_val Nullable(UInt64), 12 | date_val Date, 13 | datetime_val DateTime 14 | ) ENGINE = Memory 15 | """ 16 | 17 | alias Clickhousex, as: CH 18 | 19 | {:ok, client} = CH.start_link() 20 | {:ok, _, _} = CH.query(client, create_database, []) 21 | {:ok, _, _} = CH.query(client, create_table, []) 22 | 23 | insert = fn column_name, value -> 24 | {:ok, _, _} = CH.query(client, "INSERT INTO #{table} (#{column_name}) VALUES (?)", [value]) 25 | end 26 | 27 | select = fn column_name, value -> 28 | {:ok, _, result} = 29 | CH.query(client, "SELECT #{column_name} FROM #{table} WHERE #{column_name} = ?", [value]) 30 | end 31 | 32 | seed_data_count = 1000 33 | 34 | l = Enum.map(1..50, fn n -> String.duplicate("#{n}", 5) end) 35 | date = Date.utc_today() 36 | date_time = DateTime.utc_now() 37 | 38 | for n <- 1..seed_data_count do 39 | insert.("u64_val", n) 40 | end 41 | 42 | for string <- l do 43 | insert.("string_val", string) 44 | end 45 | 46 | Benchee.run(%{ 47 | "Insert ints" => fn -> 48 | insert.("u64_val", 4_924_848_124_381) 49 | end, 50 | "Insert strings" => fn -> 51 | insert.("string_val", "This is a long string") 52 | end, 53 | "Insert lists" => fn -> 54 | insert.("list_val", ["Hello there guys"]) 55 | end, 56 | "Insert nullable non-null" => fn -> 57 | insert.("nullable_u64_val", 4_928_481_949_828_321) 58 | end, 59 | "Insert nullable null" => fn -> 60 | insert.("nullable_u64_val", nil) 61 | end, 62 | "Insert date" => fn -> 63 | insert.("date_val", date) 64 | end, 65 | "Insert datetime" => fn -> 66 | insert.("datetime_val", date_time) 67 | end 68 | }) 69 | 70 | Benchee.run(%{ 71 | "Select ints" => fn -> 72 | select.("u64_val", :rand.uniform(seed_data_count)) 73 | end, 74 | "Select strings" => fn -> 75 | select.("string_val", "5050505050") 76 | end, 77 | "selecting nulls" => fn -> 78 | select.("nullable_u64_val", nil) 79 | end, 80 | "selecting non null" => fn -> 81 | {:ok, _, _} = 82 | CH.query(client, "SELECT * from #{table} WHERE nullable_u64_val IS NOT NULL", []) 83 | end, 84 | "selecting all" => fn -> 85 | {:ok, _, _} = CH.query(client, "SELECT * from #{table}", []) 86 | end 87 | }) 88 | 89 | {:ok, _, _} = CH.query(client, drop_database, []) 90 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | config :clickhousex, codec: Clickhousex.Codec.RowBinary 4 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | services: 4 | clickhouse: 5 | image: yandex/clickhouse-server 6 | restart: always 7 | ports: 8 | - 8123:8123 9 | - 9000:9000 10 | -------------------------------------------------------------------------------- /lib/clickhousex.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex do 2 | @moduledoc """ 3 | Clickhouse driver for Elixir. 4 | 5 | 6 | This module handles the connection to Clickhouse, providing support 7 | for queries, connection backoff, logging, pooling and 8 | more. 9 | """ 10 | 11 | alias Clickhousex.Query 12 | 13 | @typedoc """ 14 | A connection process name, pid or reference. 15 | 16 | A connection reference is used when making multiple requests to the same 17 | connection, see `transaction/3`. 18 | """ 19 | @type conn :: DBConnection.conn() 20 | 21 | @timeout 60_000 22 | def timeout, do: @timeout 23 | 24 | ### PUBLIC API ### 25 | 26 | @doc """ 27 | Connect to ClickHouse. 28 | `opts` expects a keyword list with zero or more of: 29 | * `:scheme` - Scheme (:http | :https). 30 | * default value: :http 31 | * `:hostname` - The server hostname. 32 | * default value: localhost 33 | * `:database` - Database name. 34 | * default value: "default" 35 | * `:port` - The server port number. 36 | * default value: 8123 37 | * `:username` - Username. 38 | * default value: empty 39 | * `:password` - User's password. 40 | * default value: empty 41 | """ 42 | 43 | @spec start_link(Keyword.t()) :: {:ok, pid} | {:error, term} 44 | def start_link(opts \\ []) do 45 | opts = Keyword.put(opts, :show_sensitive_data_on_connection_error, true) 46 | DBConnection.start_link(Clickhousex.Protocol, opts) 47 | end 48 | 49 | @spec child_spec(Keyword.t()) :: Supervisor.Spec.spec() 50 | def child_spec(opts) do 51 | DBConnection.child_spec(Clickhousex.Protocol, opts) 52 | end 53 | 54 | @spec query(DBConnection.conn(), binary(), list, Keyword.t()) :: 55 | {:ok, iodata(), Clickhousex.Result.t()} 56 | def query(conn, statement, params \\ [], opts \\ []) do 57 | DBConnection.prepare_execute(conn, %Query{name: "", statement: statement}, params, opts) 58 | end 59 | 60 | @spec query!(DBConnection.conn(), binary(), list, Keyword.t()) :: 61 | {iodata(), Clickhousex.Result.t()} 62 | def query!(conn, statement, params \\ [], opts \\ []) do 63 | DBConnection.prepare_execute!(conn, %Query{name: "", statement: statement}, params, opts) 64 | end 65 | 66 | ## Helpers 67 | def defaults(opts) do 68 | Keyword.put_new(opts, :timeout, @timeout) 69 | end 70 | end 71 | -------------------------------------------------------------------------------- /lib/clickhousex/codec.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec do 2 | @moduledoc """ 3 | Behaviour for input and/or output format. 4 | 5 | If none of the out of the box codecs suits your needs, you can 6 | implement one of [the supported ones][1] yourself. 7 | 8 | [1]: https://clickhouse.tech/docs/en/interfaces/formats/ 9 | """ 10 | 11 | @type select_response :: %{column_names: [String.t()], rows: [tuple], row_count: non_neg_integer} 12 | @type state :: any 13 | 14 | @callback response_format() :: String.t() 15 | @callback request_format() :: String.t() 16 | @callback new() :: state 17 | @callback append(state, iodata) :: state 18 | @callback decode(state) :: {:ok, select_response} | {:error, any} 19 | @callback encode(query :: Clickhousex.Query.t(), param_replacements :: iodata, params :: [any]) :: iodata 20 | end 21 | -------------------------------------------------------------------------------- /lib/clickhousex/codec/binary.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec.Binary do 2 | @moduledoc false 3 | 4 | use Bitwise 5 | 6 | def encode(:varint, num) when num < 128, do: <> 7 | def encode(:varint, num), do: <<1::1, num::7, encode(:varint, num >>> 7)::binary>> 8 | 9 | def encode(:string, str) when is_bitstring(str) do 10 | [encode(:varint, byte_size(str)), str] 11 | end 12 | 13 | def encode(:u8, i) when is_integer(i) do 14 | <> 15 | end 16 | 17 | def encode(:u16, i) do 18 | <> 19 | end 20 | 21 | def encode(:u32, i) do 22 | <> 23 | end 24 | 25 | def encode(:u64, i) do 26 | <> 27 | end 28 | 29 | def encode(:i8, i) do 30 | <> 31 | end 32 | 33 | def encode(:i16, i) do 34 | <> 35 | end 36 | 37 | def encode(:i32, i) do 38 | <> 39 | end 40 | 41 | def encode(:i64, i) do 42 | <> 43 | end 44 | 45 | def encode(:f64, f) do 46 | <> 47 | end 48 | 49 | def encode(:f32, f) do 50 | <> 51 | end 52 | 53 | def encode(:boolean, true) do 54 | encode(:u8, 1) 55 | end 56 | 57 | def encode(:boolean, false) do 58 | encode(:u8, 0) 59 | end 60 | 61 | def encode({:list, type}, list) do 62 | elements = for e <- list, do: encode(type, e) 63 | [encode(:varint, length(list)), elements] 64 | end 65 | 66 | def encode({:nullable, _type}, nil) do 67 | encode(:u8, 1) 68 | end 69 | 70 | def encode({:nullable, type}, thing) do 71 | [ 72 | encode(:u8, 0), 73 | encode(type, thing) 74 | ] 75 | end 76 | 77 | def decode(bytes, :struct, struct_module) do 78 | decode_struct(bytes, struct_module.decode_spec(), struct(struct_module)) 79 | end 80 | 81 | def decode(<<1, rest::binary>>, {:nullable, _type}) do 82 | {:ok, nil, rest} 83 | end 84 | 85 | def decode(<<0, rest::binary>>, {:nullable, type}) do 86 | decode(rest, type) 87 | end 88 | 89 | def decode(<<>>, {:nullable, type}) do 90 | {:resume, fn more_data -> decode(more_data, {:nullable, type}) end} 91 | end 92 | 93 | def decode(bytes, :varint) do 94 | decode_varint(bytes, 0, 0) 95 | end 96 | 97 | def decode(bytes, :string) do 98 | with {:ok, byte_count, rest} <- decode(bytes, :varint), 99 | true <- byte_size(rest) >= byte_count do 100 | <> = rest 101 | {:ok, decoded_str, rest} 102 | else 103 | _ -> 104 | {:resume, fn more_data -> decode(bytes <> more_data, :string) end} 105 | end 106 | end 107 | 108 | def decode(<<1::little-unsigned-size(8), rest::binary>>, :boolean) do 109 | {:ok, true, rest} 110 | end 111 | 112 | def decode(<<0::little-unsigned-size(8), rest::binary>>, :boolean) do 113 | {:ok, false, rest} 114 | end 115 | 116 | def decode(bytes, {:list, data_type}) do 117 | case decode(bytes, :varint) do 118 | {:ok, count, rest} -> 119 | decode_list(rest, data_type, count, []) 120 | 121 | _ -> 122 | decoder = fn more_data -> decode(bytes <> more_data, {:list, data_type}) end 123 | {:resume, decoder} 124 | end 125 | end 126 | 127 | def decode(<>, :i64) do 128 | {:ok, decoded, rest} 129 | end 130 | 131 | def decode(<>, :i32) do 132 | {:ok, decoded, rest} 133 | end 134 | 135 | def decode(<>, :i16) do 136 | {:ok, decoded, rest} 137 | end 138 | 139 | def decode(<>, :i8) do 140 | {:ok, decoded, rest} 141 | end 142 | 143 | def decode(<>, :u64) do 144 | {:ok, decoded, rest} 145 | end 146 | 147 | def decode(<>, :u32) do 148 | {:ok, decoded, rest} 149 | end 150 | 151 | def decode(<>, :u16) do 152 | {:ok, decoded, rest} 153 | end 154 | 155 | def decode(<>, :u8) do 156 | {:ok, decoded, rest} 157 | end 158 | 159 | def decode(<>, :date) do 160 | {:ok, date} = Date.new(1970, 01, 01) 161 | date = Date.add(date, days_since_epoch) 162 | 163 | {:ok, date, rest} 164 | end 165 | 166 | def decode(<>, :datetime) do 167 | {:ok, date_time} = NaiveDateTime.new(1970, 1, 1, 0, 0, 0) 168 | date_time = NaiveDateTime.add(date_time, seconds_since_epoch) 169 | 170 | {:ok, date_time, rest} 171 | end 172 | 173 | def decode(<<0, rest::binary>>, :boolean) do 174 | {:ok, false, rest} 175 | end 176 | 177 | def decode(<<1, rest::binary>>, :boolean) do 178 | {:ok, true, rest} 179 | end 180 | 181 | def decode(<>, :f64) do 182 | {:ok, decoded, rest} 183 | end 184 | 185 | def decode(<>, :f32) do 186 | {:ok, decoded, rest} 187 | end 188 | 189 | def decode(bytes, type) do 190 | {:resume, &decode(bytes <> &1, type)} 191 | end 192 | 193 | defp decode_list(rest, _, 0, accum) do 194 | {:ok, Enum.reverse(accum), rest} 195 | end 196 | 197 | defp decode_list(bytes, data_type, count, accum) do 198 | case decode(bytes, data_type) do 199 | {:ok, decoded, rest} -> 200 | decode_list(rest, data_type, count - 1, [decoded | accum]) 201 | 202 | {:resume, _} -> 203 | {:resume, &decode_list(bytes <> &1, data_type, count, accum)} 204 | end 205 | end 206 | 207 | defp decode_varint(<<0::size(1), byte::size(7), rest::binary>>, result, shift) do 208 | {:ok, result ||| byte <<< shift, rest} 209 | end 210 | 211 | defp decode_varint(<<1::1, byte::7, rest::binary>>, result, shift) do 212 | decode_varint(rest, result ||| byte <<< shift, shift + 7) 213 | end 214 | 215 | defp decode_varint(bytes, result, shift) do 216 | {:resume, &decode_varint(bytes <> &1, result, shift)} 217 | end 218 | 219 | defp decode_struct(rest, [], struct) do 220 | {:ok, struct, rest} 221 | end 222 | 223 | defp decode_struct(rest, [{field_name, type} | specs], struct) do 224 | case decode(rest, type) do 225 | {:ok, decoded, rest} -> 226 | decode_struct(rest, specs, Map.put(struct, field_name, decoded)) 227 | 228 | {:error, _} = err -> 229 | err 230 | end 231 | end 232 | end 233 | -------------------------------------------------------------------------------- /lib/clickhousex/codec/binary/extractor.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec.Binary.Extractor do 2 | @moduledoc """ 3 | Allows modules that `use` this module to create efficient extractor functions that speak clickhouse's binary protocol. 4 | 5 | To define extractors, annotate a function with the `extract` attribute like this: 6 | 7 | 8 | @extract length: :varint 9 | def extract_length(<>, length, other_param) do 10 | do_something_with_length(data, length, other_param) 11 | end 12 | 13 | def do_something_with_length(_data, length, other_param) do 14 | {other_param, length} 15 | end 16 | 17 | In the above example, a function named `extract_length/2` will be created, which, when passed a binary, will 18 | extract the length varint from it, and call the function above, passing the unparsed part of the binary and the extracted 19 | length varint to it. 20 | 21 | Usage looks like this 22 | 23 | {:ok, binary_from_network} = :gen_tcp.recv(conn, 0) 24 | {:this_is_passed_along, length} = extract_length(binary_from_network, :this_is_passed_along) 25 | 26 | 27 | If there isn't enough data to parse, a resume tuple is returned. The second element of the tuple is a function that when 28 | called with more data, picks up the parse operation where it left off. 29 | 30 | 31 | {:resume, resume_fn} = extract_length(<<>>, :this_is_passed_along) 32 | {:ok, data} = :gen_tcp.recv(conn, 0) 33 | {:this_is_passed_along, length} = resume_fn.(data) 34 | 35 | 36 | # Performance 37 | All functions generated by this module take advantage of binary optimizations, resuse match contexts and won't create sub-binaries. 38 | 39 | # Completeness 40 | The following extractors are implemented: 41 | 42 | 1. Variable length integers `:varint` 43 | 1. Signed integers: `:i8`, `:i16`, `:i32`, `i64` 44 | 1. Unsigned integers: `:u8`, `:u16`, `:u32`, `:u64` 45 | 1. Floats: `:f32`, `:f64` 46 | 1. Strings: `:string` 47 | 1. Booleans: `:boolean` 48 | 1. Dates: `:date`, `:datetime` 49 | 1. Lists of the above scalar types `{:list, scalar}` 50 | 1. Nullable instances of all the above `{:nullable, scalar}` or `{:list, {:nullable, scalar}}` 51 | """ 52 | 53 | defmacro __using__(_) do 54 | quote do 55 | use Bitwise 56 | Module.register_attribute(__MODULE__, :extract, accumulate: true) 57 | Module.register_attribute(__MODULE__, :extractors, accumulate: true) 58 | @on_definition {unquote(__MODULE__), :on_definition} 59 | @before_compile unquote(__MODULE__) 60 | end 61 | end 62 | 63 | @doc false 64 | defmacro __before_compile__(env) do 65 | for {name, visibility, args, [extractors]} <- Module.get_attribute(env.module, :extractors), 66 | {arg_name, arg_type} <- extractors do 67 | [_ | non_binary_args] = args 68 | extractor_args = reject_argument(non_binary_args, arg_name) 69 | 70 | landing_call = 71 | quote do 72 | unquote(name)(rest, unquote_splicing(non_binary_args)) 73 | end 74 | 75 | extractor_fn_name = unique_name(name) 76 | 77 | jump_functions = 78 | build_jump_fn(name, extractor_fn_name, extractor_args) 79 | |> rewrite_visibility(visibility) 80 | |> collapse_blocks() 81 | 82 | extractors = 83 | arg_type 84 | |> build_extractor(arg_name, extractor_fn_name, landing_call, args) 85 | |> rewrite_visibility(visibility) 86 | 87 | quote do 88 | unquote_splicing(jump_functions) 89 | unquote(extractors) 90 | end 91 | end 92 | |> collapse_blocks() 93 | end 94 | 95 | @doc false 96 | def on_definition(env, visibility, name, args, _guards, _body) do 97 | extractors = Module.get_attribute(env.module, :extract) 98 | Module.delete_attribute(env.module, :extract) 99 | Module.put_attribute(env.module, :extractors, {name, visibility, args, extractors}) 100 | end 101 | 102 | defp build_jump_fn(base_fn_name, extractor_fn_name, extractor_args) do 103 | quote do 104 | def unquote(base_fn_name)(<<>>, unquote_splicing(extractor_args)) do 105 | {:resume, &unquote(extractor_fn_name)(&1, unquote_splicing(extractor_args))} 106 | end 107 | 108 | def unquote(base_fn_name)(<>, unquote_splicing(extractor_args)) do 109 | unquote(extractor_fn_name)(rest, unquote_splicing(extractor_args)) 110 | end 111 | end 112 | end 113 | 114 | defp build_extractor(:varint, arg_name, extractor_name, landing_call, [_ | non_binary_args]) do 115 | extractor_args = reject_argument(non_binary_args, arg_name) 116 | int_variable = Macro.var(arg_name, nil) 117 | 118 | vars = quote do: [a, b, c, d, e, f, g, h, i, j] 119 | 120 | # ZigZag encoding is defined for arbitrary sized integers, but for 121 | # our purposes up to 10 parts are enough. Let's unroll the decoding loop. 122 | extractor_clauses = 123 | for parts_count <- 1..10 do 124 | vars_for_clause = Enum.take(vars, parts_count) 125 | pattern = varint_pattern(vars_for_clause) 126 | decoding = varint_decoding(vars_for_clause) 127 | 128 | quote do 129 | def unquote(extractor_name)(unquote(pattern), unquote_splicing(extractor_args)) do 130 | unquote(int_variable) = unquote(decoding) 131 | unquote(landing_call) 132 | end 133 | end 134 | end 135 | 136 | quote do 137 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do 138 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))} 139 | end 140 | 141 | unquote_splicing(extractor_clauses) 142 | 143 | def unquote(extractor_name)(<>, unquote_splicing(extractor_args)) do 144 | {:resume, fn more_data -> unquote(extractor_name)(rest <> more_data, unquote_splicing(extractor_args)) end} 145 | end 146 | end 147 | end 148 | 149 | # `vars` are variables for binding varint parts, from high to low 150 | defp varint_pattern([_ | _] = vars) do 151 | [last | rest] = Enum.reverse(vars) 152 | tag = quote do: 1 :: size(1) 153 | init = quote do: [0 :: size(1), unquote(last) :: size(7), rest :: binary] 154 | patterns = Enum.reduce(rest, init, &[tag, quote(do: unquote(&1) :: size(7)) | &2]) 155 | {:<<>>, [], patterns} 156 | end 157 | 158 | # `vars` are varint parts, from high to low 159 | defp varint_decoding([_ | _] = vars) do 160 | vars 161 | |> Enum.reverse() 162 | |> Enum.with_index() 163 | |> Enum.map(fn 164 | {var, 0} -> var 165 | {var, index} -> {:<<<, [], [var, index * 7]} 166 | end) 167 | |> Enum.reduce(&{:|||, [], [&2, &1]}) 168 | end 169 | 170 | @int_extractors [ 171 | {:i64, :signed, 64}, 172 | {:u64, :unsigned, 64}, 173 | {:i32, :signed, 32}, 174 | {:u32, :unsigned, 32}, 175 | {:i16, :signed, 16}, 176 | {:u16, :unsigned, 16}, 177 | {:i8, :signed, 8}, 178 | {:u8, :unsigned, 8} 179 | ] 180 | 181 | for {type_name, signed, width} <- @int_extractors do 182 | defp build_extractor(unquote(type_name), arg_name, extractor_name, landing_call, [_ | args]) do 183 | extractor_args = reject_argument(args, arg_name) 184 | value_variable = Macro.var(arg_name, nil) 185 | width = unquote(width) 186 | signedness = Macro.var(unquote(signed), nil) 187 | 188 | match = 189 | quote do 190 | <> 191 | end 192 | 193 | quote do 194 | def unquote(extractor_name)(unquote(match), unquote_splicing(extractor_args)) do 195 | unquote(landing_call) 196 | end 197 | 198 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do 199 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))} 200 | end 201 | 202 | def unquote(extractor_name)(<>, unquote_splicing(extractor_args)) do 203 | {:resume, &unquote(extractor_name)(data <> &1, unquote_splicing(extractor_args))} 204 | end 205 | end 206 | end 207 | end 208 | 209 | # Float extractors 210 | for width <- [32, 64], 211 | type_name = :"f#{width}" do 212 | defp build_extractor(unquote(type_name), arg_name, extractor_name, landing_call, [_ | args]) do 213 | extractor_args = reject_argument(args, arg_name) 214 | value_variable = Macro.var(arg_name, nil) 215 | width = unquote(width) 216 | 217 | quote do 218 | def unquote(extractor_name)( 219 | <>, 220 | unquote_splicing(extractor_args) 221 | ) do 222 | unquote(landing_call) 223 | end 224 | 225 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do 226 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))} 227 | end 228 | 229 | def unquote(extractor_name)(<>, unquote_splicing(extractor_args)) do 230 | {:resume, &unquote(extractor_name)(rest <> &1, unquote_splicing(extractor_args))} 231 | end 232 | end 233 | end 234 | end 235 | 236 | defp build_extractor(:boolean, arg_name, extractor_name, landing_call, [_ | args]) do 237 | extractor_args = reject_argument(args, arg_name) 238 | value_variable = Macro.var(arg_name, nil) 239 | 240 | quote do 241 | def unquote(extractor_name)(<<1, rest::binary>>, unquote_splicing(extractor_args)) do 242 | unquote(value_variable) = true 243 | unquote(landing_call) 244 | end 245 | 246 | def unquote(extractor_name)(<<0, rest::binary>>, unquote_splicing(extractor_args)) do 247 | unquote(value_variable) = false 248 | unquote(landing_call) 249 | end 250 | 251 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do 252 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))} 253 | end 254 | end 255 | end 256 | 257 | defp build_extractor(:date, arg_name, extractor_name, landing_call, [_ | args]) do 258 | extractor_args = reject_argument(args, arg_name) 259 | value_variable = Macro.var(arg_name, nil) 260 | 261 | quote do 262 | def unquote(extractor_name)( 263 | <>, 264 | unquote_splicing(extractor_args) 265 | ) do 266 | {:ok, date} = Date.new(1970, 01, 01) 267 | 268 | unquote(value_variable) = Date.add(date, days_since_epoch) 269 | unquote(landing_call) 270 | end 271 | 272 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do 273 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))} 274 | end 275 | 276 | def unquote(extractor_name)(<>, unquote_splicing(extractor_args)) do 277 | {:resume, &unquote(extractor_name)(rest <> &1, unquote_splicing(extractor_args))} 278 | end 279 | end 280 | end 281 | 282 | defp build_extractor(:datetime, arg_name, extractor_name, landing_call, [_ | args]) do 283 | extractor_args = reject_argument(args, arg_name) 284 | value_variable = Macro.var(arg_name, nil) 285 | 286 | quote do 287 | def unquote(extractor_name)( 288 | <>, 289 | unquote_splicing(extractor_args) 290 | ) do 291 | {:ok, date_time} = NaiveDateTime.new(1970, 1, 1, 0, 0, 0) 292 | 293 | unquote(value_variable) = NaiveDateTime.add(date_time, seconds_since_epoch) 294 | unquote(landing_call) 295 | end 296 | 297 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do 298 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))} 299 | end 300 | 301 | def unquote(extractor_name)(<>, unquote_splicing(extractor_args)) do 302 | {:resume, &unquote(extractor_name)(rest <> &1, unquote_splicing(extractor_args))} 303 | end 304 | end 305 | end 306 | 307 | defp build_extractor({:nullable, type}, arg_name, extractor_name, landing_call, [_ | non_binary_args] = args) do 308 | extractor_args = reject_argument(non_binary_args, arg_name) 309 | value_variable = Macro.var(arg_name, nil) 310 | value_extractor_name = :"#{extractor_name}_value" 311 | 312 | value_extractors = 313 | type 314 | |> build_extractor(arg_name, value_extractor_name, landing_call, args) 315 | |> collapse_blocks() 316 | 317 | quote do 318 | unquote_splicing(value_extractors) 319 | 320 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do 321 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))} 322 | end 323 | 324 | def unquote(extractor_name)(<<0, rest::binary>>, unquote_splicing(extractor_args)) do 325 | unquote(value_extractor_name)(rest, unquote_splicing(extractor_args)) 326 | end 327 | 328 | def unquote(extractor_name)(<<1, rest::binary>>, unquote_splicing(extractor_args)) do 329 | unquote(value_variable) = nil 330 | unquote(landing_call) 331 | end 332 | end 333 | end 334 | 335 | defp build_extractor(:string, arg_name, extractor_name, landing_call, [binary_arg | non_binary_args]) do 336 | extractor_args = reject_argument(non_binary_args, arg_name) 337 | 338 | length_variable_name = unique_name("string_length") 339 | length_variable = Macro.var(length_variable_name, nil) 340 | length_extractor_name = :"#{extractor_name}_length" 341 | length_extractor_args = extractor_args 342 | 343 | length_landing_call = 344 | quote do 345 | unquote(extractor_name)(rest, unquote_splicing(extractor_args), unquote(length_variable)) 346 | end 347 | 348 | length_extractors = 349 | build_extractor( 350 | :varint, 351 | length_variable_name, 352 | length_extractor_name, 353 | length_landing_call, 354 | [binary_arg | length_extractor_args] ++ [length_variable] 355 | ) 356 | |> collapse_blocks() 357 | 358 | value_arg = Macro.var(arg_name, nil) 359 | 360 | # The string extractor call chain looks like this: 361 | # top_level function -> length_extractor -> value_extractor 362 | quote do 363 | # Size exctractors 364 | unquote_splicing(length_extractors) 365 | 366 | # Value extractors 367 | 368 | # Empty string optimization, prevents concatenating large data to an empty string and 369 | # reallocating the large data 370 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args), unquote(length_variable)) do 371 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args), unquote(length_variable))} 372 | end 373 | 374 | def unquote(extractor_name)(<>, unquote_splicing(extractor_args), unquote(length_variable)) do 375 | case rest do 376 | <> -> 377 | unquote(landing_call) 378 | 379 | _ -> 380 | {:resume, &unquote(extractor_name)(rest <> &1, unquote_splicing(extractor_args), unquote(length_variable))} 381 | end 382 | end 383 | 384 | # Starts the size extractor chain 385 | def unquote(extractor_name)(<>, unquote_splicing(extractor_args)) do 386 | unquote(length_extractor_name)(b, unquote_splicing(extractor_args)) 387 | end 388 | end 389 | end 390 | 391 | defp build_extractor({:array, item_type}, arg_name, extractor_name, landing_call, args) do 392 | build_extractor({:list, item_type}, arg_name, extractor_name, landing_call, args) 393 | end 394 | 395 | defp build_extractor({:list, item_type}, arg_name, extractor_name, landing_call, [binary_arg | non_binary_args]) do 396 | extractor_args = reject_argument(non_binary_args, arg_name) 397 | 398 | length_extractor_name = :"#{extractor_name}_list_length" 399 | length_name = :length |> unique_name() 400 | length_variable = length_name |> Macro.var(nil) 401 | length_extractor_args = [binary_arg | extractor_args] ++ [length_variable] 402 | 403 | list_extractor_name = unique_name("#{extractor_name}_list") 404 | item_name = :item |> unique_name() 405 | item_variable = Macro.var(item_name, nil) 406 | 407 | item_accumulator_variable = Macro.var(arg_name, nil) 408 | count_variable = Macro.var(:"#{extractor_name}_count", nil) 409 | item_extractor_name = unique_name("#{extractor_name}_item") 410 | item_extractor_call_args = extractor_args ++ [count_variable, item_accumulator_variable] 411 | item_extractor_args = [binary_arg] ++ item_extractor_call_args 412 | list_extractor_args = extractor_args 413 | 414 | length_landing_call = 415 | quote do 416 | unquote(item_extractor_name)(rest, unquote_splicing(extractor_args), unquote(length_variable), []) 417 | end 418 | 419 | list_landing_call = 420 | quote do 421 | unquote(list_extractor_name)( 422 | rest, 423 | unquote_splicing(list_extractor_args), 424 | unquote(count_variable) - 1, 425 | unquote(item_variable), 426 | unquote(item_accumulator_variable) 427 | ) 428 | end 429 | 430 | item_extractors = 431 | item_type 432 | |> build_extractor(item_name, item_extractor_name, list_landing_call, item_extractor_args) 433 | |> collapse_blocks 434 | 435 | length_extractors = 436 | :varint 437 | |> build_extractor(length_name, length_extractor_name, length_landing_call, length_extractor_args) 438 | |> collapse_blocks() 439 | 440 | quote do 441 | def unquote(extractor_name)(<<>>, unquote_splicing(extractor_args)) do 442 | {:resume, &unquote(extractor_name)(&1, unquote_splicing(extractor_args))} 443 | end 444 | 445 | # Starts the chain by calling the length extractor 446 | def unquote(extractor_name)(<>, unquote_splicing(extractor_args)) do 447 | unquote(length_extractor_name)(rest, unquote_splicing(extractor_args)) 448 | end 449 | 450 | unquote_splicing(length_extractors) 451 | unquote_splicing(item_extractors) 452 | 453 | # This clause matches when we've extracted all items (remaining count is 0) 454 | def unquote(list_extractor_name)( 455 | <>, 456 | unquote_splicing(list_extractor_args), 457 | 0, 458 | unquote(item_variable), 459 | unquote(item_accumulator_variable) 460 | ) do 461 | unquote(item_accumulator_variable) = Enum.reverse([unquote(item_variable) | unquote(item_accumulator_variable)]) 462 | unquote(landing_call) 463 | end 464 | 465 | # This matches when there's more work to do. It accumulates the extracted item 466 | # and calls the item extractor again 467 | def unquote(list_extractor_name)( 468 | <>, 469 | unquote_splicing(list_extractor_args), 470 | unquote(count_variable), 471 | unquote(item_variable), 472 | unquote(item_accumulator_variable) 473 | ) do 474 | unquote(item_accumulator_variable) = [unquote(item_variable) | unquote(item_accumulator_variable)] 475 | unquote(item_extractor_name)(rest, unquote_splicing(item_extractor_call_args)) 476 | end 477 | end 478 | end 479 | 480 | # Helper functions 481 | 482 | defp rewrite_visibility(ast, :def) do 483 | ast 484 | end 485 | 486 | defp rewrite_visibility(ast, :defp) do 487 | Macro.prewalk(ast, fn 488 | {:def, context, rest} -> {:defp, context, rest} 489 | other -> other 490 | end) 491 | end 492 | 493 | defp collapse_blocks({:__block__, _, defs}) do 494 | defs 495 | end 496 | 497 | defp collapse_blocks(ast) when is_list(ast) do 498 | Enum.reduce(ast, [], fn 499 | {:__block__, _context, clauses}, acc -> 500 | acc ++ clauses 501 | 502 | _, acc -> 503 | acc 504 | end) 505 | |> Enum.reverse() 506 | end 507 | 508 | defp collapse_blocks(ast) do 509 | [ast] 510 | end 511 | 512 | defp reject_argument(args, arg_name) do 513 | Enum.reject(args, fn 514 | {^arg_name, _, _} -> true 515 | _ -> false 516 | end) 517 | end 518 | 519 | defp unique_name(base_name) do 520 | unique = System.unique_integer([:positive, :monotonic]) 521 | :"#{base_name}_#{unique}" 522 | end 523 | end 524 | -------------------------------------------------------------------------------- /lib/clickhousex/codec/json.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec.JSON do 2 | @moduledoc """ 3 | `Clickhousex.Codec` implementation for JSON output format. 4 | 5 | See [JSON][1], [JSONCompact][2]. 6 | 7 | [1]: https://clickhouse.tech/docs/en/interfaces/formats/#json 8 | [2]: https://clickhouse.tech/docs/en/interfaces/formats/#jsoncompact 9 | """ 10 | 11 | alias Clickhousex.Codec 12 | @behaviour Codec 13 | 14 | @impl Codec 15 | defdelegate encode(query, replacements, params), to: Codec.Values 16 | 17 | @impl Codec 18 | def request_format do 19 | "Values" 20 | end 21 | 22 | @impl Codec 23 | def response_format do 24 | "JSONCompact" 25 | end 26 | 27 | @impl Codec 28 | def new do 29 | [] 30 | end 31 | 32 | @impl Codec 33 | def append(state, data) do 34 | [state, data] 35 | end 36 | 37 | @impl Codec 38 | def decode(response) do 39 | case Jason.decode(response) do 40 | {:ok, %{"meta" => meta, "data" => data, "rows" => row_count}} -> 41 | column_names = Enum.map(meta, & &1["name"]) 42 | column_types = Enum.map(meta, & &1["type"]) 43 | 44 | rows = 45 | for row <- data do 46 | for {raw_value, column_type} <- Enum.zip(row, column_types) do 47 | to_native(column_type, raw_value) 48 | end 49 | |> List.to_tuple() 50 | end 51 | 52 | {:ok, %{column_names: column_names, rows: rows, count: row_count}} 53 | end 54 | end 55 | 56 | defp to_native(_, nil) do 57 | nil 58 | end 59 | 60 | defp to_native(<<"Nullable(", type::binary>>, value) do 61 | type = String.replace_suffix(type, ")", "") 62 | to_native(type, value) 63 | end 64 | 65 | defp to_native(<<"Array(", type::binary>>, value) do 66 | type = String.replace_suffix(type, ")", "") 67 | Enum.map(value, &to_native(type, &1)) 68 | end 69 | 70 | defp to_native("Float" <> _, value) when is_integer(value) do 71 | 1.0 * value 72 | end 73 | 74 | defp to_native("Int64", value) do 75 | String.to_integer(value) 76 | end 77 | 78 | defp to_native("Date", value) do 79 | {:ok, date} = to_date(value) 80 | date 81 | end 82 | 83 | defp to_native("DateTime", value) do 84 | [date, time] = String.split(value, " ") 85 | 86 | with {:ok, date} <- to_date(date), 87 | {:ok, time} <- to_time(time), 88 | {:ok, naive} <- NaiveDateTime.new(date, time) do 89 | naive 90 | end 91 | end 92 | 93 | defp to_native("UInt" <> _, value) when is_bitstring(value) do 94 | String.to_integer(value) 95 | end 96 | 97 | defp to_native("Int" <> _, value) when is_bitstring(value) do 98 | String.to_integer(value) 99 | end 100 | 101 | defp to_native(_, value) do 102 | value 103 | end 104 | 105 | defp to_date(date_string) do 106 | [year, month, day] = 107 | date_string 108 | |> String.split("-") 109 | |> Enum.map(&String.to_integer/1) 110 | 111 | Date.new(year, month, day) 112 | end 113 | 114 | defp to_time(time_string) do 115 | [h, m, s] = 116 | time_string 117 | |> String.split(":") 118 | |> Enum.map(&String.to_integer/1) 119 | 120 | Time.new(h, m, s) 121 | end 122 | end 123 | -------------------------------------------------------------------------------- /lib/clickhousex/codec/row_binary.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec.RowBinary do 2 | @moduledoc """ 3 | A codec that speaks Clickhouse's RowBinary format 4 | 5 | To use this codec, set the application `:clickhousex` `:codec` application variable: 6 | 7 | config :clickhousex, codec: Clickhousex.Codec.RowBinary 8 | 9 | """ 10 | alias Clickhousex.{Codec, Codec.Binary.Extractor, Codec.RowBinary.Utils} 11 | import Utils 12 | use Extractor 13 | 14 | require Record 15 | 16 | @behaviour Codec 17 | 18 | Record.defrecord(:state, column_count: 0, column_names: [], column_types: [], rows: [], count: 0) 19 | 20 | @impl Codec 21 | def response_format do 22 | "RowBinaryWithNamesAndTypes" 23 | end 24 | 25 | @impl Codec 26 | def request_format do 27 | "Values" 28 | end 29 | 30 | @impl Codec 31 | def encode(query, replacements, params) do 32 | params = 33 | Enum.map(params, fn 34 | %DateTime{} = dt -> DateTime.to_unix(dt) 35 | other -> other 36 | end) 37 | 38 | Codec.Values.encode(query, replacements, params) 39 | end 40 | 41 | @impl Codec 42 | def decode(state(column_names: column_names, rows: rows, count: count)) do 43 | {:ok, %{column_names: column_names, rows: Enum.reverse(rows), count: count}} 44 | end 45 | 46 | def decode(nil) do 47 | decode(state()) 48 | end 49 | 50 | @impl Codec 51 | def new do 52 | nil 53 | end 54 | 55 | @impl Codec 56 | def append(nil, data) do 57 | extract_column_count(data, state()) 58 | end 59 | 60 | def append(state() = state, data) do 61 | extract_rows(data, state) 62 | end 63 | 64 | def append({:resume, resumer}, data) do 65 | case resumer.(data) do 66 | {:resume, _} = resumer -> resumer 67 | state() = state -> state 68 | end 69 | end 70 | 71 | @extract column_count: :varint 72 | defp extract_column_count(<>, column_count, state) do 73 | extract_column_names(data, column_count, state(state, column_count: column_count)) 74 | end 75 | 76 | defp extract_column_names( 77 | <>, 78 | 0, 79 | state(column_count: column_count, column_names: column_names) = state 80 | ) do 81 | new_state = state(state, column_names: Enum.reverse(column_names)) 82 | extract_column_types(data, column_count, [], new_state) 83 | end 84 | 85 | defp extract_column_names(<>, remaining, state) do 86 | extract_column_name(data, remaining, state) 87 | end 88 | 89 | @extract column_name: :string 90 | defp extract_column_name(<>, remaining, column_name, state) do 91 | column_names = state(state, :column_names) 92 | 93 | extract_column_names( 94 | data, 95 | remaining - 1, 96 | state(state, column_names: [column_name | column_names]) 97 | ) 98 | end 99 | 100 | defp extract_column_types(<>, 0, column_types, state) do 101 | column_types = Enum.reverse(column_types) 102 | new_state = state(state, column_types: column_types) 103 | extract_rows(data, new_state) 104 | end 105 | 106 | defp extract_column_types(<>, remaining, column_types, state) do 107 | extract_column_type(data, remaining, column_types, state) 108 | end 109 | 110 | @extract column_type: :string 111 | defp extract_column_type(<>, remaining, column_type, column_types, state) do 112 | column_type = parse_type(column_type) 113 | 114 | extract_column_types(data, remaining - 1, [column_type | column_types], state) 115 | end 116 | 117 | defp extract_rows(<<>>, state() = state) do 118 | state 119 | end 120 | 121 | defp extract_rows(<>, state(column_types: column_types) = state) do 122 | extract_row(data, column_types, [], state) 123 | end 124 | 125 | defp extract_field(<<>>, type, types, row, state) do 126 | {:resume, &extract_field(&1, type, types, row, state)} 127 | end 128 | 129 | defp extract_field(<>, {:fixed_string, length} = fixed_string, types, row, state) do 130 | case data do 131 | <> -> 132 | extract_row(rest, types, [value | row], state) 133 | 134 | _ -> 135 | {:resume, &extract_field(data <> &1, fixed_string, types, row, state)} 136 | end 137 | end 138 | 139 | @scalar_types [ 140 | :i64, 141 | :i32, 142 | :i16, 143 | :i8, 144 | :u64, 145 | :u32, 146 | :u16, 147 | :u8, 148 | :f64, 149 | :f32, 150 | :boolean, 151 | :string, 152 | :date, 153 | :datetime 154 | ] 155 | 156 | @all_types @scalar_types 157 | |> Enum.flat_map(&type_permutations/1) 158 | |> Enum.sort() 159 | 160 | # Build all permutations of extract_field/5 161 | for type <- @all_types do 162 | defp extract_field(<>, unquote(type), types, row, state) do 163 | unquote(extractor_name(type))(data, types, row, state) 164 | end 165 | end 166 | 167 | # Build all specific typed extractors, e.g. extract_u8/5 168 | for type <- @all_types do 169 | @extract field_value: type 170 | defp unquote(extractor_name(type))(<>, field_value, types, row, state) do 171 | extract_row(data, types, [field_value | row], state) 172 | end 173 | end 174 | 175 | defp extract_row(<>, [], row_data, state(rows: rows, count: count) = state) do 176 | row = row_data |> Enum.reverse() |> List.to_tuple() 177 | new_state = state(state, rows: [row | rows], count: count + 1) 178 | extract_rows(data, new_state) 179 | end 180 | 181 | defp extract_row(<>, [type | types], row, state) do 182 | extract_field(data, type, types, row, state) 183 | end 184 | 185 | defp parse_type(<<"Nullable(", type::binary>>) do 186 | rest_type = 187 | type 188 | |> String.replace_suffix(")", "") 189 | |> parse_type() 190 | 191 | {:nullable, rest_type} 192 | end 193 | 194 | defp parse_type(<<"FixedString(", rest::binary>>) do 195 | case Integer.parse(rest) do 196 | {length, rest} -> 197 | _ = String.replace_suffix(rest, ")", "") 198 | 199 | {:fixed_string, length} 200 | end 201 | end 202 | 203 | defp parse_type(<<"Array(", type::binary>>) do 204 | rest_type = 205 | type 206 | |> String.replace_suffix(")", "") 207 | |> parse_type() 208 | 209 | {:array, rest_type} 210 | end 211 | 212 | # Boolean isn't represented below because clickhouse has no concept 213 | # of booleans. 214 | @clickhouse_mappings [ 215 | {"Int64", :i64}, 216 | {"Int32", :i32}, 217 | {"Int16", :i16}, 218 | {"Int8", :i8}, 219 | {"UInt64", :u64}, 220 | {"UInt32", :u32}, 221 | {"UInt16", :u16}, 222 | {"UInt8", :u8}, 223 | {"Float64", :f64}, 224 | {"Float32", :f32}, 225 | {"Float16", :f16}, 226 | {"Float8", :f8}, 227 | {"String", :string}, 228 | {"Date", :date}, 229 | {"DateTime", :datetime} 230 | ] 231 | for {clickhouse_type, local_type} <- @clickhouse_mappings do 232 | defp parse_type(unquote(clickhouse_type)) do 233 | unquote(local_type) 234 | end 235 | end 236 | end 237 | -------------------------------------------------------------------------------- /lib/clickhousex/codec/row_binary/utils.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec.RowBinary.Utils do 2 | @moduledoc """ 3 | Utility functions for `Clickhousex.Codec.RowBinary`, can not be 4 | defined in the same module because they are required at compile 5 | time. 6 | """ 7 | 8 | def type_permutations(type) do 9 | [ 10 | type, 11 | {:nullable, type}, 12 | {:array, type}, 13 | {:array, {:nullable, type}} 14 | ] 15 | end 16 | 17 | def extractor_name({modifier, base_type}) do 18 | suffix = type_suffix(base_type) 19 | :"extract_#{modifier}_#{suffix}" 20 | end 21 | 22 | def extractor_name(type) when is_atom(type) do 23 | :"extract_#{type}" 24 | end 25 | 26 | defp type_suffix({modifier, base_type}) do 27 | suffix = type_suffix(base_type) 28 | :"#{modifier}_#{suffix}" 29 | end 30 | 31 | defp type_suffix(type) when is_atom(type) do 32 | :"#{type}" 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /lib/clickhousex/codec/values.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec.Values do 2 | @moduledoc """ 3 | Routines for [Values][1] input/output format. 4 | 5 | **NB**: This module does not implement `Clickhousex.Codec` behaviour. 6 | 7 | [1]: https://clickhouse.tech/docs/en/interfaces/formats/#data-format-values 8 | """ 9 | 10 | alias Clickhousex.Query 11 | 12 | def encode(%Query{param_count: 0, type: :insert}, _, []) do 13 | # An insert query's arguments go into the post body and the query part goes into the query string. 14 | # If we don't have any arguments, we don't have to encode anything, but we don't want to return 15 | # anything here because we'll duplicate the query into both the query string and post body 16 | "" 17 | end 18 | 19 | def encode(%Query{param_count: 0, statement: statement}, _, []) do 20 | statement 21 | end 22 | 23 | def encode(%Query{param_count: 0}, _, _) do 24 | raise ArgumentError, "Extra params! Query doesn't contain '?'" 25 | end 26 | 27 | def encode(%Query{param_count: param_count} = query, query_text, params) do 28 | if length(params) != param_count do 29 | raise ArgumentError, 30 | "The number of parameters does not correspond to the number of question marks!" 31 | end 32 | 33 | query_parts = String.split(query_text, "?") 34 | 35 | weave(query, query_parts, params) 36 | end 37 | 38 | defp weave(query, query_parts, params) do 39 | weave(query, query_parts, params, []) 40 | end 41 | 42 | defp weave(_query, [part], [], acc) do 43 | Enum.reverse([part | acc]) 44 | end 45 | 46 | defp weave(query, [part | parts], [param | params], acc) do 47 | weave(query, parts, params, [encode_param(query, param), part | acc]) 48 | end 49 | 50 | @doc false 51 | defp encode_param(query, param) when is_list(param) do 52 | values = Enum.map_join(param, ",", &encode_param(query, &1)) 53 | 54 | case query.type do 55 | :select -> 56 | # We pass lists to in clauses, and they shouldn't have brackets around them. 57 | values 58 | 59 | _ -> 60 | "[" <> values <> "]" 61 | end 62 | end 63 | 64 | defp encode_param(_query, param) when is_integer(param) do 65 | Integer.to_string(param) 66 | end 67 | 68 | defp encode_param(_query, true) do 69 | "1" 70 | end 71 | 72 | defp encode_param(_query, false) do 73 | "0" 74 | end 75 | 76 | defp encode_param(_query, param) when is_float(param) do 77 | to_string(param) 78 | end 79 | 80 | defp encode_param(_query, param) when is_float(param) do 81 | to_string(param) 82 | end 83 | 84 | defp encode_param(_query, nil) do 85 | "NULL" 86 | end 87 | 88 | defp encode_param(_query, %DateTime{} = datetime) do 89 | iso_date = 90 | datetime 91 | |> DateTime.truncate(:second) 92 | |> DateTime.to_iso8601() 93 | |> String.replace("Z", "") 94 | 95 | "'#{iso_date}'" 96 | end 97 | 98 | defp encode_param(_query, %NaiveDateTime{} = naive_datetime) do 99 | naive = 100 | naive_datetime 101 | |> NaiveDateTime.truncate(:second) 102 | |> NaiveDateTime.to_iso8601() 103 | 104 | "'#{naive}'" 105 | end 106 | 107 | defp encode_param(_query, %Date{} = date) do 108 | "'#{Date.to_iso8601(date)}'" 109 | end 110 | 111 | defp encode_param(_query, param) do 112 | "'" <> escape(param) <> "'" 113 | end 114 | 115 | defp escape(s) do 116 | s 117 | |> String.replace("_", "\_") 118 | |> String.replace("'", "\'") 119 | |> String.replace("%", "\%") 120 | |> String.replace(~s("), ~s(\\")) 121 | |> String.replace("\\", "\\\\") 122 | end 123 | end 124 | -------------------------------------------------------------------------------- /lib/clickhousex/error.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Error do 2 | @moduledoc """ 3 | Defines an error returned from the client. 4 | """ 5 | 6 | defexception message: "", code: 0, constraint_violations: [] 7 | 8 | @type t :: %__MODULE__{ 9 | message: binary(), 10 | code: integer(), 11 | constraint_violations: Keyword.t() 12 | } 13 | 14 | def exception(%Mint.TransportError{reason: reason}) do 15 | %__MODULE__{message: "Transport Error: #{inspect(reason)}"} 16 | end 17 | 18 | def exception(message) do 19 | message = to_string(message) 20 | 21 | %__MODULE__{ 22 | message: message, 23 | code: get_code(message), 24 | constraint_violations: get_constraint_violations(message) 25 | } 26 | end 27 | 28 | defp get_code(message) do 29 | case Regex.scan(~r/^Code: (\d+)/i, message) do 30 | [[_, code]] -> translate(code) 31 | _ -> :unknown 32 | end 33 | end 34 | 35 | defp translate("57"), do: :table_already_exists 36 | defp translate("60"), do: :base_table_or_view_not_found 37 | defp translate("81"), do: :database_does_not_exists 38 | defp translate("82"), do: :database_already_exists 39 | defp translate("28000"), do: :invalid_authorization 40 | defp translate("08" <> _), do: :connection_exception 41 | defp translate(code), do: code 42 | 43 | defp get_constraint_violations(_reason) do 44 | [] 45 | end 46 | end 47 | -------------------------------------------------------------------------------- /lib/clickhousex/http_client.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.HTTPClient do 2 | defmodule Response do 3 | @moduledoc false 4 | 5 | defstruct ref: nil, codec_state: nil, status: nil, error_buffer: [], complete?: false 6 | 7 | @codec Application.get_env(:clickhousex, :codec, Clickhousex.Codec.JSON) 8 | def new(ref) do 9 | codec_state = @codec.new() 10 | 11 | %__MODULE__{ref: ref, codec_state: codec_state} 12 | end 13 | 14 | def append_messages(%__MODULE__{} = response, messages) do 15 | Enum.reduce(messages, response, &append(&2, &1)) 16 | end 17 | 18 | def decode(%__MODULE__{status: status, error_buffer: error_buffer}) when status != 200 do 19 | {:error, IO.iodata_to_binary(error_buffer)} 20 | end 21 | 22 | def decode(%__MODULE__{codec_state: state, complete?: true}) do 23 | @codec.decode(state) 24 | end 25 | 26 | def format do 27 | @codec.response_format 28 | end 29 | 30 | defp append( 31 | %__MODULE__{status: status, ref: ref, error_buffer: error_buffer} = response, 32 | {:data, ref, data} 33 | ) 34 | when status != 200 do 35 | %{response | error_buffer: [error_buffer, data]} 36 | end 37 | 38 | defp append(%__MODULE__{codec_state: state, ref: ref} = response, {:data, ref, data}) do 39 | %{response | codec_state: @codec.append(state, data)} 40 | end 41 | 42 | defp append(%__MODULE__{ref: ref} = response, {:status, ref, status_code}) do 43 | %{response | status: status_code} 44 | end 45 | 46 | defp append(%__MODULE__{ref: ref} = response, {:headers, ref, _headers}) do 47 | response 48 | end 49 | 50 | defp append(%__MODULE__{ref: ref} = response, {:done, ref}) do 51 | %{response | complete?: true} 52 | end 53 | end 54 | 55 | alias Clickhousex.Query 56 | @moduledoc false 57 | 58 | @req_headers [{"Content-Type", "text/plain"}] 59 | 60 | def connect(scheme, host, port) do 61 | Mint.HTTP.connect(scheme, host, port, mode: :passive) 62 | end 63 | 64 | def disconnect(conn) do 65 | Mint.HTTP.close(conn) 66 | end 67 | 68 | def request(conn, query, request, timeout, nil, _password, database) do 69 | post(conn, query, request, database, timeout: timeout, recv_timeout: timeout) 70 | end 71 | 72 | def request(conn, query, request, timeout, username, password, database) do 73 | opts = [basic_auth: {username, password}, timeout: timeout, recv_timeout: timeout] 74 | post(conn, query, request, database, opts) 75 | end 76 | 77 | defp post(conn, query, request, database, opts) do 78 | {recv_timeout, opts} = Keyword.pop(opts, :recv_timeout, 5000) 79 | 80 | query_string = 81 | URI.encode_query(%{ 82 | database: database, 83 | query: IO.iodata_to_binary(request.query_string_data) 84 | }) 85 | 86 | path = "/?#{query_string}" 87 | post_body = maybe_append_format(query, request) 88 | headers = headers(opts, post_body) 89 | 90 | with {:ok, conn, ref} <- Mint.HTTP.request(conn, "POST", path, headers, post_body), 91 | {:ok, conn, %Response{} = response} <- 92 | receive_response(conn, recv_timeout, Response.new(ref)) do 93 | decode_response(conn, query, response) 94 | else 95 | {:error, conn, error, _messages} -> 96 | {:error, conn, error} 97 | end 98 | end 99 | 100 | defp decode_response(conn, %Query{type: :select}, %Response{} = response) do 101 | case Response.decode(response) do 102 | {:ok, %{column_names: columns, rows: rows}} -> {:ok, conn, {:selected, columns, rows}} 103 | {:error, error} -> {:error, conn, error.reason} 104 | end 105 | end 106 | 107 | defp decode_response(conn, %Query{type: :create}, %Response{status: 200} = _response) do 108 | {:ok, conn, {:updated, 1}} 109 | end 110 | 111 | defp decode_response(conn, %Query{}, response) do 112 | case Response.decode(response) do 113 | {:error, reason} -> {:error, conn, reason} 114 | _ -> {:ok, conn, {:updated, 1}} 115 | end 116 | end 117 | 118 | defp headers(opts, post_iodata) do 119 | headers = 120 | case Keyword.get(opts, :basic_auth) do 121 | {username, password} -> 122 | auth_hash = Base.encode64("#{username}:#{password}") 123 | auth_header = {"Authorization", "Basic: #{auth_hash}"} 124 | [auth_header | @req_headers] 125 | 126 | nil -> 127 | @req_headers 128 | end 129 | 130 | content_length = post_iodata |> IO.iodata_length() |> Integer.to_string() 131 | [{"content-length", content_length} | headers] 132 | end 133 | 134 | defp receive_response(conn, _recv_timeout, %Response{complete?: true} = response) do 135 | {:ok, conn, response} 136 | end 137 | 138 | defp receive_response(conn, recv_timeout, response) do 139 | case Mint.HTTP.recv(conn, 0, recv_timeout) do 140 | {:ok, conn, messages} -> 141 | response = Response.append_messages(response, messages) 142 | receive_response(conn, recv_timeout, response) 143 | 144 | {:error, conn, err, messages} -> 145 | response = Response.append_messages(response, messages) 146 | {:error, conn, err, response} 147 | end 148 | end 149 | 150 | defp maybe_append_format(%Query{type: :select}, request) do 151 | [request.post_data, " FORMAT ", Response.format()] 152 | end 153 | 154 | defp maybe_append_format(_, request) do 155 | [request.post_data] 156 | end 157 | end 158 | -------------------------------------------------------------------------------- /lib/clickhousex/http_request.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.HTTPRequest do 2 | @moduledoc false 3 | 4 | defstruct post_data: "", query_string_data: "" 5 | 6 | def new do 7 | %__MODULE__{} 8 | end 9 | 10 | def with_post_data(%__MODULE__{} = request, post_data) do 11 | %{request | post_data: post_data} 12 | end 13 | 14 | def with_query_string_data(%__MODULE__{} = request, query_string_data) do 15 | %{request | query_string_data: query_string_data} 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /lib/clickhousex/protocol.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Protocol do 2 | @moduledoc false 3 | 4 | use DBConnection 5 | 6 | alias Clickhousex.Error 7 | alias Clickhousex.HTTPClient, as: Client 8 | 9 | defstruct conn_opts: [], base_address: "", conn: nil 10 | 11 | @type state :: %__MODULE__{ 12 | conn_opts: Keyword.t(), 13 | base_address: String.t(), 14 | conn: Mint.HTTP.t() 15 | } 16 | 17 | @type query :: Clickhousex.Query.t() 18 | @type result :: Clickhousex.Result.t() 19 | @type cursor :: any 20 | 21 | @ping_query Clickhousex.Query.new("SELECT 1") |> DBConnection.Query.parse([]) 22 | @ping_params DBConnection.Query.encode(@ping_query, [], []) 23 | 24 | @doc false 25 | @spec connect(opts :: Keyword.t()) :: {:ok, state} | {:error, Exception.t()} 26 | def connect(opts) do 27 | scheme = opts[:scheme] || :http 28 | hostname = opts[:hostname] || "localhost" 29 | port = opts[:port] || 8123 30 | database = opts[:database] || "default" 31 | username = opts[:username] 32 | password = opts[:password] 33 | timeout = opts[:timeout] || Clickhousex.timeout() 34 | 35 | {:ok, conn} = Client.connect(scheme, hostname, port) 36 | 37 | response = Client.request(conn, @ping_query, @ping_params, timeout, username, password, database) 38 | 39 | with {:ok, conn, {:selected, _, _}} <- response do 40 | conn_opts = [ 41 | scheme: scheme, 42 | hostname: hostname, 43 | port: port, 44 | database: database, 45 | username: username, 46 | password: password, 47 | timeout: timeout 48 | ] 49 | 50 | state = %__MODULE__{ 51 | conn: conn, 52 | conn_opts: conn_opts 53 | } 54 | 55 | {:ok, state} 56 | end 57 | end 58 | 59 | @doc false 60 | @spec disconnect(err :: Exception.t(), state) :: :ok 61 | def disconnect(_err, _state) do 62 | :ok 63 | end 64 | 65 | @doc false 66 | @spec ping(state) :: 67 | {:ok, state} 68 | | {:disconnect, term, state} 69 | def ping(state) do 70 | case do_query(state.conn, @ping_query, @ping_params, [], state) do 71 | {:ok, _, _, new_state} -> {:ok, new_state} 72 | {:error, reason, new_state} -> {:disconnect, reason, new_state} 73 | other -> other 74 | end 75 | end 76 | 77 | @doc false 78 | @spec reconnect(new_opts :: Keyword.t(), state) :: {:ok, state} 79 | def reconnect(new_opts, state) do 80 | with :ok <- disconnect("Reconnecting", state), 81 | do: connect(new_opts) 82 | end 83 | 84 | @doc false 85 | @spec checkin(state) :: {:ok, state} 86 | def checkin(state) do 87 | {:ok, state} 88 | end 89 | 90 | @doc false 91 | @spec checkout(state) :: {:ok, state} 92 | def checkout(state) do 93 | {:ok, state} 94 | end 95 | 96 | @doc false 97 | def handle_status(_, state) do 98 | {:idle, state} 99 | end 100 | 101 | @doc false 102 | @spec handle_prepare(query, Keyword.t(), state) :: {:ok, query, state} 103 | def handle_prepare(query, _, state) do 104 | {:ok, query, state} 105 | end 106 | 107 | @doc false 108 | @spec handle_execute(query, list, opts :: Keyword.t(), state) :: 109 | {:ok, result, state} 110 | | {:error | :disconnect, Exception.t(), state} 111 | def handle_execute(query, params, opts, state) do 112 | do_query(state.conn, query, params, opts, state) 113 | end 114 | 115 | @doc false 116 | def handle_declare(_query, _params, _opts, state) do 117 | {:error, :cursors_not_supported, state} 118 | end 119 | 120 | @doc false 121 | def handle_deallocate(_query, _cursor, _opts, state) do 122 | {:error, :cursors_not_supported, state} 123 | end 124 | 125 | def handle_fetch(_query, _cursor, _opts, state) do 126 | {:error, :cursors_not_supported, state} 127 | end 128 | 129 | @doc false 130 | @spec handle_begin(opts :: Keyword.t(), state) :: {:ok, result, state} 131 | def handle_begin(_opts, state) do 132 | {:ok, %Clickhousex.Result{}, state} 133 | end 134 | 135 | @doc false 136 | @spec handle_close(query, Keyword.t(), state) :: {:ok, result, state} 137 | def handle_close(_query, _opts, state) do 138 | {:ok, %Clickhousex.Result{}, state} 139 | end 140 | 141 | @doc false 142 | @spec handle_commit(opts :: Keyword.t(), state) :: {:ok, result, state} 143 | def handle_commit(_opts, state) do 144 | {:ok, %Clickhousex.Result{}, state} 145 | end 146 | 147 | @doc false 148 | @spec handle_info(opts :: Keyword.t(), state) :: {:ok, result, state} 149 | def handle_info(_msg, state) do 150 | {:ok, state} 151 | end 152 | 153 | @doc false 154 | @spec handle_rollback(opts :: Keyword.t(), state) :: {:ok, result, state} 155 | def handle_rollback(_opts, state) do 156 | {:ok, %Clickhousex.Result{}, state} 157 | end 158 | 159 | defp do_query(conn, query, params, _opts, state) do 160 | username = state.conn_opts[:username] 161 | password = state.conn_opts[:password] 162 | timeout = state.conn_opts[:timeout] 163 | database = state.conn_opts[:database] 164 | 165 | res = 166 | conn 167 | |> Client.request(query, params, timeout, username, password, database) 168 | |> handle_errors() 169 | 170 | case res do 171 | {:error, conn, %Error{code: :connection_exception} = reason} -> 172 | {:disconnect, reason, %{state | conn: conn}} 173 | 174 | {:error, conn, reason} -> 175 | {:error, reason, %{state | conn: conn}} 176 | 177 | {:ok, conn, {:selected, columns, rows}} -> 178 | { 179 | :ok, 180 | query, 181 | %Clickhousex.Result{ 182 | command: :selected, 183 | columns: columns, 184 | rows: rows, 185 | num_rows: Enum.count(rows) 186 | }, 187 | %{state | conn: conn} 188 | } 189 | 190 | {:ok, conn, {:updated, count}} -> 191 | { 192 | :ok, 193 | query, 194 | %Clickhousex.Result{ 195 | command: :updated, 196 | columns: ["count"], 197 | rows: [[count]], 198 | num_rows: 1 199 | }, 200 | %{state | conn: conn} 201 | } 202 | 203 | {:ok, conn, {command, columns, rows}} -> 204 | { 205 | :ok, 206 | query, 207 | %Clickhousex.Result{ 208 | command: command, 209 | columns: columns, 210 | rows: rows, 211 | num_rows: Enum.count(rows) 212 | }, 213 | %{state | conn: conn} 214 | } 215 | end 216 | end 217 | 218 | @doc false 219 | defp handle_errors({:error, conn, reason}) do 220 | {:error, conn, Error.exception(reason)} 221 | end 222 | 223 | defp handle_errors(term), do: term 224 | end 225 | -------------------------------------------------------------------------------- /lib/clickhousex/query.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Query do 2 | @moduledoc """ 3 | Query struct returned from a successfully prepared query. 4 | """ 5 | 6 | @type t :: %__MODULE__{ 7 | name: iodata, 8 | type: :select | :insert | :alter | :create | :drop, 9 | param_count: integer, 10 | params: iodata | nil, 11 | columns: [String.t()] | nil 12 | } 13 | 14 | defstruct name: nil, 15 | statement: "", 16 | type: :select, 17 | params: [], 18 | param_count: 0, 19 | columns: [] 20 | 21 | def new(statement) do 22 | %__MODULE__{statement: statement} 23 | |> DBConnection.Query.parse([]) 24 | end 25 | end 26 | 27 | defimpl DBConnection.Query, for: Clickhousex.Query do 28 | alias Clickhousex.HTTPRequest 29 | 30 | @values_regex ~r/VALUES/i 31 | @create_query_regex ~r/\bCREATE\b/i 32 | @select_query_regex ~r/\bSELECT\b/i 33 | @insert_query_regex ~r/\bINSERT\b/i 34 | @alter_query_regex ~r/\bALTER\b/i 35 | 36 | @codec Application.get_env(:clickhousex, :codec, Clickhousex.Codec.JSON) 37 | 38 | def parse(%{statement: statement} = query, _opts) do 39 | param_count = 40 | statement 41 | |> String.codepoints() 42 | |> Enum.count(fn s -> s == "?" end) 43 | 44 | query = %{query | type: query_type(statement)} 45 | 46 | %{query | param_count: param_count} 47 | end 48 | 49 | def describe(query, _opts) do 50 | query 51 | end 52 | 53 | def encode(%{type: :insert} = query, params, _opts) do 54 | {query_part, post_body_part} = do_parse(query) 55 | encoded_params = @codec.encode(query, post_body_part, params) 56 | 57 | HTTPRequest.new() 58 | |> HTTPRequest.with_query_string_data(query_part) 59 | |> HTTPRequest.with_post_data(encoded_params) 60 | end 61 | 62 | def encode(query, params, _opts) do 63 | {query_part, _post_body_part} = do_parse(query) 64 | encoded_params = @codec.encode(query, query_part, params) 65 | 66 | HTTPRequest.new() 67 | |> HTTPRequest.with_query_string_data(encoded_params) 68 | end 69 | 70 | def decode(_query, result, _opts) do 71 | result 72 | end 73 | 74 | defp do_parse(%{type: :insert, statement: statement}) do 75 | with true <- Regex.match?(@values_regex, statement), 76 | [fragment, substitutions] <- String.split(statement, @values_regex), 77 | true <- String.contains?(substitutions, "?") do 78 | {fragment <> " FORMAT #{@codec.request_format}", substitutions} 79 | else 80 | _ -> 81 | {statement, ""} 82 | end 83 | end 84 | 85 | defp do_parse(%{statement: statement}) do 86 | {statement, ""} 87 | end 88 | 89 | defp query_type(statement) do 90 | with {:create, false} <- {:create, Regex.match?(@create_query_regex, statement)}, 91 | {:select, false} <- {:select, Regex.match?(@select_query_regex, statement)}, 92 | {:insert, false} <- {:insert, Regex.match?(@insert_query_regex, statement)}, 93 | {:alter, false} <- {:alter, Regex.match?(@alter_query_regex, statement)} do 94 | :unknown 95 | else 96 | {statement_type, true} -> 97 | statement_type 98 | end 99 | end 100 | end 101 | 102 | defimpl String.Chars, for: Clickhousex.Query do 103 | def to_string(%Clickhousex.Query{statement: statement}) do 104 | IO.iodata_to_binary(statement) 105 | end 106 | end 107 | -------------------------------------------------------------------------------- /lib/clickhousex/result.ex: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Result do 2 | @moduledoc """ 3 | Result struct returned from any successful query. Its fields are: 4 | 5 | * `command` - An atom of the query command 6 | * `columns` - The column names; 7 | * `rows` - The result set. A list of lists, each inner list corresponding to a 8 | row, each element in the inner list corresponds to a column; 9 | * `num_rows` - The number of fetched or affected rows; 10 | """ 11 | 12 | @type t :: %__MODULE__{ 13 | command: atom, 14 | columns: [String.t()] | nil, 15 | rows: [[term] | binary] | nil, 16 | num_rows: integer | :undefined 17 | } 18 | 19 | defstruct command: nil, columns: nil, rows: nil, num_rows: :undefined 20 | end 21 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Mixfile do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :clickhousex, 7 | version: "0.5.0", 8 | elixir: "~> 1.5", 9 | elixirc_paths: elixirc_paths(Mix.env()), 10 | deps: deps(), 11 | package: package(), 12 | source_url: "https://github.com/clickhouse-elixir/clickhousex" 13 | ] 14 | end 15 | 16 | # Run "mix help compile.app" to learn about applications. 17 | def application do 18 | [ 19 | extra_applications: [:logger] 20 | ] 21 | end 22 | 23 | # Specifies which paths to compile per environment. 24 | defp elixirc_paths(:test), do: ["lib", "test"] 25 | defp elixirc_paths(_), do: ["lib"] 26 | 27 | # Run "mix help deps" to learn about dependencies. 28 | defp deps do 29 | [ 30 | {:db_connection, "~> 2.0"}, 31 | {:mint, "~> 1.0"}, 32 | {:castore, "~> 0.1"}, 33 | {:jason, "~> 1.0"}, 34 | {:ex_doc, "~> 0.22", only: :dev}, 35 | {:benchee, "~> 1.0", only: [:dev, :test]}, 36 | {:credo, "~> 1.5", only: :dev} 37 | # {:nicene, "~> 0.4.0", only: :dev} 38 | ] 39 | end 40 | 41 | defp package do 42 | [ 43 | name: "clickhousex", 44 | description: description(), 45 | maintainers: maintainers(), 46 | licenses: ["Apache 2.0"], 47 | links: %{"GitHub" => "https://github.com/clickhouse-elixir/clickhousex"} 48 | ] 49 | end 50 | 51 | defp description do 52 | "ClickHouse driver for Elixir (uses HTTP)." 53 | end 54 | 55 | defp maintainers do 56 | [ 57 | "Roman Chudov", 58 | "Konstantin Grabar", 59 | "Ivan Zinoviev", 60 | "Evgeniy Shurmin", 61 | "Alexey Lukyanov", 62 | "Yaroslav Rogov", 63 | "Ivan Sokolov", 64 | "Georgy Sychev" 65 | ] 66 | end 67 | end 68 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "benchee": {:hex, :benchee, "1.0.1", "66b211f9bfd84bd97e6d1beaddf8fc2312aaabe192f776e8931cb0c16f53a521", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}], "hexpm", "3ad58ae787e9c7c94dd7ceda3b587ec2c64604563e049b2a0e8baafae832addb"}, 3 | "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"}, 4 | "castore": {:hex, :castore, "0.1.11", "c0665858e0e1c3e8c27178e73dffea699a5b28eb72239a3b2642d208e8594914", [:mix], [], "hexpm", "91b009ba61973b532b84f7c09ce441cba7aa15cb8b006cf06c6f4bba18220081"}, 5 | "connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"}, 6 | "credo": {:hex, :credo, "1.5.6", "e04cc0fdc236fefbb578e0c04bd01a471081616e741d386909e527ac146016c6", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "4b52a3e558bd64e30de62a648518a5ea2b6e3e5d2b164ef5296244753fc7eb17"}, 7 | "db_connection": {:hex, :db_connection, "2.4.0", "d04b1b73795dae60cead94189f1b8a51cc9e1f911c234cc23074017c43c031e5", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ad416c21ad9f61b3103d254a71b63696ecadb6a917b36f563921e0de00d7d7c8"}, 8 | "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, 9 | "earmark_parser": {:hex, :earmark_parser, "1.4.13", "0c98163e7d04a15feb62000e1a891489feb29f3d10cb57d4f845c405852bbef8", [:mix], [], "hexpm", "d602c26af3a0af43d2f2645613f65841657ad6efc9f0e361c3b6c06b578214ba"}, 10 | "ex_doc": {:hex, :ex_doc, "0.25.1", "4b736fa38dc76488a937e5ef2944f5474f3eff921de771b25371345a8dc810bc", [:mix], [{:earmark_parser, "~> 1.4.0", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "3200b0a69ddb2028365281fbef3753ea9e728683863d8cdaa96580925c891f67"}, 11 | "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, 12 | "jason": {:hex, :jason, "1.2.2", "ba43e3f2709fd1aa1dce90aaabfd039d000469c05c56f0b8e31978e03fa39052", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "18a228f5f0058ee183f29f9eae0805c6e59d61c3b006760668d8d18ff0d12179"}, 13 | "makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"}, 14 | "makeup_elixir": {:hex, :makeup_elixir, "0.15.1", "b5888c880d17d1cc3e598f05cdb5b5a91b7b17ac4eaf5f297cb697663a1094dd", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.1", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "db68c173234b07ab2a07f645a5acdc117b9f99d69ebf521821d89690ae6c6ec8"}, 15 | "makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"}, 16 | "mint": {:hex, :mint, "1.3.0", "396b3301102f7b775e103da5a20494b25753aed818d6d6f0ad222a3a018c3600", [:mix], [{:castore, "~> 0.1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "a9aac960562e43ca69a77e5176576abfa78b8398cec5543dd4fb4ab0131d5c1e"}, 17 | "nimble_parsec": {:hex, :nimble_parsec, "1.1.0", "3a6fca1550363552e54c216debb6a9e95bd8d32348938e13de5eda962c0d7f89", [:mix], [], "hexpm", "08eb32d66b706e913ff748f11694b17981c0b04a33ef470e33e11b3d3ac8f54b"}, 18 | "telemetry": {:hex, :telemetry, "1.0.0", "0f453a102cdf13d506b7c0ab158324c337c41f1cc7548f0bc0e130bbf0ae9452", [:rebar3], [], "hexpm", "73bc09fa59b4a0284efb4624335583c528e07ec9ae76aca96ea0673850aec57a"}, 19 | } 20 | -------------------------------------------------------------------------------- /test/clickhouse_case.ex: -------------------------------------------------------------------------------- 1 | defmodule ClickhouseCase do 2 | @moduledoc """ 3 | Test Case and helpers for testing Clickhousex. 4 | """ 5 | 6 | use ExUnit.CaseTemplate 7 | alias Clickhousex, as: CH 8 | 9 | def database(ctx) do 10 | ctx.case 11 | |> Module.split() 12 | |> List.last() 13 | |> Macro.underscore() 14 | end 15 | 16 | def table(ctx) do 17 | table = 18 | ctx.test 19 | |> Atom.to_string() 20 | |> String.downcase() 21 | |> String.replace(" ", "_") 22 | 23 | "#{database(ctx)}.#{table}" 24 | end 25 | 26 | def schema(ctx, create_statement) do 27 | create_statement = parameterize(create_statement, ctx) 28 | 29 | CH.query(ctx.client, create_statement, []) 30 | end 31 | 32 | def select_all(ctx) do 33 | select(ctx, "SELECT * from {{table}}") 34 | end 35 | 36 | def select(ctx, select_statement) do 37 | select(ctx, select_statement, []) 38 | end 39 | 40 | def select(ctx, select_statement, params) do 41 | select_statement = parameterize(select_statement, ctx) 42 | {:ok, _, _} = CH.query(ctx.client, select_statement, params) 43 | end 44 | 45 | def insert(ctx, insert_statement, values) do 46 | insert_statement = parameterize(insert_statement, ctx) 47 | {:ok, _, _} = CH.query(ctx.client, insert_statement, values) 48 | end 49 | 50 | defp parameterize(query, ctx) do 51 | query 52 | |> String.replace(~r/\{\{\s*database\s*\}\}/, database(ctx)) 53 | |> String.replace(~r/\{\{\s*table\s*\}\}/, table(ctx)) 54 | |> String.trim() 55 | end 56 | 57 | using do 58 | quote do 59 | require unquote(__MODULE__) 60 | 61 | import unquote(__MODULE__), 62 | only: [ 63 | schema: 2, 64 | select_all: 1, 65 | select: 2, 66 | select: 3, 67 | insert: 3 68 | ] 69 | end 70 | end 71 | 72 | setup_all do 73 | hostname = System.get_env("test_db_hostname") || "localhost" 74 | 75 | with {:ok, client} <- start_supervised({Clickhousex, hostname: hostname}) do 76 | {:ok, client: client} 77 | end 78 | end 79 | 80 | setup %{client: client} = ctx do 81 | db_name = database(ctx) 82 | 83 | on_exit(fn -> 84 | Clickhousex.query!(client, "DROP DATABASE IF EXISTS #{db_name}", []) 85 | end) 86 | 87 | {:ok, _, _} = Clickhousex.query(client, "CREATE DATABASE #{db_name}", []) 88 | 89 | {:ok, client: client} 90 | end 91 | end 92 | -------------------------------------------------------------------------------- /test/clickhousex/codec/binary_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec.BinaryTest do 2 | use ExUnit.Case 3 | import Clickhousex.Codec.Binary 4 | 5 | test "resuming" do 6 | start = <<1, 0, 0>> 7 | assert {:resume, resumer} = decode(start, :i32) 8 | assert {:ok, 1, <<>>} = resumer.(<<0>>) 9 | end 10 | 11 | test "resuming a varint" do 12 | expected = decode(<<129, 0>>, :varint) 13 | start = <<129>> 14 | 15 | assert {:resume, resumer} = decode(start, :varint) 16 | assert expected == resumer.(<<0>>) 17 | 18 | expected = decode(<<140, 202, 192, 6>>, :varint) 19 | start = <<140, 202>> 20 | 21 | assert {:resume, resumer} = decode(start, :varint) 22 | assert expected == resumer.(<<192, 6>>) 23 | end 24 | 25 | test "decoding an i64" do 26 | expected = 48_291_928 27 | 28 | encoded = encode(:i64, expected) 29 | 30 | <> = encoded 31 | assert {:resume, resumer} = decode(start, :i64) 32 | assert {:ok, expected, <<>>} == resumer.(rest) 33 | end 34 | 35 | test "resuming a string" do 36 | to_encode = String.duplicate("hi", 400) 37 | encoded = encode(:string, to_encode) |> IO.iodata_to_binary() 38 | <> = encoded 39 | 40 | assert {:resume, resumer} = decode(first_byte, :string) 41 | assert {:ok, ^to_encode, <<>>} = resumer.(rest) 42 | 43 | <> = encoded 44 | 45 | assert {:resume, resumer} = decode(length, :string) 46 | assert {:ok, ^to_encode, <<>>} = resumer.(rest) 47 | 48 | <> = encoded 49 | assert {:resume, resumer} = decode(start, :string) 50 | assert {:ok, ^to_encode, <<>>} = resumer.(rest) 51 | end 52 | 53 | test "decoding a list of integers" do 54 | to_encode = 1..300 |> Enum.to_list() 55 | 56 | for type <- ~w(u64 u32 u16 i64 i32 i16)a do 57 | encoded = encode({:list, type}, to_encode) |> IO.iodata_to_binary() 58 | <> = encoded 59 | 60 | assert {:resume, resumer} = decode(first_byte, {:list, type}) 61 | assert {:ok, to_encode, <<>>} == resumer.(rest) 62 | 63 | <> = encoded 64 | assert {:resume, resumer} = decode(length, {:list, type}) 65 | assert {:ok, to_encode, <<>>} == resumer.(rest) 66 | 67 | <> = encoded 68 | assert {:resume, resumer} = decode(first, {:list, type}) 69 | assert {:ok, to_encode, <<>>} == resumer.(rest) 70 | end 71 | end 72 | 73 | test "decoding a list of floats" do 74 | to_encode = Enum.map(1..300, &(&1 / 1)) 75 | 76 | for type <- ~w(f64 f32 )a do 77 | encoded = encode({:list, type}, to_encode) |> IO.iodata_to_binary() 78 | <> = encoded 79 | 80 | assert {:resume, resumer} = decode(first_byte, {:list, type}) 81 | assert {:ok, to_encode, <<>>} == resumer.(rest) 82 | 83 | <> = encoded 84 | assert {:resume, resumer} = decode(length, {:list, type}) 85 | assert {:ok, to_encode, <<>>} == resumer.(rest) 86 | 87 | <> = encoded 88 | assert {:resume, resumer} = decode(first, {:list, type}) 89 | assert {:ok, to_encode, <<>>} == resumer.(rest) 90 | end 91 | end 92 | 93 | test "decoding a nullable string" do 94 | null = encode({:nullable, :string}, nil) |> IO.iodata_to_binary() 95 | assert {:resume, resumer} = decode(<<>>, {:nullable, :string}) 96 | assert {:ok, long_string, <<>>} = resumer.(null) 97 | 98 | long_string = String.duplicate("h", 300) 99 | non_null = encode({:nullable, :string}, long_string) |> IO.iodata_to_binary() 100 | 101 | <> = non_null 102 | assert {:resume, resumer} = decode(start, {:nullable, :string}) 103 | assert {:ok, long_string, <<>>} == resumer.(rest) 104 | 105 | <> = non_null 106 | assert {:resume, resumer} = decode(start, {:nullable, :string}) 107 | assert {:ok, long_string, <<>>} == resumer.(rest) 108 | 109 | <> = non_null 110 | assert {:resume, resumer} = decode(start, {:nullable, :string}) 111 | assert {:ok, long_string, <<>>} == resumer.(rest) 112 | end 113 | end 114 | -------------------------------------------------------------------------------- /test/clickhousex/codec/extractor_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.Codec.ExtractorTest do 2 | use ExUnit.Case 3 | alias Clickhousex.Codec.Binary 4 | 5 | defmodule Extractors do 6 | alias Clickhousex.Codec.Binary.Extractor 7 | use Extractor 8 | @scalar_types ~w(i64 i32 i16 i8 u64 u32 u16 u8 f64 f32 string boolean)a 9 | 10 | @extract value: :varint 11 | def extract(<>, :varint, value) do 12 | {:ok, value, rest} 13 | end 14 | 15 | for type <- @scalar_types do 16 | @extract value: type 17 | def extract(<>, unquote(type), value) do 18 | {:ok, value, rest} 19 | end 20 | end 21 | 22 | for base_type <- @scalar_types, 23 | type = {:nullable, base_type} do 24 | @extract value: type 25 | def extract(<>, unquote(type), value) do 26 | {:ok, value, rest} 27 | end 28 | end 29 | 30 | for base_type <- @scalar_types, 31 | type = {:list, base_type} do 32 | @extract value: type 33 | def extract(<>, unquote(type), value) do 34 | {:ok, value, rest} 35 | end 36 | end 37 | 38 | for base_type <- @scalar_types, 39 | type = {:list, {:nullable, base_type}} do 40 | @extract value: type 41 | def extract(<>, unquote(type), value) do 42 | {:ok, value, rest} 43 | end 44 | end 45 | end 46 | 47 | def encode(type, to_encode) do 48 | type |> Binary.encode(to_encode) |> IO.iodata_to_binary() 49 | end 50 | 51 | describe "resuming" do 52 | test "it should resume varints" do 53 | first = <<1::size(1), 7::size(7)>> 54 | rest = <<0::size(1), 14::size(7)>> 55 | 56 | assert {:resume, resume_fn} = Extractors.extract(first, :varint) 57 | assert {:ok, 910, <<>>} == resume_fn.(rest) 58 | end 59 | 60 | test "it should resume ints" do 61 | assert {:resume, resume_fn} = Extractors.extract(<<56, 0, 0, 0, 0, 0, 0>>, :i64) 62 | assert {:ok, 56, <<>>} = resume_fn.(<<0>>) 63 | 64 | assert {:resume, resume_fn} = Extractors.extract(<<56, 0, 0>>, :i32) 65 | assert {:ok, 56, <<>>} = resume_fn.(<<0>>) 66 | 67 | assert {:resume, resume_fn} = Extractors.extract(<<56>>, :i16) 68 | assert {:ok, 56, <<>>} = resume_fn.(<<0>>) 69 | 70 | assert {:resume, resume_fn} = Extractors.extract(<<>>, :i8) 71 | assert {:ok, 56, <<>>} = resume_fn.(<<56>>) 72 | end 73 | 74 | test "it should resume lists of things" do 75 | encoded = [1, 2, 3, 4, 5] 76 | s = encode({:list, :i64}, encoded) 77 | <> = s 78 | assert {:resume, resume_fn} = Extractors.extract(<>, {:list, :i64}) 79 | assert {:ok, encoded, <<>>} == resume_fn.(rest) 80 | 81 | assert {:resume, resume_fn} = Extractors.extract(<<>>, {:list, :i64}) 82 | assert {:ok, encoded, <<>>} == resume_fn.(s) 83 | end 84 | 85 | test "it should be able to resume a nullable" do 86 | s = <> = encode({:nullable, :i32}, 15) 87 | assert {:resume, resume_fn} = Extractors.extract(<>, {:nullable, :i32}) 88 | assert {:ok, 15, <<>>} = resume_fn.(rest) 89 | 90 | assert {:resume, resume_fn} = Extractors.extract(<<>>, {:nullable, :i32}) 91 | assert {:ok, 15, <<>>} = resume_fn.(s) 92 | end 93 | end 94 | 95 | describe "extracting scalar types" do 96 | test "it should be able to extract signed ints" do 97 | for type <- ~w(i64 i32 i16 i8)a, 98 | val = :rand.uniform(127) do 99 | s = encode(type, val) 100 | assert {:ok, val, <<>>} == Extractors.extract(s, type) 101 | end 102 | end 103 | 104 | test "it should be able to extract unsigned ints" do 105 | for type <- ~w(u64 u32 u16 u8)a, val = :rand.uniform(127) do 106 | s = encode(type, val) 107 | assert {:ok, val, <<>>} == Extractors.extract(s, type) 108 | end 109 | end 110 | 111 | test "it should be able to extract strings" do 112 | s = encode(:string, "hello") 113 | assert {:ok, "hello", <<>>} = Extractors.extract(s, :string) 114 | end 115 | 116 | test "it should be able to extract booleans" do 117 | s = encode(:boolean, true) 118 | assert {:ok, true, <<>>} = Extractors.extract(s, :boolean) 119 | end 120 | 121 | test "it should be able to extract floats" do 122 | for type <- ~w(f64 f32)a do 123 | s = encode(type, 0.24) 124 | assert {:ok, val, <<>>} = Extractors.extract(s, type) 125 | end 126 | end 127 | end 128 | 129 | describe "extracting nullable values" do 130 | test "it should be able to extract a null int" do 131 | for base_type <- ~w(i64 i32 i16 i8 u64 u32 u16 u8)a, 132 | type = {:nullable, base_type} do 133 | val = :rand.uniform(127) 134 | s = encode(type, val) 135 | assert {:ok, val, <<>>} = Extractors.extract(s, type) 136 | 137 | s = encode(type, nil) 138 | assert {:ok, nil, <<>>} = Extractors.extract(s, type) 139 | end 140 | end 141 | 142 | test "it should be able to extract nullable strings" do 143 | for val <- ["hello", nil], 144 | type = {:nullable, :string} do 145 | s = encode(type, val) 146 | assert {:ok, ^val, <<>>} = Extractors.extract(s, type) 147 | end 148 | end 149 | 150 | test "it should be able to extract nullable booleans" do 151 | for val <- [true, false, nil], 152 | type = {:nullable, :boolean} do 153 | s = encode(type, val) 154 | assert {:ok, ^val, <<>>} = Extractors.extract(s, type) 155 | end 156 | end 157 | 158 | test "it should be able to extract nullable floats" do 159 | for base_type <- ~w(f64 f32)a, 160 | type = {:nullable, base_type} do 161 | for val <- [32.0, nil] do 162 | s = encode(type, val) 163 | assert {:ok, val, <<>>} == Extractors.extract(s, type) 164 | end 165 | end 166 | end 167 | end 168 | 169 | describe "lists" do 170 | test "it should be able to extract a list of ints" do 171 | for base_type <- ~w(i64 i32 i16 i8)a, 172 | list_type = {:list, base_type} do 173 | values = 1..10 |> Enum.to_list() 174 | s = encode(list_type, values) 175 | assert {:ok, values, <<>>} == Extractors.extract(s, list_type) 176 | end 177 | end 178 | 179 | test "it should be able to extract strings" do 180 | values = ~w(hi there people) 181 | s = encode({:list, :string}, values) 182 | assert {:ok, values, <<>>} == Extractors.extract(s, {:list, :string}) 183 | end 184 | end 185 | 186 | describe "lists with nullable elements" do 187 | test "it should be able to extract nullable ints" do 188 | for base_type <- ~w(i64 i32 i16 i8 u64 u32 u16 u8)a, 189 | list_type = {:list, {:nullable, base_type}} do 190 | values = [:random.uniform(127), nil, :random.uniform(127), nil] 191 | s = encode(list_type, values) 192 | assert {:ok, values, <<>>} == Extractors.extract(s, list_type) 193 | end 194 | end 195 | end 196 | end 197 | -------------------------------------------------------------------------------- /test/clickhousex/login_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.LoginTest do 2 | @moduledoc false 3 | end 4 | -------------------------------------------------------------------------------- /test/clickhousex/query_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.QueryTest do 2 | @moduledoc false 3 | use ClickhouseCase, async: true 4 | 5 | alias Clickhousex.Query 6 | alias Clickhousex.Result 7 | 8 | test "materialize view create query", ctx do 9 | create_statement = """ 10 | CREATE TABLE IF NOT EXISTS {{table}} ( 11 | name String 12 | ) ENGINE = Memory 13 | """ 14 | 15 | schema(ctx, create_statement) 16 | 17 | assert {:ok, %Query{type: :create}, _result} = 18 | schema(ctx, """ 19 | CREATE MATERIALIZED VIEW IF NOT EXISTS material_view 20 | ENGINE = MergeTree() ORDER BY name 21 | AS SELECT 22 | name 23 | FROM {{table}} 24 | """) 25 | end 26 | 27 | test "simple select", ctx do 28 | create_statement = """ 29 | CREATE TABLE IF NOT EXISTS {{table}} ( 30 | name String 31 | ) ENGINE = Memory 32 | """ 33 | 34 | schema(ctx, create_statement) 35 | 36 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 37 | insert(ctx, "INSERT INTO {{table}} VALUES ('qwerty')", []) 38 | 39 | assert {:ok, _, %Result{command: :selected, columns: ["name"], num_rows: 1, rows: [{"qwerty"}]}} = select_all(ctx) 40 | end 41 | 42 | test "parametrized queries", ctx do 43 | create_statement = """ 44 | CREATE TABLE {{table}} ( 45 | id Int32, 46 | name String 47 | ) ENGINE = Memory 48 | """ 49 | 50 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 51 | 52 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 53 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?)", [ 54 | 1, 55 | "abyrvalg" 56 | ]) 57 | 58 | assert {:ok, _, 59 | %Result{ 60 | command: :selected, 61 | columns: ["id", "name"], 62 | num_rows: 1, 63 | rows: [{1, "abyrvalg"}] 64 | }} = select_all(ctx) 65 | end 66 | 67 | test "scalar db types", ctx do 68 | create_statement = """ 69 | CREATE TABLE {{table}} ( 70 | u64_val UInt64, 71 | u32_val UInt32, 72 | u16_val UInt16, 73 | u8_val UInt8, 74 | 75 | i64_val Int64, 76 | i32_val Int32, 77 | i16_val Int16, 78 | i8_val Int8, 79 | 80 | f64_val Float64, 81 | f32_val Float32, 82 | 83 | string_val String, 84 | fixed_string_val FixedString(5), 85 | 86 | date_val Date, 87 | date_time_val DateTime 88 | ) 89 | 90 | ENGINE = Memory 91 | """ 92 | 93 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 94 | 95 | date = Date.utc_today() 96 | datetime = DateTime.utc_now() 97 | 98 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 99 | insert( 100 | ctx, 101 | "INSERT INTO {{table}} VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", 102 | [ 103 | 329, 104 | 328, 105 | 327, 106 | 32, 107 | 429, 108 | 428, 109 | 427, 110 | 42, 111 | 29.8, 112 | 4.0, 113 | "This is long", 114 | "hello", 115 | date, 116 | datetime 117 | ] 118 | ) 119 | 120 | assert {:ok, _, %Result{columns: column_names, rows: [row]}} = select_all(ctx) 121 | 122 | naive_datetime = 123 | datetime 124 | |> DateTime.to_naive() 125 | |> NaiveDateTime.truncate(:second) 126 | 127 | assert row == 128 | {329, 328, 327, 32, 429, 428, 427, 42, 29.8, 4.0, "This is long", "hello", date, naive_datetime} 129 | end 130 | 131 | test "nullables", ctx do 132 | create_statement = """ 133 | CREATE TABLE {{table}} ( 134 | id UInt64, 135 | u64_val Nullable(UInt64), 136 | string_val Nullable(String), 137 | date_val Nullable(Date), 138 | date_time_val Nullable(DateTime) 139 | ) ENGINE = Memory 140 | """ 141 | 142 | now_date = Date.utc_today() 143 | now_datetime = DateTime.utc_now() 144 | 145 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 146 | 147 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 148 | insert( 149 | ctx, 150 | "INSERT INTO {{table}} VALUES (?, ?, ?, ?, ?)", 151 | [1, 2, "hi", now_date, now_datetime] 152 | ) 153 | 154 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 155 | insert( 156 | ctx, 157 | "INSERT INTO {{table}} VALUES (?, ?, ?, ?, ?)", 158 | [2, nil, nil, nil, nil] 159 | ) 160 | 161 | assert {:ok, _, %Result{rows: rows}} = select_all(ctx) 162 | [row_1, row_2] = Enum.sort(rows, fn row_1, row_2 -> elem(row_1, 1) <= elem(row_2, 1) end) 163 | assert {1, 2, "hi", _, _} = row_1 164 | assert row_2 == {2, nil, nil, nil, nil} 165 | end 166 | 167 | test "arrays", ctx do 168 | create_statement = """ 169 | CREATE TABLE {{table}} ( 170 | id UInt64, 171 | arr_val Array(UInt64), 172 | nullable_val Array(Nullable(String)) 173 | ) ENGINE = Memory 174 | 175 | """ 176 | 177 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 178 | 179 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 180 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?, ?)", [ 181 | 1, 182 | [1, 2, 3], 183 | ["hi", nil, "dude"] 184 | ]) 185 | 186 | assert {:ok, _, %Result{rows: [row]}} = select_all(ctx) 187 | 188 | assert row == {1, [1, 2, 3], ["hi", nil, "dude"]} 189 | end 190 | 191 | test "arrays of a nullable type", ctx do 192 | create_statement = """ 193 | CREATE TABLE {{table}} ( 194 | id UInt64, 195 | nullable_value Array(Nullable(UInt64)) 196 | ) Engine = Memory 197 | """ 198 | 199 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 200 | 201 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 202 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?)", [1, [1, nil, 2, nil]]) 203 | 204 | assert {:ok, _, %Result{rows: [row]}} = select_all(ctx) 205 | assert row == {1, [1, nil, 2, nil]} 206 | end 207 | 208 | test "nested", ctx do 209 | create_statement = """ 210 | CREATE TABLE {{table}} ( 211 | id UInt64, 212 | fields Nested ( 213 | label String, 214 | count UInt64 215 | ) 216 | ) Engine = Memory 217 | """ 218 | 219 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 220 | 221 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 222 | insert( 223 | ctx, 224 | "INSERT INTO {{table}} (id, fields.label, fields.count) VALUES (?, ?, ?)", 225 | [ 226 | 32, 227 | ["label_1", "label_2", "label_3"], 228 | [6, 9, 42] 229 | ] 230 | ) 231 | 232 | assert {:ok, _, %Result{rows: [row]}} = select_all(ctx) 233 | assert row == {32, ~w(label_1 label_2 label_3), [6, 9, 42]} 234 | 235 | assert {:ok, _, %Result{rows: [label_1, label_2, label_3]}} = 236 | select(ctx, "SELECT * from {{table}} ARRAY JOIN fields where id = 32", []) 237 | 238 | assert {32, "label_1", 6} == label_1 239 | assert {32, "label_2", 9} == label_2 240 | assert {32, "label_3", 42} == label_3 241 | end 242 | 243 | test "queries that insert more than one row", ctx do 244 | create_statement = """ 245 | CREATE TABLE {{table}} ( 246 | id Int32, 247 | name String 248 | ) ENGINE = Memory 249 | """ 250 | 251 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 252 | 253 | assert {:ok, _, %Result{command: :updated, num_rows: 1}} = 254 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?)", [1, "abyrvalg"]) 255 | 256 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?)", [2, "stinky"]) 257 | 258 | assert {:ok, _, 259 | %Result{ 260 | command: :selected, 261 | columns: ["id", "name"], 262 | num_rows: 2, 263 | rows: rows 264 | }} = select_all(ctx) 265 | 266 | assert {1, "abyrvalg"} in rows 267 | assert {2, "stinky"} in rows 268 | end 269 | 270 | test "selecting specific fields", ctx do 271 | create_statement = """ 272 | CREATE TABLE {{table}} ( 273 | id Int64, 274 | name String, 275 | email String 276 | ) ENGINE = Memory 277 | """ 278 | 279 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 280 | 281 | assert {:ok, _, %{command: :updated, num_rows: 1}} = 282 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?, ?)", [1, "foobie", "foo@bar.com"]) 283 | 284 | assert {:ok, _, %{command: :updated, num_rows: 1}} = 285 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?, ?)", [2, "barbie", "bar@bar.com"]) 286 | 287 | assert {:ok, _, %{rows: [row]}} = select(ctx, "SELECT email FROM {{table}} WHERE id = ?", [1]) 288 | assert row == {"foo@bar.com"} 289 | end 290 | 291 | test "selecting with in", ctx do 292 | create_statement = """ 293 | CREATE TABLE {{table}} ( 294 | id Int64, 295 | name String, 296 | email String 297 | ) ENGINE = Memory 298 | """ 299 | 300 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 301 | 302 | assert {:ok, _, %{command: :updated, num_rows: 1}} = 303 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?, ?)", [1, "foobie", "foo@bar.com"]) 304 | 305 | assert {:ok, _, %{command: :updated, num_rows: 1}} = 306 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?, ?)", [2, "barbie", "bar@bar.com"]) 307 | 308 | assert {:ok, _, %{rows: rows}} = select(ctx, "SELECT email FROM {{table}} WHERE id IN (?)", [[1, 2]]) 309 | 310 | assert [{"bar@bar.com"}, {"foo@bar.com"}] == Enum.sort(rows) 311 | end 312 | 313 | test "updating rows via alter", ctx do 314 | create_statement = """ 315 | CREATE TABLE {{table}} ( 316 | id Int64, 317 | name String, 318 | email String 319 | ) ENGINE = MergeTree 320 | PARTITION BY id 321 | ORDER BY id SETTINGS index_granularity = 8192 322 | """ 323 | 324 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 325 | 326 | assert {:ok, _, %{command: :updated, num_rows: 1}} = 327 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?, ?)", [1, "foobie", "foo@bar.com"]) 328 | 329 | assert {:ok, _, %{command: :updated, num_rows: 1}} = 330 | insert(ctx, "INSERT INTO {{table}} VALUES (?, ?, ?)", [2, "barbie", "bar@bar.com"]) 331 | 332 | assert {:ok, _, _} = 333 | select(ctx, "ALTER TABLE {{table}} UPDATE email = ? WHERE id = ?", [ 334 | "foobar@bar.com", 335 | 1 336 | ]) 337 | end 338 | end 339 | -------------------------------------------------------------------------------- /test/clickhousex/storage_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.StorageTest do 2 | use ClickhouseCase, async: true 3 | 4 | alias Clickhousex.Result 5 | 6 | test "can create and drop database", ctx do 7 | assert {:ok, _, %Result{}} = schema(ctx, "CREATE DATABASE other_db") 8 | assert {:ok, _, %Result{}} = schema(ctx, "DROP DATABASE other_db") 9 | end 10 | 11 | test "returns correct error when dropping database that doesn't exist", ctx do 12 | assert {:error, %{code: :database_does_not_exists}} = schema(ctx, "DROP DATABASE random_db ") 13 | end 14 | 15 | test "returns correct error when creating a database that already exists", ctx do 16 | assert {:error, %{code: :database_already_exists}} = schema(ctx, "CREATE DATABASE {{database}}") 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /test/clickhousex/table_storage_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Clickhousex.TableStorageTest do 2 | use ClickhouseCase, async: true 3 | 4 | alias Clickhousex.Result 5 | 6 | test "can create and drop table", ctx do 7 | create_statement = """ 8 | CREATE TABLE {{table}} (id Int32) ENGINE = Memory 9 | """ 10 | 11 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 12 | 13 | assert {:ok, _, %Result{}} = schema(ctx, "DROP TABLE {{ table }}") 14 | end 15 | 16 | test "returns correct error when dropping table that doesn't exist", ctx do 17 | assert {:error, %{code: :base_table_or_view_not_found}} = schema(ctx, "DROP TABLE table_storage_test.not_exist") 18 | end 19 | 20 | test "returns correct error when creating a table that already exists", ctx do 21 | create_statement = """ 22 | CREATE TABLE {{ table }} 23 | (id Int32) ENGINE = Memory 24 | """ 25 | 26 | assert {:ok, _, %Result{}} = schema(ctx, create_statement) 27 | assert {:error, %{code: :table_already_exists}} = schema(ctx, create_statement) 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | --------------------------------------------------------------------------------