├── .credo.exs ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── config └── config.exs ├── lib ├── flex │ ├── api.ex │ ├── application.ex │ ├── datapoints.ex │ ├── query.ex │ ├── stream.ex │ └── stream │ │ ├── supervisor.ex │ │ └── worker.ex └── test_support │ └── test_case.ex ├── mix.exs ├── mix.lock └── test ├── flex ├── api_test.exs ├── datapoints_test.exs └── query_test.exs └── test_helper.exs /.credo.exs: -------------------------------------------------------------------------------- 1 | # This file contains the configuration for Credo and you are probably reading 2 | # this after creating it with `mix credo.gen.config`. 3 | # 4 | # If you find anything wrong or unclear in this file, please report an 5 | # issue on GitHub: https://github.com/rrrene/credo/issues 6 | # 7 | %{ 8 | # 9 | # You can have as many configs as you like in the `configs:` field. 10 | configs: [ 11 | %{ 12 | # 13 | # Run any config using `mix credo -C `. If no config name is given 14 | # "default" is used. 15 | name: "default", 16 | # 17 | # these are the files included in the analysis 18 | files: %{ 19 | # 20 | # you can give explicit globs or simply directories 21 | # in the latter case `**/*.{ex,exs}` will be used 22 | included: ["lib/", "src/", "web/", "apps/", "test/"], 23 | excluded: [~r"/_build/", ~r"/deps/"] 24 | }, 25 | # 26 | # If you create your own checks, you must specify the source files for 27 | # them here, so they can be loaded by Credo before running the analysis. 28 | requires: [], 29 | # 30 | # Credo automatically checks for updates, like e.g. Hex does. 31 | # You can disable this behaviour below: 32 | check_for_updates: true, 33 | # 34 | # If you want to enforce a style guide and need a more traditional linting 35 | # experience, you can change `strict` to true below: 36 | strict: false, 37 | # 38 | # You can customize the parameters of any check by adding a second element 39 | # to the tuple. 40 | # 41 | # To disable a check put `false` as second element: 42 | # 43 | # {Credo.Check.Design.DuplicatedCode, false} 44 | # 45 | checks: [ 46 | {Credo.Check.Consistency.ExceptionNames}, 47 | {Credo.Check.Consistency.LineEndings}, 48 | {Credo.Check.Consistency.SpaceAroundOperators}, 49 | {Credo.Check.Consistency.SpaceInParentheses}, 50 | {Credo.Check.Consistency.TabsOrSpaces}, 51 | 52 | # For some checks, like AliasUsage, you can only customize the priority 53 | # Priority values are: `low, normal, high, higher` 54 | {Credo.Check.Design.AliasUsage, false}, 55 | 56 | # For others you can set parameters 57 | 58 | # If you don't want the `setup` and `test` macro calls in ExUnit tests 59 | # or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just 60 | # set the `excluded_macros` parameter to `[:schema, :setup, :test]`. 61 | {Credo.Check.Design.DuplicatedCode, excluded_macros: []}, 62 | 63 | # You can also customize the exit_status of each check. 64 | # If you don't want TODO comments to cause `mix credo` to fail, just 65 | # set this value to 0 (zero). 66 | {Credo.Check.Design.TagTODO, exit_status: 2}, 67 | {Credo.Check.Design.TagFIXME}, 68 | 69 | {Credo.Check.Readability.FunctionNames}, 70 | {Credo.Check.Readability.LargeNumbers}, 71 | {Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 80}, 72 | {Credo.Check.Readability.ModuleAttributeNames}, 73 | {Credo.Check.Readability.ModuleDoc}, 74 | {Credo.Check.Readability.ModuleNames}, 75 | {Credo.Check.Readability.ParenthesesInCondition}, 76 | {Credo.Check.Readability.PredicateFunctionNames}, 77 | {Credo.Check.Readability.TrailingBlankLine}, 78 | {Credo.Check.Readability.TrailingWhiteSpace}, 79 | {Credo.Check.Readability.VariableNames}, 80 | 81 | {Credo.Check.Refactor.ABCSize}, 82 | # {Credo.Check.Refactor.CaseTrivialMatches}, # deprecated in 0.4.0 83 | {Credo.Check.Refactor.CondStatements}, 84 | {Credo.Check.Refactor.FunctionArity}, 85 | {Credo.Check.Refactor.MatchInCondition}, 86 | {Credo.Check.Refactor.PipeChainStart, false}, 87 | {Credo.Check.Refactor.CyclomaticComplexity}, 88 | {Credo.Check.Refactor.NegatedConditionsInUnless}, 89 | {Credo.Check.Refactor.NegatedConditionsWithElse}, 90 | {Credo.Check.Refactor.Nesting}, 91 | {Credo.Check.Refactor.UnlessWithElse}, 92 | 93 | {Credo.Check.Warning.IExPry}, 94 | {Credo.Check.Warning.IoInspect}, 95 | {Credo.Check.Warning.NameRedeclarationByAssignment}, 96 | {Credo.Check.Warning.NameRedeclarationByCase}, 97 | {Credo.Check.Warning.NameRedeclarationByDef}, 98 | {Credo.Check.Warning.NameRedeclarationByFn}, 99 | {Credo.Check.Warning.OperationOnSameValues}, 100 | {Credo.Check.Warning.BoolOperationOnSameValues}, 101 | {Credo.Check.Warning.UnusedEnumOperation}, 102 | {Credo.Check.Warning.UnusedKeywordOperation}, 103 | {Credo.Check.Warning.UnusedListOperation}, 104 | {Credo.Check.Warning.UnusedStringOperation}, 105 | {Credo.Check.Warning.UnusedTupleOperation}, 106 | {Credo.Check.Warning.OperationWithConstantResult}, 107 | 108 | # Custom checks can be created using `mix credo.gen.check`. 109 | # 110 | ] 111 | } 112 | ] 113 | } 114 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where 3rd-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | This project adheres to [Semantic Versioning](http://semver.org/) 5 | 6 | ## [v0.2.1](https://github.com/esl/flex/compare/v0.2.0...v0.2.1) - 2018-09-05 7 | 8 | ### Fixed 9 | - Add logic for checking presence and validation of fill tag 10 | - Fix test in elixir 1.6 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2018 Erlang Solutions Ltd. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Flex 2 | 3 | A simple InfluxDB client. 4 | 5 | ## Installation 6 | 7 | Add to your deps 8 | ```elixir 9 | deps do 10 | [ 11 | ... 12 | {:flex, "~> 0.1"} 13 | ... 14 | ] 15 | end 16 | ```` 17 | 18 | And to your applications (pre Elixir 1.4) 19 | ```elixir 20 | def application do 21 | [ 22 | applications: [ 23 | ... 24 | :flex, 25 | ... 26 | ], 27 | ... 28 | ] 29 | end 30 | ``` 31 | 32 | ## Test 33 | 34 | You'll need Influx serving locally on port 8086 (see `config/config.exs`): 35 | 36 | ```Shell 37 | docker pull influxdb && \ 38 | docker run -d -p 8086:8086 -v influxdb:/var/lib/influxdb influxdb 39 | ``` 40 | 41 | Then go for a `mix test`. 42 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | config :flex, 4 | test_host: "http://localhost:8086" 5 | 6 | # import_config "#{Mix.env}.exs" 7 | -------------------------------------------------------------------------------- /lib/flex/api.ex: -------------------------------------------------------------------------------- 1 | defmodule Flex.API do 2 | @moduledoc """ 3 | Responsible for interacting with InfluxDB HTTP API. 4 | 5 | The current implementation uses HTTPoison as an HTTP client. 6 | 7 | """ 8 | 9 | @type epoch :: :ms 10 | | :s 11 | 12 | @typedoc """ 13 | Set of parameters for this module 14 | 15 | - `:db` 16 | which database should be used for the query 17 | - `:epoch` 18 | sets the unit of timestamp either given in a query or returned in a response 19 | - `:host` 20 | address for InfluxDB HTTP API endpoint e.g. `http://localhost:8086` 21 | - `:pretty` 22 | if true, returned JSON will be pretty formatted. 23 | Default: `false` 24 | - `:chunked` 25 | if true, a response will be splitted into multiple responses. 26 | A response will be splitted when there is the end of data seria 27 | or when `chunked_size` is reached, whatever occurs first. 28 | Default: `false` 29 | - `:chunked_size` 30 | sets the maximum size of returned chunk. By default InfluxDB uses `10 000` 31 | if it is not set explicitly. 32 | Default: `nil` 33 | """ 34 | @type param :: {:db, String.t} 35 | | {:epoch, epoch} 36 | | {:host, String.t} 37 | | {:pretty, boolean} 38 | | {:chunked, boolean} 39 | | {:chunk_size, integer} 40 | 41 | @type params :: [param] 42 | 43 | ### 44 | # API functions 45 | ### 46 | 47 | @doc """ 48 | Performs a query through InfluxDB HTTP API. The response is given as a Stream. 49 | 50 | The query is given as a string in the first parameter. It makes sense to use this 51 | function only for queries expected to return big amount of data. 52 | 53 | Also this function makes sense only with `chunked: true, chunked_size: size` 54 | parmeters. If it is not given, result will be returned as a single element, so 55 | there are no advantages of Streams. 56 | 57 | Required parameters: `:db`, `:epoch`, `:host` 58 | Optional parameters: `:pretty`, `:chunked`, :`chunk_size` 59 | """ 60 | @spec stream(String.t, params) :: Enum.t | {:error, any} 61 | def stream(query, params) do 62 | required = [:db, :epoch, :host] 63 | optional = [:pretty, :chunked, :chunk_size] 64 | case check_params(params, required, optional) do 65 | {:ok, params} -> 66 | url = form_read_url(query, params) 67 | Flex.Stream.new_stream(url) 68 | error -> 69 | error 70 | end 71 | end 72 | 73 | @doc """ 74 | Performs query through InfluxDB HTTP API. 75 | 76 | Query is given as a string in the first parameter. 77 | 78 | Required parameters: `:db`, `:epoch`, `:host` 79 | Optional parameters: `:pretty`, `:chunked`, :`chunk_size` 80 | """ 81 | @spec query(String.t, params) :: {:ok, HTTPoison.Response.t} | {:error, any} 82 | def query(query, params) do 83 | required = [:db, :epoch, :host] 84 | optional = [:pretty, :chunked, :chunk_size] 85 | do_get_to_read_endpoint(query, params, required, optional) 86 | end 87 | 88 | @doc """ 89 | Performs POST request including the first parameter as data. The first 90 | parameter is expected to be an Influx Line Protocol formatted binary. 91 | 92 | For more info regarding it see: 93 | https://docs.influxdata.com/influxdb/latest/write_protocols/line_protocol_tutorial/ 94 | 95 | It requires Influx API endpoint, database name and epoch parameters. 96 | 97 | Actually epoch parameter is not required by InfluxDB - it implicitly assumes 98 | epoch is "ns" - nanosecond. However during working with InfluxDB we 99 | encountered a lot of issues related to it, thus it is safer to require it. 100 | 101 | Required parameters: `:db`, `:epoch`, `:host` 102 | Optional parameters: `:pretty` 103 | 104 | ``` 105 | iex(1)> Flex.API.write("measurement", []) 106 | {:error, [missing: :db, missing: :epoch, missing: :host]} 107 | ``` 108 | 109 | """ 110 | @spec write(String.t, params) :: {:ok, HTTPoison.Response.t} | {:error, any} 111 | def write(data, params) do 112 | required = [:db, :epoch, :host] 113 | optional = [:pretty] 114 | do_post_to_write_endpoint(data, params, required, optional) 115 | end 116 | 117 | @doc """ 118 | Perform database creation with given name, for given InfluxDB. It expects 119 | only one parameter - address of InfluxDB API endpoint. 120 | 121 | iex(1)> Flex.API.create_db("my_db", []) 122 | {:error, [missing: :host]} 123 | 124 | iex(2)> Flex.API.create_db("my_db", host: "http://localhost:8086") 125 | {:ok, %HTTPoison.Response{ ... }} 126 | 127 | Required parameters: `:host` 128 | Optional parameters: none 129 | """ 130 | @spec create_db(String.t, params) :: {:ok, HTTPoison.Response.t} 131 | | {:error, any} 132 | def create_db(db_name, params) do 133 | required = [:host] 134 | optional = [] 135 | query = "CREATE DATABASE \"#{db_name}\"" 136 | do_post_to_read_endpoint(query, params, required, optional) 137 | end 138 | 139 | @doc """ 140 | Performs "DELETE DATABASE" database query. 141 | 142 | Required parameters: `:host` 143 | """ 144 | @spec delete_database(String.t, params) :: {:ok, HTTPoison.Response.t} 145 | | {:error, any} 146 | def delete_database(db, params) do 147 | required = [:host] 148 | optional = [:pretty, :chunked, :chunk_size] 149 | query = "DROP DATABASE \"#{db}\"" 150 | do_post_to_read_endpoint(query, params, required, optional) 151 | end 152 | 153 | ### 154 | # Schema exploration releted queries 155 | ### 156 | 157 | @doc """ 158 | Performs "SHOW MEASUREMENTS" database query. 159 | 160 | Result will include all available measurements in given database. 161 | 162 | Required parameters: `:host`, `:db` 163 | """ 164 | @spec get_measurements(params) :: {:ok, HTTPoison.Response.t} | {:error, any} 165 | def get_measurements(params) do 166 | required = [:db, :host] 167 | optional = [:pretty, :chunked, :chunk_size] 168 | query = "SHOW MEASUREMENTS" 169 | do_get_to_read_endpoint(query, params, required, optional) 170 | end 171 | 172 | @doc """ 173 | Performs "SHOW TAG KEYS" database query. 174 | 175 | Result will return all measurements available in database with tags included. 176 | 177 | Required parameters: `:host`, `:db` 178 | """ 179 | @spec get_tag_keys(params) :: {:ok, HTTPoison.Response.t} | {:error, any} 180 | def get_tag_keys(params) do 181 | required = [:db, :host] 182 | optional = [:pretty, :chunked, :chunk_size] 183 | query = "SHOW TAG KEYS" 184 | do_get_to_read_endpoint(query, params, required, optional) 185 | end 186 | 187 | @doc """ 188 | Performs "SHOW FIELD KEYS" database query. 189 | 190 | Result will return all measurmenet available in database with field types. 191 | 192 | Required parameters: `:host`, `:db` 193 | """ 194 | @spec get_field_keys(params) :: {:ok, HTTPoison.Response.t} | {:error, any} 195 | def get_field_keys(params) do 196 | required = [:db, :host] 197 | optional = [:pretty, :chunked, :chunk_size] 198 | query = "SHOW FIELD KEYS" 199 | do_get_to_read_endpoint(query, params, required, optional) 200 | end 201 | 202 | @doc """ 203 | Performs "SHOW DATABASES" database query. 204 | 205 | Result will return all available databases. 206 | 207 | Required parameters: `:host` 208 | """ 209 | @spec get_databases(params) :: {:ok, HTTPoison.Response.t} | {:error, any} 210 | def get_databases(params) do 211 | required = [:host] 212 | optional = [:pretty, :chunked, :chunk_size] 213 | query = "SHOW DATABASES" 214 | do_get_to_read_endpoint(query, params, required, optional) 215 | end 216 | 217 | ### 218 | # Private functions 219 | ### 220 | 221 | @spec do_get_to_read_endpoint(String.t, params, [atom], [atom]) 222 | :: {:ok, HTTPoison.Response.t} | {:error, any} 223 | defp do_get_to_read_endpoint(query, params, required, optional) do 224 | case check_params(params, required, optional) do 225 | {:ok, params} -> 226 | url = form_read_url(query, params) 227 | HTTPoison.get(url) 228 | {:error, reason} -> 229 | {:error, reason} 230 | end 231 | end 232 | 233 | defp do_post_to_read_endpoint(query, params, required, optional) do 234 | case check_params(params, required, optional) do 235 | {:ok, params} -> 236 | url = form_read_url(query, params) 237 | HTTPoison.post(url, "") 238 | {:error, reason} -> 239 | {:error, reason} 240 | end 241 | end 242 | 243 | defp do_post_to_write_endpoint(data, params, required, optional) do 244 | case check_params(params, required, optional) do 245 | {:ok, params} -> 246 | url = form_write_url(params) 247 | HTTPoison.post(url, data) 248 | {:error, reason} -> 249 | {:error, reason} 250 | end 251 | end 252 | 253 | defp form_write_url(params) do 254 | {host, params} = Map.pop(params, :host) 255 | uri = URI.parse(host) 256 | {precision, params} = Map.pop(params, :epoch) # on read we have epoch param 257 | # on writing it is :precision 258 | query = Map.put(params, :precision, precision) 259 | |> URI.encode_query() 260 | %URI{uri | path: "/write", query: query} 261 | |> URI.to_string() 262 | end 263 | 264 | defp form_read_url(query, params) do 265 | {host, params} = Map.pop(params, :host) 266 | uri = URI.parse(host) 267 | query = maybe_put_query_param(query, params) 268 | |> URI.encode_query() 269 | %URI{uri | path: "/query", query: query} 270 | |> URI.to_string() 271 | end 272 | 273 | defp maybe_put_query_param("", params), do: params 274 | defp maybe_put_query_param(q, params), do: Map.put(params, "q", q) 275 | 276 | ### 277 | # Parameters checking functions 278 | ### 279 | 280 | @spec check_params(params, [atom], [atom]) :: {:ok, map} | {:error, Keyword.t} 281 | defp check_params(params, required, optional) do 282 | given_params = params |> Enum.into(%{}) |> Map.keys() 283 | missing = get_missing_params(given_params, required) 284 | bad = get_bad_params(given_params, required ++ optional) 285 | case missing do 286 | [] -> {:ok, Map.drop(params, bad)} 287 | errors -> {:error, errors} 288 | end 289 | end 290 | 291 | defp get_missing_params(params, required) do 292 | Enum.map(required -- params, &({:missing, &1})) 293 | end 294 | 295 | defp get_bad_params(params, allowed) do 296 | Enum.map(params -- allowed, &(&1)) 297 | end 298 | 299 | end 300 | -------------------------------------------------------------------------------- /lib/flex/application.ex: -------------------------------------------------------------------------------- 1 | defmodule Flex.Application do 2 | @moduledoc false 3 | 4 | use Application 5 | 6 | def start(_type, _args) do 7 | import Supervisor.Spec, warn: false 8 | 9 | children = [ 10 | supervisor(Flex.Stream.Supervisor, []) 11 | ] 12 | 13 | opts = [strategy: :one_for_one, name: Flex.Supervisor] 14 | Supervisor.start_link(children, opts) 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /lib/flex/datapoints.ex: -------------------------------------------------------------------------------- 1 | defmodule Flex.Datapoints do 2 | 3 | @moduledoc """ 4 | Read and write the Influx line protocol for DB queries 5 | """ 6 | 7 | @type data :: %{required(String.t) => String.t} 8 | 9 | @typedoc """ 10 | Datapoint is formatted response from InfluxDB. 11 | 12 | List of datapoints is returned from `format_results/1` function. It simplifies 13 | transformations on InfluxDB responses. 14 | 15 | ## Example datapoint: 16 | ``` 17 | %{"name" => "amoc.times.message_ttd.max", 18 | "data" => [ 19 | %{"value" => 134633.0, "time" => "2017-02-22T13:36:51Z"} 20 | %{"value" => 134634.0, "time" => "2017-02-22T13:37:01Z"} 21 | ] 22 | } 23 | ``` 24 | """ 25 | @type datapoint :: %{String.t => String.t, 26 | String.t => [data]} 27 | 28 | @type datapoints :: [datapoint] 29 | 30 | @typedoc """ 31 | `t:datapoint/0` with list of tags. 32 | 33 | This should be used for datapoints that describes metrics. It can be created 34 | with `add_tags_to_datapoint/2`. See its docs for examples. 35 | """ 36 | @type datapoint_with_tags :: %{String.t => String.t, #name 37 | String.t => [data], #data 38 | String.t => [String.t]} #tags 39 | 40 | @type datapoints_with_tags :: [datapoint_with_tags] 41 | 42 | @doc """ 43 | Converts values from valid datapoint to InfluxDB Line Protocol. 44 | 45 | Structure of InfluxDB Line Protocol: 46 | ``` 47 | [,=] = [] 48 | ``` 49 | 50 | Map should have form of: 51 | 52 | ``` 53 | %{"name" => "amoc.times.message_ttd.max", 54 | "data" => [ 55 | %{ 56 | "max" => 1346342154.0, 57 | "time" => "2017-02-22T13:36:51Z", 58 | "node" => "first_one" 59 | }, 60 | ] 61 | "tags" => ["node"] 62 | } 63 | 64 | ``` 65 | 66 | Parameters 67 | - `:extra_tags` - allows to pass extra tags, which will be added to 68 | metric while converting. 69 | - `:cast` - see `escape_field/2`. 70 | 71 | """ 72 | @spec to_line_protocol(datapoint, Keyword.t) :: Enumerable.t 73 | def to_line_protocol(measurement, params \\ []) do 74 | name = measurement["name"] 75 | tags = Map.get(measurement, "tags", []) 76 | {extra_tags, params} = Keyword.pop(params, :extra_tags, "") 77 | measurement["data"] 78 | |> Enum.map(&(make_line_protocol(&1, name, tags, extra_tags, params))) 79 | end 80 | 81 | @doc """ 82 | This functions tranforms InfluxDB bare JSON response into internal 83 | representation of datapoint. 84 | 85 | Internal representation of datapoint is map with two fields: `"name"` and 86 | `"data"`. 87 | 88 | For example: 89 | 90 | ``` 91 | iex> results = %{"results" => 92 | [%{"series" => 93 | [%{"columns" => ["time", "max"], 94 | "name" => "amoc.times.message_ttd.max", 95 | "values" => [["2017-02-22T13:36:51Z", 1346342154.0]] 96 | }], 97 | "statement_id" => 0}]} 98 | iex> format_results(results) 99 | [ 100 | %{"name" => "amoc.times.message_ttd.max", 101 | "data" => [%{"max" => 1346342154.0, "time" => "2017-02-22T13:36:51Z"}] 102 | } 103 | ] 104 | ``` 105 | """ 106 | @spec format_results(map) :: datapoints 107 | def format_results(results) do 108 | get_in(results, ["results", Access.all()]) 109 | |> Enum.flat_map(&parse_statement/1) 110 | end 111 | 112 | @doc """ 113 | This function adds to datapoint map another field `tags`. 114 | 115 | Tags are retrievied from second argument, which is expected to be a result 116 | of `SHOW TAG KEYS` query formatted with `format_results/1`. 117 | 118 | ## Example 119 | 120 | iex> measurement = 121 | %{"name" => "amoc.times.message_ttd.max", 122 | "data" => [%{"max" => 1346342154.0, "time" => "2017-02-22T13:36:51Z"}] 123 | } 124 | iex> tags = 125 | [ 126 | %{"name" =>"amoc.times.message_ttd.max", 127 | "data" => [%{"tagKey" => "tag1"}, %{"tagKey" => "tag2"}] 128 | }, 129 | %{"name" =>"another.metric", 130 | "data" -> [%{"tagKey" => "any_tag"}] 131 | } 132 | ] 133 | iex> add_tags_to_datapoint(measurement, tags) 134 | %{"name" => "amoc.times.message_ttd.max", 135 | "data" => [%{"max" => 1346342154.0, "time" => "2017-02-22T13:36:51Z"}], 136 | "tags" => ["tag1", "tag2"] 137 | } 138 | 139 | """ 140 | @spec add_tags_to_datapoint(datapoint, datapoints) :: datapoint_with_tags 141 | def add_tags_to_datapoint(measurement, tags_for_measurements) do 142 | name = measurement["name"] 143 | tags = 144 | Enum.find(tags_for_measurements, %{}, &(&1["name"] == name)) 145 | |> get_in([Access.key("data", []), Access.all(), "tagKey"]) 146 | Map.put(measurement, "tags", tags) 147 | end 148 | 149 | @doc """ 150 | Function escapes characters disallowed in InfluxDB Line Protocol. 151 | """ 152 | @spec escape_chars(String.t) :: String.t 153 | def escape_chars(value) when is_binary(value) do 154 | value 155 | |> String.replace(",", "\\,") 156 | |> String.replace("=", "\\=") 157 | |> String.replace(" ", "\\ ") 158 | |> String.replace("\"", "\\\"") 159 | end 160 | 161 | def escape_chars(value), do: value 162 | 163 | @doc """ 164 | Functions returns string representation of given value in InfluxDB Line 165 | Protocol format. 166 | 167 | Data types: 168 | - Float - 82 169 | - Integer - 82i 170 | - String - "some string" 171 | - Boolean - t, T, true, True, TRUE, f, F, false, False, FALSE 172 | 173 | Parameters 174 | - `:cast` - possible values: `:int_to_float`, `:float_to_int`. Allows to 175 | cast Elixir data types into different InfluxDB data types. 176 | """ 177 | @spec escape_field(term, Keyword.t) :: String.t 178 | def escape_field(value, params), do: do_escape_field(value, params[:cast]) 179 | 180 | defp do_escape_field(value, :int_to_float) when is_integer(value), 181 | do: "#{value}" 182 | defp do_escape_field(value, _) when is_integer(value), 183 | do: "#{value}i" 184 | defp do_escape_field(value, :float_to_int) when is_float(value), 185 | do: "#{round(value)}i" 186 | defp do_escape_field(value, _) when is_float(value) or is_boolean(value), 187 | do: "#{value}" 188 | defp do_escape_field(value, _) when is_binary(value), do: "\"#{value}\"" 189 | 190 | ### 191 | # Private functions 192 | ### 193 | 194 | @spec parse_statement(map) :: [map] 195 | defp parse_statement(%{"error" => msg}), 196 | do: [%{"name" => "error", "data" => msg}] 197 | 198 | defp parse_statement(%{"series" => series}), 199 | do: Enum.map(series, &parse_seria/1) 200 | 201 | defp parse_statement(_), 202 | do: [%{"name" => "error", "data" => "empty response"}] 203 | 204 | @spec parse_seria(map) :: map 205 | defp parse_seria(seria) do 206 | name = seria["name"] 207 | keys = seria["columns"] 208 | key_values_map = Enum.map(seria["values"], 209 | &(Enum.zip(keys, &1) |> Enum.into(%{}))) 210 | %{"name" => name, 211 | "data" => key_values_map} 212 | end 213 | 214 | # This is how Line Protocol look like: 215 | # weather,location=us-midwest temperature=82 1465839830100400200 216 | # | -------------------- -------------- | 217 | # | | | | 218 | # | | | | 219 | # +-----------+--------+-+---------+-+---------+ 220 | # |measurement|,tag_set| |field_set| |timestamp| 221 | # +-----------+--------+-+---------+-+---------+ 222 | # 223 | # Provided data should be in form of map: 224 | # data = %{"value" => 122, 225 | # "some tag" => tag_value} 226 | # 227 | # measurement_name is just string 228 | # 229 | # tags is a list of strings. It says which keys in data should be 230 | # formed as tag. All other keys will be formed as fields. 231 | # Only "time" key is an exception. It is treated as a timestamp. 232 | # 233 | # Extra tags is already formatted as part of Line Protocol 234 | 235 | @spec make_line_protocol(map, String.t, [String.t], String.t, Keyword.t) :: String.t 236 | defp make_line_protocol(data, measurement_name, tags, extra_tags, params) do 237 | {timestamp, data} = Map.pop(data, "time") 238 | {tags, fields} = Enum.split_with(data, &(is_tag?(&1, tags))) 239 | tags = tuples_to_line(tags, :tag, params) 240 | fields = tuples_to_line(fields, :field, params) 241 | tags = [tags, extra_tags] |> Enum.filter(&(&1 != "")) |> Enum.join(",") 242 | case tags do 243 | "" -> "#{measurement_name} #{fields} #{timestamp}" 244 | tags -> "#{measurement_name},#{tags} #{fields} #{timestamp}" 245 | end 246 | end 247 | 248 | defp is_tag?({key, _}, tags), do: key in tags 249 | 250 | # part of making line protocol: 251 | # [{"a", "b"}, {"c", "d"}] -> "a=b,c=d" 252 | defp tuples_to_line(tuples, :tag, _) do 253 | tuples 254 | |> Enum.map(fn ({k, v}) -> "#{k}=#{escape_chars(v)}" end) 255 | |> Enum.join(",") 256 | end 257 | 258 | defp tuples_to_line(tuples, :field, params) do 259 | tuples 260 | |> Enum.map(fn ({k, v}) -> "#{k}=#{escape_field(v, params)}" end) 261 | |> Enum.join(",") 262 | end 263 | end 264 | -------------------------------------------------------------------------------- /lib/flex/query.ex: -------------------------------------------------------------------------------- 1 | defmodule Flex.Query do 2 | @moduledoc """ 3 | This module is used for constructing InfluxDB Queries. 4 | 5 | Queries are built using Query struct: 6 | ``` 7 | %Query{fields: nil, from: nil, group_by: nil, measurements: nil, 8 | to: nil, where: []} 9 | ``` 10 | 11 | ## measurements 12 | This is mandatory option to build query expression. Is is expected to be 13 | a list of string. 14 | 15 | ## fields 16 | It is expected to be list of strings. When nothing is provided, 17 | it will get all fields. This is optional. 18 | 19 | Examples of transformations: 20 | 21 | iex(1)> %Query{fields: nil, measurements: ["ttd"]} 22 | |> Query.build_query 23 | {:ok, "SELECT * FROM \\"ttd\\""} 24 | 25 | iex(2)> %Query{fields: ["max(value)", "test_id"], measurements: ["ttd"]} 26 | |> Query.build_query 27 | {:ok, "SELECT max(value),test_id FROM \\"ttd\\""} 28 | 29 | ## where 30 | This is optional parameter for filtering data we are quering for. It expects 31 | a list of `where()` tuples. They are in form: 32 | ``` 33 | {field :: String.t, value :: expression(), comparator :: comparator()} 34 | ``` 35 | First one indicated field name, second is expression or literal, which Influx 36 | is going to compare with field and last one is comparator. Values will be 37 | espaced if need (e.g. string literals should be escaped with `'`). 38 | 39 | If we want to put expression into value, we should point directly that 40 | it is expression and it should not be escaped: 41 | ``` 42 | {:expr, "now() - 20w"} 43 | ``` 44 | 45 | Examples: 46 | 47 | iex(1)> %Query{measurements: ["tdd"], 48 | where: [{"start_type", "manual", :=}]} 49 | |> Query.build_query 50 | {:ok, "SELECT * FROM \\"tdd\\" WHERE start_type = 'manual'"} 51 | 52 | iex(2)> %Query{measurements: ["tdd"], 53 | where: [{"time", {:expr, "now() - 2w"}, :>}]} 54 | |> Query.build_query 55 | {:ok, "SELECT * FROM \\"tdd\\" WHERE time > now() - 2w"} 56 | 57 | ## from and to 58 | This is simplifier for specifying `where` parameter. It is common to query 59 | with specific time barriers. So instead of writing: 60 | ``` 61 | %Query{measurements: ["tdd"], 62 | where: [{"time", {:expr, "now() - 2w"}, :>}, 63 | {"time", {:expr, "now() - 1w"}, :<}]} 64 | ``` 65 | we can write: 66 | ``` 67 | iex(1)> %Query{measurements: ["tdd"], 68 | from: "now() - 2w", 69 | to: "now() - 1w"} 70 | |> Query.build_query 71 | {:ok, "SELECT * FROM \\"tdd\\" WHERE time > now() - 2w and time < now() - 1w"} 72 | ``` 73 | ## group by 74 | It expects a list of fields, which response should be groupped by. 75 | 76 | Response could be also groupped by time, however InfluxDB API requires to 77 | put time limits in WHERE clause 78 | 79 | ``` 80 | iex(1)> %Query{measurements: ["tdd"], 81 | from: "now() - 2w", 82 | group_by: ["time(2d)"]} 83 | |> Query.build_query 84 | {:ok, "SELECT * FROM \\"tdd\\" WHERE time > now() - 2w GROUP BY time(2d)"} 85 | ``` 86 | 87 | Response lacking datapoints for given time range while using time() 88 | can be filled with user defined values. Add [fill()] to `group_by` 89 | list in a Query struct with one of the fill_options: any numerical value, `null`, 90 | `none`, `previous`, `linear`. 91 | 92 | ``` 93 | iex(1)> %Query{measurements: ["tdd"], 94 | from: "now() - 2w", 95 | group_by: ["time(2d)", fill(previous)]} 96 | |> Query.build_query 97 | {:ok, 98 | "SELECT * FROM \"tdd\" WHERE time > now() - 2w GROUP BY time(2d) fill(previous)"} 99 | ``` 100 | 101 | """ 102 | 103 | require Logger 104 | 105 | defstruct from: nil, 106 | to: nil, 107 | measurements: nil, 108 | fields: nil, 109 | where: [], 110 | group_by: nil 111 | 112 | @type field :: String.t 113 | @type expr :: String.t | {:expr, String.t} 114 | @type comparator :: := | :< | :<= | :> | :>= | :"=~" | :"!~" 115 | 116 | @type where() :: {field, expr, comparator} 117 | 118 | @type t() :: %__MODULE__{from: String.t, 119 | to: String.t, 120 | measurements: [String.t], 121 | fields: [String.t], 122 | where: [where()], 123 | group_by: [String.t] 124 | } 125 | 126 | @doc """ 127 | This function builds InfluxDB Query from a `Query` struct. 128 | 129 | It will validate correctness of the given parameters and build a string that 130 | represents and actual query to be sent to Influx.. 131 | 132 | NOTE: Module is in early development and may build incorrect queries! 133 | """ 134 | @spec build_query(__MODULE__.t) :: String.t | {:error, any} 135 | def build_query(%__MODULE__{} = query) do 136 | with {:ok, query} <- add_timestamps_to_where(query), 137 | {:ok, measurement} <- build_measurement(query), 138 | {:ok, fields} <- build_fields(query), 139 | {:ok, where} <- build_where(query), 140 | {:ok, group_by} <- build_group_by(query) do 141 | {:ok, "SELECT #{fields} " 142 | <> "FROM #{measurement}" 143 | <> "#{where}" 144 | <> "#{group_by}"} 145 | else 146 | {:error, _} = error -> error 147 | end 148 | end 149 | 150 | @doc """ 151 | Stack string queries so that they're sent to InfluxDB as a single query. 152 | """ 153 | @spec stack_queries([String.t]) :: String.t | {:error, any} 154 | def stack_queries(string_queries) do 155 | Enum.reduce(string_queries, &stack_queries/2) 156 | end 157 | 158 | # Private functions 159 | 160 | # InfluxDB HTTP API allows to send multiple queries at once by delimiting them 161 | # with a semicolon - and this is exactly what this function does. 162 | # 163 | # Look at the following link to see the details: 164 | # https://docs.influxdata.com/influxdb/v0.9/guides/querying_data/#multiple-queries 165 | @spec stack_queries(String.t, String.t) :: String.t 166 | defp stack_queries(query, accumulated_queries), 167 | do: "#{query};#{accumulated_queries}" 168 | 169 | # This function converts fields `from` and `to` into WHERE expression. 170 | # 171 | # Theese fields are just shortends - e.g. we know that value `from` will be 172 | # compared to a time field in query, with greater comparator: `:>` 173 | defp add_timestamps_to_where(%__MODULE__{} = query) do 174 | query = maybe_add_from(query) |> maybe_add_to() 175 | {:ok, query} 176 | end 177 | 178 | defp maybe_add_from(%__MODULE__{from: nil} = query), do: query 179 | defp maybe_add_from(%__MODULE__{from: from, where: wheres} = query) do 180 | where = [{"time", {:expr, from}, :>} | wheres] 181 | query 182 | |> Map.put(:where, where) 183 | |> Map.put(:from, nil) 184 | end 185 | 186 | defp maybe_add_to(%__MODULE__{to: nil} = query), do: query 187 | defp maybe_add_to(%__MODULE__{to: to, where: wheres} = query) do 188 | where = [{"time", {:expr, to}, :<} | wheres] 189 | query 190 | |> Map.put(:where, where) 191 | |> Map.put(:to, nil) 192 | end 193 | 194 | defp build_measurement(%__MODULE__{measurements: m}) 195 | when is_list(m) and length(m) > 0 do 196 | m = m 197 | |> Enum.map(&escape_val(&1, "\"")) 198 | |> Enum.join(",") 199 | {:ok, "#{m}"} 200 | end 201 | defp build_measurement(%__MODULE__{}), do: {:error, :measurements} 202 | 203 | defp build_fields(%__MODULE__{fields: nil}), do: {:ok, "*"} 204 | defp build_fields(%__MODULE__{fields: f}), do: {:ok, Enum.join(f, ",")} 205 | 206 | defp build_where(%__MODULE__{where: nil}), do: {:ok, ""} 207 | defp build_where(%__MODULE__{where: where}) do 208 | {valid, invalid} = where 209 | |> Enum.map(&parse_where/1) 210 | |> Enum.split_with(&valid?/1) 211 | cond do 212 | valid != [] and invalid == [] -> 213 | {:ok, " WHERE " <> Enum.join(valid, " and ")} 214 | valid == [] and invalid == [] -> 215 | {:ok, ""} 216 | true -> 217 | {:error, invalid} 218 | end 219 | end 220 | 221 | @valid_comparators [:=, :<, :<=, :>, :>=, :"=~", :"!~"] 222 | 223 | defp parse_where({field, {:expr, expression}, comparator}) do 224 | case comparator in @valid_comparators do 225 | true -> "#{field} #{comparator} #{expression}" 226 | false -> {:error, {:invalid_op, comparator}} 227 | end 228 | end 229 | defp parse_where({field, value, comparator}) do 230 | case comparator in @valid_comparators do 231 | true -> "#{field} #{comparator} #{escape_val(value, "'")}" 232 | false -> {:error, {:invalid_op, comparator}} 233 | end 234 | end 235 | 236 | defp build_group_by(%__MODULE__{group_by: nil}), do: {:ok, ""} 237 | defp build_group_by(%__MODULE__{group_by: "*"}), do: {:ok, " GROUP BY *"} 238 | defp build_group_by(%__MODULE__{group_by: group_by, where: wheres}) do 239 | {valid, invalid} = group_by 240 | |> Enum.map(&parse_group_by(&1, wheres)) 241 | |> Enum.split_with(&valid?/1) 242 | cond do 243 | valid != [] and invalid == [] -> 244 | {:ok, " GROUP BY " <> join_group_by(valid)} 245 | valid == [] and invalid == [] -> 246 | {:ok, ""} 247 | true -> 248 | {:error, invalid} 249 | end 250 | end 251 | 252 | # GROUP BY time(),[tag_key] [fill()] 253 | defp parse_group_by("time(" <> _ = time, wheres) do 254 | # we need to check for time condition where clause because grouping 255 | # by time is disallowed without giving timerange. 256 | case Enum.any?(wheres, fn ({"time", _, _}) -> true 257 | (_) -> false end) do 258 | true -> time 259 | false -> {:error, "missing time condition in where statement"} 260 | end 261 | end 262 | defp parse_group_by("fill(" <> _ = fill, _) do 263 | case valid_fill_opt?(fill) do 264 | true -> fill 265 | false -> {:error, "fill requires given opt: any numerical value, `null`, " 266 | <> "`none`, `previous`, `linear`"} 267 | end 268 | end 269 | defp parse_group_by(tag, _) when is_binary(tag), do: escape_val(tag, "\"") 270 | defp parse_group_by(tag, _), do: {:error, tag} 271 | 272 | defp escape_val(val, escape_char) do 273 | cond do 274 | is_duration?(val) -> val 275 | is_regex?(val) -> val 276 | is_expression?(val) -> val 277 | true -> "#{escape_char}#{val}#{escape_char}" 278 | end 279 | end 280 | 281 | @fill_opts ["linear", "none", "null", "previous"] 282 | defp valid_fill_opt?("fill(" <> temp_opt) do 283 | [opt, ""] = String.split(temp_opt, ")") 284 | opt in @fill_opts 285 | or number?(opt) 286 | end 287 | 288 | defp number?(opt) do 289 | case Float.parse(opt) do 290 | {_number, ""} -> true 291 | _ -> false 292 | end 293 | end 294 | 295 | defp join_group_by(group_by) do 296 | # fill is required to be at the end of group_by list 297 | {fill, rest} = Enum.split_with(group_by, &fill?/1) 298 | group_by = rest ++ fill 299 | Enum.reduce(group_by, "", &join_tags/2) 300 | end 301 | 302 | defp fill?(tag) do 303 | String.starts_with?(tag, "fill(") 304 | end 305 | 306 | defp join_tags(tag, "") do 307 | tag 308 | end 309 | defp join_tags("fill(" <> _ = fill, joined_tags) do 310 | joined_tags <> " " <> fill 311 | end 312 | defp join_tags(tag, joined_tags) do 313 | joined_tags <> "," <> tag 314 | end 315 | 316 | @duration_units ["u", "µ", "ms", "s", "m", "h", "d", "w"] 317 | defp is_duration?(val) do 318 | case Integer.parse(val) do 319 | {int, suffix} when is_integer(int) -> suffix in @duration_units 320 | :error -> false 321 | end 322 | end 323 | 324 | defp is_regex?(val) do 325 | Regex.match?(~r/\/.*\//, val) 326 | end 327 | 328 | defp is_expression?({:expr, _}), do: true 329 | defp is_expression?(_), do: false 330 | 331 | defp valid?({:error, _}), do: false 332 | defp valid?(_), do: true 333 | end 334 | 335 | -------------------------------------------------------------------------------- /lib/flex/stream.ex: -------------------------------------------------------------------------------- 1 | defmodule Flex.Stream do 2 | @moduledoc """ 3 | This module exposes API for wrapping HTTP requests into Stream 4 | """ 5 | 6 | alias Flex.Stream.{Worker, Supervisor} 7 | 8 | @doc """ 9 | Returns HTTP response wrapped into Stream. 10 | 11 | ## Example 12 | iex(1)> Flex.Stream.new_stream("tide-erlang-solutions.com") 13 | #Function<51.122079345/2 in Stream.resource/3> 14 | """ 15 | @spec new_stream(String.t) :: Enum.t 16 | def new_stream(url) do 17 | Stream.resource(init_fun(url), next_fun(), after_fun()) 18 | end 19 | 20 | # Function called to initialize Stream 21 | defp init_fun(url) do 22 | fn -> 23 | {:ok, pid} = Supervisor.new_worker(url) 24 | pid 25 | end 26 | end 27 | 28 | # Function called where there is demand for new element 29 | defp next_fun do 30 | fn (pid) -> 31 | case Worker.get_chunk(pid) do 32 | {:chunk, chunk} -> {[chunk], pid} 33 | :halt -> {:halt, pid} 34 | end 35 | end 36 | end 37 | 38 | # Function called when stream is finished, used for clean_up 39 | defp after_fun do 40 | fn (pid) -> 41 | Worker.stop(pid) 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/flex/stream/supervisor.ex: -------------------------------------------------------------------------------- 1 | defmodule Flex.Stream.Supervisor do 2 | @moduledoc false 3 | # Supervisor for Flex HTTP Stream backend GenServers 4 | 5 | use Supervisor 6 | 7 | alias Flex.Stream.Worker 8 | 9 | # API functions 10 | 11 | def new_worker(url) do 12 | Supervisor.start_child(__MODULE__, [url]) 13 | end 14 | 15 | def start_link do 16 | Supervisor.start_link(__MODULE__, [], name: __MODULE__) 17 | end 18 | 19 | def init(_) do 20 | children = [ 21 | worker(Worker, [], restart: :temporary) 22 | ] 23 | supervise(children, strategy: :simple_one_for_one) 24 | end 25 | 26 | end 27 | -------------------------------------------------------------------------------- /lib/flex/stream/worker.ex: -------------------------------------------------------------------------------- 1 | defmodule Flex.Stream.Worker do 2 | @moduledoc false 3 | # Worker module for handling chunked HTTP request, used as a backend for 4 | # Flex.Stream 5 | 6 | use GenServer 7 | 8 | @influx_query_timeout 10_000 9 | 10 | # API 11 | 12 | @spec get_chunk(pid) :: {:chunk, String.t} | :halt 13 | def get_chunk(name) do 14 | GenServer.call(name, :get_chunk) 15 | end 16 | 17 | @spec start_link(String.t) :: GenServer.on_start() 18 | def start_link(url) do 19 | GenServer.start_link(__MODULE__, url) 20 | end 21 | 22 | @spec stop(pid) :: :ok 23 | def stop(name) do 24 | GenServer.stop(name) 25 | end 26 | 27 | # Callbacks 28 | 29 | def init(url) do 30 | GenServer.cast(self(), {:initalize, url}) 31 | {:ok, %{id: nil, chunks: nil, reply_to: nil}} 32 | end 33 | 34 | def handle_cast({:initalize, url}, _state) do 35 | resp = HTTPoison.get!(url, [], [stream_to: self(), 36 | recv_timeout: @influx_query_timeout]) 37 | {:noreply, %{id: resp.id, chunks: [], more_chunks: true, reply_to: nil}} 38 | end 39 | 40 | # Server is replaying with chunk, if there is any in accumulator 41 | def handle_call(:get_chunk, _from, %{chunks: [c | cs]} = state) do 42 | state = %{state | chunks: cs} 43 | response = {:chunk, c} 44 | {:reply, response, state} 45 | end 46 | 47 | # Server is not replaying, as there is no chunks in accumulator. However 48 | # we know that there will be more chunks, as we did not received 49 | # %HTTPostion.AsyncEnd yet 50 | def handle_call(:get_chunk, from, %{chunks: [], more_chunks: true} = state) do 51 | state = %{state | reply_to: from} 52 | {:noreply, state} 53 | end 54 | 55 | # Server is replaying with `:halt` atom to indicate there will be no more 56 | # chunks. We know that, because we received %HTTPostion.AsyncEnd 57 | def handle_call(:get_chunk, _from, 58 | %{chunks: [], more_chunks: false} = state) do 59 | {:reply, :halt, state} 60 | end 61 | 62 | # Server acumulates new chunk, when there is no client demending chunk. 63 | def handle_info(%HTTPoison.AsyncChunk{id: id, chunk: chunk}, 64 | %{id: id, reply_to: nil} = state) do 65 | {:noreply, %{state | chunks: [chunk | state.chunks]}} 66 | end 67 | 68 | # Server do not accumulate new chunk, where there is client waiting for 69 | # chunk. Instead of storing it, chunk is send directly to client. 70 | def handle_info(%HTTPoison.AsyncChunk{id: id, chunk: chunk}, 71 | %{id: id, reply_to: pid} = state) do 72 | GenServer.reply(pid, {:chunk, chunk}) 73 | {:noreply, %{state | reply_to: nil}} 74 | end 75 | 76 | # If there is end of Stream and there is no client waiting for chunk, server 77 | # just marks in its state, that it is the end. 78 | def handle_info(%HTTPoison.AsyncEnd{id: id}, 79 | %{id: id, reply_to: nil} = state) do 80 | {:noreply, %{state | more_chunks: false}} 81 | end 82 | 83 | # If there is end of stream AND thre is nothing in accumulator AND there is 84 | # client waiting for response we replay with `:halt` to indicate there will be 85 | # no more chunks. 86 | def handle_info(%HTTPoison.AsyncEnd{id: id}, 87 | %{id: id, chunks: [], reply_to: pid} = state) do 88 | GenServer.reply(pid, :halt) 89 | state = %{state | more_chunks: false, reply_to: nil} 90 | {:noreply, state} 91 | end 92 | 93 | # Handle headers, do nothing 94 | def handle_info(%HTTPoison.AsyncHeaders{id: id}, %{id: id} = state) do 95 | {:noreply, state} 96 | end 97 | 98 | # Handle status, do nothing if successful 99 | def handle_info(%HTTPoison.AsyncStatus{code: 200, id: id}, 100 | %{id: id} = state) do 101 | {:noreply, state} 102 | end 103 | 104 | # Handle status, stop if response is not 200 105 | def handle_info(%HTTPoison.AsyncStatus{code: code, id: id}, 106 | %{id: id} = state) do 107 | {:stop, {:bad_code, code}, state} 108 | end 109 | end 110 | -------------------------------------------------------------------------------- /lib/test_support/test_case.ex: -------------------------------------------------------------------------------- 1 | defmodule Flex.TestCase do 2 | @moduledoc """ 3 | This module defines the test case to be used by tests that require interaction 4 | with InfluxDB 5 | 6 | In order to use this case template, you need to configure address of running 7 | InfluxDB instance API: 8 | 9 | config :flex, 10 | test_host: "http://localhost:8086" 11 | """ 12 | 13 | use ExUnit.CaseTemplate 14 | 15 | using do 16 | influx_host = Application.get_env(:flex, :test_host) 17 | 18 | if is_nil(influx_host) do 19 | raise ":test_host application env must be set in order to use " <> 20 | "Flex.TestCase" 21 | end 22 | 23 | ping_url = influx_host <> "/ping" 24 | case HTTPoison.head(ping_url) do 25 | {:ok, %HTTPoison.Response{status_code: 204}} -> 26 | :ok 27 | error -> 28 | raise "It seems InfluxDB at #{influx_host} is down. Error message: " <> 29 | "#{inspect(error)}" 30 | end 31 | 32 | quote do 33 | end 34 | end 35 | 36 | # InfluxDB will be cleared before each test case 37 | setup do 38 | host = Application.get_env(:flex, :test_host) 39 | db = "test_db" 40 | measurement_name = "measurement" 41 | tag = "tag" 42 | tag_value = "my_tag" 43 | measurement = 44 | "#{measurement_name},#{tag}=#{tag_value} value=2000,value2=2000" 45 | 46 | clear_db(host) 47 | create_db(db, host) 48 | insert_measurement(measurement, db, host) 49 | 50 | context = %{influx_host: host, 51 | present_measurement: measurement_name, 52 | present_db: db, 53 | present_tag: tag, 54 | tag_value: tag_value} 55 | {:ok, context} 56 | end 57 | 58 | def clear_db(host) do 59 | query = host <> "/query?q=SHOW+DATABASES" 60 | %HTTPoison.Response{body: body, status_code: 200} = HTTPoison.get!(query) 61 | result = Poison.decode!(body) 62 | dbs = get_in(result, ["results", Access.at(0), "series", Access.at(0), 63 | "values"]) |> List.flatten # get out values from map 64 | dbs = List.delete(dbs, "_internal") 65 | Enum.each(dbs, &delete_db(&1, host)) 66 | end 67 | 68 | def create_db(db, host) do 69 | query = host <> "/query?q=CREATE+DATABASE+#{db}" 70 | %HTTPoison.Response{status_code: 200} = HTTPoison.post!(query, "") 71 | end 72 | 73 | def delete_db(db, host) do 74 | query = host <> "/query?q=DROP+DATABASE+#{db}" 75 | %HTTPoison.Response{status_code: 200} = HTTPoison.post!(query, "") 76 | end 77 | 78 | def insert_measurement(measurement, db, host) do 79 | query = host <> "/write?db=#{db}" 80 | %HTTPoison.Response{status_code: 204} = HTTPoison.post!(query, measurement) 81 | end 82 | 83 | end 84 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Flex.Mixfile do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :flex, 7 | version: "0.2.1", 8 | elixir: "~> 1.6", 9 | build_embedded: Mix.env == :prod, 10 | start_permanent: Mix.env == :prod, 11 | deps: deps() 12 | ] 13 | end 14 | 15 | def application do 16 | [ 17 | applications: [:logger, :httpoison, :poison], 18 | mod: {Flex.Application, []} 19 | ] 20 | end 21 | 22 | defp deps do 23 | [ 24 | {:httpoison, "~> 0.13"}, 25 | {:poison, "~> 2.2"}, 26 | {:ex_doc, "~> 0.18", only: :dev}, 27 | {:earmark, "~> 1.2", only: :dev}, 28 | {:dialyxir, "~> 0.5", only: :dev}, 29 | {:credo, "~> 0.8", only: :dev}, 30 | ] 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"}, 2 | "certifi": {:hex, :certifi, "2.0.0", "a0c0e475107135f76b8c1d5bc7efb33cd3815cb3cf3dea7aefdd174dabead064", [:rebar3], [], "hexpm"}, 3 | "credo": {:hex, :credo, "0.8.10", "261862bb7363247762e1063713bb85df2bbd84af8d8610d1272cd9c1943bba63", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}], "hexpm"}, 4 | "dialyxir": {:hex, :dialyxir, "0.5.1", "b331b091720fd93e878137add264bac4f644e1ddae07a70bf7062c7862c4b952", [:mix], [], "hexpm"}, 5 | "earmark": {:hex, :earmark, "1.2.4", "99b637c62a4d65a20a9fb674b8cffb8baa771c04605a80c911c4418c69b75439", [:mix], [], "hexpm"}, 6 | "ex_doc": {:hex, :ex_doc, "0.18.2", "993e0a95e9fbb790ac54ea58e700b45b299bd48bc44b4ae0404f28161f37a83e", [:mix], [{:earmark, "~> 1.1", [hex: :earmark, repo: "hexpm", optional: false]}], "hexpm"}, 7 | "hackney": {:hex, :hackney, "1.11.0", "4951ee019df102492dabba66a09e305f61919a8a183a7860236c0fde586134b6", [:rebar3], [{:certifi, "2.0.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "5.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"}, 8 | "httpoison": {:hex, :httpoison, "0.13.0", "bfaf44d9f133a6599886720f3937a7699466d23bb0cd7a88b6ba011f53c6f562", [:mix], [{:hackney, "~> 1.8", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}, 9 | "idna": {:hex, :idna, "5.1.0", "d72b4effeb324ad5da3cab1767cb16b17939004e789d8c0ad5b70f3cea20c89a", [:rebar3], [{:unicode_util_compat, "0.3.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm"}, 10 | "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"}, 11 | "mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], [], "hexpm"}, 12 | "poison": {:hex, :poison, "2.2.0", "4763b69a8a77bd77d26f477d196428b741261a761257ff1cf92753a0d4d24a63", [:mix], [], "hexpm"}, 13 | "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.1", "28a4d65b7f59893bc2c7de786dec1e1555bd742d336043fe644ae956c3497fbe", [:make, :rebar], [], "hexpm"}, 14 | "unicode_util_compat": {:hex, :unicode_util_compat, "0.3.1", "a1f612a7b512638634a603c8f401892afbf99b8ce93a45041f8aaca99cadb85e", [:rebar3], [], "hexpm"}} 15 | -------------------------------------------------------------------------------- /test/flex/api_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Flex.APITest do 2 | use Flex.TestCase 3 | 4 | alias Flex.{API, Query, Datapoints} 5 | alias HTTPoison.Response 6 | 7 | require Logger 8 | 9 | @moduletag :external 10 | 11 | test "List of databases can be retrieved", %{influx_host: host} do 12 | params = %{host: host} 13 | assert {:ok, %Response{status_code: 200}} = API.get_databases(params) 14 | end 15 | 16 | test "Database can be created", %{influx_host: host} do 17 | # given 18 | params = %{host: host} 19 | db = "my_db" 20 | # when 21 | {:ok, %Response{status_code: 200}} = API.create_db(db, params) 22 | # then 23 | {:ok, %Response{status_code: 200, body: body}} = API.get_databases(params) 24 | databases = Poison.decode!(body) 25 | |> Datapoints.format_results() 26 | |> extract_field_from_datapoints("name") 27 | assert db in databases 28 | end 29 | 30 | test "Database can be deleted", %{influx_host: host} do 31 | # given 32 | params = %{host: host} 33 | db = "my_db" 34 | {:ok, %Response{status_code: 200}} = API.create_db(db, params) 35 | # when 36 | {:ok, %Response{status_code: 200}} = API.delete_database(db, params) 37 | # then 38 | {:ok, %Response{status_code: 200, body: body}} = API.get_databases(params) 39 | databases = Poison.decode!(body) 40 | |> Datapoints.format_results() 41 | |> extract_field_from_datapoints("name") 42 | refute db in databases 43 | end 44 | 45 | test "Tag names can be retrieved", 46 | %{influx_host: host, present_db: db, present_tag: tag} do 47 | # given 48 | params = %{host: host, db: db} 49 | # when 50 | {:ok, %Response{status_code: 200, body: body}} = API.get_tag_keys(params) 51 | # then 52 | tags = Poison.decode!(body) 53 | |> Datapoints.format_results() 54 | |> extract_field_from_datapoints("tagKey") 55 | assert tag in tags 56 | end 57 | 58 | test "Measurements can be listed", 59 | %{influx_host: host, present_db: db, present_measurement: m} do 60 | # given 61 | params = %{host: host, db: db} 62 | # when 63 | {:ok, %Response{status_code: 200, body: body}} = 64 | API.get_measurements(params) 65 | # then 66 | measurements = Poison.decode!(body) 67 | |> Datapoints.format_results() 68 | |> extract_field_from_datapoints("name") 69 | assert m in measurements 70 | end 71 | 72 | test "Measurement can be inserted", %{influx_host: host, present_db: db} do 73 | # given 74 | params = %{host: host, db: db, epoch: :ms} 75 | m_name = "my_shiny_measurement" 76 | m = "#{m_name} value=1234" 77 | # when 78 | {:ok, %Response{status_code: 204}} = API.write(m, params) 79 | # then 80 | {:ok, %Response{status_code: 200, body: body}} = 81 | API.get_measurements(params) 82 | measurements = Poison.decode!(body) 83 | |> Datapoints.format_results() 84 | |> extract_field_from_datapoints("name") 85 | assert m_name in measurements 86 | end 87 | 88 | test "Groupped measurement can be inserted", 89 | %{influx_host: host, present_db: db} do 90 | # given 91 | params = %{host: host, db: db, epoch: :ms} 92 | m_name1 = "my_shiny_measurement1" 93 | m_name2 = "my_shiny_measurement2" 94 | m = "#{m_name1} value=1234\n #{m_name2} value=1234" 95 | # when 96 | {:ok, %Response{status_code: 204}} = API.write(m, params) 97 | # then 98 | {:ok, %Response{status_code: 200, body: body}} = 99 | API.get_measurements(params) 100 | measurements = Poison.decode!(body) 101 | |> Datapoints.format_results() 102 | |> extract_field_from_datapoints("name") 103 | assert m_name1 in measurements 104 | assert m_name2 in measurements 105 | end 106 | 107 | test "Simple queries can be performed", 108 | %{influx_host: host, present_db: db} do 109 | # given 110 | params = %{host: host, db: db, epoch: :ms} 111 | 112 | m_name = "measurement1" 113 | data = [%{"value" => 1000, "time" => 1111}, 114 | %{"value" => 2000, "time" => 2222}, 115 | %{"value" => 3000, "time" => 3333}] 116 | datapoint = %{"name" => m_name, "data" => data} 117 | _ = insert_datapoint(params, datapoint) 118 | {:ok, query} = %Query{measurements: [m_name]} |> Query.build_query 119 | # when 120 | {:ok, %Response{status_code: 200, body: body}} = API.query(query, params) 121 | # then 122 | datapoints = Poison.decode!(body) |> Datapoints.format_results() 123 | values = extract_field_from_datapoints(datapoints, "value") 124 | times = extract_field_from_datapoints(datapoints, "time") 125 | 126 | Enum.each(data, 127 | fn(%{"value" => v, "time" => t}) -> 128 | assert v in values 129 | assert t in times 130 | end) 131 | assert length(data) == length(values) 132 | assert length(data) == length(times) 133 | end 134 | 135 | test "Can specify fields to query for", 136 | %{influx_host: host, present_db: db} do 137 | # given 138 | params = %{host: host, db: db, epoch: :ms} 139 | 140 | m_name = "measurement1" 141 | data = [%{"value" => 1000, "value2" => 1000, "time" => 1111}, 142 | %{"value" => 2000, "value2" => 2000, "time" => 2222}, 143 | %{"value" => 3000, "value2" => 3000, "time" => 3333}] 144 | datapoint = %{"name" => m_name, "data" => data} 145 | _ = insert_datapoint(params, datapoint) 146 | {:ok, query} = %Query{fields: ["value2"], 147 | measurements: [m_name]} |> Query.build_query 148 | # when 149 | {:ok, %Response{status_code: 200, body: body}} = API.query(query, params) 150 | # then 151 | datapoints = Poison.decode!(body) |> Datapoints.format_results() 152 | values = extract_field_from_datapoints(datapoints, "value") 153 | values2 = extract_field_from_datapoints(datapoints, "value2") 154 | times = extract_field_from_datapoints(datapoints, "time") 155 | 156 | assert Enum.all?(values, &is_nil/1) 157 | assert length(data) == length(values2) 158 | assert length(data) == length(times) 159 | end 160 | 161 | test "Queries with `from` and `to` can be performed", 162 | %{influx_host: host, present_db: db} do 163 | # given 164 | params = %{host: host, db: db, epoch: :ms} 165 | 166 | m_name = "measurement1" 167 | data = [%{"value" => 1000, "time" => 1111}, 168 | %{"value" => 2000, "time" => 2222}, 169 | %{"value" => 3000, "time" => 3333}] 170 | datapoint = %{"name" => m_name, "data" => data} 171 | _ = insert_datapoint(params, datapoint) 172 | {:ok, query} = %Query{measurements: [m_name], 173 | from: "2000ms", 174 | to: "3000ms"} |> Query.build_query 175 | # when 176 | {:ok, %Response{status_code: 200, body: body}} = API.query(query, params) 177 | # then 178 | datapoints = Poison.decode!(body) |> Datapoints.format_results() 179 | values = extract_field_from_datapoints(datapoints, "value") 180 | times = extract_field_from_datapoints(datapoints, "time") 181 | 182 | assert 1 == length(values) 183 | assert 1 == length(times) 184 | assert 2000 in values 185 | assert 2222 in times 186 | end 187 | 188 | test "Queries can be groupped by time", 189 | %{influx_host: host, present_db: db} do 190 | # given 191 | params = %{host: host, db: db, epoch: :ms} 192 | 193 | m_name = "measurement1" 194 | data = [%{"value" => 1000, "time" => 1200}, 195 | %{"value" => 4000, "time" => 1600}, 196 | %{"value" => 2000, "time" => 2200}, 197 | %{"value" => 8000, "time" => 2600}] 198 | datapoint = %{"name" => m_name, "data" => data} 199 | _ = insert_datapoint(params, datapoint) 200 | {:ok, query} = %Query{fields: ["max(value)"], 201 | measurements: [m_name], 202 | from: "1000ms", 203 | to: "3000ms", 204 | group_by: ["time(1000ms)"]} |> Query.build_query 205 | # when 206 | {:ok, %Response{status_code: 200, body: body}} = API.query(query, params) 207 | # then 208 | datapoints = Poison.decode!(body) |> Datapoints.format_results() 209 | values = extract_field_from_datapoints(datapoints, "max") 210 | times = extract_field_from_datapoints(datapoints, "time") 211 | 212 | assert 2 == length(values) 213 | assert 2 == length(times) 214 | assert 4000 in values and 8000 in values 215 | assert 1000 in times and 2000 in times 216 | end 217 | 218 | test "Queries can be groupped by tag", 219 | %{influx_host: host, present_db: db} do 220 | # given 221 | params = %{host: host, db: db, epoch: :ms} 222 | 223 | m_name = "measurement1" 224 | data = [%{"value" => 1000, "node" => "first", "time" => 1200}, 225 | %{"value" => 4000, "node" => "second", "time" => 1600}, 226 | %{"value" => 2000, "node" => "first", "time" => 2200}, 227 | %{"value" => 8000, "node" => "second", "time" => 2600}] 228 | datapoint = %{"name" => m_name, "data" => data, "tags" => ["node"]} 229 | _ = insert_datapoint(params, datapoint) 230 | {:ok, query} = %Query{fields: ["max(value)"], 231 | measurements: [m_name], 232 | from: "1000ms", 233 | to: "3000ms", 234 | group_by: ["node"]} |> Query.build_query 235 | # when 236 | {:ok, %Response{status_code: 200, body: body}} = API.query(query, params) 237 | # then 238 | datapoints = Poison.decode!(body) |> Datapoints.format_results() 239 | values = extract_field_from_datapoints(datapoints, "max") 240 | times = extract_field_from_datapoints(datapoints, "time") 241 | 242 | assert 2 == length(values) 243 | assert 2 == length(times) 244 | assert 2000 in values and 8000 in values 245 | assert 2200 in times and 2600 in times 246 | end 247 | 248 | test "Queries can be retrieved as a stream", 249 | %{influx_host: host, present_db: db} do 250 | # given 251 | params = %{host: host, db: db, epoch: :ms, chunked: true, chunk_size: 1} 252 | m_name = "measurement1" 253 | data = [%{"value" => 1000, "node" => "first", "time" => 1200}, 254 | %{"value" => 4000, "node" => "second", "time" => 1600}, 255 | %{"value" => 2000, "node" => "first", "time" => 2200}, 256 | %{"value" => 8000, "node" => "second", "time" => 2600}] 257 | datapoint = %{"name" => m_name, "data" => data, "tags" => ["node"]} 258 | _ = insert_datapoint(params, datapoint) 259 | {:ok, query} = %Query{measurements: [m_name]} |> Query.build_query 260 | # when 261 | stream = API.stream(query, params) 262 | # then 263 | datapoints = Stream.map(stream, &Poison.decode!/1) 264 | |> Enum.map(&Datapoints.format_results/1) 265 | values = Enum.flat_map(datapoints, 266 | &(extract_field_from_datapoints(&1, "value"))) 267 | times = Enum.flat_map(datapoints, 268 | &(extract_field_from_datapoints(&1, "time"))) 269 | 270 | expected_values = Enum.map(data, &(Map.get(&1, "value"))) 271 | expected_times = Enum.map(data, &(Map.get(&1, "time"))) 272 | 273 | Enum.each(expected_values, 274 | fn (value) -> 275 | assert value in values 276 | end) 277 | Enum.each(expected_times, 278 | fn (time) -> 279 | assert time in times 280 | end) 281 | end 282 | 283 | @tag capture_log: true # Silence GenServer crash 284 | test "Queries to invalid InfluxDB returns error while retreving stream" do 285 | # given 286 | params = %{host: "http://nonexisting", db: "some_db", epoch: :ms, 287 | chunked: true, chunk_size: 1} 288 | {:ok, query} = %Query{measurements: ["m"]} |> Query.build_query 289 | # when 290 | stream = API.stream(query, params) 291 | # then 292 | assert {:noproc, _} = catch_exit(Stream.run(stream)) 293 | end 294 | 295 | defp extract_field_from_datapoints(datapoints, field) do 296 | get_in(datapoints, [Access.all(), "data", Access.all(), field]) 297 | |> List.flatten() 298 | end 299 | 300 | defp insert_datapoint(params, datapoint) do 301 | m = Datapoints.to_line_protocol(datapoint) |> Enum.join("\n") 302 | {:ok, %Response{status_code: 204}} = API.write(m, params) 303 | end 304 | 305 | end 306 | -------------------------------------------------------------------------------- /test/flex/datapoints_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Flex.DatapointsTest do 2 | use Flex.TestCase 3 | 4 | alias Flex.Datapoints 5 | 6 | for query_fun <- [&__MODULE__.from_query/3, 7 | &__MODULE__.get_tags/3, 8 | &__MODULE__.get_measurements/3] do 9 | @tag :external 10 | test "InfluxDB response from #{inspect(query_fun)} map can be formatted", 11 | %{influx_host: host, present_db: db, present_measurement: m} do 12 | response = unquote(query_fun).(host, db, m) 13 | measurements = Datapoints.format_results(response) 14 | Enum.each(measurements, 15 | fn (measurement) -> 16 | assert "name" in Map.keys(measurement) 17 | assert "data" in Map.keys(measurement) 18 | end 19 | ) 20 | end 21 | end 22 | 23 | @tag :external 24 | test "Datapoint can have added tags", 25 | %{influx_host: host, present_db: db, present_measurement: m, present_tag: t} 26 | do 27 | [measurement] = from_query(host, db, m) |> Datapoints.format_results() 28 | tags = get_tags(host, db) |> Datapoints.format_results() 29 | 30 | measurement_with_tags = Datapoints.add_tags_to_datapoint(measurement, tags) 31 | 32 | assert "name" in Map.keys(measurement_with_tags) 33 | assert "data" in Map.keys(measurement_with_tags) 34 | assert "tags" in Map.keys(measurement_with_tags) 35 | assert t in measurement_with_tags["tags"] 36 | end 37 | 38 | test "InfluxDB line protocol can be formed from measurement map" do 39 | m = %{ 40 | "name" => "m", 41 | "data" => [ 42 | %{"key" => "value", "time" => 12_345}, 43 | %{"key" => "value", "some_tag" => "some_tag_value", "time" => 12_346}, 44 | ], 45 | "tags" => ["some_tag"] 46 | } 47 | 48 | lines = Datapoints.to_line_protocol(m) 49 | assert 2 = length(lines) 50 | assert "m key=\"value\" 12345" in lines 51 | assert "m,some_tag=some_tag_value key=\"value\" 12346" in lines 52 | 53 | end 54 | 55 | def from_query(host, db, measurement) do 56 | query = 57 | URI.encode_query(%{"q" => "SELECT * FROM \"#{measurement}\"", "db" => db}) 58 | url = host <> "/query?#{query}" 59 | http_get(url) 60 | end 61 | 62 | def get_tags(host, db, _ \\ "") do 63 | query = URI.encode_query(%{"q" => "SHOW TAG KEYS", "db" => db}) 64 | url = host <> "/query?#{query}" 65 | http_get(url) 66 | end 67 | 68 | def get_measurements(host, db, _ \\ "") do 69 | query = URI.encode_query(%{"q" => "SHOW MEASUREMENTS", "db" => db}) 70 | url = host <> "/query?#{query}" 71 | http_get(url) 72 | end 73 | 74 | defp http_get(url) do 75 | %HTTPoison.Response{status_code: 200, body: body} = HTTPoison.get!(url) 76 | Poison.decode!(body) 77 | end 78 | 79 | end 80 | -------------------------------------------------------------------------------- /test/flex/query_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Flex.QueryTest do 2 | use ExUnit.Case 3 | 4 | alias Flex.Query 5 | 6 | test "Query without measurements is invalid" do 7 | query1 = %Query{measurements: []} 8 | query2 = %Query{measurements: nil} 9 | 10 | assert {:error, _} = Query.build_query(query1) 11 | assert {:error, _} = Query.build_query(query2) 12 | end 13 | 14 | test "Query requires at least one measurement to be valid" do 15 | query = %Query{measurements: ["m"]} 16 | 17 | assert {:ok, _} = Query.build_query(query) 18 | end 19 | 20 | test "Query without fields specification, SELECTS all fields" do 21 | query = %Query{fields: nil, measurements: ["m"]} 22 | 23 | assert {:ok, query} = Query.build_query(query) 24 | assert "SELECT *" <> _ = query 25 | end 26 | 27 | test "Query can specify multiple fields to select" do 28 | query = %Query{fields: ["f1", "f2"], measurements: ["m"]} 29 | 30 | assert {:ok, query} = Query.build_query(query) 31 | assert "SELECT f1,f2" <> _ = query 32 | end 33 | 34 | test "Field can hold expressions" do 35 | query = %Query{fields: ["max(value) - 20"], measurements: ["m"]} 36 | 37 | assert {:ok, query} = Query.build_query(query) 38 | assert "SELECT max(value) - 20" <> _ = query 39 | end 40 | 41 | test "Where can hold simple conditions" do 42 | query = %Query{measurements: ["m"], where: [{"node", "node-1", :=}]} 43 | 44 | assert {:ok, query} = Query.build_query(query) 45 | assert [_, "node = 'node-1'"] = String.split(query, "WHERE ") 46 | end 47 | 48 | test "Where can hold expressions" do 49 | query = %Query{measurements: ["m"], 50 | where: [{"time", {:expr, "now() - 2h"}, :<}]} 51 | 52 | assert {:ok, query} = Query.build_query(query) 53 | assert [_, "time < now() - 2h"] = String.split(query, "WHERE ") 54 | end 55 | 56 | for unit <- ["u", "µ", "ms", "s", "m", "h", "d", "w"] do 57 | test "Duration unit '#{unit}' is not escaped" do 58 | time_value = "20" <> unquote(unit) 59 | query = %Query{measurements: ["m"], 60 | where: [{"time", time_value, :<}]} 61 | 62 | assert {:ok, query} = Query.build_query(query) 63 | assert [_, "time < " <> ^time_value] = String.split(query, "WHERE ") 64 | end 65 | end 66 | 67 | test "Multiple conditions are joined with AND" do 68 | query = %Query{measurements: ["m"], 69 | where: [ 70 | {"time", {:expr, "now() - 2h"}, :<}, 71 | {"node", "node-1", :=} 72 | ]} 73 | 74 | assert {:ok, query} = Query.build_query(query) 75 | assert [_, where_clause] = String.split(query, "WHERE ") 76 | assert "time < now() - 2h and node = 'node-1'" = where_clause 77 | end 78 | 79 | test "'from' and 'to' fields are converted into WHERE conditions" do 80 | query = %Query{measurements: ["m"], 81 | from: "now() - 2d", 82 | to: "now() - 1d" 83 | } 84 | assert {:ok, query} = Query.build_query(query) 85 | assert [_, where_clause] = String.split(query, "WHERE ") 86 | conditions = String.split(where_clause, " and ") 87 | assert "time > now() - 2d" in conditions 88 | assert "time < now() - 1d" in conditions 89 | end 90 | 91 | test "Query can hold GROUP BY" do 92 | query = %Query{measurements: ["m"], 93 | group_by: ["node"]} 94 | 95 | assert {:ok, query} = Query.build_query(query) 96 | assert [_, group_by_clause] = String.split(query, "GROUP BY ") 97 | assert "\"node\"" = group_by_clause 98 | end 99 | 100 | test "GROUP BY time is incorrect, when there is no WHERE time condition" do 101 | query = %Query{measurements: ["m"], 102 | group_by: ["time(2d)"]} 103 | 104 | assert {:error, _} = Query.build_query(query) 105 | end 106 | 107 | test "GROUP BY time is correct, when there is WHERE time condition" do 108 | query = %Query{measurements: ["m"], 109 | from: "now() - 2d", 110 | group_by: ["time(2d)"]} 111 | 112 | assert {:ok, query} = Query.build_query(query) 113 | assert [_, group_by_clause] = String.split(query, "GROUP BY ") 114 | assert "time(2d)" = group_by_clause 115 | end 116 | 117 | test "GROUP BY fill is correct even if is not the last element of group_by list" do 118 | query = %Query{fields: ["f1"], 119 | measurements: ["m"], 120 | group_by: ["time(2d)", "fill(null)", "sample_tag"], 121 | from: "now() - 2d", 122 | to: "now() - 1d"} 123 | assert {:ok, query} = Query.build_query(query) 124 | assert String.ends_with?(query, "fill(null)") 125 | end 126 | 127 | for opt <- ["10", "10.1", "null", "none", "previous", "linear"] do 128 | test "GROUP BY query is properly built, when fill option is '#{opt}'." do 129 | fill = "fill(" <> unquote(opt) <> ")" 130 | query = %Query{fields: ["f1"], 131 | measurements: ["m"], 132 | group_by: ["time(5m)", fill], 133 | from: "now() - 2d", 134 | to: "now() - 1d"} 135 | assert {:ok, query} = Query.build_query(query) 136 | assert String.ends_with?(query, fill) 137 | end 138 | end 139 | 140 | test "GROUP BY fill is incorrect when fill option is not valid" do 141 | query = %Query{fields: ["f1"], 142 | measurements: ["m"], 143 | group_by: ["time(2d)", "fill(invalid_opt)"], 144 | from: "now() - 2d", 145 | to: "now() - 1d"} 146 | assert {:error, _} = Query.build_query(query) 147 | end 148 | end 149 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | --------------------------------------------------------------------------------