├── docs
├── _config.yml
├── search-docs.html
├── integration.html
├── influxdb.vibe.urlEncode.html
├── influxdb.api.Response.html
├── influxdb.api.Database.html
├── influxdb.api.DatabaseImpl.drop.html
├── influxdb.api.Result.html
├── influxdb.vibe.manage.html
├── index.html
├── influxdb.html
├── influxdb.api.Measurement.html
├── influxdb.api.DatabaseImpl.manage.html
├── influxdb.vibe.write.html
├── influxdb.vibe.query.html
├── influxdb.api.DatabaseImpl.query.html
├── influxdb.mir.html
├── integration.asdf.shouldBeSameJsonAs.html
├── influxdb.api.influxSysTime.html
├── influxdb.vibe.html
├── script.js
├── influxdb.api.toInfluxDateTime.1.html
├── influxdb.api.toInfluxDateTime.2.html
├── influxdb.api.shouldEqualLine.html
├── influxdb.api.DatabaseImpl.insert.1.html
├── influxdb.api.DatabaseImpl.insert.2.html
├── influxdb.api.html
├── integration.asdf.html
├── integration.vibe.html
├── influxdb.api.DatabaseImpl.insert.3.html
├── influxdb.api.DatabaseImpl.insert.4.html
├── influxdb.mir.toMirSeries.html
├── integration.api.html
├── influxdb.api.MeasurementSeries.html
└── integration.curl.html
├── mir-integration-example
├── .gitignore
├── dub.json
├── dub.selections.json
└── source
│ └── app.d
├── integration
├── source
│ └── integration
│ │ ├── common.d
│ │ ├── it.d
│ │ ├── asdf.d
│ │ ├── vibe.d
│ │ ├── api.d
│ │ └── curl.d
├── dub.selections.json
└── dub.json
├── .gitignore
├── dub.selections.json
├── bin
└── ut.d
├── makedocs.sh
├── makedocs_cached.sh
├── source
└── influxdb
│ ├── package.d
│ ├── http.d
│ └── mir.d
├── .skeleton.html
├── LICENSE
├── dub.json
├── .github
└── workflows
│ └── ci.yml
└── README.md
/docs/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-minimal
--------------------------------------------------------------------------------
/mir-integration-example/.gitignore:
--------------------------------------------------------------------------------
1 | mir-integration
--------------------------------------------------------------------------------
/integration/source/integration/common.d:
--------------------------------------------------------------------------------
1 | module integration.common;
2 |
3 | enum influxURL = "http://localhost:8086";
4 |
--------------------------------------------------------------------------------
/mir-integration-example/dub.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mir-integration",
3 | "dependencies": {
4 | "influx-d": {"path": "../"},
5 | "mir-algorithm": "*"
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .dub
2 | docs.json
3 | __dummy.html
4 | *.o
5 | *.obj
6 | __test__*__
7 | bin
8 | *.lib
9 | *.dll
10 | *.a
11 | mir-integration/mir-integration
12 | mir-integration/dub.selections.json
13 | *.sublime-project
14 | influx-d-test-unittest-ssl-1.1
15 |
--------------------------------------------------------------------------------
/integration/source/integration/it.d:
--------------------------------------------------------------------------------
1 | import unit_threaded;
2 |
3 | int main(string[] args)
4 | {
5 | return args.runTests!(
6 | "integration.curl",
7 | "integration.vibe",
8 | "integration.asdf",
9 | "integration.api",
10 | );
11 | }
12 |
--------------------------------------------------------------------------------
/dub.selections.json:
--------------------------------------------------------------------------------
1 | {
2 | "fileVersion": 1,
3 | "versions": {
4 | "asdf": "0.7.2",
5 | "automem": "0.6.9",
6 | "cachetools": "0.4.1",
7 | "mir-algorithm": "3.12.47",
8 | "mir-core": "1.7.3",
9 | "requests": "2.2.0",
10 | "silly": "1.1.1",
11 | "test_allocator": "0.3.4",
12 | "unit-threaded": "1.0.11"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/integration/dub.selections.json:
--------------------------------------------------------------------------------
1 | {
2 | "fileVersion": 1,
3 | "versions": {
4 | "asdf": "0.7.2",
5 | "automem": "0.6.9",
6 | "cachetools": "0.4.1",
7 | "mir-algorithm": "3.12.47",
8 | "mir-core": "1.7.3",
9 | "requests": "2.2.0",
10 | "silly": "1.1.1",
11 | "test_allocator": "0.3.4",
12 | "unit-threaded": "1.0.11"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/mir-integration-example/dub.selections.json:
--------------------------------------------------------------------------------
1 | {
2 | "fileVersion": 1,
3 | "versions": {
4 | "asdf": "0.7.2",
5 | "automem": "0.6.9",
6 | "cachetools": "0.4.1",
7 | "mir-algorithm": "3.12.47",
8 | "mir-core": "1.7.3",
9 | "requests": "2.2.0",
10 | "silly": "1.1.1",
11 | "test_allocator": "0.3.4",
12 | "unit-threaded": "1.0.11"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/bin/ut.d:
--------------------------------------------------------------------------------
1 | //influxdb.api,influxdb,influxdb.vibe
2 | //Automatically generated by unit_threaded.gen_ut_main, do not edit by hand.
3 | import unit_threaded;
4 |
5 | int main(string[] args)
6 | {
7 | return args.runTests!(
8 | "influxdb.api",
9 | "influxdb",
10 | "influxdb.http"
11 | );
12 | }
13 |
--------------------------------------------------------------------------------
/makedocs.sh:
--------------------------------------------------------------------------------
1 | set -e -o pipefail
2 | kal_project_dir=`pwd`
3 | source_dir=${kal_project_dir}
4 | echo generating documents for ${kal_project_dir}
5 | mkdir -p docs
6 | cd ~
7 | mkdir -p tmp
8 | cd tmp
9 | rm -rf adrdox
10 | git clone https://github.com/adamdruppe/adrdox
11 | cp ${kal_project_dir}/.skeleton.html adrdox/skeleton.html
12 | cd adrdox
13 | make
14 | ./doc2 -i ${source_dir}
15 | mv generated-docs/* ${kal_project_dir}/docs
16 | cp ${kal_project_dir}/docs/influxdb.html ${kal_project_dir}/docs/index.html
17 | cd ${kal_project_dir}
18 | echo succeeded - docs generated
19 |
--------------------------------------------------------------------------------
/makedocs_cached.sh:
--------------------------------------------------------------------------------
1 | set -e -o pipefail
2 | kal_project_dir=`pwd`
3 | source_dir=${kal_project_dir}
4 | echo generating documents for ${kal_project_dir}
5 | mkdir -p docs
6 | cd ~
7 | mkdir -p tmp
8 | cd tmp
9 | #rm -rf adrdox
10 | #git clone https://github.com/adamdruppe/adrdox
11 | cp ${kal_project_dir}/.skeleton.html adrdox/skeleton.html
12 | cd adrdox
13 | #make
14 | ./doc2 -i ${source_dir}
15 | mv generated-docs/* ${kal_project_dir}/docs
16 | cp ${kal_project_dir}/docs/influxdb.html ${kal_project_dir}/docs/index.html
17 | cd ${kal_project_dir}
18 | echo succeeded - docs generated
19 |
--------------------------------------------------------------------------------
/source/influxdb/package.d:
--------------------------------------------------------------------------------
1 | /**
2 | [InfluxDB](https://www.influxdata.com/products/) is an open source time-series database.
3 | It supports high write loads, large data set storage, and conserves space through downsampling, automatically expiring and deleting
4 | unwanted data as well as backup and restore. InfluxDB also makes it easy to analyze data by providing an easy-to-use SQL-like query language.
5 |
6 | This library implements a convenience wrapper API for influx.
7 |
8 | Authors: Atila Neves (Kaleidic Associates Advisory Limited)
9 |
10 | Generated documentation:
11 | http://influxdb.code.kaleidic.io/influxdb.html
12 |
13 | */
14 | module influxdb;
15 |
16 | public import influxdb.api;
17 |
18 | version(Have_mir_algorithm)
19 | public import influxdb.mir;
20 |
--------------------------------------------------------------------------------
/integration/dub.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "integration",
3 | "dependencies": {
4 | "unit-threaded": "*",
5 | "influx-d": { "path": "../" }
6 | },
7 | "targetPath": "bin",
8 | "targetType": "executable",
9 | "configurations": [
10 | {
11 | "name": "integration",
12 | "targetName": "it",
13 | },
14 | {
15 | "name": "integration-mir",
16 | "targetName": "it-mir",
17 | "dependencies": {
18 | "mir-algorithm": "*"
19 | }
20 | },
21 | {
22 | "name": "integration-ssl-1.1",
23 | "targetName": "it-ssl-1.1",
24 | "dependencies": {
25 | "mir-algorithm": "*"
26 | },
27 | "libs-posix": [
28 | "ssl",
29 | "crypto",
30 | ":libssl.so.1.0.0",
31 | ":libcrypto.so.1.0.0"
32 | ]
33 | }
34 | ]
35 | }
36 |
--------------------------------------------------------------------------------
/docs/search-docs.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
19 |
20 |
21 |
22 | The "offline" searcher requires Javascript.
23 | To get search without JS, build the `locate.d` file (see the adrdox makefile) and use it as a CGI program on a regular web server, or build with -version=embedded_httpd to embed its own server and run that.
24 | Then, configure the server to call that script instead of loading this file, or change your skeleton.html search form action to point to it.
25 |
26 |
27 |
--------------------------------------------------------------------------------
/.skeleton.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Documentation
5 |
6 |
7 |
8 |
9 |
10 |
12 |
13 |
14 |
15 |
29 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2017, Atila Neves
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/dub.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "influx-d",
3 | "authors": [
4 | "Atila Neves",
5 | "Ilya Yaroshenko"
6 | ],
7 | "dependencies": {
8 | "requests": "~>2.2.0",
9 | "asdf": ">=0.1.1 <0.8.0"
10 | },
11 | "description": "InfluxDB wrapper",
12 | "copyright": "Copyright © 2017, Kaleidic Associates Advisory Limited",
13 | "license": "BSD 3-clause",
14 | "targetType": "library",
15 | "sourcePaths": ["source"],
16 |
17 | "configurations": [
18 |
19 | {"name": "library" },
20 | {
21 | "name": "library-ssl-1.1",
22 | "libs-posix": [
23 | ":libssl.so.1.0.0",
24 | ":libcrypto.so.1.0.0"
25 | ]
26 | },
27 |
28 | {
29 | "name": "unittest",
30 | "targetType": "executable",
31 | "targetName": "bin/ut",
32 | "mainSourceFile": "bin/ut.d",
33 | "dependencies": {
34 | "unit-threaded": "*"
35 | },
36 | "versions": ["unitUnthreaded", "Test_InfluxD"]
37 | },
38 |
39 | {
40 | "name": "unittest-ssl-1.1",
41 | "targetType": "executable",
42 | "targetName": "bin/ut",
43 | "mainSourceFile": "bin/ut.d",
44 | "dependencies": {
45 | "unit-threaded": "*"
46 | },
47 | "libs-posix": [
48 | ":libssl.so.1.0.0",
49 | ":libcrypto.so.1.0.0"
50 | ],
51 | "versions": ["unitUnthreaded", "Test_InfluxD"]
52 | }
53 | ]
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI Pipeline
2 | on: [push, pull_request]
3 |
4 | jobs:
5 | unit-tests:
6 | name: Unit tests
7 | strategy:
8 | matrix:
9 | os:
10 | - ubuntu-24.04
11 | - windows-2022
12 | #- macos-13
13 | dc:
14 | - dmd-2.098.1
15 | - dmd-2.111.0
16 | - ldc-1.28.1
17 | - ldc-1.41.0
18 |
19 | runs-on: ${{ matrix.os }}
20 | steps:
21 | - uses: actions/checkout@v4
22 |
23 | - name: Install D compiler
24 | uses: dlang-community/setup-dlang@v2
25 | with:
26 | compiler: ${{ matrix.dc }}
27 |
28 | - name: Unit tests
29 | run: |
30 | dub build
31 | dub test
32 | dub run --root mir-integration-example
33 |
34 | - uses: codecov/codecov-action@v5.1.2
35 |
36 | integration-test:
37 | name: Integration tests
38 | strategy:
39 | matrix:
40 | os:
41 | - ubuntu-24.04
42 | dc:
43 | - dmd-2.098.1
44 | - dmd-2.111.0
45 | - ldc-1.28.1
46 | - ldc-1.41.0
47 |
48 | runs-on: ${{ matrix.os }}
49 | steps:
50 | - uses: actions/checkout@v4
51 |
52 | - name: Install D compiler
53 | uses: dlang-community/setup-dlang@v2
54 | with:
55 | compiler: ${{ matrix.dc }}
56 |
57 | - name: Install system dependencies
58 | run: |
59 | sudo apt-get update
60 | sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install influxdb libevent-dev
61 |
62 | - name: Integration tests
63 | run: |
64 | sudo service influxdb start
65 | cd integration
66 | dub run --build=unittest -c integration
67 | dub run --build=unittest -c integration-mir
68 |
--------------------------------------------------------------------------------
/source/influxdb/http.d:
--------------------------------------------------------------------------------
1 | /**
2 | This module implements utility functions for Influx REST API
3 |
4 | Authors: Atila Neves (Kaleidic Associates Advisory Limited)
5 |
6 | Generated documentation:
7 | http://influxdb.code.kaleidic.io/influxdb.html
8 |
9 | */
10 | module influxdb.http;
11 |
12 | import requests : Request, Response, urlEncoded;
13 |
14 |
15 | ///
16 | void manage(in string url, in string str) {
17 | httpPostRequest(url ~ "/query", ["q": str]);
18 | }
19 |
20 | ///
21 | string query(in string url, in string db, in string query) {
22 | return httpGetRequest(url ~ "/query", ["db": db, "q": query]);
23 | }
24 |
25 | ///
26 | void write(in string url, in string db, in string line) {
27 | httpPostRequest(url ~ "/write?db=" ~ urlEncoded(db), cast(ubyte[]) line);
28 | }
29 |
30 |
31 | private string httpGetRequest(in string url,
32 | string[string] queryParams,
33 | in string file = __FILE__,
34 | in size_t line = __LINE__) {
35 | auto res = Request().get(url, queryParams);
36 | return processResponse(res, file, line);
37 | }
38 |
39 | private string httpPostRequest(in string url,
40 | string[string] postParams,
41 | in string file = __FILE__,
42 | in size_t line = __LINE__) {
43 | auto res = Request().post(url, postParams);
44 | return processResponse(res, file, line);
45 | }
46 |
47 | private string httpPostRequest(in string url,
48 | in ubyte[] data,
49 | in string file = __FILE__,
50 | in size_t line = __LINE__) {
51 | auto res = Request().post(url, data, "application/x-www-form-urlencoded");
52 | return processResponse(res, file, line);
53 | }
54 |
55 | private string processResponse(Response response, in string file, in size_t line) {
56 | const ret = response.responseBody.toString();
57 | if (response.code < 200 || response.code > 299)
58 | throw new Exception(ret, file, line);
59 | return ret;
60 | }
61 |
--------------------------------------------------------------------------------
/integration/source/integration/asdf.d:
--------------------------------------------------------------------------------
1 | /**
2 | This module implements integration tests for Influx API
3 |
4 | Authors: Atila Neves (Kaleidic Associates Advisory Limited)
5 |
6 | Generated documentation:
7 | http://influxdb.code.kaleidic.io/influxdb.html
8 |
9 | */
10 | module integration.asdf;
11 |
12 | import asdf;
13 | import influxdb.api;
14 | import unit_threaded;
15 |
16 |
17 | ///
18 | @("deserialise Response")
19 | @system unittest {
20 | enum jsonString = `
21 | {
22 | "results": [{
23 | "series": [{
24 | "columns": ["time", "othervalue", "tag1", "tag2", "value"],
25 | "name": "myname",
26 | "values": [
27 | ["2015-06-11T20:46:02Z", 4, "toto", "titi", 2],
28 | ["2017-03-14T23:15:01.06282785Z", 3, "letag", "othertag", 1]
29 | ]
30 | }],
31 | "statement_id": 42
32 | }]
33 | }
34 | `;
35 |
36 | jsonString.deserialize!Response.shouldEqual(
37 | Response(
38 | [
39 | Result(
40 | [
41 | MeasurementSeries(
42 | "myname", //name
43 | ["time", "othervalue", "tag1", "tag2", "value"], //columns
44 | //values
45 | [
46 | ["2015-06-11T20:46:02Z", "4", "toto", "titi", "2"],
47 | ["2017-03-14T23:15:01.06282785Z", "3", "letag", "othertag", "1"],
48 | ]
49 | ),
50 | ],
51 |
52 | 42, // statement_id
53 | )
54 | ]
55 | )
56 | );
57 | }
58 |
59 |
60 | /**
61 | Example:
62 | */
63 | void shouldBeSameJsonAs(in string actual,
64 | in string expected,
65 | in string file = __FILE__,
66 | in size_t line = __LINE__)
67 | @trusted // parseJSON
68 | {
69 | import std.json;
70 | actual.parseJSON.toPrettyString.shouldEqual(expected.parseJSON.toPrettyString, file, line);
71 | }
72 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # influxdb-dlang-wrapper
2 |
3 | [](https://github.com/symmetryinvestments/influx-d/actions)
4 | [](https://codecov.io/gh/symmetryinvestments/influx-d)
5 |
6 | D programming language wrapper for InfluxDB.
7 |
8 | Generated documentation
9 | -----------------------
10 |
11 | [Integration Tests/Examples](http://influxdb.code.kaleidic.io/integration.html)
12 |
13 | [API documentation](http://influxdb.code.kaleidic.io/influxdb.html)
14 |
15 | Getting started tutorial (hopefully self-explanatory)
16 | -----------------------------------------------------
17 |
18 | ```d
19 |
20 | import influxdb;
21 |
22 | // this will connect and create the `mydb` database if not already in InfluxDB
23 | const database = Database("http://localhost:8086" /*URL*/, "mydb" /*DB name*/);
24 |
25 | // no explicit timestamp
26 | database.insert(Measurement("cpu" /*name*/,
27 | ["tag1": "foo"] /*tags*/,
28 | ["temperature": InfluxValue(42)] /*values*/));
29 | // `insert` can also take `Measurement[]` or a variadic number of `Measurement`s
30 | // Measurement also has a contructor that does't take tags:
31 | // auto m = Measurement("cpu", ["temperature": InfluxValue(42)]);
32 |
33 | // explicit timestamp
34 | import std.datetime: Clock;
35 | database.insert(Measurement("cpu",
36 | ["tag1": "foo"],
37 | ["temperature": InfluxValue(68)],
38 | Clock.currTime));
39 |
40 | // this will have the two measurements given the code above
41 | const response = database.query("SELECT * FROM cpu");
42 |
43 | // Accessing the response.
44 | // The code below assumes a response with one result and that result has only
45 | // one series.
46 |
47 | assert(response.results.length == 1);
48 | const result = response.results[0];
49 | assert(result.statement_id == 0);
50 | assert(result.series.length == 1);
51 | const series = result.series[0];
52 | assert(series.rows.length == 1);
53 | const row = series.rows[0];
54 |
55 | assert(row.time == SysTime(DateTime(2015, 06, 11, 20, 46, 2), UTC()));
56 | assert(row["foo"] == "bar");
57 |
58 | assert(series ==
59 | MeasurementSeries(
60 | "lename", //name
61 | ["time", "othervalue", "tag1", "tag2", "value"], //column names
62 | //values
63 | [
64 | ["2015-06-11T20:46:02Z", "4", "toto", "titi", "2"],
65 | ["2017-03-14T23:15:01.06282785Z", "3", "letag", "othertag", "1"],
66 | ]
67 | ));
68 | ```
69 |
--------------------------------------------------------------------------------
/docs/integration.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | integration (integration)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
integration
api module integration.api
This module implements integration tests for Influx API
asdf module integration.asdf
This module implements integration tests for Influx API
curl module integration.curl
This module implements integration tests for InfluxDB. As such, they record in
31 | code the assumptions made with regards to the HTTP API. Given that these tests
32 | pass, the unit tests are sufficient to guarantee correct behaviour.
vibe module integration.vibe
This module makes sure that using vibe corresponds in using the HTTP API
33 | correctly.
34 |
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/docs/influxdb.vibe.urlEncode.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | urlEncode (influxdb.vibe.urlEncode)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
31 |
influxdb vibe
32 |
functions
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/influxdb.api.Response.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Response (influxdb.api.Response)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
Response
struct Response {
Result [] results
}
31 |
influxdb api
32 |
aliases functions structs
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/integration/source/integration/vibe.d:
--------------------------------------------------------------------------------
1 | /**
2 | This module makes sure that using vibe corresponds in using the HTTP API
3 | correctly.
4 | */
5 | module integration.vibe;
6 |
7 | import unit_threaded;
8 | import integration.common: influxURL;
9 | import influxdb.http: manage, query, write;
10 | import std.json: JSONValue, parseJSON;
11 |
12 |
13 | ///
14 | @Serial
15 | @("manage")
16 | unittest {
17 | manage(influxURL, "DROP DATABASE test_vibe_db");
18 | wait;
19 | manage(influxURL, "CREATE DATABASE test_vibe_db");
20 | wait;
21 | manage(influxURL, "DROP DATABASE test_vibe_db");
22 | wait;
23 | }
24 |
25 |
26 | ///
27 | @Serial
28 | @("query empty database")
29 | unittest {
30 | manage(influxURL, "DROP DATABASE test_vibe_db");
31 | wait;
32 | manage(influxURL, "CREATE DATABASE test_vibe_db");
33 | wait;
34 | scope(exit) {
35 | manage(influxURL, "DROP DATABASE test_vibe_db");
36 | wait;
37 | }
38 |
39 | const json = query(influxURL, "test_vibe_db", "SELECT * from foo").parseJSON;
40 | JSONValue expected;
41 | JSONValue result;
42 | result["statement_id"] = JSONValue(0);
43 | expected["results"] = [result];
44 | json.shouldEqual(expected);
45 | }
46 |
47 | /**
48 | Example of a response (prettified):
49 | {
50 | "results": [{
51 | "series": [{
52 | "columns": ["time", "othervalue", "tag1", "tag2", "value"],
53 | "name": "foo",
54 | "values": [
55 | ["2015-06-11T20:46:02Z", 4, "toto", "titi", 2],
56 | ["2017-03-14T23:15:01.06282785Z", 3, "letag", "othertag", 1]
57 | ]
58 | }],
59 | "statement_id": 0
60 | }]
61 | }
62 | */
63 |
64 |
65 | ///
66 | @Serial
67 | @("query database with data")
68 | unittest {
69 | import std.algorithm: map;
70 |
71 | manage(influxURL, "DROP DATABASE test_vibe_db");
72 | wait;
73 | manage(influxURL, "CREATE DATABASE test_vibe_db");
74 | wait;
75 | scope(exit) {
76 | manage(influxURL, "DROP DATABASE test_vibe_db");
77 | wait;
78 | }
79 |
80 | write(influxURL, "test_vibe_db", "foo,tag1=letag,tag2=othertag value=1,othervalue=3");
81 | write(influxURL, "test_vibe_db", "foo,tag1=toto,tag2=titi value=2,othervalue=4 1434055562000000000");
82 | wait;
83 |
84 | {
85 | const json = query(influxURL, "test_vibe_db", "SELECT * from foo").parseJSON;
86 | const result = json.object["results"].array[0].object;
87 | const table = result["series"].array[0].object;
88 | table["columns"].array.map!(a => a.str).shouldBeSameSetAs(
89 | ["time", "othervalue", "tag1", "tag2", "value"]);
90 | table["name"].str.shouldEqual("foo");
91 | table["values"].array.length.shouldEqual(2);
92 | }
93 | }
94 |
95 |
96 | private void wait() {
97 | import core.thread;
98 | Thread.sleep(10.msecs);
99 | }
100 |
--------------------------------------------------------------------------------
/docs/influxdb.api.Database.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Database (influxdb.api.Database)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
31 |
influxdb api
32 |
aliases functions structs
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/influxdb.api.DatabaseImpl.drop.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | DatabaseImpl.drop (influxdb.api.DatabaseImpl.drop)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
DatabaseImpl.drop
? struct DatabaseImpl (alias manageFunc, alias queryFunc, alias writeFunc)
31 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/influxdb.api.Result.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Result (influxdb.api.Result)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
Result
struct Result {
MeasurementSeries [] series
int statement_id
}
31 |
influxdb api
32 |
aliases functions structs
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/mir-integration-example/source/app.d:
--------------------------------------------------------------------------------
1 | /++
2 | Mir-Influx integration example.
3 |
4 | Include mir-algorithm into your project.
5 |
6 | See_also:
7 | $(LINK2 http://docs.algorithm.dlang.io/latest/mir_series.html, mir.series).
8 |
9 | Authors: Ilya Yaroshenko
10 | +/
11 | import influxdb;
12 | import mir.series;
13 | import std.datetime: Date, DateTime;
14 |
15 | void main()
16 | {
17 | readFromInflux();
18 | writeToInflux();
19 | }
20 |
21 | void writeToInflux()
22 | {
23 | string[string][] manages;
24 | string[string][] queries;
25 | string[string][] writes;
26 |
27 | alias TestDatabase = DatabaseImpl!(
28 | (url, cmd) => manages ~= ["url": url, "cmd": cmd], // manage
29 | (url, db, query) { // query
30 | queries ~= ["url": url, "db": db, "query": query];
31 | return ``;
32 | },
33 | (url, db, line) => writes ~= ["url": url, "db": db, "line": line]
34 | );
35 |
36 | const database = TestDatabase("http://db.com", "testdb");
37 |
38 | auto time = [
39 | Date(2017, 2, 1),
40 | Date(2017, 2, 3),
41 | Date(2017, 2, 4)].sliced;
42 | auto data = [
43 | 2, 3,
44 | -3, 6,
45 | 4, 0].sliced(3, 2); // 3 x 2
46 |
47 | auto series1D = time.series(data.front!1);
48 | auto series2D = time.series(data);
49 |
50 | database.insert("coins-Alice", "Alice", series1D, ["tag":"1"]);
51 | database.insert("coins", ["Alice", "Bob"], series2D, ["tag":"2"]);
52 |
53 | assert(writes == [
54 |
55 | ["url": "http://db.com", "db": "testdb", "line":
56 | "coins-Alice,tag=1 Alice=2 1485907200000000000\n" ~
57 | "coins-Alice,tag=1 Alice=-3 1486080000000000000\n" ~
58 | "coins-Alice,tag=1 Alice=4 1486166400000000000"],
59 |
60 | ["url": "http://db.com", "db": "testdb", "line":
61 | "coins,tag=2 Alice=2,Bob=3 1485907200000000000\n" ~
62 | "coins,tag=2 Alice=-3,Bob=6 1486080000000000000\n" ~
63 | "coins,tag=2 Alice=4,Bob=0 1486166400000000000"],
64 | ]);
65 | }
66 |
67 | void readFromInflux()
68 | {
69 | auto influxSeries = MeasurementSeries("coolness",
70 | ["time", "foo", "bar"],
71 | [
72 | ["2015-06-11T20:46:02Z", "1.0", "2.0"],
73 | ["2013-02-09T12:34:56Z", "3.0", "4.0"],
74 | ]);
75 |
76 | auto series = influxSeries.rows.toMirSeries;
77 |
78 | // sort data if required
79 | {
80 | import mir.algorithm.iteration: all;
81 | import mir.ndslice.allocation: uninitSlice;
82 | import mir.ndslice.topology: pairwise;
83 |
84 | if (!series.time.pairwise!"a <= b".all)
85 | {
86 | series.sort(
87 | uninitSlice!size_t(series.length), // index buffer
88 | uninitSlice!double(series.length)); // data buffer
89 | }
90 | }
91 |
92 | assert(series.time == [
93 | DateTime(2013, 2, 9, 12, 34, 56),
94 | DateTime(2015, 6, 11, 20, 46, 2)]);
95 |
96 | assert(series.data == [
97 | [3.0, 4.0],
98 | [1.0, 2.0]]);
99 | }
100 |
--------------------------------------------------------------------------------
/docs/influxdb.vibe.manage.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | manage (influxdb.vibe.manage)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
31 |
influxdb vibe
32 |
functions
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | influxdb (influxdb)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
influxdb
api module influxdb.api
This module implements a convenience wrapper API for influx.
mir module influxdb.mir
Conversion utilities that help to work with Mir Series and ndslice.
vibe module influxdb.vibe
This module implements utility functions for Influx API using vibe-d
34 |
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/docs/influxdb.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | influxdb (influxdb)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
influxdb
api module influxdb.api
This module implements a convenience wrapper API for influx.
mir module influxdb.mir
Conversion utilities that help to work with Mir Series and ndslice.
vibe module influxdb.vibe
This module implements utility functions for Influx API using vibe-d
34 |
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/docs/influxdb.api.Measurement.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Measurement (influxdb.api.Measurement)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
Measurement
struct Measurement {
string name
string [string ] tags
InfluxValue [string ] fields
long timestamp
}
31 |
influxdb api
32 |
aliases functions structs
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/influxdb.api.DatabaseImpl.manage.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | DatabaseImpl.manage (influxdb.api.DatabaseImpl.manage)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
DatabaseImpl.manage
? struct DatabaseImpl (alias manageFunc, alias queryFunc, alias writeFunc)
32 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/docs/influxdb.vibe.write.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | write (influxdb.vibe.write)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
31 |
influxdb vibe
32 |
functions
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/influxdb.vibe.query.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | query (influxdb.vibe.query)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
31 |
influxdb vibe
32 |
functions
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/source/influxdb/mir.d:
--------------------------------------------------------------------------------
1 | /++
2 | Conversion utilities that help to work with Mir Series and ndslice.
3 |
4 | To use this module `mir-algorithm` package should be included into users `dub.json` file.
5 |
6 | Public_imports:
7 | mir.timeserires
8 |
9 | Authors: Ilya Yaroshenko
10 | Copyright: Kaleidic Associates Advisory Limited
11 | License: BSD 3-clause
12 | +/
13 | module influxdb.mir;
14 |
15 | version(Have_mir_algorithm):
16 |
17 | static if (__VERSION__ >= 2073)
18 | {
19 | ////////////////////////////////////
20 | import mir.series;
21 | import influxdb.api;
22 | import std.datetime: DateTime;
23 |
24 | /++
25 | Converts MeasurementSeries.Rows to Mir Series.
26 |
27 | Params:
28 | T = Time type. Default time type is DateTime. Supported types are SysTime, DateTime, and Date.
29 | D = Data type. Default data type is double.
30 | rows = MeasurementSeries rows
31 | columns = List of columns (optional). The "time" colummn is ignored.
32 | Returns:
33 | 2D Mir $(LINK2 https://docs.algorithm.dlang.io/latest/mir_series.html, Series).
34 | +/
35 | Series!(T*, D*, 2)
36 | toMirSeries(T = DateTime, D = double)(
37 | MeasurementSeries.Rows rows,
38 | const(string)[] columns = null)
39 | {
40 | // if columns are not set use all columns
41 | if (columns is null)
42 | {
43 | columns = rows.columns;
44 | }
45 | // always exclude "time" column
46 | foreach (i, column; columns)
47 | {
48 | if (column == "time")
49 | {
50 | columns = columns[0 .. i] ~ columns[i + 1 .. $];
51 | break;
52 | }
53 | }
54 | import mir.ndslice.allocation: slice, uninitSlice;
55 | import mir.ndslice.topology: map, as;
56 | import mir.array.allocation: array;
57 | import std.conv: to;
58 | auto time = rows["time"].array.map!influxSysTime.as!T.slice;
59 | auto data = uninitSlice!D(time.length, columns.length);
60 | foreach (i, column; columns)
61 | {
62 | auto from = rows[column];
63 | foreach (ref elem; data[0 .. $, i])
64 | {
65 | elem = from.front.to!D;
66 | from.popFront;
67 | }
68 | assert(from.empty);
69 | }
70 | return time.series(data);
71 | }
72 |
73 | ///
74 | @("toMirSeries")
75 | unittest
76 | {
77 | import mir.series;
78 | import std.datetime: DateTime;
79 |
80 | auto influxSeries = MeasurementSeries("coolness",
81 | ["time", "foo", "bar"],
82 | [
83 | ["2015-06-11T20:46:02Z", "1.0", "2.0"],
84 | ["2013-02-09T12:34:56Z", "3.0", "4.0"],
85 | ]);
86 |
87 | auto series = influxSeries.rows.toMirSeries;
88 |
89 | // sort data if required
90 | {
91 | import mir.algorithm.iteration: all;
92 | import mir.ndslice.allocation: uninitSlice;
93 | import mir.ndslice.topology: pairwise;
94 |
95 | if (!series.time.pairwise!"a <= b".all)
96 | {
97 | series.sort(
98 | uninitSlice!size_t(series.length), // index buffer
99 | uninitSlice!double(series.length)); // data buffer
100 | }
101 | }
102 |
103 | assert(series.time == [
104 | DateTime(2013, 2, 9, 12, 34, 56),
105 | DateTime(2015, 6, 11, 20, 46, 2)]);
106 |
107 | assert(series.data == [
108 | [3.0, 4.0],
109 | [1.0, 2.0]]);
110 | }
111 | ////////////////////////////////////
112 | }
113 | else
114 | pragma(msg, "Warning: influxdb.mir requires DMD Front End >= 2073");
115 |
--------------------------------------------------------------------------------
/docs/influxdb.api.DatabaseImpl.query.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | DatabaseImpl.query (influxdb.api.DatabaseImpl.query)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
DatabaseImpl.query
? struct DatabaseImpl (alias manageFunc, alias queryFunc, alias writeFunc)
32 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/docs/influxdb.mir.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | influxdb.mir (influxdb.mir)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
influxdb.mir
toMirSeries Series !(T *, Contiguous , [2 ], D *) toMirSeries (MeasurementSeries .Rows rows , const (string )[] columns = null )
Converts MeasurementSeries.Rows to Mir Series.
31 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/integration/source/integration/api.d:
--------------------------------------------------------------------------------
1 | /**
2 | This module implements integration tests for Influx API
3 |
4 | Authors: Atila Neves (Kaleidic Associates Advisory Limited)
5 |
6 | Generated documentation:
7 | http://influxdb.code.kaleidic.io/influxdb.html
8 |
9 | */
10 | module integration.api;
11 |
12 | import unit_threaded;
13 | import influxdb;
14 | import integration.common: influxURL;
15 |
16 |
17 | ///
18 | @Serial
19 | @("Database api")
20 | unittest {
21 |
22 | import influxdb.api: Database, Measurement;
23 |
24 | const database = Database(influxURL, "myspecialDB");
25 | scope(exit) database.drop;
26 |
27 | database.insert(Measurement("cpu", ["tag1": "foo"], ["temperature": "42"]));
28 | database.insert(Measurement("cpu", ["tag1": "foo"], ["temperature": "68"]));
29 |
30 | {
31 | const response = database.query("SELECT * from cpu");
32 | const result = response.results[0];
33 | const series = result.series[0];
34 | series.rows.length.shouldEqual(2);
35 | }
36 |
37 | {
38 | const response = database.query("SELECT * from cpu WHERE temperature > 50");
39 | const result = response.results[0];
40 | const series = result.series[0];
41 | series.rows.length.shouldEqual(1);
42 | }
43 | }
44 |
45 | ///
46 | @Serial
47 | @("Database multiple inserts")
48 | unittest {
49 |
50 | import influxdb.api: Database, Measurement;
51 |
52 | const database = Database(influxURL, "myspecialDB");
53 | scope(exit) database.drop;
54 |
55 | database.insert(Measurement("cpu", ["tag1": "foo"], ["temperature": "42"]),
56 | Measurement("cpu", ["tag1": "bar"], ["temperature": "68"]),
57 | Measurement("cpu", ["tag1": "baz"], ["temperature": "54"]));
58 |
59 | const response = database.query("SELECT * from cpu WHERE temperature > 50");
60 | const result = response.results[0];
61 | const series = result.series[0];
62 | series.rows.length.shouldEqual(2);
63 | }
64 |
65 | ///
66 | @Serial
67 | @("Database explicit timestamps")
68 | unittest {
69 |
70 | import influxdb.api: Database, Measurement;
71 | import std.datetime;
72 |
73 | const database = Database(influxURL, "myspecialDB");
74 | scope(exit) database.drop;
75 |
76 | database.insert(Measurement("cpu", ["tag1": "foo"], ["temperature": "42"], SysTime(DateTime(2017, 1, 1))));
77 | database.insert(Measurement("cpu", ["tag1": "foo"], ["temperature": "68"], SysTime(DateTime(2015, 1, 1))));
78 |
79 | {
80 | const response = database.query("SELECT * from cpu");
81 | const result = response.results[0];
82 | const series = result.series[0];
83 | series.rows.length.shouldEqual(2);
84 | }
85 |
86 | {
87 | const response = database.query("SELECT * from cpu WHERE time >= '2016-01-01 00:00:00'");
88 | const result = response.results[0];
89 | const series = result.series[0];
90 | series.rows.length.shouldEqual(1);
91 | }
92 |
93 | }
94 |
95 | @Serial
96 | @("string data")
97 | unittest {
98 | import influxdb.api: Database, Measurement;
99 |
100 | const database = Database(influxURL, "myspecialDB");
101 | scope(exit) database.drop;
102 |
103 | database.insert(Measurement("cpu", ["string": "foo"]));
104 | }
105 |
106 | @Serial
107 | @("float data")
108 | unittest {
109 | import influxdb.api: Database, Measurement;
110 |
111 | const database = Database(influxURL, "myspecialDB");
112 | scope(exit) database.drop;
113 |
114 | database.insert(Measurement("cpu", ["value": "42.3"]));
115 | }
116 |
117 | @Serial
118 | @("bool data")
119 | unittest {
120 | import influxdb.api: Database, Measurement;
121 |
122 | const database = Database(influxURL, "myspecialDB");
123 | scope(exit) database.drop;
124 |
125 | database.insert(Measurement("cpu", ["value": "true"]));
126 | database.insert(Measurement("cpu", ["value": "false"]));
127 | }
128 |
--------------------------------------------------------------------------------
/docs/integration.asdf.shouldBeSameJsonAs.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | shouldBeSameJsonAs (integration.asdf.shouldBeSameJsonAs)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
shouldBeSameJsonAs
?
void
shouldBeSameJsonAs
@
trusted
(
, , in string file = __FILE__ , in size_t line = __LINE__ )
31 |
integration asdf
32 |
functions
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/influxdb.api.influxSysTime.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | influxSysTime (influxdb.api.influxSysTime)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
influxSysTime
?
SysTime
influxSysTime
@
safe
31 |
influxdb api
32 |
aliases functions structs
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/influxdb.vibe.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | influxdb.vibe (influxdb.vibe)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
influxdb.vibe
manage void manage (in string url , in string str )
query string query (in string url , in string db , in string query )
urlEncode string urlEncode (in string str )
write void write (in string url , in string db , in string line )
32 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/docs/script.js:
--------------------------------------------------------------------------------
1 | window.onload = function() {
2 | document.body.addEventListener("mouseover", function(event) {
3 | if(event.target.hasAttribute("data-ident")) {
4 | var all = document.querySelectorAll("[data-ident=\""+event.target.getAttribute("data-ident")+"\"]");
5 | for(var i = 0; i < all.length; i++)
6 | all[i].className += " active";
7 | }
8 | });
9 | document.body.addEventListener("mouseout", function(event) {
10 | if(event.target.hasAttribute("data-ident")) {
11 | var all = document.querySelectorAll("[data-ident=\""+event.target.getAttribute("data-ident")+"\"]");
12 | for(var i = 0; i < all.length; i++)
13 | all[i].className = all[i].className.replace(" active", "");
14 | }
15 | });
16 | /*
17 | document.body.addEventListener("dblclick", function(event) {
18 | if(event.target.hasAttribute("data-ident")) {
19 | location.href = "/" + event.target.getAttribute("data-ident");
20 | }
21 | });
22 | */
23 |
24 | var sn = document.getElementById("source-navigation");
25 | if(sn) {
26 | sn.addEventListener("click", function(event) {
27 | if(event.target.tagName != "A" || event.target.className == "docs")
28 | return true;
29 | if(event.target.nextSibling) {
30 | var s = event.target.nextSibling;
31 | if(s.style.display == "" || s.style.display == "none" || s.className.indexOf("search-hit") != -1) {
32 | s.style.display = "block";
33 | var items = s.getElementsByTagName("ul");
34 | var i;
35 | for(i = 0; i < items.length; i++)
36 | items[i].style.display = "";
37 | items = s.getElementsByTagName("li");
38 | for(i = 0; i < items.length; i++)
39 | items[i].style.display = "";
40 | } else
41 | s.style.display = "";
42 | }
43 |
44 | //var id = event.target.href.substring(event.target.href.indexOf("#") + 1);
45 | //sn.style.marginTop = (document.getElementById(id).offsetTop - event.target.offsetTop + 16) + "px";
46 | });
47 |
48 | var search = document.createElement("input");
49 | search.setAttribute("type", "search");
50 | function searchHelper() {
51 | var regex = new RegExp(search.value, "i");
52 | var items = document.querySelectorAll("#source-navigation a[href^=\"#\"]");
53 | var stxt = search.value;
54 | for(var i = 0; i < items.length; i++) {
55 | var a = items[i];
56 | if(stxt.length && regex.test(a.textContent)) {
57 | var p = a.parentNode;
58 | while(p.tagName != "DIV") {
59 | if(p.tagName == "LI")
60 | p.style.display = "list-item";
61 | else
62 | p.style.display = "block";
63 | p.className += " search-hit";
64 | p = p.parentNode;
65 | }
66 | } else {
67 | var p = a.parentNode;
68 | if(stxt.length == 0) {
69 | p.style.display = "";
70 | while(p.tagName != "DIV") {
71 | p.style.display = "";
72 | p = p.parentNode;
73 | }
74 | } else
75 | p.style.display = "none";
76 | p.className = p.className.replace(" search-hit", "");
77 | }
78 | }
79 | }
80 | search.addEventListener("keyup", searchHelper);
81 | sn.insertBefore(search, sn.firstChild);
82 | }
83 |
84 | function updateDynamicStyle() {
85 | var thing = document.getElementById("page-content");
86 | var newStyle = document.getElementById("dynamic-style");
87 | if(!newStyle) {
88 | newStyle = document.createElement("style");
89 | newStyle.setAttribute("id", "dynamic-style");
90 | newStyle.type = "text/css";
91 | document.head.appendChild(newStyle);
92 | }
93 |
94 | var maxContentWidth = window.innerWidth;
95 | /* 800 is the threshold for putting nav vertically */
96 | if(maxContentWidth < 800)
97 | maxContentWidth = 800;
98 | else
99 | maxContentWidth =
100 | document.body.offsetWidth -
101 | document.getElementById("page-nav").offsetWidth -
102 | document.getElementById("page-nav").offsetLeft -
103 | 64;
104 |
105 | newStyle.innerHTML = ".member-list:not(.constructors) dt .simplified-prototype:hover { width: " + (thing.offsetWidth - 32) + "px; } #page-content pre.d_code, #page-content .overload-option, #page-content .member-list dt { max-width: " + (maxContentWidth) + "px; }";
106 | }
107 |
108 | updateDynamicStyle();
109 |
110 | window.onresize = updateDynamicStyle;
111 |
112 | // Disable line numbers in IE because the copy/paste with them sucks - it includes all line numbers
113 | // in the middle making it too hard to use. Copy/paste is more important than line displays.
114 | if (navigator.userAgent.indexOf('MSIE') !== -1 || navigator.appVersion.indexOf('Trident/') > 0) {
115 | var items = document.querySelectorAll(".with-line-wrappers");
116 | for(var a = 0; a < items.length; a++)
117 | items[a].className = items[a].className.replace("with-line-wrappers", "");
118 | }
119 |
120 | };
121 |
--------------------------------------------------------------------------------
/docs/influxdb.api.toInfluxDateTime.1.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | toInfluxDateTime (influxdb.api.toInfluxDateTime)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
toInfluxDateTime
1 DateTime (2017 , 2 , 1 ).toInfluxDateTime .shouldEqual ("'2017-02-01T00:00:00Z'" );
32 |
33 |
influxdb api
34 |
aliases functions structs
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/docs/influxdb.api.toInfluxDateTime.2.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | toInfluxDateTime (influxdb.api.toInfluxDateTime)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
toInfluxDateTime
1 import std .datetime : UTC ;
32 | 2 SysTime (DateTime (2017 , 2 , 1 ), UTC ()).toInfluxDateTime .shouldEqual ("'2017-02-01T00:00:00Z'" );
33 |
34 |
influxdb api
35 |
aliases functions structs
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/docs/influxdb.api.shouldEqualLine.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | shouldEqualLine (influxdb.api.shouldEqualLine)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
shouldEqualLine
? version (unittest )
void
shouldEqualLine
@
safe pure
(
, , in string file = __FILE__ , in size_t line = __LINE__ )
35 |
influxdb api
36 |
aliases functions structs
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/docs/influxdb.api.DatabaseImpl.insert.1.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | DatabaseImpl.insert (influxdb.api.DatabaseImpl.insert)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
DatabaseImpl.insert
void insert (in Measurement [] measurements ) ? struct DatabaseImpl (alias manageFunc, alias queryFunc, alias writeFunc)
void insert (in Measurement [] measurements ...) void insert (string measurementName , string columnName , Series !(TimeIterator , kind , [1 ], Iterator ) series1 , string [string ] commonTags = null ) void insert (string measurementName , in string [] columnNames , Series !(TimeIterator , kind , [2 ], Iterator ) series , string [string ] commonTags = null )
31 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/influxdb.api.DatabaseImpl.insert.2.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | DatabaseImpl.insert (influxdb.api.DatabaseImpl.insert)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
DatabaseImpl.insert
void insert (in Measurement [] measurements ) void insert (in Measurement [] measurements ...) ? struct DatabaseImpl (alias manageFunc, alias queryFunc, alias writeFunc)
void insert (string measurementName , string columnName , Series !(TimeIterator , kind , [1 ], Iterator ) series1 , string [string ] commonTags = null ) void insert (string measurementName , in string [] columnNames , Series !(TimeIterator , kind , [2 ], Iterator ) series , string [string ] commonTags = null )
31 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/integration/source/integration/curl.d:
--------------------------------------------------------------------------------
1 | /**
2 | This module implements integration tests for InfluxDB. As such, they record in
3 | code the assumptions made with regards to the HTTP API. Given that these tests
4 | pass, the unit tests are sufficient to guarantee correct behaviour.
5 |
6 | These tests can be run with `dub run -c integration` and require a running
7 | instance of InfluxDB on localhost:8086. On systems with systemd, install
8 | InfluxDB (as appropriate for the Linux distribution) and start it with
9 | `systemctl start influxdb`.
10 |
11 | If these tests fail, nothing else in this repository will work.
12 | */
13 | module integration.curl;
14 |
15 | import unit_threaded;
16 | import integration.common: influxURL;
17 |
18 |
19 | ///
20 | @Serial
21 | @("Create and drop")
22 | unittest {
23 | curlPostQuery("CREATE DATABASE testdb").shouldSucceed;
24 | curlPostQuery("DROP DATABASE testdb").shouldSucceed;
25 | }
26 |
27 | ///
28 | @Serial
29 | @("Nonsense query")
30 | unittest {
31 | curlPostQuery("FOO DATABASE testdb").shouldFail;
32 | }
33 |
34 | ///
35 | @Serial
36 | @("Query empty database")
37 | unittest {
38 | import std.string: join;
39 | import std.json: parseJSON;
40 | import std.algorithm: find;
41 |
42 | // in case there's still data there, delete the DB
43 | curlPostQuery("DROP DATABASE testdb").shouldSucceed;
44 | curlPostQuery("CREATE DATABASE testdb").shouldSucceed;
45 | scope(exit) curlPostQuery("DROP DATABASE testdb").shouldSucceed;
46 |
47 | const lines = curlGet("SELECT * from foo").shouldSucceed;
48 | const json = lines.join(" ").find("{").parseJSON;
49 | json.toString.shouldEqual(`{"results":[{"statement_id":0}]}`);
50 | }
51 |
52 |
53 | ///
54 | @Serial
55 | @("Query database with data")
56 | unittest {
57 | import std.string: join;
58 | import std.json: parseJSON;
59 | import std.algorithm: find, map;
60 |
61 | // in case there's still data there, delete the DB
62 | curlPostQuery("DROP DATABASE testdb").shouldSucceed;
63 | curlPostQuery("CREATE DATABASE testdb").shouldSucceed;
64 | scope(exit) curlPostQuery("DROP DATABASE testdb").shouldSucceed;
65 |
66 | curlPostWrite("foo,tag1=letag,tag2=othertag value=1,othervalue=3").shouldSucceed;
67 | curlPostWrite("foo,tag1=toto,tag2=titi value=2,othervalue=4 1434055562000000000").shouldSucceed;
68 |
69 | /*
70 | Example of a response (prettified):
71 | {
72 | "results": [{
73 | "series": [{
74 | "columns": ["time", "othervalue", "tag1", "tag2", "value"],
75 | "name": "foo",
76 | "values": [
77 | ["2015-06-11T20:46:02Z", 4, "toto", "titi", 2],
78 | ["2017-03-14T23:15:01.06282785Z", 3, "letag", "othertag", 1]
79 | ]
80 | }],
81 | "statement_id": 0
82 | }]
83 | }
84 | */
85 |
86 | {
87 | const lines = curlGet("SELECT * from foo").shouldSucceed;
88 | const json = lines.join(" ").find("{").parseJSON;
89 | const result = json.object["results"].array[0].object;
90 | const table = result["series"].array[0].object;
91 | table["columns"].array.map!(a => a.str).shouldBeSameSetAs(
92 | ["time", "othervalue", "tag1", "tag2", "value"]);
93 | table["name"].str.shouldEqual("foo");
94 | table["values"].array.length.shouldEqual(2);
95 | }
96 |
97 | {
98 | const lines = curlGet("SELECT value from foo WHERE value > 1").shouldSucceed;
99 | const json = lines.join(" ").find("{").parseJSON;
100 | const result = json.object["results"].array[0].object;
101 | const table = result["series"].array[0].object;
102 | table["values"].array.length.shouldEqual(1);
103 | }
104 |
105 | {
106 | const lines = curlGet("SELECT othervalue from foo WHERE othervalue > 42").shouldSucceed;
107 | const json = lines.join(" ").find("{").parseJSON;
108 | const result = json.object["results"].array[0];
109 | // no result in this case, no data with othervalue > 42
110 | json.object["results"].array[0].toString.shouldEqual(`{"statement_id":0}`);
111 | }
112 | }
113 |
114 | private string[] curlPostQuery(in string arg) {
115 | return ["curl", "-i", "-XPOST", influxURL ~ `/query`, "--data-urlencode",
116 | `q=` ~ arg];
117 | }
118 |
119 | private string[] curlPostWrite(in string arg) {
120 | return ["curl", "-i", "-XPOST", influxURL ~ `/write?db=testdb`, "--data-binary", arg];
121 | }
122 |
123 | private string[] curlGet(in string arg) {
124 | return ["curl", "-G", influxURL ~ "/query?pretty=true", "--data-urlencode", "db=testdb",
125 | "--data-urlencode", `q=` ~ arg];
126 | }
127 |
128 |
129 | private string[] shouldSucceed(in string[] cmd, in string file = __FILE__, in size_t line = __LINE__) {
130 | import std.process: execute;
131 | import std.conv: text;
132 | import std.string: splitLines, join;
133 | import std.algorithm: find, canFind, startsWith, endsWith;
134 | import std.array: empty;
135 | import std.json: parseJSON;
136 |
137 | writelnUt(cmd.join(" "));
138 |
139 | const ret = execute(cmd);
140 | if(ret.status != 0)
141 | throw new UnitTestException([text("Could not execute '", cmd.join(" "), "':")] ~
142 | ret.output.splitLines, file, line);
143 |
144 | if(!ret.output.splitLines.canFind!(a => a.canFind("HTTP/1.1 20")) &&
145 | !ret.output.canFind(`"results"`))
146 | throw new UnitTestException([text("Bad HTTP response for '", cmd.join(" "), "':")]
147 | ~ ("first: " ~ ret.output[0] ~ " last: " ~ ret.output[$-1])
148 | ~
149 | ret.output.splitLines, file, line);
150 |
151 | return ret.output.splitLines;
152 | }
153 |
154 | private void shouldFail(in string[] cmd, in string file = __FILE__, in size_t line = __LINE__) {
155 |
156 | import std.conv: text;
157 |
158 | try {
159 | shouldSucceed(cmd, file, line);
160 | fail(text("Command '", cmd, "' was expected to fail but did not:"), file, line);
161 | } catch(Exception ex) {}
162 | }
163 |
--------------------------------------------------------------------------------
/docs/influxdb.api.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | influxdb.api (influxdb.api)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
influxdb.api
Database alias Database = DatabaseImpl !(influxdb .vibe .manage , influxdb .vibe .query , influxdb .vibe .write )
influxSysTime SysTime influxSysTime (string time )
shouldEqualLine void shouldEqualLine (in string actual , in string expected , in string file = __FILE__ , in size_t line = __LINE__ )
toInfluxDateTime string toInfluxDateTime (in DateTime time )
Converts a DateTime to a string suitable for use in queries
31 | e.g. SELECT * FROM foo WHERE time >=
toInfluxDateTime string toInfluxDateTime (in SysTime time )
Converts a SysTime to a string suitable for use in queries
32 | e.g. SELECT * FROM foo WHERE time >=
DatabaseImpl struct DatabaseImpl (alias manageFunc, alias queryFunc, alias writeFunc)
Holds information about the database name and URL, forwards
33 | it to the implemetation functions for managing, querying and
34 | writing to the DB
Measurement struct Measurement
MeasurementSeries struct MeasurementSeries
Response struct Response
Result struct Result
36 |
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/docs/integration.asdf.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | integration.asdf (integration.asdf)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
integration.asdf
shouldBeSameJsonAs void shouldBeSameJsonAs (in string actual , in string expected , in string file = __FILE__ , in size_t line = __LINE__ )
1 enum jsonString = `
31 | 2 {
32 | 3 "results": [{
33 | 4 "series": [{
34 | 5 "columns": ["time", "othervalue", "tag1", "tag2", "value"],
35 | 6 "name": "myname",
36 | 7 "values": [
37 | 8 ["2015-06-11T20:46:02Z", 4, "toto", "titi", 2],
38 | 9 ["2017-03-14T23:15:01.06282785Z", 3, "letag", "othertag", 1]
39 | 10 ]
40 | 11 }],
41 | 12 "statement_id": 42
42 | 13 }]
43 | 14 }
44 | 15 ` ;
45 | 16
46 | 17 jsonString .deserialize !Response .shouldEqual (
47 | 18 Response (
48 | 19 [
49 | 20 Result (
50 | 21 [
51 | 22 MeasurementSeries (
52 | 23 "myname" , //name
53 | 24 ["time" , "othervalue" , "tag1" , "tag2" , "value" ], //columns
54 | 25 //values
55 | 26 [
56 | 27 ["2015-06-11T20:46:02Z" , "4" , "toto" , "titi" , "2" ],
57 | 28 ["2017-03-14T23:15:01.06282785Z" , "3" , "letag" , "othertag" , "1" ],
58 | 29 ]
59 | 30 ),
60 | 31 ],
61 | 32
62 | 33 42 , // statement_id
63 | 34 )
64 | 35 ]
65 | 36 )
66 | 37 );
67 |
69 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/docs/integration.vibe.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | integration.vibe (integration.vibe)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
integration.vibe
1 manage (influxURL , "DROP DATABASE test_vibe_db" );
32 | 2 wait ;
33 | 3 manage (influxURL , "CREATE DATABASE test_vibe_db" );
34 | 4 wait ;
35 | 5 manage (influxURL , "DROP DATABASE test_vibe_db" );
36 | 6 wait ;
37 | 1 manage (influxURL , "DROP DATABASE test_vibe_db" );
38 | 2 wait ;
39 | 3 manage (influxURL , "CREATE DATABASE test_vibe_db" );
40 | 4 wait ;
41 | 5 scope (exit ) {
42 | 6 manage (influxURL , "DROP DATABASE test_vibe_db" );
43 | 7 wait ;
44 | 8 }
45 | 9
46 | 10 const json = query (influxURL , "test_vibe_db" , "SELECT * from foo" ).parseJSON ;
47 | 11 JSONValue expected ;
48 | 12 JSONValue result ;
49 | 13 result ["statement_id" ] = JSONValue (0 );
50 | 14 expected ["results" ] = [result ];
51 | 15 json .shouldEqual (expected );
52 | 1 import std .algorithm : map ;
68 | 2
69 | 3 manage (influxURL , "DROP DATABASE test_vibe_db" );
70 | 4 wait ;
71 | 5 manage (influxURL , "CREATE DATABASE test_vibe_db" );
72 | 6 wait ;
73 | 7 scope (exit ) {
74 | 8 manage (influxURL , "DROP DATABASE test_vibe_db" );
75 | 9 wait ;
76 | 10 }
77 | 11
78 | 12 write (influxURL , "test_vibe_db" , "foo,tag1=letag,tag2=othertag value=1,othervalue=3" );
79 | 13 write (influxURL , "test_vibe_db" , "foo,tag1=toto,tag2=titi value=2,othervalue=4 1434055562000000000" );
80 | 14 wait ;
81 | 15
82 | 16 {
83 | 17 const json = query (influxURL , "test_vibe_db" , "SELECT * from foo" ).parseJSON ;
84 | 18 const result = json .object ["results" ].array [0 ].object ;
85 | 19 const table = result ["series" ].array [0 ].object ;
86 | 20 table ["columns" ].array .map !(a => a .str ).shouldBeSameSetAs (
87 | 21 ["time" , "othervalue" , "tag1" , "tag2" , "value" ]);
88 | 22 table ["name" ].str .shouldEqual ("foo" );
89 | 23 table ["values" ].array .length .shouldEqual (2 );
90 | 24 }
91 |
92 |
94 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/docs/influxdb.api.DatabaseImpl.insert.3.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | DatabaseImpl.insert (influxdb.api.DatabaseImpl.insert)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
DatabaseImpl.insert
void insert (in Measurement [] measurements ) void insert (in Measurement [] measurements ...) void insert (string measurementName , string columnName , Series !(TimeIterator , kind , [1 ], Iterator ) series1 , string [string ] commonTags = null ) ? struct DatabaseImpl (alias manageFunc, alias queryFunc, alias writeFunc)
version (Have_mir_algorithm )
void
insert
const
(
TimeIterator
SliceKind kind
Iterator
)
(
, , Series !(TimeIterator , kind , [1], Iterator ) series1 , )
void insert (string measurementName , in string [] columnNames , Series !(TimeIterator , kind , [2 ], Iterator ) series , string [string ] commonTags = null )
32 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/docs/influxdb.api.DatabaseImpl.insert.4.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | DatabaseImpl.insert (influxdb.api.DatabaseImpl.insert)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
DatabaseImpl.insert
void insert (in Measurement [] measurements ) void insert (in Measurement [] measurements ...) void insert (string measurementName , string columnName , Series !(TimeIterator , kind , [1 ], Iterator ) series1 , string [string ] commonTags = null ) void insert (string measurementName , in string [] columnNames , Series !(TimeIterator , kind , [2 ], Iterator ) series , string [string ] commonTags = null ) ? struct DatabaseImpl (alias manageFunc, alias queryFunc, alias writeFunc)
version (Have_mir_algorithm )
void
insert
const
(
TimeIterator
SliceKind kind
Iterator
)
(
, , Series !(TimeIterator , kind , [2], Iterator ) series , )
32 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/docs/influxdb.mir.toMirSeries.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | toMirSeries (influxdb.mir.toMirSeries)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
toMirSeries
?
Series !(T *, Contiguous , [2], D *)
toMirSeries
(
MeasurementSeries.Rows rows , )
T D rows columns Type: const (string )[]
Type: Series !(T *, Contiguous , [2], D *)
1 import mir .series ;
31 | 2 import std .datetime : DateTime ;
32 | 3
33 | 4 auto influxSeries = MeasurementSeries ("coolness" ,
34 | 5 ["time" , "foo" , "bar" ],
35 | 6 [
36 | 7 ["2015-06-11T20:46:02Z" , "1.0" , "2.0" ],
37 | 8 ["2013-02-09T12:34:56Z" , "3.0" , "4.0" ],
38 | 9 ]);
39 | 10
40 | 11 auto series = influxSeries .rows .toMirSeries ;
41 | 12
42 | 13 // sort data if required
43 | 14 {
44 | 15 import mir .ndslice .algorithm : all ;
45 | 16 import mir .ndslice .allocation : uninitSlice ;
46 | 17 import mir .ndslice .topology : pairwise ;
47 | 18
48 | 19 if (!series .time .pairwise !"a <= b" .all )
49 | 20 {
50 | 21 series .sort (
51 | 22 uninitSlice !size_t (series .length ), // index buffer
52 | 23 uninitSlice !double (series .length )); // data buffer
53 | 24 }
54 | 25 }
55 | 26
56 | 27 assert (series .time == [
57 | 28 DateTime (2013 , 2 , 9 , 12 , 34 , 56 ),
58 | 29 DateTime (2015 , 6 , 11 , 20 , 46 , 2 )]);
59 | 30
60 | 31 assert (series .data == [
61 | 32 [3.0 , 4.0 ],
62 | 33 [1.0 , 2.0 ]]);
63 |
64 |
influxdb mir
65 |
functions
66 |
67 |
68 |
69 |
--------------------------------------------------------------------------------
/docs/integration.api.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | integration.api (integration.api)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
integration.api
1
31 | 2 import influxdb .api : Database , Measurement ;
32 | 3
33 | 4 const database = Database (influxURL , "myspecialDB" );
34 | 5 scope (exit ) database .drop ;
35 | 6
36 | 7 database .insert (Measurement ("cpu" , ["tag1" : "foo" ], ["temperature" : "42" ]));
37 | 8 database .insert (Measurement ("cpu" , ["tag1" : "foo" ], ["temperature" : "68" ]));
38 | 9
39 | 10 {
40 | 11 const response = database .query ("SELECT * from cpu" );
41 | 12 const result = response .results [0 ];
42 | 13 const series = result .series [0 ];
43 | 14 series .rows .length .shouldEqual (2 );
44 | 15 }
45 | 16
46 | 17 {
47 | 18 const response = database .query ("SELECT * from cpu WHERE temperature > 50" );
48 | 19 const result = response .results [0 ];
49 | 20 const series = result .series [0 ];
50 | 21 series .rows .length .shouldEqual (1 );
51 | 22 }
52 | 1
53 | 2 import influxdb .api : Database , Measurement ;
54 | 3
55 | 4 const database = Database (influxURL , "myspecialDB" );
56 | 5 scope (exit ) database .drop ;
57 | 6
58 | 7 database .insert (Measurement ("cpu" , ["tag1" : "foo" ], ["temperature" : "42" ]),
59 | 8 Measurement ("cpu" , ["tag1" : "bar" ], ["temperature" : "68" ]),
60 | 9 Measurement ("cpu" , ["tag1" : "baz" ], ["temperature" : "54" ]));
61 | 10
62 | 11 const response = database .query ("SELECT * from cpu WHERE temperature > 50" );
63 | 12 const result = response .results [0 ];
64 | 13 const series = result .series [0 ];
65 | 14 series .rows .length .shouldEqual (2 );
66 | 1
67 | 2 import influxdb .api : Database , Measurement ;
68 | 3 import std .datetime ;
69 | 4
70 | 5 const database = Database (influxURL , "myspecialDB" );
71 | 6 scope (exit ) database .drop ;
72 | 7
73 | 8 database .insert (Measurement ("cpu" , ["tag1" : "foo" ], ["temperature" : "42" ], SysTime (DateTime (2017 , 1 , 1 ))));
74 | 9 database .insert (Measurement ("cpu" , ["tag1" : "foo" ], ["temperature" : "68" ], SysTime (DateTime (2015 , 1 , 1 ))));
75 | 10
76 | 11 {
77 | 12 const response = database .query ("SELECT * from cpu" );
78 | 13 const result = response .results [0 ];
79 | 14 const series = result .series [0 ];
80 | 15 series .rows .length .shouldEqual (2 );
81 | 16 }
82 | 17
83 | 18 {
84 | 19 const response = database .query ("SELECT * from cpu WHERE time >= '2016-01-01 00:00:00'" );
85 | 20 const result = response .results [0 ];
86 | 21 const series = result .series [0 ];
87 | 22 series .rows .length .shouldEqual (1 );
88 | 23 }
89 |
91 |
93 |
94 |
95 |
96 |
--------------------------------------------------------------------------------
/docs/influxdb.api.MeasurementSeries.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | MeasurementSeries (influxdb.api.MeasurementSeries)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
MeasurementSeries
struct MeasurementSeries {
string name
string [] columns
string [][] values
}
1
31 | 2 import std .datetime : SysTime , DateTime , UTC ;
32 | 3 import std .array : array ;
33 | 4
34 | 5 auto series = MeasurementSeries ("coolness" ,
35 | 6 ["time" , "foo" , "bar" ],
36 | 7 [
37 | 8 ["2015-06-11T20:46:02Z" , "red" , "blue" ],
38 | 9 ["2013-02-09T12:34:56Z" , "green" , "yellow" ],
39 | 10 ]);
40 | 11
41 | 12 series .rows [0 ]["foo" ].shouldEqual ("red" );
42 | 13 series .rows [0 ]["time" ].shouldEqual ("2015-06-11T20:46:02Z" );
43 | 14 series .rows [0 ].time .shouldEqual (SysTime (DateTime (2015 , 06 , 11 , 20 , 46 , 2 ), UTC ()));
44 | 15
45 | 16 series .rows [1 ]["bar" ].shouldEqual ("yellow" );
46 | 17 series .rows [1 ]["time" ].shouldEqual ("2013-02-09T12:34:56Z" );
47 | 18 series .rows [1 ].time .shouldEqual (SysTime (DateTime (2013 , 2 , 9 , 12 , 34 , 56 ), UTC ()));
48 | 19
49 | 20 series .rows ["time" ][0 ].shouldEqual ("2015-06-11T20:46:02Z" );
50 | 21 series .rows ["bar" ][1 ].shouldEqual ("yellow" );
51 | 22
52 | 23 series .rows .array .shouldEqual (
53 | 24 [
54 | 25 MeasurementSeries .Rows .Row (["time" , "foo" , "bar" ],
55 | 26 ["2015-06-11T20:46:02Z" , "red" , "blue" ],
56 | 27 ),
57 | 28 MeasurementSeries .Rows .Row (["time" , "foo" , "bar" ],
58 | 29 ["2013-02-09T12:34:56Z" , "green" , "yellow" ],
59 | 30 ),
60 | 31 ]
61 | 32 );
62 | 1 auto series = MeasurementSeries ("coolness" ,
63 | 2 ["time" , "foo" , "bar" ],
64 | 3 [["2015-06-11T20:46:02Z" , "red" , "blue" ]]);
65 | 4 series .rows [0 ].get ("foo" , "oops" ).shouldEqual ("red" );
66 | 5 series .rows [0 ].get ("quux" , "oops" ).shouldEqual ("oops" );
67 | 1 import std .conv : to ;
68 | 2 auto series = MeasurementSeries ("coolness" ,
69 | 3 ["time" , "foo" , "bar" ],
70 | 4 [["2015-06-11T20:46:02Z" , "red" , "blue" ]]);
71 | 5 series .rows [0 ].to !string .shouldEqual ("Row(time: 2015-06-11T20:46:02Z, foo: red, bar: blue)" );
72 | 1
73 | 2 import std .datetime : SysTime , DateTime , UTC , usecs ;
74 | 3 import std .array : array ;
75 | 4
76 | 5 auto series = MeasurementSeries ("coolness" ,
77 | 6 ["time" , "foo" , "bar" ],
78 | 7 [
79 | 8 ["2017-05-10T14:47:38.82524801Z" , "red" , "blue" ],
80 | 9 ]);
81 | 10
82 | 11 series .rows [0 ].time .shouldEqual (SysTime (DateTime (2017 , 05 , 10 , 14 , 47 , 38 ), 825248 .usecs , UTC ()));
83 |
84 |
influxdb api
85 |
aliases functions structs
86 |
87 |
88 |
89 |
--------------------------------------------------------------------------------
/docs/integration.curl.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | integration.curl (integration.curl)
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
28 |
29 |
30 |
integration.curl
1 curlPostQuery ("CREATE DATABASE testdb" ).shouldSucceed ;
36 | 2 curlPostQuery ("DROP DATABASE testdb" ).shouldSucceed ;
37 | 1 curlPostQuery ("FOO DATABASE testdb" ).shouldFail ;
38 | 1 import std .string : join ;
39 | 2 import std .json : parseJSON ;
40 | 3 import std .algorithm : find ;
41 | 4
42 | 5 // in case there's still data there, delete the DB
43 | 6 curlPostQuery ("DROP DATABASE testdb" ).shouldSucceed ;
44 | 7 curlPostQuery ("CREATE DATABASE testdb" ).shouldSucceed ;
45 | 8 scope (exit ) curlPostQuery ("DROP DATABASE testdb" ).shouldSucceed ;
46 | 9
47 | 10 const lines = curlGet ("SELECT * from foo" ).shouldSucceed ;
48 | 11 const json = lines .join (" " ).find ("{" ).parseJSON ;
49 | 12 json .toString .shouldEqual (`{"results":[{"statement_id":0}]}` );
50 | 1 import std .string : join ;
51 | 2 import std .json : parseJSON ;
52 | 3 import std .algorithm : find , map ;
53 | 4
54 | 5 // in case there's still data there, delete the DB
55 | 6 curlPostQuery ("DROP DATABASE testdb" ).shouldSucceed ;
56 | 7 curlPostQuery ("CREATE DATABASE testdb" ).shouldSucceed ;
57 | 8 scope (exit ) curlPostQuery ("DROP DATABASE testdb" ).shouldSucceed ;
58 | 9
59 | 10 curlPostWrite ("foo,tag1=letag,tag2=othertag value=1,othervalue=3" ).shouldSucceed ;
60 | 11 curlPostWrite ("foo,tag1=toto,tag2=titi value=2,othervalue=4 1434055562000000000" ).shouldSucceed ;
61 | 12
62 | 13 /*
63 | 14 Example of a response (prettified):
64 | 15 {
65 | 16 "results": [{
66 | 17 "series": [{
67 | 18 "columns": ["time", "othervalue", "tag1", "tag2", "value"],
68 | 19 "name": "foo",
69 | 20 "values": [
70 | 21 ["2015-06-11T20:46:02Z", 4, "toto", "titi", 2],
71 | 22 ["2017-03-14T23:15:01.06282785Z", 3, "letag", "othertag", 1]
72 | 23 ]
73 | 24 }],
74 | 25 "statement_id": 0
75 | 26 }]
76 | 27 }
77 | 28 */
78 | 29
79 | 30 {
80 | 31 const lines = curlGet ("SELECT * from foo" ).shouldSucceed ;
81 | 32 const json = lines .join (" " ).find ("{" ).parseJSON ;
82 | 33 const result = json .object ["results" ].array [0 ].object ;
83 | 34 const table = result ["series" ].array [0 ].object ;
84 | 35 table ["columns" ].array .map !(a => a .str ).shouldBeSameSetAs (
85 | 36 ["time" , "othervalue" , "tag1" , "tag2" , "value" ]);
86 | 37 table ["name" ].str .shouldEqual ("foo" );
87 | 38 table ["values" ].array .length .shouldEqual (2 );
88 | 39 }
89 | 40
90 | 41 {
91 | 42 const lines = curlGet ("SELECT value from foo WHERE value > 1" ).shouldSucceed ;
92 | 43 const json = lines .join (" " ).find ("{" ).parseJSON ;
93 | 44 const result = json .object ["results" ].array [0 ].object ;
94 | 45 const table = result ["series" ].array [0 ].object ;
95 | 46 table ["values" ].array .length .shouldEqual (1 );
96 | 47 }
97 | 48
98 | 49 {
99 | 50 const lines = curlGet ("SELECT othervalue from foo WHERE othervalue > 42" ).shouldSucceed ;
100 | 51 const json = lines .join (" " ).find ("{" ).parseJSON ;
101 | 52 const result = json .object ["results" ].array [0 ];
102 | 53 // no result in this case, no data with othervalue > 42
103 | 54 json .object ["results" ].array [0 ].toString .shouldEqual (`{"statement_id":0}` );
104 | 55 }
105 |
106 |
integration
107 |
modules
108 |
109 |
110 |
111 |
--------------------------------------------------------------------------------