├── .github └── workflows │ └── haskell-ci.yml ├── .gitignore ├── .travis └── influxdb.patch ├── CHANGELOG.md ├── LICENSE ├── README.md ├── Setup.hs ├── cabal.haskell-ci ├── cabal.project ├── examples ├── random-points.hs └── write-udp.hs ├── hie.yaml ├── influxdb.cabal ├── scripts └── travis.sh ├── src └── Database │ ├── InfluxDB.hs │ └── InfluxDB │ ├── Format.hs │ ├── Internal │ └── Text.hs │ ├── JSON.hs │ ├── Line.hs │ ├── Manage.hs │ ├── Ping.hs │ ├── Query.hs │ ├── Types.hs │ ├── Write.hs │ └── Write │ └── UDP.hs └── tests ├── doctests.hs └── regressions.hs /.github/workflows/haskell-ci.yml: -------------------------------------------------------------------------------- 1 | # This GitHub workflow config has been generated by a script via 2 | # 3 | # haskell-ci 'github' 'influxdb.cabal' 4 | # 5 | # To regenerate the script (for example after adjusting tested-with) run 6 | # 7 | # haskell-ci regenerate 8 | # 9 | # For more information, see https://github.com/haskell-CI/haskell-ci 10 | # 11 | # version: 0.19.20240702 12 | # 13 | # REGENDATA ("0.19.20240702",["github","influxdb.cabal"]) 14 | # 15 | name: Haskell-CI 16 | on: 17 | - push 18 | - pull_request 19 | jobs: 20 | linux: 21 | name: Haskell-CI - Linux - ${{ matrix.compiler }} 22 | runs-on: ubuntu-20.04 23 | timeout-minutes: 24 | 60 25 | container: 26 | image: buildpack-deps:jammy 27 | services: 28 | influxdb: 29 | image: influxdb:1.8.10 30 | env: 31 | INFLUXDB_REPORTING_DISABLED: true 32 | continue-on-error: ${{ matrix.allow-failure }} 33 | strategy: 34 | matrix: 35 | include: 36 | - compiler: ghc-9.10.1 37 | compilerKind: ghc 38 | compilerVersion: 9.10.1 39 | setup-method: ghcup 40 | allow-failure: false 41 | - compiler: ghc-9.8.2 42 | compilerKind: ghc 43 | compilerVersion: 9.8.2 44 | setup-method: ghcup 45 | allow-failure: false 46 | - compiler: ghc-9.6.6 47 | compilerKind: ghc 48 | compilerVersion: 9.6.6 49 | setup-method: ghcup 50 | allow-failure: false 51 | - compiler: ghc-9.4.8 52 | compilerKind: ghc 53 | compilerVersion: 9.4.8 54 | setup-method: ghcup 55 | allow-failure: false 56 | - compiler: ghc-9.2.8 57 | compilerKind: ghc 58 | compilerVersion: 9.2.8 59 | setup-method: ghcup 60 | allow-failure: false 61 | - compiler: ghc-9.0.2 62 | compilerKind: ghc 63 | compilerVersion: 9.0.2 64 | setup-method: ghcup 65 | allow-failure: false 66 | - compiler: ghc-8.10.7 67 | compilerKind: ghc 68 | compilerVersion: 8.10.7 69 | setup-method: ghcup 70 | allow-failure: false 71 | - compiler: ghc-8.8.4 72 | compilerKind: ghc 73 | compilerVersion: 8.8.4 74 | setup-method: ghcup 75 | allow-failure: false 76 | - compiler: ghc-8.6.5 77 | compilerKind: ghc 78 | compilerVersion: 8.6.5 79 | setup-method: ghcup 80 | allow-failure: false 81 | - compiler: ghc-8.4.4 82 | compilerKind: ghc 83 | compilerVersion: 8.4.4 84 | setup-method: ghcup 85 | allow-failure: false 86 | fail-fast: false 87 | steps: 88 | - name: apt 89 | run: | 90 | apt-get update 91 | apt-get install -y --no-install-recommends gnupg ca-certificates dirmngr curl git software-properties-common libtinfo5 libnuma-dev socat 92 | mkdir -p "$HOME/.ghcup/bin" 93 | curl -sL https://downloads.haskell.org/ghcup/0.1.20.0/x86_64-linux-ghcup-0.1.20.0 > "$HOME/.ghcup/bin/ghcup" 94 | chmod a+x "$HOME/.ghcup/bin/ghcup" 95 | "$HOME/.ghcup/bin/ghcup" install ghc "$HCVER" || (cat "$HOME"/.ghcup/logs/*.* && false) 96 | "$HOME/.ghcup/bin/ghcup" install cabal 3.12.1.0 || (cat "$HOME"/.ghcup/logs/*.* && false) 97 | env: 98 | HCKIND: ${{ matrix.compilerKind }} 99 | HCNAME: ${{ matrix.compiler }} 100 | HCVER: ${{ matrix.compilerVersion }} 101 | - name: influxdb forward 102 | run: | 103 | socat TCP-LISTEN:8086,fork TCP:influxdb:8086 & 104 | - name: Set PATH and environment variables 105 | run: | 106 | echo "$HOME/.cabal/bin" >> $GITHUB_PATH 107 | echo "LANG=C.UTF-8" >> "$GITHUB_ENV" 108 | echo "CABAL_DIR=$HOME/.cabal" >> "$GITHUB_ENV" 109 | echo "CABAL_CONFIG=$HOME/.cabal/config" >> "$GITHUB_ENV" 110 | HCDIR=/opt/$HCKIND/$HCVER 111 | HC=$("$HOME/.ghcup/bin/ghcup" whereis ghc "$HCVER") 112 | HCPKG=$(echo "$HC" | sed 's#ghc$#ghc-pkg#') 113 | HADDOCK=$(echo "$HC" | sed 's#ghc$#haddock#') 114 | echo "HC=$HC" >> "$GITHUB_ENV" 115 | echo "HCPKG=$HCPKG" >> "$GITHUB_ENV" 116 | echo "HADDOCK=$HADDOCK" >> "$GITHUB_ENV" 117 | echo "CABAL=$HOME/.ghcup/bin/cabal-3.12.1.0 -vnormal+nowrap" >> "$GITHUB_ENV" 118 | HCNUMVER=$(${HC} --numeric-version|perl -ne '/^(\d+)\.(\d+)\.(\d+)(\.(\d+))?$/; print(10000 * $1 + 100 * $2 + ($3 == 0 ? $5 != 1 : $3))') 119 | echo "HCNUMVER=$HCNUMVER" >> "$GITHUB_ENV" 120 | echo "ARG_TESTS=--enable-tests" >> "$GITHUB_ENV" 121 | echo "ARG_BENCH=--enable-benchmarks" >> "$GITHUB_ENV" 122 | echo "HEADHACKAGE=false" >> "$GITHUB_ENV" 123 | echo "ARG_COMPILER=--$HCKIND --with-compiler=$HC" >> "$GITHUB_ENV" 124 | echo "GHCJSARITH=0" >> "$GITHUB_ENV" 125 | env: 126 | HCKIND: ${{ matrix.compilerKind }} 127 | HCNAME: ${{ matrix.compiler }} 128 | HCVER: ${{ matrix.compilerVersion }} 129 | - name: env 130 | run: | 131 | env 132 | - name: write cabal config 133 | run: | 134 | mkdir -p $CABAL_DIR 135 | cat >> $CABAL_CONFIG <> $CABAL_CONFIG < cabal-plan.xz 168 | echo 'f62ccb2971567a5f638f2005ad3173dba14693a45154c1508645c52289714cb2 cabal-plan.xz' | sha256sum -c - 169 | xz -d < cabal-plan.xz > $HOME/.cabal/bin/cabal-plan 170 | rm -f cabal-plan.xz 171 | chmod a+x $HOME/.cabal/bin/cabal-plan 172 | cabal-plan --version 173 | - name: checkout 174 | uses: actions/checkout@v4 175 | with: 176 | path: source 177 | - name: initial cabal.project for sdist 178 | run: | 179 | touch cabal.project 180 | echo "packages: $GITHUB_WORKSPACE/source/." >> cabal.project 181 | cat cabal.project 182 | - name: sdist 183 | run: | 184 | mkdir -p sdist 185 | $CABAL sdist all --output-dir $GITHUB_WORKSPACE/sdist 186 | - name: unpack 187 | run: | 188 | mkdir -p unpacked 189 | find sdist -maxdepth 1 -type f -name '*.tar.gz' -exec tar -C $GITHUB_WORKSPACE/unpacked -xzvf {} \; 190 | - name: generate cabal.project 191 | run: | 192 | PKGDIR_influxdb="$(find "$GITHUB_WORKSPACE/unpacked" -maxdepth 1 -type d -regex '.*/influxdb-[0-9.]*')" 193 | echo "PKGDIR_influxdb=${PKGDIR_influxdb}" >> "$GITHUB_ENV" 194 | rm -f cabal.project cabal.project.local 195 | touch cabal.project 196 | touch cabal.project.local 197 | echo "packages: ${PKGDIR_influxdb}" >> cabal.project 198 | echo "package influxdb" >> cabal.project 199 | echo " ghc-options: -Werror=missing-methods" >> cabal.project 200 | cat >> cabal.project <> cabal.project.local 203 | cat cabal.project 204 | cat cabal.project.local 205 | - name: dump install plan 206 | run: | 207 | $CABAL v2-build $ARG_COMPILER $ARG_TESTS $ARG_BENCH --dry-run all 208 | cabal-plan 209 | - name: restore cache 210 | uses: actions/cache/restore@v4 211 | with: 212 | key: ${{ runner.os }}-${{ matrix.compiler }}-${{ github.sha }} 213 | path: ~/.cabal/store 214 | restore-keys: ${{ runner.os }}-${{ matrix.compiler }}- 215 | - name: install dependencies 216 | run: | 217 | $CABAL v2-build $ARG_COMPILER --disable-tests --disable-benchmarks --dependencies-only -j2 all 218 | $CABAL v2-build $ARG_COMPILER $ARG_TESTS $ARG_BENCH --dependencies-only -j2 all 219 | - name: build w/o tests 220 | run: | 221 | $CABAL v2-build $ARG_COMPILER --disable-tests --disable-benchmarks all 222 | - name: build 223 | run: | 224 | $CABAL v2-build $ARG_COMPILER $ARG_TESTS $ARG_BENCH all --write-ghc-environment-files=always 225 | - name: tests 226 | run: | 227 | $CABAL v2-test $ARG_COMPILER $ARG_TESTS $ARG_BENCH all --test-show-details=direct 228 | - name: cabal check 229 | run: | 230 | cd ${PKGDIR_influxdb} || false 231 | ${CABAL} -vnormal check 232 | - name: haddock 233 | run: | 234 | if [ $((HCNUMVER >= 90400)) -ne 0 ] ; then $CABAL v2-haddock --disable-documentation --haddock-all $ARG_COMPILER --with-haddock $HADDOCK $ARG_TESTS $ARG_BENCH all ; fi 235 | - name: unconstrained build 236 | run: | 237 | rm -f cabal.project.local 238 | $CABAL v2-build $ARG_COMPILER --disable-tests --disable-benchmarks all 239 | - name: save cache 240 | uses: actions/cache/save@v4 241 | if: always() 242 | with: 243 | key: ${{ runner.os }}-${{ matrix.compiler }}-${{ github.sha }} 244 | path: ~/.cabal/store 245 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.sublime-* 2 | *~ 3 | .DS_Store 4 | .cabal-sandbox 5 | .ghc.* 6 | /.stack-work/ 7 | /dist-newstyle/ 8 | /dist/ 9 | cabal.project.local 10 | cabal.sandbox.config 11 | stack.yaml.lock 12 | *.hi 13 | -------------------------------------------------------------------------------- /.travis/influxdb.patch: -------------------------------------------------------------------------------- 1 | diff --git a/.travis.yml b/.travis.yml 2 | index c9bac24..2fcb7ab 100644 3 | --- a/.travis.yml 4 | +++ b/.travis.yml 5 | @@ -10,6 +10,9 @@ version: ~> 1.0 6 | language: c 7 | os: linux 8 | dist: xenial 9 | +env: 10 | + global: 11 | + - INFLUXDB_VERSION=1.8.0 12 | git: 13 | # whether to recursively clone submodules 14 | submodules: false 15 | @@ -95,6 +98,9 @@ before_install: 16 | echo " f76d08be13e9a61a377a85e2fb63f4c5435d40f8feb3e12eb05905edb8cdea89" >> $CABALHOME/config 17 | echo " key-threshold: 3" >> $CABALHOME/config 18 | fi 19 | + - wget https://dl.influxdata.com/influxdb/releases/influxdb_${INFLUXDB_VERSION}_amd64.deb 20 | + - dpkg -x influxdb_${INFLUXDB_VERSION}_amd64.deb influxdb 21 | + - ./influxdb/usr/bin/influxd & 22 | install: 23 | - ${CABAL} --version 24 | - echo "$(${HC} --version) [$(${HC} --print-project-git-commit-id 2> /dev/null || echo '?')]" 25 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Revision history for influxdb 2 | 3 | ## v1.9.3.2 - 2024-07-12 4 | 5 | * Allow newer dependencies ([101](https://github.com/maoe/influxdb-haskell/pull/101)) 6 | 7 | ## v1.9.3.1 - 2024-03-15 8 | 9 | * Support GHC 9.8 ([#98](https://github.com/maoe/influxdb-haskell/pull/98)) 10 | * Allow latest time package ([#99](https://github.com/maoe/influxdb-haskell/pull/99)) 11 | 12 | ## v1.9.3 - 2023-06-29 13 | 14 | * Mitigate InfluxDB compatibility problems ([#94](https://github.com/maoe/influxdb-haskell/pull/94)) 15 | * Support GHC 9.4 and 9.6 ([#95](https://github.com/maoe/influxdb-haskell/pull/95) and [#97](https://github.com/maoe/influxdb-haskell/pull/97)) 16 | * Revitalize CI ([#96](https://github.com/maoe/influxdb-haskell/pull/96)) 17 | 18 | ## v1.9.2.2 - 2021-11-21 19 | 20 | * Update dependencies 21 | 22 | ## v1.9.2.1 - 2021-10-20 23 | 24 | * Add support for aeson 2.0 ([#89](https://github.com/maoe/influxdb-haskell/pull/89)) 25 | 26 | ## v1.9.2 - 2021-09-08 27 | 28 | * Derive Show for Line ([#87](https://github.com/maoe/influxdb-haskell/pull/87)) 29 | * Relax upper version bound for time ([#88](https://github.com/maoe/influxdb-haskell/pull/88)) 30 | 31 | ## v1.9.1.2 - 2021-03-25 32 | 33 | * Relax upper version bound for attoparsec 34 | 35 | ## v1.9.1.1 - 2021-03-12 36 | 37 | * Support GHC 9.0.1 ([#85](https://github.com/maoe/influxdb-haskell/pull/85)) 38 | 39 | ## v1.9.1 - 2021-02-21 40 | 41 | * Show error on the "impossible" path in writeByteString ([#82](https://github.com/maoe/influxdb-haskell/pull/82)) 42 | * Relax upper version bounds for lens, time, doctest, and bytestring 43 | * Switch from Travis CI to GitHub Actions ([#84](https://github.com/maoe/influxdb-haskell/pull/84)) 44 | 45 | ## v1.9.0 - 2020-07-18 46 | 47 | * Fix `Ignore` and `Empty` to replace the `QueryResults` instance for `Void`. The instance has been deprecated. 48 | * Remove the deprecated `parseResults` method in `QueryResults`. 49 | * Add the `coerceDecoder` method in `QueryResults`. 50 | * Drop support for GHC 8.2 and older because of the use of `EmptyDataDeriving`. 51 | * Update doctest comments with `TypeApplications`. 52 | 53 | ## v1.8.0 - 2020-06-19 54 | 55 | This release reworked the `QueryResuls` type class. There are some breaking changes: 56 | 57 | * `parseResults` has been deprecated. `QueryResults` has now `parseMeasurement` method. 58 | * `Decoder` has been monomorphized so that it can be used with lens. The original `Decoder` type has been renamed to `SomeDecoder`. 59 | * `QueryParams` has now `decoder` field. 60 | * `parseResults` and `parseResultsWith` had been using `lenientDecoder` and it caused some unintuitive behavior ([#64](https://github.com/maoe/influxdb-haskell/issues/64), [#66](https://github.com/maoe/influxdb-haskell/issues/66)). Now they use `strictDecoder` instead. 61 | * `parseErrorObject` now doesn't fail. It returns the error message of a response. 62 | * `parseQueryField` which has been deprecated is now deleted. 63 | * `QueryResults` instance for `ShowSeries` was broken. This is fixed. 64 | * The constructor of `Decoder`, `parseResultsWith`, and `parseResultsWithDecoder` have been hidden from the top-level module. They're still available from `Database.InfluxDB.JSON`. 65 | 66 | See [#68](https://github.com/maoe/influxdb-haskell/pull/68/files) for how to migrate your code from v1.7.x to v1.8.x. 67 | 68 | ## v1.7.1.6 - 2020-06-03 69 | 70 | * Relax upper version bound for doctest 71 | 72 | ## v1.7.1.5 - 2020-05-27 73 | 74 | * Relax upper version bound for http-client 75 | 76 | ## v1.7.1.4 - 2020-05-27 77 | 78 | * Relax upper version bound for aeson 79 | * Fix a GHC warning 80 | 81 | ## v1.7.1.3 - 2020-04-03 82 | 83 | * Relax upper version bound for base to support GHC 8.10.1 84 | 85 | ## v1.7.1.2 - 2020-02-08 86 | 87 | * Relax upper version bound for lens 88 | * Fix documentation bugs 89 | * Extend doctests 90 | * Test with GHC 8.8.2 91 | 92 | ## v1.7.1.1 - 2019-09-09 93 | 94 | * Relax upper version bound for lens 95 | 96 | ## v1.7.1 - 2019-07-19 97 | 98 | * Escape backslashes when encoding `Line`s (#75) 99 | 100 | ## v1.7.0 - 2019-05-03 101 | 102 | * Support GHC 8.8.1-alpha1 103 | * The types of `getField` and `getTag` have changed 104 | * Relax upper version bounds for clock and network 105 | 106 | ## v1.6.1.3 - 2019-03-26 107 | 108 | * Drop unused dependency on QuickCheck 109 | 110 | ## v1.6.1.2 - 2019-01-21 111 | 112 | * Relax upper version bound for network 113 | 114 | ## v1.6.1.1 - 2019-01-10 115 | 116 | * Relax upper version bound for http-client 117 | 118 | ## v1.6.1 - 2018-11-20 119 | 120 | * Add secureServer smart constructor for Server 121 | * Test with InfluxDB 1.7.1 and newer GHCs including 8.6.2 122 | * Enhace Haddock comments 123 | 124 | ## v1.6.0.9 - 2018-09-11 125 | 126 | * Relax upper version bound for network 127 | 128 | ## v1.6.0.8 - 2018-09-04 129 | 130 | * Relax upper version bound for QuickCheck 131 | 132 | ## v1.6.0.7 - 2018-07-23 133 | 134 | * Relax upper version bound for base to support GHC 8.6 (#69) 135 | 136 | ## v1.6.0.6 - 2018-07-07 137 | 138 | * Relax upper version bound for lens 139 | 140 | ## v1.6.0.5 - 2018-06-25 141 | 142 | * Relax upper version bound for doctest 143 | 144 | ## v1.6.0.4 - 2018-06-18 145 | 146 | * Relax upper version bound for containers 147 | 148 | ## v1.6.0.3 - 2018-06-11 149 | 150 | * Relax upper version bound for aeson 151 | 152 | ## v1.6.0.2 - 2018-04-29 153 | 154 | * Relax upper version bound for network 155 | 156 | ## v1.6.0.1 - 2018-04-20 157 | 158 | * Relax upper version bound for foldl 159 | 160 | ## v1.6.0 - 2018-04-14 161 | 162 | This release includes a few significant breaking changes. 163 | 164 | * Deprecate the confusing parseQueryField and re-export parseJSON instead 165 | * Rewrite the QueryResults instances for tuples 166 | * Add Timestamp instance for TimeSpec (#59) 167 | * Extend haddock comments 168 | 169 | ## v1.5.2 - 2018-04-11 170 | 171 | * Export parseResultsWithDecoder, Decoder, lenientDecoder and strictDecoder from Database.InfluxDB 172 | * Extend haddock comments 173 | 174 | ## v1.5.1 - 2018-03-29 175 | 176 | * Add basic auth support for query (#58) 177 | 178 | ## v1.5.0 - 2018-03-15 179 | 180 | * Change UnexpectedResponse constructor to include the request and throw it in place of UserError in query/write/manage 181 | * Relax upper version bound for doctest 182 | * Extend Haddock comments in Database.InfluxDB.Line 183 | 184 | The first item is a breaking change. 185 | 186 | ## v1.4.0 - 2018-03-13 187 | 188 | * Implement proper escaping/quoting for queries ([#54](https://github.com/maoe/influxdb-haskell/pull/54)) 189 | * Relax upper version bound for aeson 190 | * Test against InfluxDB 1.5 191 | 192 | ## v1.3.0.1 - 2018-03-06 193 | 194 | * Relax upper version bounds for doctest and QuickCheck 195 | 196 | ## v1.3.0 - 2018-03-05 197 | 198 | * Relax upper version bound for base ([#51](https://github.com/maoe/influxdb-haskell/pull/51)) 199 | * Implement proper escaping and quoting for special characters ([#51](https://github.com/maoe/influxdb-haskell/pull/51), [#52](https://github.com/maoe/influxdb-haskell/pull/52)) 200 | * Introduce the Measurement type and accompanying functions 201 | * Fix a bug in the HTTP writer where the precision parameter is ignored when constructing requests 202 | * Some minor doctest fixes 203 | 204 | ## v1.2.2.3 - 2018-01-30 205 | 206 | * Relax upper version bounds for http-types, lens and time 207 | 208 | ## v1.2.2.2 - 2017-11-30 209 | 210 | * Relax upper version bounds for http-types and tasty-hunit 211 | 212 | ## v1.2.2.1 - 2017-11-30 213 | 214 | * Relax upper version bound for http-types 215 | 216 | ## v1.2.2 - 2017-06-26 217 | 218 | * A couple of documentation fixes 219 | * Add `Ord` instance for `Server` 220 | 221 | ## v1.2.1 - 2017-06-19 222 | 223 | * Export `formatDatabase` and `formatKey` from `Database.InfluxDB` for convenience 224 | 225 | ## v1.2.0 - 2017-06-19 226 | 227 | There are a lot of breaking changes in this release. The API has been cleaned up 228 | and a lot of Haddock comments are added extensively. 229 | 230 | * The `FieldVal` has been renamed to `Field` which takes `Nullability` as a type parameter. 231 | * `localServer` has been renamed to `defaultServer` 232 | * Some constructors in `InfluxException` have been renamed 233 | * `BadRequest` to `ClientError` 234 | * `IllformedJSON` to `UnexpectedResponse` 235 | * Added a smart constructor `credentials` for `Credentials` 236 | * Dropped `parseTimestamp` and added `parseUTCTime` 237 | * `ping` handles timeout proerply and throws `InfluxException` on failure 238 | * `PingResult` has been renamed to `Pong` and is now an abstract data type. 239 | * `PingParams` has been turned into an abstract data type. 240 | * `waitForLeader` has been renamed to `timeout`. 241 | * `parsekey` has been removed. `getField` and `parseQueryField` can be used instead. 242 | * Drop support for `http-client < 0.5` 243 | 244 | ## v1.1.2.2 - 2017-05-31 245 | 246 | * Relax upper version bound for foldl 247 | 248 | ## v1.1.2.1 - 2017-05-02 249 | 250 | * Relax version bounds for base and aeson 251 | 252 | ## v1.1.2 - 2017-04-10 253 | 254 | * Tighten lower version bound for base [#43](https://github.com/maoe/influxdb-haskell/issues/43) 255 | * Add `Database.InfluxDB.Format.{string,byteString8}` 256 | 257 | ## v1.1.1 - 2017-03-29 258 | 259 | * Relax unnecessary Traversable constraints to Foldable 260 | 261 | ## v1.1.0 - 2017-03-23 262 | 263 | * Handle empty "values" in parseSeriesBody 264 | 265 | ## v1.0.0 - 2017-03-03 266 | 267 | The library was completely rewritten and support for older InfluxDB has been dropped. 268 | 269 | * Support for InfluxDB 1.2 270 | 271 | ## v0.10.0 - 2016-05-17 272 | 273 | * Fix a typo in a Haddock comment (#28) 274 | * Drop support for retry < 0.7 275 | * Add stack.yml 276 | * Add support for GHC 8.0.1 (#29) 277 | 278 | ## v0.9.1.3 - 2015-06-02 279 | 280 | * Relax upper bound for aeson 281 | 282 | ## v0.9.1.2 - 2015-05-15 283 | 284 | * Relax upper bound for attoparsec 285 | 286 | ## v0.9.1.1 - 2015-03-07 287 | 288 | * Allow retry >= 0.6 && < 0.7 289 | 290 | ## v0.9.1 - 2015-03-07 291 | 292 | * Add `writeSeriesData` 293 | * Relax upper version bound for exceptions 294 | * Drop support for old retry package 295 | * retry < 0.6 had an unexpected behavior wrt exception masking state (https://github.com/Soostone/retry/pull/12) 296 | 297 | ## v0.9.0.1 - 2015-01-06 298 | 299 | * Support for GHC 7.10.1 300 | 301 | ## v0.9.0 - 2014-11-27 302 | 303 | * The `Value` parsers (accidentally) could throw exceptions. It's fixed now. 304 | * Add `fromSeriesData_` which discards parsing errors and returns only successful data 305 | * Remove `listInterfaces` 306 | 307 | ## v0.8.0 - 2014-11-07 308 | 309 | * Retry on connection failure and response timeout in addition to IOException 310 | * Note that this may break existing code silently 311 | 312 | ## v0.7.1.1 - 2014-09-19 313 | 314 | * Relax upper bound for http-client 315 | * Set upper bounds for some packages 316 | 317 | ## v0.7.1 - 2014-09-16 318 | 319 | * Add more lenses 320 | 321 | ## v0.7.0 - 2014-09-12 322 | 323 | * Support for influxdb v0.8 (#15) 324 | * Add shard spaces API 325 | * Add `configureDatabase` 326 | * Add Typeable and Generic instances where missing 327 | * Remove unused `ScheduledDelete` type 328 | 329 | ## v0.6.0 - 2014-08-19 330 | 331 | * Support for retry-0.5 (#16) 332 | * `newServerPoolWithRetrySettings` has been renamed to `newServerPoolWithRetryPolicy` 333 | * `serverRetrySettings` field in `ServerPool` has been renamed to `serverRetryPolicy` 334 | * Support for network-uri (#17) 335 | 336 | ## v0.5.1 - 2014-07-18 337 | 338 | * Export `InfluxException` from `Database.InfluxDB` 339 | 340 | ## v0.5.0 - 2014-07-18 341 | 342 | * Add `InfluxException` type and use it when decoding JSON or SeriesData (#12) 343 | * New API 344 | * `ping` 345 | * `listInterfaces` 346 | * `isInSync` 347 | * BUGFIX: Fix `when expecting a Float, encountered Int instead` error (#14) 348 | 349 | ## v0.4.2 - 2014-06-06 350 | 351 | * Export `newServerPoolWithRetrySettings` from `Database.InfluxDB` 352 | 353 | ## v0.4.1 - 2014-06-05 354 | 355 | * Make retry settings configurable (#5) 356 | 357 | ## v0.4.0 - 2014-06-05 358 | 359 | * Remove `databaseReplicationFactor` field from `Database` type 360 | 361 | ## v0.3.0.1 - 2014-06-04 362 | 363 | * Allow exceptions-0.6 (@JohnLato) 364 | 365 | ## v0.3.0 - 2014-06-03 366 | 367 | * Support for InfluxDB v0.7 368 | * Renamed `username` field for `/cluster_admins` to `user` 369 | * No support for the old field name 370 | 371 | ## v0.2.2 - 2014-05-08 372 | 373 | * Support for retry-0.4 374 | * Add deleteSeries 375 | * Add authenticateClusterAdmin and authenticateDatabaseUser 376 | 377 | ## v0.2.1.1 - 2014-04-22 378 | 379 | * Bug fix: Treat as integer if base10Exponent is positive 380 | 381 | ## v0.2.1 - 2014-04-22 382 | 383 | * Add `stripPrefixSnake` 384 | 385 | ## v0.2.0.1 - 2014-04-17 386 | 387 | * Drop unnecessary dependency on `scientific` when using old `aeson`. 388 | 389 | ## v0.2.0 - 2014-04-16 390 | 391 | * Add more `FromValue` instances 392 | * Add `(.:?)` and `(.!=)` 393 | * Add `deriveSeriesData` and some variants 394 | * Add left folds for `Stream` type 395 | 396 | ## v0.1.0.1 - 2014-04-07 397 | 398 | * Support for older aeson 399 | * Textual paramters in some functions for convenience 400 | * A lot of bug fixes 401 | 402 | ## v0.0.0 - 2014-04-03 403 | 404 | * Initial release 405 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014-2020, Mitsutoshi Aoe 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Mitsutoshi Aoe nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # InfluxDB client library for Haskell 2 | 3 | [![Hackage](https://img.shields.io/hackage/v/influxdb.svg)](https://hackage.haskell.org/package/influxdb) 4 | [![Hackage-Deps](https://img.shields.io/hackage-deps/v/influxdb.svg)](http://packdeps.haskellers.com/feed?needle=influxdb) 5 | [![Haskell-CI](https://github.com/maoe/influxdb-haskell/actions/workflows/haskell-ci.yml/badge.svg?branch=master)](https://github.com/maoe/influxdb-haskell/actions/workflows/haskell-ci.yml) 6 | [![Hackage CI](https://matrix.hackage.haskell.org/api/v2/packages/influxdb/badge)](https://matrix.hackage.haskell.org/package/influxdb) 7 | [![Gitter](https://badges.gitter.im/maoe/influxdb-haskell.svg)](https://gitter.im/maoe/influxdb-haskell?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) 8 | 9 | Currently this library is tested against InfluxDB 1.8. InfluxDB 2 isn't supported (yet). 10 | 11 | ## Getting started 12 | 13 | There is [a quick start guide](https://hackage.haskell.org/package/influxdb/docs/Database-InfluxDB.html) on Hackage. 14 | 15 | ## Running tests 16 | 17 | Either `cabal new-test` or `stack test` runs the doctests in Haddock comments. Note that they need a local running InfluxDB server. 18 | 19 | ## Contact information 20 | 21 | Contributions and bug reports are welcome! 22 | 23 | Please feel free to contact me through github or on [gitter](https://gitter.im/maoe/influxdb-haskell). 24 | -------------------------------------------------------------------------------- /Setup.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# OPTIONS_GHC -Wall #-} 3 | module Main (main) where 4 | 5 | #ifndef MIN_VERSION_cabal_doctest 6 | #define MIN_VERSION_cabal_doctest(x,y,z) 0 7 | #endif 8 | 9 | #if MIN_VERSION_cabal_doctest(1,0,0) 10 | 11 | import Distribution.Extra.Doctest ( defaultMainWithDoctests ) 12 | main :: IO () 13 | main = defaultMainWithDoctests "doctests" 14 | 15 | #else 16 | 17 | #ifdef MIN_VERSION_Cabal 18 | -- If the macro is defined, we have new cabal-install, 19 | -- but for some reason we don't have cabal-doctest in package-db 20 | -- 21 | -- Probably we are running cabal sdist, when otherwise using new-build 22 | -- workflow 23 | #warning You are configuring this package without cabal-doctest installed. \ 24 | The doctests test-suite will not work as a result. \ 25 | To fix this, install cabal-doctest before configuring. 26 | #endif 27 | 28 | import Distribution.Simple 29 | 30 | main :: IO () 31 | main = defaultMain 32 | 33 | #endif 34 | -------------------------------------------------------------------------------- /cabal.haskell-ci: -------------------------------------------------------------------------------- 1 | -- With GHC = 8.4.4 haddock fails due to a parse error in the vector. 2 | -- https://github.com/haskell/vector/issues/383 3 | -- And also avoid haddock: internal error: /github/home/.cabal/store/ghc-9.2.8/ghc-paths-0.1.0.12-10a0786baf4e4177a0f0c7bce1c1734610faf216f494e40079fc95c99b6f64e6/share/doc/html/doc-index.json 4 | -- https://github.com/haskell/cabal/issues/8104 5 | haddock: >= 9.4 -------------------------------------------------------------------------------- /cabal.project: -------------------------------------------------------------------------------- 1 | packages: . 2 | 3 | package * 4 | test-show-details: direct 5 | -------------------------------------------------------------------------------- /examples/random-points.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | {-# LANGUAGE ScopedTypeVariables #-} 5 | {-# LANGUAGE TypeSynonymInstances #-} 6 | {-# LANGUAGE ViewPatterns #-} 7 | import Data.Foldable 8 | import Data.Traversable 9 | import System.Environment 10 | import System.IO 11 | import Text.Printf (printf) 12 | 13 | import Control.Lens 14 | import Data.Aeson 15 | import Data.Optional (Optional(Default)) 16 | import Data.Time.Clock.POSIX 17 | import System.Random.MWC (Variate(..)) 18 | import qualified Control.Foldl as L 19 | import qualified Data.Map.Strict as Map 20 | import qualified Data.Text as T 21 | import qualified Network.HTTP.Client as HC 22 | import qualified System.Random.MWC as MWC 23 | 24 | import Database.InfluxDB 25 | import qualified Database.InfluxDB.Format as F 26 | import qualified Database.InfluxDB.Manage as M 27 | 28 | oneWeekInSeconds :: Int 29 | oneWeekInSeconds = 7*24*60*60 30 | 31 | main :: IO () 32 | main = do 33 | [read -> (numPoints :: Int), read -> (batches :: Int)] <- getArgs 34 | hSetBuffering stdout NoBuffering 35 | manager' <- HC.newManager managerSettings 36 | 37 | let 38 | ctx = "ctx" 39 | ct1 = "ct1" 40 | qparams = queryParams ctx 41 | & manager .~ Right manager' 42 | & precision .~ RFC3339 43 | 44 | M.manage qparams $ F.formatQuery ("DROP DATABASE "%F.database) ctx 45 | M.manage qparams $ F.formatQuery ("CREATE DATABASE "%F.database) ctx 46 | 47 | let wparams = writeParams ctx & manager .~ Right manager' 48 | 49 | gen <- MWC.create 50 | for_ [1..batches] $ \_ -> do 51 | batch <- for [1..numPoints] $ \_ -> do 52 | !time <- (-) 53 | <$> getPOSIXTime 54 | <*> (fromIntegral <$> uniformR (0, oneWeekInSeconds) gen) 55 | !value <- uniform gen 56 | return (time, value) 57 | writeBatch wparams $ flip map batch $ \(time, value) -> 58 | Line ct1 59 | (Map.fromList []) 60 | (Map.fromList [("value", nameToFVal value)]) 61 | (Just time) 62 | 63 | queryChunked qparams Default (F.formatQuery ("SELECT * FROM "%F.measurement) ct1) $ 64 | L.mapM_ $ traverse_ $ \Row {..} -> 65 | printf "%s:\t%s\n" 66 | (show $ posixSecondsToUTCTime rowTime) 67 | (show rowValue) 68 | 69 | managerSettings :: HC.ManagerSettings 70 | managerSettings = HC.defaultManagerSettings 71 | 72 | data Row = Row 73 | { rowTime :: POSIXTime 74 | , rowValue :: Name 75 | } deriving Show 76 | 77 | instance QueryResults Row where 78 | parseMeasurement prec _ _ columns fields = do 79 | rowTime <- getField "time" columns fields >>= parsePOSIXTime prec 80 | String name <- getField "value" columns fields 81 | rowValue <- case name of 82 | "foo" -> return Foo 83 | "bar" -> return Bar 84 | "baz" -> return Baz 85 | "quu" -> return Quu 86 | "qux" -> return Qux 87 | _ -> fail $ "unknown name: " ++ show name 88 | return Row {..} 89 | 90 | data Name 91 | = Foo 92 | | Bar 93 | | Baz 94 | | Quu 95 | | Qux 96 | deriving (Enum, Bounded, Show) 97 | 98 | nameToFVal :: Name -> LineField 99 | nameToFVal = FieldString . T.toLower . T.pack . show 100 | 101 | instance Variate Name where 102 | uniform = uniformR (minBound, maxBound) 103 | uniformR (lower, upper) g = do 104 | name <- uniformR (fromEnum lower, fromEnum upper) g 105 | return $! toEnum name 106 | -------------------------------------------------------------------------------- /examples/write-udp.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DataKinds #-} 2 | {-# LANGUAGE OverloadedLists #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | module Main where 5 | import Control.Exception 6 | 7 | import Control.Lens 8 | import Data.Time.Clock 9 | import Network.Socket 10 | 11 | import Database.InfluxDB 12 | import qualified Database.InfluxDB.Write.UDP as UDP 13 | 14 | main :: IO () 15 | main = bracket (socket AF_INET Datagram defaultProtocol) close $ \sock -> do 16 | let localhost = tupleToHostAddress (127, 0, 0, 1) 17 | let params = UDP.writeParams sock $ SockAddrInet 8089 localhost 18 | tags1 = 19 | [ ("tag1", "A") 20 | , ("tag2", "B") 21 | ] 22 | fields1 = 23 | [ ("val1", FieldInt 10) 24 | , ("val2", FieldBool True) 25 | ] 26 | fields2 = 27 | [ ("val1", FieldInt 1) 28 | , ("val2", FieldBool False) 29 | ] 30 | UDP.write params $ 31 | Line "measurement1" tags1 fields1 (Nothing :: Maybe UTCTime) 32 | now <- getCurrentTime 33 | UDP.write 34 | (params & UDP.precision .~ Millisecond) 35 | (Line "measurement1" tags1 fields2 (Just now)) 36 | -------------------------------------------------------------------------------- /hie.yaml: -------------------------------------------------------------------------------- 1 | cradle: 2 | cabal: 3 | - path: "./src" 4 | component: "lib:influxdb" 5 | 6 | - path: "./tests/doctests.hs" 7 | component: "influxdb:test:doctests" 8 | 9 | - path: "./tests/regressions.hs" 10 | component: "influxdb:test:regressions" 11 | 12 | - path: "./examples/random-points.hs" 13 | component: "influxdb:exe:influx-random-points" 14 | 15 | - path: "./examples/write-udp.hs" 16 | component: "influxdb:exe:influx-write-udp" 17 | -------------------------------------------------------------------------------- /influxdb.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 1.24 2 | name: influxdb 3 | version: 1.9.3.2 4 | synopsis: InfluxDB client library for Haskell 5 | description: 6 | @influxdb@ is an InfluxDB client library for Haskell. 7 | . 8 | See "Database.InfluxDB" for a quick start guide. 9 | homepage: https://github.com/maoe/influxdb-haskell 10 | license: BSD3 11 | license-file: LICENSE 12 | author: Mitsutoshi Aoe 13 | maintainer: Mitsutoshi Aoe 14 | copyright: Copyright (C) 2014-2024 Mitsutoshi Aoe 15 | category: Database 16 | build-type: Custom 17 | tested-with: 18 | GHC == 8.4.4 19 | GHC == 8.6.5 20 | GHC == 8.8.4 21 | GHC == 8.10.7 22 | GHC == 9.0.2 23 | GHC == 9.2.8 24 | GHC == 9.4.8 25 | GHC == 9.6.6 26 | GHC == 9.8.2 27 | GHC == 9.10.1 28 | 29 | extra-source-files: 30 | README.md 31 | cabal.project 32 | 33 | extra-doc-files: 34 | CHANGELOG.md 35 | 36 | flag examples 37 | description: Build examples 38 | default: False 39 | manual: True 40 | 41 | custom-setup 42 | setup-depends: 43 | base >= 4 && < 5 44 | , Cabal >= 1.24 && < 3.13 45 | , cabal-doctest >= 1 && < 1.1 46 | 47 | library 48 | exposed-modules: 49 | Database.InfluxDB 50 | Database.InfluxDB.Format 51 | Database.InfluxDB.JSON 52 | Database.InfluxDB.Line 53 | Database.InfluxDB.Manage 54 | Database.InfluxDB.Ping 55 | Database.InfluxDB.Query 56 | Database.InfluxDB.Types 57 | Database.InfluxDB.Write 58 | Database.InfluxDB.Write.UDP 59 | other-modules: 60 | Database.InfluxDB.Internal.Text 61 | other-extensions: 62 | BangPatterns 63 | CPP 64 | DataKinds 65 | DeriveDataTypeable 66 | DeriveGeneric 67 | ExistentialQuantification 68 | FlexibleInstances 69 | FunctionalDependencies 70 | GADTs 71 | GeneralizedNewtypeDeriving 72 | KindSignatures 73 | LambdaCase 74 | MultiParamTypeClasses 75 | NamedFieldPuns 76 | OverloadedStrings 77 | RecordWildCards 78 | ScopedTypeVariables 79 | StandaloneDeriving 80 | TemplateHaskell 81 | ViewPatterns 82 | ghc-options: -Wall 83 | build-depends: 84 | base >= 4.11 && < 4.21 85 | , aeson >= 0.7 && < 2.3 86 | , attoparsec < 0.15 87 | , attoparsec-aeson >= 2.1 && < 2.3 88 | , bytestring >= 0.10 && < 0.13 89 | , clock >= 0.7 && < 0.9 90 | , containers >= 0.5 && < 0.8 91 | , foldl < 1.5 92 | , http-client >= 0.5 && < 0.8 93 | , http-types >= 0.8.6 && < 0.13 94 | , lens >= 4.9 && < 5.4 95 | , network >= 2.6 && < 3.3 96 | , optional-args >= 1.0 && < 1.1 97 | , scientific >= 0.3.3 && < 0.4 98 | , tagged >= 0.1 && < 0.9 99 | , text < 2.2 100 | , time >= 1.5 && < 1.15 101 | , unordered-containers < 0.3 102 | , vector >= 0.10 && < 0.14 103 | hs-source-dirs: src 104 | default-language: Haskell2010 105 | 106 | test-suite doctests 107 | type: exitcode-stdio-1.0 108 | main-is: doctests.hs 109 | build-depends: 110 | base 111 | , doctest >= 0.11.3 && < 0.23 112 | , influxdb 113 | , template-haskell < 2.23 114 | ghc-options: -Wall -threaded 115 | hs-source-dirs: tests 116 | default-language: Haskell2010 117 | 118 | test-suite regressions 119 | type: exitcode-stdio-1.0 120 | main-is: regressions.hs 121 | build-depends: 122 | base 123 | , containers 124 | , influxdb 125 | , lens 126 | , tasty < 1.6 127 | , tasty-hunit < 1.11 128 | , time 129 | , raw-strings-qq >= 1.1 && < 1.2 130 | , vector 131 | ghc-options: -Wall -threaded 132 | hs-source-dirs: tests 133 | default-language: Haskell2010 134 | 135 | executable influx-random-points 136 | if !flag(examples) 137 | buildable: False 138 | hs-source-dirs: examples 139 | main-is: random-points.hs 140 | ghc-options: -Wall 141 | build-depends: 142 | aeson 143 | , base 144 | , bytestring 145 | , containers 146 | , foldl >= 1.1.3 147 | , http-client 148 | , influxdb 149 | , lens 150 | , mwc-random 151 | , optional-args 152 | , text 153 | , time 154 | , vector 155 | default-language: Haskell2010 156 | 157 | executable influx-write-udp 158 | if !flag(examples) 159 | buildable: False 160 | hs-source-dirs: examples 161 | main-is: write-udp.hs 162 | ghc-options: -Wall 163 | build-depends: 164 | base 165 | , containers 166 | , influxdb 167 | , lens 168 | , network 169 | , time 170 | default-language: Haskell2010 171 | 172 | source-repository head 173 | type: git 174 | branch: develop 175 | location: https://github.com/maoe/influxdb-haskell.git 176 | -------------------------------------------------------------------------------- /scripts/travis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cabal configure -fexamples --enable-tests --enable-benchmarks --enable-coverage --ghc-options="-Wall -Werror" 4 | cabal build -j 5 | run-cabal-test --cabal-name=cabal --show-details=always 6 | cabal run influx-random-points -- 10 10 7 | cabal check 8 | cabal sdist 9 | export SRC_TGZ=$(cabal info . | awk '{print $2 ".tar.gz";exit}') 10 | pushd dist/ 11 | if [ -f "$SRC_TGZ" ]; then 12 | cabal install "$SRC_TGZ" 13 | else 14 | echo "expected '$SRC_TGZ' not found" 15 | exit 1 16 | fi 17 | popd 18 | -------------------------------------------------------------------------------- /src/Database/InfluxDB.hs: -------------------------------------------------------------------------------- 1 | {- | 2 | stability: experimental 3 | portability: GHC 4 | -} 5 | module Database.InfluxDB 6 | ( -- $intro 7 | 8 | -- * Writing data via HTTP 9 | -- $write 10 | write 11 | , writeBatch 12 | , writeByteString 13 | 14 | -- ** Write parameters 15 | , WriteParams 16 | , writeParams 17 | , retentionPolicy 18 | 19 | -- ** The Line protocol 20 | , Line(Line) 21 | , measurement 22 | , tagSet 23 | , fieldSet 24 | , timestamp 25 | 26 | , Field(..) 27 | , LineField 28 | , QueryField 29 | , Timestamp(..) 30 | , precisionScale 31 | , precisionName 32 | 33 | -- * Querying data 34 | -- $query 35 | , Query 36 | , query 37 | , queryChunked 38 | 39 | -- ** Query construction 40 | -- $query-construction 41 | , F.formatQuery 42 | , (F.%) 43 | 44 | -- ** Query parameters 45 | , QueryParams 46 | , queryParams 47 | , authentication 48 | , decoder 49 | 50 | -- ** Parsing results 51 | , QueryResults(..) 52 | , Decoder 53 | , lenientDecoder 54 | , strictDecoder 55 | 56 | -- ** Helper types and functions 57 | , Ignored 58 | , Empty 59 | , Tagged(..) 60 | , untag 61 | 62 | , getField 63 | , getTag 64 | , parseJSON 65 | , parseUTCTime 66 | , parsePOSIXTime 67 | 68 | 69 | -- * Database management 70 | , manage 71 | 72 | -- * Common data types and classes 73 | , Precision(..) 74 | , Database 75 | , F.formatDatabase 76 | , Measurement 77 | , F.formatMeasurement 78 | , Key 79 | , F.formatKey 80 | 81 | , Server 82 | , defaultServer 83 | , secureServer 84 | , host 85 | , port 86 | , ssl 87 | 88 | , Credentials 89 | , credentials 90 | , user 91 | , password 92 | 93 | -- * Exception 94 | , InfluxException(..) 95 | 96 | , HasServer(..) 97 | , HasDatabase(..) 98 | , HasPrecision(..) 99 | , HasManager(..) 100 | ) where 101 | 102 | import Database.InfluxDB.JSON 103 | import Database.InfluxDB.Line 104 | import Database.InfluxDB.Manage (manage) 105 | import Database.InfluxDB.Query 106 | import Database.InfluxDB.Types 107 | import Database.InfluxDB.Write 108 | import qualified Database.InfluxDB.Format as F 109 | 110 | {- $intro 111 | = Getting started 112 | 113 | This tutorial assumes the following language extensions and imports. 114 | 115 | >>> :set -XOverloadedStrings 116 | >>> :set -XRecordWildCards 117 | >>> :set -XTypeApplications 118 | >>> import Database.InfluxDB 119 | >>> import qualified Database.InfluxDB.Format as F 120 | >>> import Control.Lens 121 | >>> import qualified Data.Map as Map 122 | >>> import Data.Time 123 | >>> import qualified Data.Vector as V 124 | 125 | The examples below roughly follows the 126 | [README](https://github.com/influxdata/influxdb/blob/0b4528b26de43d5504ec0623c184540f7c3e1a54/client/README.md) 127 | in the official Go client library. 128 | 129 | == Creating a database 130 | 131 | This library assumes the [lens](https://hackage.haskell.org/package/lens) 132 | package in some APIs. Here we use 'Control.Lens.?~' to set the authentication 133 | parameters of type @Maybe 'Credentials'@. 134 | 135 | Also note that in order to construct a 'Query', we use 'F.formatQuery' with the 136 | 'F.database' formatter. There are many other formatters defined in 137 | "Database.InfluxDB.Format". 138 | 139 | >>> let db = "square_holes" 140 | >>> let bubba = credentials "bubba" "bumblebeetuna" 141 | >>> let p = queryParams db & authentication ?~ bubba 142 | >>> manage p $ formatQuery ("DROP DATABASE "%F.database) db 143 | >>> manage p $ formatQuery ("CREATE DATABASE "%F.database) db 144 | 145 | == Writing data 146 | 147 | 'write' or 'writeBatch' can be used to write data. In general 'writeBatch' 148 | should be used for efficiency when writing multiple data points. 149 | 150 | >>> let wp = writeParams db & authentication ?~ bubba & precision .~ Second 151 | >>> let cpuUsage = "cpu_usage" 152 | >>> :{ 153 | writeBatch wp 154 | [ Line @UTCTime cpuUsage (Map.singleton "cpu" "cpu-total") 155 | (Map.fromList 156 | [ ("idle", FieldFloat 10.1) 157 | , ("system", FieldFloat 53.3) 158 | , ("user", FieldFloat 46.6) 159 | ]) 160 | (Just $ parseTimeOrError False defaultTimeLocale 161 | "%F %T%Q %Z" 162 | "2017-06-17 15:41:40.42659044 UTC") 163 | ] 164 | :} 165 | 166 | Note that the type signature of the timestamp is necessary. Otherwise it doesn't 167 | type check. 168 | 169 | == Querying data 170 | 171 | === Using an one-off tuple 172 | 173 | If all the field types are an instance of 'Data.Aeson.FromJSON', we can use a 174 | tuple to store the results. 175 | 176 | >>> :set -XDataKinds -XOverloadedStrings -XTypeOperators 177 | >>> type CPUUsage = (Tagged "time" UTCTime, Tagged "idle" Double, Tagged "system" Double, Tagged "user" Double) 178 | >>> v <- query @CPUUsage p $ formatQuery ("SELECT * FROM "%F.measurement) cpuUsage 179 | >>> v 180 | [(Tagged 2017-06-17 15:41:40 UTC,Tagged 10.1,Tagged 53.3,Tagged 46.6)] 181 | 182 | Note that the type signature on query here is also necessary to type check. 183 | We can remove the tags using 'untag': 184 | 185 | >>> V.map (\(a, b, c, d) -> (untag a, untag b, untag c, untag d)) v :: V.Vector (UTCTime, Double, Double, Double) 186 | [(2017-06-17 15:41:40 UTC,10.1,53.3,46.6)] 187 | 188 | Or even using 'Data.Coerce.coerce': 189 | 190 | >>> import Data.Coerce 191 | >>> coerce v :: V.Vector (UTCTime, Double, Double, Double) 192 | [(2017-06-17 15:41:40 UTC,10.1,53.3,46.6)] 193 | 194 | === Using a custom data type 195 | 196 | We can define our custom data type and write a 'QueryResults' instance 197 | instead. 'getField', 'parseUTCTime' and 'parseJSON' etc are avilable to 198 | make it easier to write a JSON decoder. 199 | 200 | >>> :{ 201 | data CPUUsage = CPUUsage 202 | { time :: UTCTime 203 | , cpuIdle, cpuSystem, cpuUser :: Double 204 | } deriving Show 205 | instance QueryResults CPUUsage where 206 | parseMeasurement prec _name _tags columns fields = do 207 | time <- getField "time" columns fields >>= parseUTCTime prec 208 | cpuIdle <- getField "idle" columns fields >>= parseJSON 209 | cpuSystem <- getField "system" columns fields >>= parseJSON 210 | cpuUser <- getField "user" columns fields >>= parseJSON 211 | return CPUUsage {..} 212 | :} 213 | 214 | >>> query @CPUUsage p $ formatQuery ("SELECT * FROM "%F.measurement) cpuUsage 215 | [CPUUsage {time = 2017-06-17 15:41:40 UTC, cpuIdle = 10.1, cpuSystem = 53.3, cpuUser = 46.6}] 216 | -} 217 | 218 | {- $write 219 | InfluxDB has two ways to write data into it, via HTTP and UDP. This module 220 | only exports functions for the HTTP API. For UDP, see 221 | "Database.InfluxDB.Write.UDP". 222 | -} 223 | 224 | {- $query 225 | 'query' and 'queryChunked' can be used to query data. If your dataset fits your 226 | memory, 'query' is easier to use. If it doesn't, use 'queryChunked' to stream 227 | data. 228 | -} 229 | 230 | {- $query-construction 231 | There are various utility functions available in "Database.InfluxDB.Format". 232 | This module is designed to be imported as qualified: 233 | 234 | @ 235 | import "Database.InfluxDB" 236 | import qualified "Database.InfluxDB.Format" as F 237 | @ 238 | -} 239 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Format.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GADTs #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE TypeOperators #-} 5 | module Database.InfluxDB.Format 6 | ( -- $setup 7 | 8 | -- * The 'Format' type and associated functions 9 | Format 10 | , makeFormat 11 | , (%) 12 | 13 | -- * Formatting functions 14 | , formatQuery 15 | , formatDatabase 16 | , formatMeasurement 17 | , formatKey 18 | 19 | -- * Formatters for various types 20 | , database 21 | , key 22 | , keys 23 | , measurement 24 | , measurements 25 | , field 26 | , decimal 27 | , realFloat 28 | , text 29 | , string 30 | , byteString8 31 | , time 32 | 33 | -- * Utility functions 34 | , fromQuery 35 | ) where 36 | import Control.Category 37 | import Data.Monoid 38 | import Data.String 39 | import Prelude hiding ((.), id) 40 | 41 | import Data.Time 42 | import qualified Data.ByteString as B 43 | import qualified Data.ByteString.Lazy as BL 44 | import qualified Data.ByteString.Builder as BB 45 | import qualified Data.List as L 46 | import qualified Data.Text as T 47 | import qualified Data.Text.Encoding as T 48 | import qualified Data.Text.Lazy as TL 49 | import qualified Data.Text.Lazy.Builder as TL 50 | import qualified Data.Text.Lazy.Builder.Int as TL 51 | import qualified Data.Text.Lazy.Builder.RealFloat as TL 52 | 53 | import Database.InfluxDB.Internal.Text 54 | import Database.InfluxDB.Types hiding (database) 55 | 56 | {- $setup 57 | This module is desined to be imported qualified: 58 | 59 | >>> :set -XOverloadedStrings 60 | >>> import qualified Data.ByteString as B 61 | >>> import qualified Database.InfluxDB.Format as F 62 | -} 63 | 64 | -- | Serialize a 'Query' to a 'B.ByteString'. 65 | fromQuery :: Query -> B.ByteString 66 | fromQuery (Query q) = 67 | BL.toStrict $ BB.toLazyByteString $ T.encodeUtf8Builder q 68 | 69 | -- | A typed format string. @Format a r@ means that @a@ is the type of formatted 70 | -- string, and @r@ is the type of the formatter. 71 | -- 72 | -- >>> :t F.formatQuery 73 | -- F.formatQuery :: F.Format Query r -> r 74 | -- >>> :t F.key 75 | -- F.key :: F.Format r (Key -> r) 76 | -- >>> :t "SELECT * FROM "%F.key 77 | -- "SELECT * FROM "%F.key :: F.Format a (Key -> a) 78 | -- >>> :t F.formatQuery ("SELECT * FROM "%F.key) 79 | -- F.formatQuery ("SELECT * FROM "%F.key) :: Key -> Query 80 | -- >>> F.formatQuery ("SELECT * FROM "%F.key) "series" 81 | -- "SELECT * FROM \"series\"" 82 | newtype Format a r = Format { runFormat :: (TL.Builder -> a) -> r } 83 | 84 | -- | 'Format's can be composed using @('.')@ from "Control.Category". 85 | -- 86 | -- >>> import Control.Category ((.)) 87 | -- >>> import Prelude hiding ((.)) 88 | -- >>> F.formatQuery ("SELECT * FROM " . F.key) "series" 89 | -- "SELECT * FROM \"series\"" 90 | instance Category Format where 91 | id = Format (\k -> k "") 92 | fmt1 . fmt2 = Format $ \k -> 93 | runFormat fmt1 $ \a -> 94 | runFormat fmt2 $ \b -> 95 | k (a <> b) 96 | 97 | -- | With the OverloadedStrings exension, string literals can be used to write 98 | -- queries. 99 | -- 100 | -- >>> "SELECT * FROM series" :: Query 101 | -- "SELECT * FROM series" 102 | instance a ~ r => IsString (Format a r) where 103 | fromString xs = Format $ \k -> k $ fromString xs 104 | 105 | -- | 'Format' specific synonym of @('.')@. 106 | -- 107 | -- This is typically easier to use than @('.')@ is because it doesn't 108 | -- conflict with @Prelude.(.)@. 109 | (%) :: Format b c -> Format a b -> Format a c 110 | (%) = (.) 111 | 112 | runFormatWith :: (T.Text -> a) -> Format a r -> r 113 | runFormatWith f fmt = runFormat fmt (f . TL.toStrict . TL.toLazyText) 114 | 115 | -- | Format a 'Query'. 116 | -- 117 | -- >>> F.formatQuery "SELECT * FROM series" 118 | -- "SELECT * FROM series" 119 | -- >>> F.formatQuery ("SELECT * FROM "%F.key) "series" 120 | -- "SELECT * FROM \"series\"" 121 | formatQuery :: Format Query r -> r 122 | formatQuery = runFormatWith Query 123 | 124 | -- | Format a 'Database'. 125 | -- 126 | -- >>> F.formatDatabase "test-db" 127 | -- "test-db" 128 | -- >>> F.formatDatabase ("test-db-"%F.decimal) 0 129 | -- "test-db-0" 130 | formatDatabase :: Format Database r -> r 131 | formatDatabase = runFormatWith Database 132 | 133 | -- | Format a 'Measurement'. 134 | -- 135 | -- >>> F.formatMeasurement "test-series" 136 | -- "test-series" 137 | -- >>> F.formatMeasurement ("test-series-"%F.decimal) 0 138 | -- "test-series-0" 139 | formatMeasurement :: Format Measurement r -> r 140 | formatMeasurement = runFormatWith Measurement 141 | 142 | -- | Format a 'Key'. 143 | -- 144 | -- >>> F.formatKey "test-key" 145 | -- "test-key" 146 | -- >>> F.formatKey ("test-key-"%F.decimal) 0 147 | -- "test-key-0" 148 | formatKey :: Format Key r -> r 149 | formatKey fmt = runFormat fmt (Key . TL.toStrict . TL.toLazyText) 150 | 151 | -- | Convenience function to make a custom formatter. 152 | makeFormat :: (a -> TL.Builder) -> Format r (a -> r) 153 | makeFormat build = Format $ \k a -> k $ build a 154 | 155 | doubleQuote :: T.Text -> TL.Builder 156 | doubleQuote name = "\"" <> TL.fromText name <> "\"" 157 | 158 | singleQuote :: T.Text -> TL.Builder 159 | singleQuote name = "'" <> TL.fromText name <> "'" 160 | 161 | identifierBuilder :: T.Text -> TL.Builder 162 | identifierBuilder = doubleQuote . escapeDoubleQuotes 163 | 164 | stringBuilder :: T.Text -> TL.Builder 165 | stringBuilder = singleQuote . escapeSingleQuotes 166 | 167 | -- | Format a database name. 168 | -- 169 | -- >>> F.formatQuery ("CREATE DATABASE "%F.database) "test-db" 170 | -- "CREATE DATABASE \"test-db\"" 171 | database :: Format r (Database -> r) 172 | database = makeFormat $ \(Database name) -> identifierBuilder name 173 | 174 | -- | Format an identifier (e.g. field names, tag names, etc). 175 | -- 176 | -- Identifiers in InfluxDB protocol are surrounded with double quotes. 177 | -- 178 | -- >>> F.formatQuery ("SELECT "%F.key%" FROM series") "field" 179 | -- "SELECT \"field\" FROM series" 180 | -- >>> F.formatQuery ("SELECT "%F.key%" FROM series") "foo\"bar" 181 | -- "SELECT \"foo\\\"bar\" FROM series" 182 | key :: Format r (Key -> r) 183 | key = makeFormat $ \(Key name) -> identifierBuilder name 184 | 185 | -- | Format multiple keys. 186 | -- 187 | -- >>> F.formatQuery ("SELECT "%F.keys%" FROM series") ["field1", "field2"] 188 | -- "SELECT \"field1\",\"field2\" FROM series" 189 | keys :: Format r ([Key] -> r) 190 | keys = makeFormat $ 191 | mconcat . L.intersperse "," . map (\(Key name) -> identifierBuilder name) 192 | 193 | -- | Format a measurement. 194 | -- 195 | -- >>> F.formatQuery ("SELECT * FROM "%F.measurement) "test-series" 196 | -- "SELECT * FROM \"test-series\"" 197 | measurement :: Format r (Measurement -> r) 198 | measurement = makeFormat $ \(Measurement name) -> identifierBuilder name 199 | 200 | -- | Format a measurement. 201 | -- 202 | -- >>> F.formatQuery ("SELECT * FROM "%F.measurements) ["series1", "series2"] 203 | -- "SELECT * FROM \"series1\",\"series2\"" 204 | measurements :: Format r ([Measurement] -> r) 205 | measurements = makeFormat $ 206 | mconcat . L.intersperse "," 207 | . map (\(Measurement name) -> identifierBuilder name) 208 | 209 | -- | Format an InfluxDB value. Good for field and tag values. 210 | -- 211 | -- >>> F.formatQuery ("SELECT * FROM series WHERE "%F.key%" = "%F.field) "location" "tokyo" 212 | -- "SELECT * FROM series WHERE \"location\" = 'tokyo'" 213 | field :: Format r (QueryField -> r) 214 | field = makeFormat $ \case 215 | FieldInt n -> TL.decimal n 216 | FieldFloat d -> TL.realFloat d 217 | FieldString s -> stringBuilder s 218 | FieldBool b -> if b then "true" else "false" 219 | FieldNull -> "null" 220 | 221 | -- | Format a decimal number. 222 | -- 223 | -- >>> F.formatQuery ("SELECT * FROM series WHERE time < now() - "%F.decimal%"h") 1 224 | -- "SELECT * FROM series WHERE time < now() - 1h" 225 | decimal :: Integral a => Format r (a -> r) 226 | decimal = makeFormat TL.decimal 227 | 228 | -- | Format a floating-point number. 229 | -- 230 | -- >>> F.formatQuery ("SELECT * FROM series WHERE value > "%F.realFloat) 0.1 231 | -- "SELECT * FROM series WHERE value > 0.1" 232 | realFloat :: RealFloat a => Format r (a -> r) 233 | realFloat = makeFormat TL.realFloat 234 | 235 | -- | Format a text. 236 | -- 237 | -- Note that this doesn't escape the string. Use 'formatKey' to format field 238 | -- values in a query. 239 | -- 240 | -- >>> :t F.formatKey F.text 241 | -- F.formatKey F.text :: T.Text -> Key 242 | text :: Format r (T.Text -> r) 243 | text = makeFormat TL.fromText 244 | 245 | -- | Format a string. 246 | -- 247 | -- Note that this doesn't escape the string. Use 'formatKey' to format field 248 | -- values in a query. 249 | -- 250 | -- >>> :t F.formatKey F.string 251 | -- F.formatKey F.string :: String -> Key 252 | string :: Format r (String -> r) 253 | string = makeFormat TL.fromString 254 | 255 | -- | Format a UTF-8 encoded byte string. 256 | -- 257 | -- Note that this doesn't escape the string. Use 'formatKey' to format field 258 | -- values in a query. 259 | -- 260 | -- >>> :t F.formatKey F.byteString8 261 | -- F.formatKey F.byteString8 :: B.ByteString -> Key 262 | byteString8 :: Format r (B.ByteString -> r) 263 | byteString8 = makeFormat $ TL.fromText . T.decodeUtf8 264 | 265 | -- | Format a time. 266 | -- 267 | -- >>> import Data.Time 268 | -- >>> let Just t = parseTimeM False defaultTimeLocale "%s" "0" :: Maybe UTCTime 269 | -- >>> F.formatQuery ("SELECT * FROM series WHERE time >= "%F.time) t 270 | -- "SELECT * FROM series WHERE time >= '1970-01-01 00:00:00'" 271 | time :: FormatTime time => Format r (time -> r) 272 | time = makeFormat $ \t -> 273 | "'" <> TL.fromString (formatTime defaultTimeLocale fmt t) <> "'" 274 | where 275 | fmt = "%F %X%Q" -- YYYY-MM-DD HH:MM:SS.nnnnnnnnn 276 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Internal/Text.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module Database.InfluxDB.Internal.Text 3 | ( escapeCommas 4 | , escapeEqualSigns 5 | , escapeSpaces 6 | , escapeDoubleQuotes 7 | , escapeSingleQuotes 8 | , escapeBackslashes 9 | ) where 10 | import Data.Text (Text) 11 | import qualified Data.Text as T 12 | 13 | escapeCommas 14 | , escapeEqualSigns 15 | , escapeSpaces 16 | , escapeDoubleQuotes 17 | , escapeSingleQuotes 18 | , escapeBackslashes :: Text -> Text 19 | escapeCommas = T.replace "," "\\," 20 | escapeEqualSigns = T.replace "=" "\\=" 21 | escapeSpaces = T.replace " " "\\ " 22 | escapeDoubleQuotes = T.replace "\"" "\\\"" 23 | escapeSingleQuotes = T.replace "'" "\\'" 24 | escapeBackslashes = T.replace "\\" "\\\\" 25 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/JSON.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns #-} 2 | {-# LANGUAGE ExistentialQuantification #-} 3 | {-# LANGUAGE FlexibleInstances #-} 4 | {-# LANGUAGE GADTs #-} 5 | {-# LANGUAGE OverloadedStrings #-} 6 | {-# LANGUAGE RankNTypes #-} 7 | {-# LANGUAGE RecordWildCards #-} 8 | {-# LANGUAGE ViewPatterns #-} 9 | module Database.InfluxDB.JSON 10 | ( -- * Result parsers 11 | parseResultsWith 12 | , parseResultsWithDecoder 13 | 14 | -- ** Decoder settings 15 | , Decoder(..) 16 | , SomeDecoder(..) 17 | , strictDecoder 18 | , lenientDecoder 19 | 20 | -- * Getting fields and tags 21 | , getField 22 | , getTag 23 | 24 | -- * Common JSON object parsers 25 | , A.parseJSON 26 | , parseUTCTime 27 | , parsePOSIXTime 28 | , parseRFC3339 29 | -- ** Utility functions 30 | , parseResultsObject 31 | , parseSeriesObject 32 | , parseSeriesBody 33 | , parseErrorObject 34 | ) where 35 | import Control.Applicative 36 | import Control.Exception 37 | import Control.Monad 38 | import Data.Foldable 39 | import Data.Maybe 40 | import Prelude 41 | import qualified Control.Monad.Fail as Fail 42 | 43 | import Data.Aeson 44 | import Data.HashMap.Strict (HashMap) 45 | import Data.Text (Text) 46 | import Data.Time.Clock 47 | import Data.Time.Clock.POSIX 48 | import Data.Time.Format 49 | import Data.Vector (Vector) 50 | import qualified Data.Aeson.Types as A 51 | import qualified Data.HashMap.Strict as HashMap 52 | import qualified Data.Scientific as Sci 53 | import qualified Data.Text as T 54 | import qualified Data.Vector as V 55 | 56 | import Database.InfluxDB.Types 57 | 58 | -- $setup 59 | -- >>> import Data.Maybe 60 | -- >>> import Data.Aeson (decode) 61 | -- >>> import Database.InfluxDB.JSON 62 | -- >>> import qualified Data.Aeson.Types as A 63 | 64 | -- | Parse a JSON response with the 'strictDecoder'. 65 | parseResultsWith 66 | :: (Maybe Text -> HashMap Text Text -> Vector Text -> Array -> A.Parser a) 67 | -- ^ A parser that parses a measurement. A measurement consists of 68 | -- 69 | -- 1. an optional name of the series 70 | -- 2. a map of tags 71 | -- 3. an array of field keys 72 | -- 4. an array of field values 73 | -> Value -- ^ JSON response 74 | -> A.Parser (Vector a) 75 | parseResultsWith = parseResultsWithDecoder strictDecoder 76 | 77 | -- | Parse a JSON response with the specified decoder settings. 78 | parseResultsWithDecoder 79 | :: Decoder 80 | -> (Maybe Text -> HashMap Text Text -> Vector Text -> Array -> A.Parser a) 81 | -- ^ A parser that parses a measurement. A measurement consists of 82 | -- 83 | -- 1. an optional name of the series 84 | -- 2. a map of tags 85 | -- 3. an array of field keys 86 | -- 4. an array of field values 87 | -> Value -- ^ JSON response 88 | -> A.Parser (Vector a) 89 | parseResultsWithDecoder (Decoder SomeDecoder {..}) row val0 = do 90 | r <- foldr1 (<|>) 91 | [ Left <$> parseErrorObject val0 92 | , Right <$> success 93 | ] 94 | case r of 95 | Left err -> fail err 96 | Right vec -> return vec 97 | where 98 | success = do 99 | results <- parseResultsObject val0 100 | 101 | (join -> series) <- V.forM results $ \val -> do 102 | r <- foldr1 (<|>) 103 | [ Left <$> parseErrorObject val 104 | , Right <$> parseSeriesObject val 105 | ] 106 | case r of 107 | Left err -> fail err 108 | Right vec -> return vec 109 | values <- V.forM series $ \val -> do 110 | (name, tags, columns, values) <- parseSeriesBody val 111 | decodeFold $ V.forM values $ A.withArray "values" $ \fields -> do 112 | assert (V.length columns == V.length fields) $ return () 113 | decodeEach $ row name tags columns fields 114 | return $! join values 115 | 116 | -- | A decoder to use when parsing a JSON response. 117 | -- 118 | -- Use 'strictDecoder' if you want to fail the entire decoding process if 119 | -- there's any failure. Use 'lenientDecoder' if you want the decoding process 120 | -- to collect only successful results. 121 | newtype Decoder = Decoder (forall a. SomeDecoder a) 122 | 123 | -- | @'SomeDecoder' a@ represents how to decode a JSON response given a row 124 | -- parser of type @'A.Parser' a@. 125 | data SomeDecoder a = forall b. SomeDecoder 126 | { decodeEach :: A.Parser a -> A.Parser b 127 | -- ^ How to decode each row. 128 | -- 129 | -- For example 'optional' can be used to turn parse 130 | -- failrues into 'Nothing's. 131 | , decodeFold :: A.Parser (Vector b) -> A.Parser (Vector a) 132 | -- ^ How to aggregate rows into the resulting vector. 133 | -- 134 | -- For example when @b ~ 'Maybe' a@, one way to aggregate the values is to 135 | -- return only 'Just's. 136 | } 137 | 138 | -- | A decoder that fails immediately if there's any parse failure. 139 | -- 140 | -- 'strictDecoder' is defined as follows: 141 | -- 142 | -- @ 143 | -- strictDecoder :: Decoder 144 | -- strictDecoder = Decoder $ SomeDecoder 145 | -- { decodeEach = id 146 | -- , decodeFold = id 147 | -- } 148 | -- @ 149 | strictDecoder :: Decoder 150 | strictDecoder = Decoder $ SomeDecoder 151 | { decodeEach = id 152 | , decodeFold = id 153 | } 154 | 155 | -- | A decoder that ignores parse failures and returns only successful results. 156 | lenientDecoder :: Decoder 157 | lenientDecoder = Decoder $ SomeDecoder 158 | { decodeEach = optional 159 | , decodeFold = \p -> do 160 | bs <- p 161 | return $! V.map fromJust $ V.filter isJust bs 162 | } 163 | 164 | -- | Get a field value from a column name 165 | getField 166 | :: Fail.MonadFail m 167 | => Text -- ^ Column name 168 | -> Vector Text -- ^ Columns 169 | -> Vector Value -- ^ Field values 170 | -> m Value 171 | getField column columns fields = 172 | case V.elemIndex column columns of 173 | Nothing -> Fail.fail $ "getField: no such column " ++ show column 174 | Just idx -> case V.indexM fields idx of 175 | Nothing -> Fail.fail $ "getField: index out of bound for " ++ show column 176 | Just field -> return field 177 | 178 | -- | Get a tag value from a tag name 179 | getTag 180 | :: Fail.MonadFail m 181 | => Text -- ^ Tag name 182 | -> HashMap Text Value -- ^ Tags 183 | -> m Value 184 | getTag tag tags = case HashMap.lookup tag tags of 185 | Nothing -> Fail.fail $ "getTag: no such tag " ++ show tag 186 | Just val -> return val 187 | 188 | -- | Parse a result response. 189 | parseResultsObject :: Value -> A.Parser (Vector A.Value) 190 | parseResultsObject = A.withObject "results" $ \obj -> obj .: "results" 191 | 192 | -- | Parse a series response. 193 | parseSeriesObject :: Value -> A.Parser (Vector A.Value) 194 | parseSeriesObject = A.withObject "series" $ \obj -> 195 | fromMaybe V.empty <$> obj .:? "series" 196 | 197 | -- | Parse the common JSON structure used in query responses. 198 | parseSeriesBody 199 | :: Value 200 | -> A.Parser (Maybe Text, HashMap Text Text, Vector Text, Array) 201 | parseSeriesBody = A.withObject "series" $ \obj -> do 202 | !name <- obj .:? "name" 203 | !columns <- obj .: "columns" 204 | !values <- obj .:? "values" .!= V.empty 205 | !tags <- obj .:? "tags" .!= HashMap.empty 206 | return (name, tags, columns, values) 207 | 208 | -- | Parse the common JSON structure used in failure response. 209 | -- >>> A.parse parseErrorObject $ fromJust $ decode "{ \"error\": \"custom error\" }" 210 | -- Success "custom error" 211 | -- >>> A.parse parseErrorObject $ fromJust $ decode "{ \"message\": \"custom error\" }" 212 | -- Success "custom error" 213 | parseErrorObject :: A.Value -> A.Parser String 214 | parseErrorObject = A.withObject "error" $ \obj -> obj .: "error" <|> obj .: "message" 215 | 216 | -- | Parse either a POSIX timestamp or RFC3339 formatted timestamp as 'UTCTime'. 217 | parseUTCTime :: Precision ty -> A.Value -> A.Parser UTCTime 218 | parseUTCTime prec val = case prec of 219 | RFC3339 -> parseRFC3339 val 220 | _ -> posixSecondsToUTCTime <$!> parsePOSIXTime prec val 221 | 222 | -- | Parse either a POSIX timestamp or RFC3339 formatted timestamp as 223 | -- 'POSIXTime'. 224 | parsePOSIXTime :: Precision ty -> A.Value -> A.Parser POSIXTime 225 | parsePOSIXTime prec val = case prec of 226 | RFC3339 -> utcTimeToPOSIXSeconds <$!> parseRFC3339 val 227 | _ -> A.withScientific err 228 | (\s -> case timestampToUTC s of 229 | Nothing -> A.typeMismatch err val 230 | Just !utc -> return utc) 231 | val 232 | where 233 | err = "POSIX timestamp in " ++ T.unpack (precisionName prec) 234 | timestampToUTC s = do 235 | n <- Sci.toBoundedInteger s 236 | return $! fromIntegral (n :: Int) * precisionScale prec 237 | 238 | -- | Parse a RFC3339-formatted timestamp. 239 | -- 240 | -- Note that this parser is slow as it converts a 'T.Text' input to a 241 | -- 'Prelude.String' before parsing. 242 | parseRFC3339 :: ParseTime time => A.Value -> A.Parser time 243 | parseRFC3339 val = A.withText err 244 | (maybe (A.typeMismatch err val) (return $!) 245 | . parseTimeM True defaultTimeLocale fmt 246 | . T.unpack) 247 | val 248 | where 249 | fmt, err :: String 250 | fmt = "%FT%X%QZ" 251 | err = "RFC3339-formatted timestamp" 252 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Line.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE KindSignatures #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE RecordWildCards #-} 5 | {-# LANGUAGE ScopedTypeVariables #-} 6 | {-# LANGUAGE TemplateHaskell #-} 7 | module Database.InfluxDB.Line 8 | ( -- $setup 9 | 10 | -- * Types and accessors 11 | Line(Line) 12 | , measurement 13 | , tagSet 14 | , fieldSet 15 | , timestamp 16 | 17 | -- * Serializers 18 | , buildLine 19 | , buildLines 20 | , encodeLine 21 | , encodeLines 22 | 23 | -- * Other types 24 | , LineField 25 | , Field(..) 26 | , Precision(..) 27 | ) where 28 | import Data.List (intersperse) 29 | import Data.Int (Int64) 30 | import Data.Monoid 31 | import Prelude 32 | 33 | import Control.Lens 34 | import Data.Map (Map) 35 | import Data.Text (Text) 36 | import qualified Data.ByteString.Builder as B 37 | import qualified Data.ByteString.Lazy as L 38 | import qualified Data.Map.Strict as Map 39 | import qualified Data.Text.Encoding as TE 40 | 41 | import Database.InfluxDB.Internal.Text 42 | import Database.InfluxDB.Types 43 | 44 | {- $setup 45 | The Line protocol implementation. 46 | 47 | >>> :set -XOverloadedStrings 48 | >>> import Data.Time 49 | >>> import Database.InfluxDB.Line 50 | >>> import System.IO (stdout) 51 | >>> import qualified Data.ByteString as B 52 | >>> import qualified Data.ByteString.Builder as B 53 | >>> import qualified Data.ByteString.Lazy.Char8 as BL8 54 | >>> :{ 55 | let l1 = Line "cpu_usage" 56 | (Map.singleton "cpu" "cpu-total") 57 | (Map.fromList 58 | [ ("idle", FieldFloat 10.1) 59 | , ("system", FieldFloat 53.3) 60 | , ("user", FieldFloat 46.6) 61 | ]) 62 | (Just $ parseTimeOrError False defaultTimeLocale 63 | "%F %T%Q %Z" 64 | "2017-06-17 15:41:40.42659044 UTC") :: Line UTCTime 65 | :} 66 | -} 67 | 68 | -- | Placeholder for the Line Protocol 69 | -- 70 | -- See https://docs.influxdata.com/influxdb/v1.7/write_protocols/line_protocol_tutorial/ for the 71 | -- concrete syntax. 72 | data Line time = Line 73 | { _measurement :: !Measurement 74 | -- ^ Measurement name 75 | , _tagSet :: !(Map Key Key) 76 | -- ^ Set of tags (optional) 77 | , _fieldSet :: !(Map Key LineField) 78 | -- ^ Set of fields 79 | -- 80 | -- It shouldn't be empty. 81 | , _timestamp :: !(Maybe time) 82 | -- ^ Timestamp (optional) 83 | } deriving Show 84 | 85 | -- | Serialize a 'Line' to a lazy bytestring 86 | -- 87 | -- >>> BL8.putStrLn $ encodeLine (scaleTo Second) l1 88 | -- cpu_usage,cpu=cpu-total idle=10.1,system=53.3,user=46.6 1497714100 89 | encodeLine 90 | :: (time -> Int64) 91 | -- ^ Function to convert time to an InfluxDB timestamp 92 | -- 93 | -- Use 'scaleTo' for HTTP writes and 'roundTo' for UDP writes. 94 | -> Line time 95 | -> L.ByteString 96 | encodeLine toTimestamp = B.toLazyByteString . buildLine toTimestamp 97 | 98 | -- | Serialize 'Line's to a lazy bytestring 99 | -- 100 | -- >>> BL8.putStr $ encodeLines (scaleTo Second) [l1, l1] 101 | -- cpu_usage,cpu=cpu-total idle=10.1,system=53.3,user=46.6 1497714100 102 | -- cpu_usage,cpu=cpu-total idle=10.1,system=53.3,user=46.6 1497714100 103 | -- 104 | encodeLines 105 | :: Foldable f 106 | => (time -> Int64) 107 | -- ^ Function to convert time to an InfluxDB timestamp 108 | -- 109 | -- Use 'scaleTo' for HTTP writes and 'roundTo' for UDP writes. 110 | -> f (Line time) 111 | -> L.ByteString 112 | encodeLines toTimestamp = B.toLazyByteString . buildLines toTimestamp 113 | 114 | -- | Serialize a 'Line' to a bytestring 'B.Buider' 115 | -- 116 | -- >>> B.hPutBuilder stdout $ buildLine (scaleTo Second) l1 117 | -- cpu_usage,cpu=cpu-total idle=10.1,system=53.3,user=46.6 1497714100 118 | buildLine 119 | :: (time -> Int64) 120 | -> Line time 121 | -> B.Builder 122 | buildLine toTimestamp Line {..} = 123 | key <> " " <> fields <> maybe "" (" " <>) timestamp 124 | where 125 | measurement = TE.encodeUtf8Builder $ escapeMeasurement _measurement 126 | tags = buildMap (TE.encodeUtf8Builder . escapeKey) _tagSet 127 | key = if Map.null _tagSet 128 | then measurement 129 | else measurement <> "," <> tags 130 | fields = buildMap buildFieldValue _fieldSet 131 | timestamp = B.int64Dec . toTimestamp <$> _timestamp 132 | buildMap encodeVal = 133 | mconcat . intersperse "," . map encodeKeyVal . Map.toList 134 | where 135 | encodeKeyVal (name, val) = mconcat 136 | [ TE.encodeUtf8Builder $ escapeKey name 137 | , "=" 138 | , encodeVal val 139 | ] 140 | 141 | escapeKey :: Key -> Text 142 | escapeKey (Key text) = escapeCommas $ escapeEqualSigns $ escapeSpaces text 143 | 144 | escapeMeasurement :: Measurement -> Text 145 | escapeMeasurement (Measurement text) = escapeCommas $ escapeSpaces text 146 | 147 | escapeStringField :: Text -> Text 148 | escapeStringField = escapeDoubleQuotes . escapeBackslashes 149 | 150 | buildFieldValue :: LineField -> B.Builder 151 | buildFieldValue = \case 152 | FieldInt i -> B.int64Dec i <> "i" 153 | FieldFloat d -> B.doubleDec d 154 | FieldString t -> "\"" <> TE.encodeUtf8Builder (escapeStringField t) <> "\"" 155 | FieldBool b -> if b then "true" else "false" 156 | 157 | -- | Serialize 'Line's to a bytestring 'B.Builder' 158 | -- 159 | -- >>> B.hPutBuilder stdout $ buildLines (scaleTo Second) [l1, l1] 160 | -- cpu_usage,cpu=cpu-total idle=10.1,system=53.3,user=46.6 1497714100 161 | -- cpu_usage,cpu=cpu-total idle=10.1,system=53.3,user=46.6 1497714100 162 | -- 163 | buildLines 164 | :: Foldable f 165 | => (time -> Int64) 166 | -> f (Line time) 167 | -> B.Builder 168 | buildLines toTimestamp = foldMap ((<> "\n") . buildLine toTimestamp) 169 | 170 | makeLensesWith (lensRules & generateSignatures .~ False) ''Line 171 | 172 | -- | Name of the measurement that you want to write your data to. 173 | measurement :: Lens' (Line time) Measurement 174 | 175 | -- | Tag(s) that you want to include with your data point. Tags are optional in 176 | -- the Line Protocol, so you can set it 'Control.Applicative.empty'. 177 | tagSet :: Lens' (Line time) (Map Key Key) 178 | 179 | -- | Field(s) for your data point. Every data point requires at least one field 180 | -- in the Line Protocol, so it shouldn't be 'Control.Applicative.empty'. 181 | fieldSet :: Lens' (Line time) (Map Key LineField) 182 | 183 | -- | Timestamp for your data point. You can put whatever type of timestamp that 184 | -- is an instance of the 'Timestamp' class. 185 | timestamp :: Lens' (Line time) (Maybe time) 186 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Manage.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns #-} 2 | {-# LANGUAGE CPP #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | {-# LANGUAGE TypeApplications #-} 6 | {-# LANGUAGE ViewPatterns #-} 7 | {-# LANGUAGE RecordWildCards #-} 8 | {-# LANGUAGE ScopedTypeVariables #-} 9 | #if __GLASGOW_HASKELL__ >= 800 10 | {-# OPTIONS_GHC -Wno-missing-signatures #-} 11 | #else 12 | {-# OPTIONS_GHC -fno-warn-missing-signatures #-} 13 | #endif 14 | module Database.InfluxDB.Manage 15 | ( -- * Management query interface 16 | Query 17 | , manage 18 | 19 | -- * Query parameters 20 | , QueryParams 21 | , queryParams 22 | , server 23 | , database 24 | , precision 25 | , manager 26 | 27 | -- * Management query results 28 | -- ** SHOW QUERIES 29 | , ShowQuery 30 | , qid 31 | , queryText 32 | , duration 33 | 34 | -- ** SHOW SERIES 35 | , ShowSeries 36 | , key 37 | ) where 38 | import Control.Exception 39 | import Control.Monad 40 | 41 | import Control.Lens 42 | import Data.Aeson (Value(..), eitherDecode', encode, parseJSON) 43 | import Data.Scientific (toBoundedInteger) 44 | import Data.Text (Text) 45 | import Data.Time.Clock 46 | import qualified Data.Aeson.Types as A 47 | import qualified Data.Attoparsec.Combinator as AC 48 | import qualified Data.Attoparsec.Text as AT 49 | import qualified Data.Text.Encoding as TE 50 | import qualified Data.Vector as V 51 | import qualified Network.HTTP.Client as HC 52 | import qualified Network.HTTP.Types as HT 53 | 54 | import Database.InfluxDB.JSON (getField) 55 | import Database.InfluxDB.Types as Types 56 | import Database.InfluxDB.Query hiding (query) 57 | import qualified Database.InfluxDB.Format as F 58 | 59 | -- $setup 60 | -- >>> :set -XOverloadedStrings 61 | -- >>> import Database.InfluxDB.Query 62 | -- >>> import Database.InfluxDB.Format ((%)) 63 | -- >>> import Database.InfluxDB.Manage 64 | 65 | -- | Send a database management query to InfluxDB. 66 | -- 67 | -- >>> let db = "manage-test" 68 | -- >>> let p = queryParams db 69 | -- >>> manage p $ F.formatQuery ("CREATE DATABASE "%F.database) db 70 | manage :: QueryParams -> Query -> IO () 71 | manage params q = do 72 | manager' <- either HC.newManager return $ params^.manager 73 | response <- HC.httpLbs request manager' `catch` (throwIO . HTTPException) 74 | let body = HC.responseBody response 75 | case eitherDecode' body of 76 | Left message -> 77 | throwIO $ UnexpectedResponse message request body 78 | Right val -> do 79 | let parser = parseQueryResultsWith 80 | (params ^. decoder) 81 | (params ^. precision) 82 | case A.parse parser val of 83 | A.Success (_ :: V.Vector Empty) -> return () 84 | A.Error message -> do 85 | let status = HC.responseStatus response 86 | when (HT.statusIsServerError status) $ 87 | throwIO $ ServerError message 88 | when (HT.statusIsClientError status) $ 89 | throwIO $ ClientError message request 90 | throwIO $ UnexpectedResponse 91 | ("BUG: " ++ message ++ " in Database.InfluxDB.Manage.manage") 92 | request 93 | (encode val) 94 | where 95 | request = HC.setQueryString qs $ manageRequest params 96 | qs = 97 | [ ("q", Just $ F.fromQuery q) 98 | ] 99 | 100 | manageRequest :: QueryParams -> HC.Request 101 | manageRequest params = HC.defaultRequest 102 | { HC.host = TE.encodeUtf8 _host 103 | , HC.port = fromIntegral _port 104 | , HC.secure = _ssl 105 | , HC.method = "POST" 106 | , HC.path = "/query" 107 | } 108 | where 109 | Server {..} = params^.server 110 | 111 | -- | 112 | -- >>> v <- query @ShowQuery (queryParams "_internal") "SHOW QUERIES" 113 | data ShowQuery = ShowQuery 114 | { showQueryQid :: !Int 115 | , showQueryText :: !Query 116 | , showQueryDatabase :: !Database 117 | , showQueryDuration :: !NominalDiffTime 118 | } 119 | 120 | instance QueryResults ShowQuery where 121 | parseMeasurement _ _ _ columns fields = 122 | maybe (fail "parseResults: parse error") return $ do 123 | Number (toBoundedInteger -> Just showQueryQid) <- 124 | getField "qid" columns fields 125 | String (F.formatQuery F.text -> showQueryText) <- 126 | getField "query" columns fields 127 | String (F.formatDatabase F.text -> showQueryDatabase) <- 128 | getField "database" columns fields 129 | String (parseDuration -> Right showQueryDuration) <- 130 | getField "duration" columns fields 131 | return ShowQuery {..} 132 | 133 | parseDuration :: Text -> Either String NominalDiffTime 134 | parseDuration = AT.parseOnly duration 135 | where 136 | duration = (*) 137 | <$> fmap (fromIntegral @Int) AT.decimal 138 | <*> unit 139 | unit = AC.choice 140 | [ 10^^(-6 :: Int) <$ AT.string "µs" 141 | , 1 <$ AT.char 's' 142 | , 60 <$ AT.char 'm' 143 | , 3600 <$ AT.char 'h' 144 | ] 145 | 146 | newtype ShowSeries = ShowSeries 147 | { _key :: Key 148 | } 149 | 150 | instance QueryResults ShowSeries where 151 | parseMeasurement _ _ _ columns fields = do 152 | name <- getField "key" columns fields >>= parseJSON 153 | return $ ShowSeries $ F.formatKey F.text name 154 | 155 | makeLensesWith 156 | ( lensRules 157 | & generateSignatures .~ False 158 | & lensField .~ lookingupNamer 159 | [ ("showQueryQid", "qid") 160 | , ("showQueryText", "queryText") 161 | , ("showQueryDatabase", "_database") 162 | , ("showQueryDuration", "duration") 163 | ] 164 | ) ''ShowQuery 165 | 166 | -- | Query ID 167 | -- 168 | -- >>> v <- query @ShowQuery (queryParams "_internal") "SHOW QUERIES" 169 | -- >>> v ^.. each.qid 170 | -- ... 171 | qid :: Lens' ShowQuery Int 172 | 173 | -- | Query text 174 | -- 175 | -- >>> v <- query @ShowQuery (queryParams "_internal") "SHOW QUERIES" 176 | -- >>> v ^.. each.queryText 177 | -- ... 178 | queryText :: Lens' ShowQuery Query 179 | 180 | -- | 181 | -- >>> v <- query @ShowQuery (queryParams "_internal") "SHOW QUERIES" 182 | -- >>> v ^.. each.database 183 | -- ... 184 | instance HasDatabase ShowQuery where 185 | database = _database 186 | 187 | -- | Duration of the query 188 | -- 189 | -- >>> v <- query @ShowQuery (queryParams "_internal") "SHOW QUERIES" 190 | -- >>> v ^.. each.duration 191 | -- ... 192 | duration :: Lens' ShowQuery NominalDiffTime 193 | 194 | makeLensesWith (lensRules & generateSignatures .~ False) ''ShowSeries 195 | 196 | -- | Series name 197 | -- 198 | -- >>> v <- query @ShowSeries (queryParams "_internal") "SHOW SERIES" 199 | -- >>> length $ v ^.. each.key 200 | -- ... 201 | key :: Lens' ShowSeries Key 202 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Ping.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | #if __GLASGOW_HASKELL__ >= 800 6 | {-# OPTIONS_GHC -Wno-missing-signatures #-} 7 | #else 8 | {-# OPTIONS_GHC -fno-warn-missing-signatures #-} 9 | #endif 10 | module Database.InfluxDB.Ping 11 | ( -- * Ping interface 12 | ping 13 | 14 | -- * Ping parameters 15 | , PingParams 16 | , pingParams 17 | , server 18 | , manager 19 | , timeout 20 | 21 | -- * Pong 22 | , Pong 23 | , roundtripTime 24 | , influxdbVersion 25 | ) where 26 | import Control.Exception 27 | 28 | import Control.Lens 29 | import Data.Time.Clock (NominalDiffTime) 30 | import System.Clock 31 | import qualified Data.ByteString as BS 32 | import qualified Data.Text.Encoding as TE 33 | import qualified Network.HTTP.Client as HC 34 | 35 | import Database.InfluxDB.Types as Types 36 | 37 | -- $setup 38 | -- >>> import Database.InfluxDB.Ping 39 | 40 | -- Ping requests do not require authentication 41 | -- | The full set of parameters for the ping API 42 | -- 43 | -- Following lenses are available to access its fields: 44 | -- 45 | -- * 'server' 46 | -- * 'manager' 47 | -- * 'timeout' 48 | data PingParams = PingParams 49 | { pingServer :: !Server 50 | , pingManager :: !(Either HC.ManagerSettings HC.Manager) 51 | -- ^ HTTP connection manager 52 | , pingTimeout :: !(Maybe NominalDiffTime) 53 | -- ^ Timeout 54 | } 55 | 56 | -- | Smart constructor for 'PingParams' 57 | -- 58 | -- Default parameters: 59 | -- 60 | -- ['server'] 'defaultServer' 61 | -- ['manager'] @'Left' 'HC.defaultManagerSettings'@ 62 | -- ['timeout'] 'Nothing' 63 | pingParams :: PingParams 64 | pingParams = PingParams 65 | { pingServer = defaultServer 66 | , pingManager = Left HC.defaultManagerSettings 67 | , pingTimeout = Nothing 68 | } 69 | 70 | makeLensesWith 71 | ( lensRules 72 | & generateSignatures .~ False 73 | & lensField .~ lookingupNamer 74 | [ ("pingServer", "_server") 75 | , ("pingManager", "_manager") 76 | , ("pingTimeout", "timeout") 77 | ] 78 | ) 79 | ''PingParams 80 | 81 | -- | 82 | -- >>> pingParams ^. server.host 83 | -- "localhost" 84 | instance HasServer PingParams where 85 | server = _server 86 | 87 | -- | 88 | -- >>> let p = pingParams & manager .~ Left HC.defaultManagerSettings 89 | instance HasManager PingParams where 90 | manager = _manager 91 | 92 | -- | The number of seconds to wait before returning a response 93 | -- 94 | -- >>> pingParams ^. timeout 95 | -- Nothing 96 | -- >>> let p = pingParams & timeout ?~ 1 97 | timeout :: Lens' PingParams (Maybe NominalDiffTime) 98 | 99 | pingRequest :: PingParams -> HC.Request 100 | pingRequest PingParams {..} = HC.defaultRequest 101 | { HC.host = TE.encodeUtf8 _host 102 | , HC.port = fromIntegral _port 103 | , HC.secure = _ssl 104 | , HC.method = "GET" 105 | , HC.path = "/ping" 106 | } 107 | where 108 | Server {..} = pingServer 109 | 110 | -- | Response of a ping request 111 | data Pong = Pong 112 | { _roundtripTime :: !TimeSpec 113 | -- ^ Round-trip time of the ping 114 | , _influxdbVersion :: !BS.ByteString 115 | -- ^ Version string returned by InfluxDB 116 | } deriving (Show, Eq, Ord) 117 | 118 | makeLensesWith (lensRules & generateSignatures .~ False) ''Pong 119 | 120 | -- | Round-trip time of the ping 121 | roundtripTime :: Lens' Pong TimeSpec 122 | 123 | -- | Version string returned by InfluxDB 124 | influxdbVersion :: Lens' Pong BS.ByteString 125 | 126 | -- | Send a ping to InfluxDB. 127 | -- 128 | -- It may throw an 'InfluxException'. 129 | ping :: PingParams -> IO Pong 130 | ping params = do 131 | manager' <- either HC.newManager return $ pingManager params 132 | startTime <- getTimeMonotonic 133 | HC.withResponse request manager' $ \response -> do 134 | endTime <- getTimeMonotonic 135 | case lookup "X-Influxdb-Version" (HC.responseHeaders response) of 136 | Just version -> 137 | return $! Pong (diffTimeSpec endTime startTime) version 138 | Nothing -> 139 | throwIO $ UnexpectedResponse 140 | "The X-Influxdb-Version header was missing in the response." 141 | request 142 | "" 143 | `catch` (throwIO . HTTPException) 144 | where 145 | request = (pingRequest params) 146 | { HC.responseTimeout = case pingTimeout params of 147 | Nothing -> HC.responseTimeoutNone 148 | Just sec -> HC.responseTimeoutMicro $ 149 | round $ realToFrac sec / (10**(-6) :: Double) 150 | } 151 | getTimeMonotonic = getTime Monotonic 152 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Query.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE EmptyDataDeriving #-} 2 | {-# LANGUAGE BangPatterns #-} 3 | {-# LANGUAGE DataKinds #-} 4 | {-# LANGUAGE FlexibleInstances #-} 5 | {-# LANGUAGE GADTs #-} 6 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 7 | {-# LANGUAGE LambdaCase #-} 8 | {-# LANGUAGE MultiParamTypeClasses #-} 9 | {-# LANGUAGE OverloadedStrings #-} 10 | {-# LANGUAGE RankNTypes #-} 11 | {-# LANGUAGE RecordWildCards #-} 12 | {-# LANGUAGE ScopedTypeVariables #-} 13 | {-# LANGUAGE TemplateHaskell #-} 14 | {-# LANGUAGE ViewPatterns #-} 15 | module Database.InfluxDB.Query 16 | ( 17 | -- * Query interface 18 | Query 19 | , query 20 | , queryChunked 21 | 22 | -- * Query parameters 23 | , QueryParams 24 | , queryParams 25 | , server 26 | , database 27 | , precision 28 | , manager 29 | , authentication 30 | , decoder 31 | 32 | -- * Parsing results 33 | , QueryResults(..) 34 | , parseQueryResults 35 | , parseQueryResultsWith 36 | 37 | -- * Low-level functions 38 | , withQueryResponse 39 | 40 | -- * Helper types 41 | , Ignored 42 | , Empty 43 | , Tagged(..) 44 | , untag 45 | ) where 46 | import Control.Exception 47 | import Control.Monad 48 | import Data.Char 49 | import Data.List 50 | import Data.Maybe (fromMaybe) 51 | import Data.Proxy 52 | import GHC.TypeLits 53 | 54 | import Control.Lens 55 | import Data.Aeson 56 | import Data.HashMap.Strict (HashMap) 57 | import Data.Optional (Optional(..), optional) 58 | import Data.Tagged 59 | import Data.Text (Text) 60 | import Data.Vector (Vector) 61 | import Data.Void 62 | import qualified Control.Foldl as L 63 | import qualified Data.Aeson.Parser as A 64 | import qualified Data.Aeson.Types as A 65 | import qualified Data.Attoparsec.ByteString as AB 66 | import qualified Data.ByteString as B 67 | import qualified Data.ByteString.Builder as BB 68 | import qualified Data.ByteString.Lazy as BL 69 | import qualified Data.Text as T 70 | import qualified Data.Text.Encoding as TE 71 | import qualified Data.Vector as V 72 | import qualified Network.HTTP.Client as HC 73 | import qualified Network.HTTP.Types as HT 74 | 75 | import Database.InfluxDB.JSON 76 | import Database.InfluxDB.Types as Types 77 | import qualified Database.InfluxDB.Format as F 78 | 79 | -- $setup 80 | -- >>> :set -XDataKinds 81 | -- >>> :set -XOverloadedStrings 82 | -- >>> :set -XRecordWildCards 83 | -- >>> :set -XTypeApplications 84 | -- >>> import Data.Time (UTCTime) 85 | -- >>> import qualified Data.Vector as V 86 | -- >>> import qualified Data.Text as T 87 | 88 | -- | Types that can be converted from an JSON object returned by InfluxDB. 89 | -- 90 | -- For example the @h2o_feet@ series in 91 | -- [the official document](https://docs.influxdata.com/influxdb/v1.2/query_language/data_exploration/) 92 | -- can be encoded as follows: 93 | -- 94 | -- >>> :{ 95 | -- data H2OFeet = H2OFeet 96 | -- { time :: UTCTime 97 | -- , levelDesc :: T.Text 98 | -- , location :: T.Text 99 | -- , waterLevel :: Double 100 | -- } 101 | -- instance QueryResults H2OFeet where 102 | -- parseMeasurement prec _name _tags columns fields = do 103 | -- time <- getField "time" columns fields >>= parseUTCTime prec 104 | -- levelDesc <- getField "level_description" columns fields >>= parseJSON 105 | -- location <- getField "location" columns fields >>= parseJSON 106 | -- waterLevel <- getField "water_level" columns fields >>= parseJSON 107 | -- return H2OFeet {..} 108 | -- :} 109 | class QueryResults a where 110 | -- | Parse a single measurement in a JSON object. 111 | parseMeasurement 112 | :: Precision 'QueryRequest 113 | -- ^ Timestamp precision 114 | -> Maybe Text 115 | -- ^ Optional series name 116 | -> HashMap Text Text 117 | -- ^ Tag set 118 | -> Vector Text 119 | -- ^ Field keys 120 | -> Array 121 | -- ^ Field values 122 | -> A.Parser a 123 | 124 | -- | Always use this 'Decoder' when decoding this type. 125 | -- 126 | -- @'Just' dec@ means 'decoder' in 'QueryParams' will be ignored and be 127 | -- replaced with the @dec@. 'Nothing' means 'decoder' in 'QueryParams' will 128 | -- be used. 129 | coerceDecoder :: proxy a -> Maybe Decoder 130 | coerceDecoder _ = Nothing 131 | 132 | -- | Parse a JSON object as an array of values of expected type. 133 | parseQueryResults 134 | :: forall a. QueryResults a 135 | => Precision 'QueryRequest 136 | -> Value 137 | -> A.Parser (Vector a) 138 | parseQueryResults = 139 | parseQueryResultsWith $ 140 | fromMaybe strictDecoder (coerceDecoder (Proxy :: Proxy a)) 141 | 142 | parseQueryResultsWith 143 | :: forall a. QueryResults a 144 | => Decoder 145 | -> Precision 'QueryRequest 146 | -> Value 147 | -> A.Parser (Vector a) 148 | parseQueryResultsWith decoder prec = 149 | parseResultsWithDecoder 150 | (fromMaybe decoder (coerceDecoder (Proxy :: Proxy a))) 151 | (parseMeasurement prec) 152 | 153 | -- | 'QueryResults' instance for empty results. Used by 154 | -- 'Database.InfluxDB.Manage.manage'. 155 | -- 156 | -- NOTE: This instance is deprecated because it's unclear from the name whether 157 | -- it can be used to ignore results. Use 'Empty' when expecting an empty result. 158 | -- Use 'Ignored' if you want to ignore any results. 159 | instance QueryResults Void where 160 | parseMeasurement _ _ _ _ _ = fail "parseMeasurement for Void" 161 | coerceDecoder _ = Just $ Decoder $ SomeDecoder 162 | { decodeEach = id 163 | , decodeFold = const $ pure V.empty 164 | } 165 | 166 | -- | 'Ignored' can be used in the result type of 'query' when the result values 167 | -- are not needed. 168 | -- 169 | -- >>> v <- query @Ignored (queryParams "dummy") "SHOW DATABASES" 170 | -- >>> v 171 | -- [] 172 | data Ignored deriving Show 173 | 174 | -- | 'QueryResults' instance for ignoring results. 175 | instance QueryResults Ignored where 176 | parseMeasurement _ _ _ _ _ = fail "parseMeasurement for Ignored" 177 | coerceDecoder _ = Just $ Decoder $ SomeDecoder 178 | { decodeEach = id -- doesn't matter 179 | , decodeFold = const $ pure V.empty -- always succeeds with an empty vector 180 | } 181 | 182 | -- | 'Empty' can be used in the result type of 'query' when the expected results 183 | -- are always empty. Note that if the results are not empty, the decoding 184 | -- process will fail: 185 | -- 186 | -- >>> let p = queryParams "empty" 187 | -- >>> Database.InfluxDB.Manage.manage p "CREATE DATABASE empty" 188 | -- >>> v <- query @Empty p "SELECT * FROM empty" -- query an empty series 189 | -- >>> v 190 | -- [] 191 | data Empty deriving Show 192 | 193 | -- | 'QueryResults' instance for empty results. Used by 194 | -- 'Database.InfluxDB.Manage.manage'. 195 | instance QueryResults Empty where 196 | parseMeasurement _ _ _ _ _ = fail "parseMeasurement for Empty" 197 | coerceDecoder _ = Just strictDecoder -- fail when the results are not empty 198 | 199 | fieldName :: KnownSymbol k => proxy k -> T.Text 200 | fieldName = T.pack . symbolVal 201 | 202 | -- | One-off type for non-timestamped measurements 203 | -- 204 | -- >>> let p = queryParams "_internal" 205 | -- >>> dbs <- query @(Tagged "name" T.Text) p "SHOW DATABASES" 206 | -- >>> V.find ((== "_internal") . untag) dbs 207 | -- Just (Tagged "_internal") 208 | instance (KnownSymbol k, FromJSON v) => QueryResults (Tagged k v) where 209 | parseMeasurement _ _name _ columns fields = 210 | getField (fieldName (Proxy :: Proxy k)) columns fields >>= parseJSON 211 | 212 | -- | One-off tuple for sigle-field measurements 213 | instance 214 | ( KnownSymbol k1, FromJSON v1 215 | , KnownSymbol k2, FromJSON v2 ) 216 | => QueryResults (Tagged k1 v1, Tagged k2 v2) where 217 | parseMeasurement _ _ _ columns fields = do 218 | v1 <- parseJSON 219 | =<< getField (fieldName (Proxy :: Proxy k1)) columns fields 220 | v2 <- parseJSON 221 | =<< getField (fieldName (Proxy :: Proxy k2)) columns fields 222 | return (v1, v2) 223 | 224 | -- | One-off tuple for two-field measurements 225 | instance 226 | ( KnownSymbol k1, FromJSON v1 227 | , KnownSymbol k2, FromJSON v2 228 | , KnownSymbol k3, FromJSON v3 ) 229 | => QueryResults (Tagged k1 v1, Tagged k2 v2, Tagged k3 v3) where 230 | parseMeasurement _ _ _ columns fields = do 231 | v1 <- parseJSON 232 | =<< getField (fieldName (Proxy :: Proxy k1)) columns fields 233 | v2 <- parseJSON 234 | =<< getField (fieldName (Proxy :: Proxy k2)) columns fields 235 | v3 <- parseJSON 236 | =<< getField (fieldName (Proxy :: Proxy k3)) columns fields 237 | return (v1, v2, v3) 238 | 239 | -- | One-off tuple for three-field measurements 240 | instance 241 | ( KnownSymbol k1, FromJSON v1 242 | , KnownSymbol k2, FromJSON v2 243 | , KnownSymbol k3, FromJSON v3 244 | , KnownSymbol k4, FromJSON v4 ) 245 | => QueryResults (Tagged k1 v1, Tagged k2 v2, Tagged k3 v3, Tagged k4 v4) where 246 | parseMeasurement _ _ _ columns fields = do 247 | v1 <- parseJSON 248 | =<< getField (fieldName (Proxy :: Proxy k1)) columns fields 249 | v2 <- parseJSON 250 | =<< getField (fieldName (Proxy :: Proxy k2)) columns fields 251 | v3 <- parseJSON 252 | =<< getField (fieldName (Proxy :: Proxy k3)) columns fields 253 | v4 <- parseJSON 254 | =<< getField (fieldName (Proxy :: Proxy k4)) columns fields 255 | return (v1, v2, v3, v4) 256 | 257 | -- | One-off tuple for four-field measurements 258 | instance 259 | ( KnownSymbol k1, FromJSON v1 260 | , KnownSymbol k2, FromJSON v2 261 | , KnownSymbol k3, FromJSON v3 262 | , KnownSymbol k4, FromJSON v4 263 | , KnownSymbol k5, FromJSON v5 ) 264 | => QueryResults 265 | ( Tagged k1 v1, Tagged k2 v2, Tagged k3 v3, Tagged k4 v4 266 | , Tagged k5 v5 267 | ) where 268 | parseMeasurement _ _ _ columns fields = do 269 | v1 <- parseJSON 270 | =<< getField (fieldName (Proxy :: Proxy k1)) columns fields 271 | v2 <- parseJSON 272 | =<< getField (fieldName (Proxy :: Proxy k2)) columns fields 273 | v3 <- parseJSON 274 | =<< getField (fieldName (Proxy :: Proxy k3)) columns fields 275 | v4 <- parseJSON 276 | =<< getField (fieldName (Proxy :: Proxy k4)) columns fields 277 | v5 <- parseJSON 278 | =<< getField (fieldName (Proxy :: Proxy k5)) columns fields 279 | return (v1, v2, v3, v4, v5) 280 | 281 | -- | One-off tuple for five-field measurements 282 | instance 283 | ( KnownSymbol k1, FromJSON v1 284 | , KnownSymbol k2, FromJSON v2 285 | , KnownSymbol k3, FromJSON v3 286 | , KnownSymbol k4, FromJSON v4 287 | , KnownSymbol k5, FromJSON v5 288 | , KnownSymbol k6, FromJSON v6 ) 289 | => QueryResults 290 | ( Tagged k1 v1, Tagged k2 v2, Tagged k3 v3, Tagged k4 v4 291 | , Tagged k5 v5, Tagged k6 v6 292 | ) where 293 | parseMeasurement _ _ _ columns fields = do 294 | v1 <- parseJSON 295 | =<< getField (fieldName (Proxy :: Proxy k1)) columns fields 296 | v2 <- parseJSON 297 | =<< getField (fieldName (Proxy :: Proxy k2)) columns fields 298 | v3 <- parseJSON 299 | =<< getField (fieldName (Proxy :: Proxy k3)) columns fields 300 | v4 <- parseJSON 301 | =<< getField (fieldName (Proxy :: Proxy k4)) columns fields 302 | v5 <- parseJSON 303 | =<< getField (fieldName (Proxy :: Proxy k5)) columns fields 304 | v6 <- parseJSON 305 | =<< getField (fieldName (Proxy :: Proxy k6)) columns fields 306 | return (v1, v2, v3, v4, v5, v6) 307 | 308 | -- | One-off tuple for six-field measurement 309 | instance 310 | ( KnownSymbol k1, FromJSON v1 311 | , KnownSymbol k2, FromJSON v2 312 | , KnownSymbol k3, FromJSON v3 313 | , KnownSymbol k4, FromJSON v4 314 | , KnownSymbol k5, FromJSON v5 315 | , KnownSymbol k6, FromJSON v6 316 | , KnownSymbol k7, FromJSON v7 ) 317 | => QueryResults 318 | ( Tagged k1 v1, Tagged k2 v2, Tagged k3 v3, Tagged k4 v4 319 | , Tagged k5 v5, Tagged k6 v6, Tagged k7 v7 320 | ) where 321 | parseMeasurement _ _ _ columns fields = do 322 | v1 <- parseJSON 323 | =<< getField (fieldName (Proxy :: Proxy k1)) columns fields 324 | v2 <- parseJSON 325 | =<< getField (fieldName (Proxy :: Proxy k2)) columns fields 326 | v3 <- parseJSON 327 | =<< getField (fieldName (Proxy :: Proxy k3)) columns fields 328 | v4 <- parseJSON 329 | =<< getField (fieldName (Proxy :: Proxy k4)) columns fields 330 | v5 <- parseJSON 331 | =<< getField (fieldName (Proxy :: Proxy k5)) columns fields 332 | v6 <- parseJSON 333 | =<< getField (fieldName (Proxy :: Proxy k6)) columns fields 334 | v7 <- parseJSON 335 | =<< getField (fieldName (Proxy :: Proxy k7)) columns fields 336 | return (v1, v2, v3, v4, v5, v6, v7) 337 | 338 | -- | One-off tuple for seven-field measurements 339 | instance 340 | ( KnownSymbol k1, FromJSON v1 341 | , KnownSymbol k2, FromJSON v2 342 | , KnownSymbol k3, FromJSON v3 343 | , KnownSymbol k4, FromJSON v4 344 | , KnownSymbol k5, FromJSON v5 345 | , KnownSymbol k6, FromJSON v6 346 | , KnownSymbol k7, FromJSON v7 347 | , KnownSymbol k8, FromJSON v8 ) 348 | => QueryResults 349 | ( Tagged k1 v1, Tagged k2 v2, Tagged k3 v3, Tagged k4 v4 350 | , Tagged k5 v5, Tagged k6 v6, Tagged k7 v7, Tagged k8 v8 351 | ) where 352 | parseMeasurement _ _ _ columns fields = do 353 | v1 <- parseJSON 354 | =<< getField (fieldName (Proxy :: Proxy k1)) columns fields 355 | v2 <- parseJSON 356 | =<< getField (fieldName (Proxy :: Proxy k2)) columns fields 357 | v3 <- parseJSON 358 | =<< getField (fieldName (Proxy :: Proxy k3)) columns fields 359 | v4 <- parseJSON 360 | =<< getField (fieldName (Proxy :: Proxy k4)) columns fields 361 | v5 <- parseJSON 362 | =<< getField (fieldName (Proxy :: Proxy k5)) columns fields 363 | v6 <- parseJSON 364 | =<< getField (fieldName (Proxy :: Proxy k6)) columns fields 365 | v7 <- parseJSON 366 | =<< getField (fieldName (Proxy :: Proxy k7)) columns fields 367 | v8 <- parseJSON 368 | =<< getField (fieldName (Proxy :: Proxy k8)) columns fields 369 | return (v1, v2, v3, v4, v5, v6, v7, v8) 370 | 371 | -- | The full set of parameters for the query API 372 | -- 373 | -- Following lenses are available to access its fields: 374 | -- 375 | -- * 'server' 376 | -- * 'database' 377 | -- * 'precision' 378 | -- * 'manager' 379 | -- * 'authentication' 380 | -- * 'decoder' 381 | data QueryParams = QueryParams 382 | { queryServer :: !Server 383 | , queryDatabase :: !Database 384 | , queryPrecision :: !(Precision 'QueryRequest) 385 | -- ^ Timestamp precision 386 | -- 387 | -- InfluxDB uses nanosecond precision if nothing is specified. 388 | , queryAuthentication :: !(Maybe Credentials) 389 | -- ^ No authentication by default 390 | , queryManager :: !(Either HC.ManagerSettings HC.Manager) 391 | -- ^ HTTP connection manager 392 | , queryDecoder :: Decoder 393 | -- ^ Decoder settings to configure how to parse a JSON resposne given a row 394 | -- parser 395 | } 396 | 397 | -- | Smart constructor for 'QueryParams' 398 | -- 399 | -- Default parameters: 400 | -- 401 | -- ['server'] 'defaultServer' 402 | -- ['precision'] 'RFC3339' 403 | -- ['authentication'] 'Nothing' 404 | -- ['manager'] @'Left' 'HC.defaultManagerSettings'@ 405 | -- ['decoder'] @'strictDecoder'@ 406 | queryParams :: Database -> QueryParams 407 | queryParams queryDatabase = QueryParams 408 | { queryServer = defaultServer 409 | , queryPrecision = RFC3339 410 | , queryAuthentication = Nothing 411 | , queryManager = Left HC.defaultManagerSettings 412 | , queryDecoder = strictDecoder 413 | , .. 414 | } 415 | 416 | -- | Query data from InfluxDB. 417 | -- 418 | -- It may throw 'InfluxException'. 419 | -- 420 | -- If you need a lower-level interface (e.g. to bypass the 'QueryResults' 421 | -- constraint etc), see 'withQueryResponse'. 422 | query :: forall a. QueryResults a => QueryParams -> Query -> IO (Vector a) 423 | query params q = withQueryResponse params Nothing q go 424 | where 425 | go request response = do 426 | chunks <- HC.brConsume $ HC.responseBody response 427 | let body = BL.fromChunks chunks 428 | case eitherDecode' body of 429 | Left message -> throwIO $ UnexpectedResponse message request body 430 | Right val -> do 431 | let parser = parseQueryResultsWith 432 | (fromMaybe 433 | (queryDecoder params) 434 | (coerceDecoder (Proxy :: Proxy a))) 435 | (queryPrecision params) 436 | case A.parse parser val of 437 | A.Success vec -> return vec 438 | A.Error message -> errorQuery message request response val 439 | 440 | setPrecision 441 | :: Precision 'QueryRequest 442 | -> [(B.ByteString, Maybe B.ByteString)] 443 | -> [(B.ByteString, Maybe B.ByteString)] 444 | setPrecision prec qs = maybe qs (\p -> ("epoch", Just p):qs) $ 445 | precisionParam prec 446 | 447 | precisionParam :: Precision 'QueryRequest -> Maybe B.ByteString 448 | precisionParam = \case 449 | Nanosecond -> return "ns" 450 | Microsecond -> return "u" 451 | Millisecond -> return "ms" 452 | Second -> return "s" 453 | Minute -> return "m" 454 | Hour -> return "h" 455 | RFC3339 -> Nothing 456 | 457 | -- | Same as 'query' but it instructs InfluxDB to stream chunked responses 458 | -- rather than returning a huge JSON object. This can be lot more efficient than 459 | -- 'query' if the result is huge. 460 | -- 461 | -- It may throw 'InfluxException'. 462 | -- 463 | -- If you need a lower-level interface (e.g. to bypass the 'QueryResults' 464 | -- constraint etc), see 'withQueryResponse'. 465 | queryChunked 466 | :: QueryResults a 467 | => QueryParams 468 | -> Optional Int 469 | -- ^ Chunk size 470 | -- 471 | -- By 'Default', InfluxDB chunks responses by series or by every 10,000 472 | -- points, whichever occurs first. If it set to a 'Specific' value, InfluxDB 473 | -- chunks responses by series or by that number of points. 474 | -> Query 475 | -> L.FoldM IO (Vector a) r 476 | -> IO r 477 | queryChunked params chunkSize q (L.FoldM step initialize extract) = 478 | withQueryResponse params (Just chunkSize) q go 479 | where 480 | go request response = do 481 | x0 <- initialize 482 | chunk0 <- HC.responseBody response 483 | x <- loop x0 k0 chunk0 484 | extract x 485 | where 486 | k0 = AB.parse A.json 487 | loop x k chunk 488 | | B.null chunk = return x 489 | | otherwise = case k chunk of 490 | AB.Fail unconsumed _contexts message -> 491 | throwIO $ UnexpectedResponse message request $ 492 | BL.fromStrict unconsumed 493 | AB.Partial k' -> do 494 | chunk' <- HC.responseBody response 495 | loop x k' chunk' 496 | AB.Done leftover val -> 497 | case A.parse (parseQueryResults (queryPrecision params)) val of 498 | A.Success vec -> do 499 | x' <- step x vec 500 | loop x' k0 leftover 501 | A.Error message -> errorQuery message request response val 502 | 503 | -- | Lower-level interface to query data. 504 | withQueryResponse 505 | :: QueryParams 506 | -> Maybe (Optional Int) 507 | -- ^ Chunk size 508 | -- 509 | -- By 'Nothing', InfluxDB returns all matching data points at once. 510 | -- By @'Just' 'Default'@, InfluxDB chunks responses by series or by every 511 | -- 10,000 points, whichever occurs first. If it set to a 'Specific' value, 512 | -- InfluxDB chunks responses by series or by that number of points. 513 | -> Query 514 | -> (HC.Request -> HC.Response HC.BodyReader -> IO r) 515 | -> IO r 516 | withQueryResponse params chunkSize q f = do 517 | manager' <- either HC.newManager return $ queryManager params 518 | HC.withResponse request manager' (f request) 519 | `catch` (throwIO . HTTPException) 520 | where 521 | request = 522 | HC.setQueryString (setPrecision (queryPrecision params) queryString) $ 523 | queryRequest params 524 | queryString = addChunkedParam 525 | [ ("q", Just $ F.fromQuery q) 526 | , ("db", Just db) 527 | ] 528 | where 529 | !db = TE.encodeUtf8 $ databaseName $ queryDatabase params 530 | addChunkedParam ps = case chunkSize of 531 | Nothing -> ps 532 | Just size -> 533 | let !chunked = optional "true" (decodeChunkSize . max 1) size 534 | in ("chunked", Just chunked) : ps 535 | where 536 | decodeChunkSize = BL.toStrict . BB.toLazyByteString . BB.intDec 537 | 538 | 539 | queryRequest :: QueryParams -> HC.Request 540 | queryRequest QueryParams {..} = applyBasicAuth $ HC.defaultRequest 541 | { HC.host = TE.encodeUtf8 _host 542 | , HC.port = fromIntegral _port 543 | , HC.secure = _ssl 544 | , HC.method = "GET" 545 | , HC.path = "/query" 546 | } 547 | where 548 | Server {..} = queryServer 549 | applyBasicAuth = 550 | case queryAuthentication of 551 | Nothing -> id 552 | Just Credentials {..} -> 553 | HC.applyBasicAuth (TE.encodeUtf8 _user) (TE.encodeUtf8 _password) 554 | 555 | errorQuery :: String -> HC.Request -> HC.Response body -> A.Value -> IO a 556 | errorQuery message request response val = do 557 | let status = HC.responseStatus response 558 | when (HT.statusIsServerError status) $ 559 | throwIO $ ServerError message 560 | when (HT.statusIsClientError status) $ 561 | throwIO $ ClientError message request 562 | throwIO $ UnexpectedResponse 563 | ("BUG: " ++ message ++ " in Database.InfluxDB.Query.query") 564 | request 565 | (encode val) 566 | 567 | makeLensesWith 568 | ( lensRules 569 | & lensField .~ mappingNamer 570 | (\name -> case stripPrefix "query" name of 571 | Just (c:cs) -> ['_':toLower c:cs] 572 | _ -> []) 573 | ) 574 | ''QueryParams 575 | 576 | -- | 577 | -- >>> let p = queryParams "foo" 578 | -- >>> p ^. server.host 579 | -- "localhost" 580 | instance HasServer QueryParams where 581 | server = _server 582 | 583 | -- | 584 | -- >>> let p = queryParams "foo" 585 | -- >>> p ^. database 586 | -- "foo" 587 | instance HasDatabase QueryParams where 588 | database = _database 589 | 590 | -- | Returning JSON responses contain timestamps in the specified 591 | -- precision/format. 592 | -- 593 | -- >>> let p = queryParams "foo" 594 | -- >>> p ^. precision 595 | -- RFC3339 596 | instance HasPrecision 'QueryRequest QueryParams where 597 | precision = _precision 598 | 599 | -- | 600 | -- >>> let p = queryParams "foo" & manager .~ Left HC.defaultManagerSettings 601 | instance HasManager QueryParams where 602 | manager = _manager 603 | 604 | -- | Authentication info for the query 605 | -- 606 | -- >>> let p = queryParams "foo" 607 | -- >>> p ^. authentication 608 | -- Nothing 609 | -- >>> let p' = p & authentication ?~ credentials "john" "passw0rd" 610 | -- >>> p' ^. authentication.traverse.user 611 | -- "john" 612 | instance HasCredentials QueryParams where 613 | authentication = _authentication 614 | 615 | -- | Decoder settings 616 | -- 617 | -- >>> let p = queryParams "foo" 618 | -- >>> let _ = p & decoder .~ strictDecoder 619 | -- >>> let _ = p & decoder .~ lenientDecoder 620 | decoder :: Lens' QueryParams Decoder 621 | decoder = _decoder 622 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DataKinds #-} 2 | {-# LANGUAGE DeriveDataTypeable #-} 3 | {-# LANGUAGE DeriveGeneric #-} 4 | {-# LANGUAGE FlexibleInstances #-} 5 | {-# LANGUAGE FunctionalDependencies #-} 6 | {-# LANGUAGE GADTs #-} 7 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 8 | {-# LANGUAGE KindSignatures #-} 9 | {-# LANGUAGE LambdaCase #-} 10 | {-# LANGUAGE NamedFieldPuns #-} 11 | {-# LANGUAGE OverloadedStrings #-} 12 | {-# LANGUAGE StandaloneDeriving #-} 13 | {-# LANGUAGE TemplateHaskell #-} 14 | module Database.InfluxDB.Types where 15 | import Control.Exception 16 | import Data.Int (Int64) 17 | import Data.String 18 | import Data.Typeable (Typeable) 19 | import GHC.Generics (Generic) 20 | 21 | import Control.Lens 22 | import Data.Text (Text) 23 | import Data.Time.Clock 24 | import Data.Time.Clock.POSIX 25 | import Network.HTTP.Client (Manager, ManagerSettings, Request) 26 | import System.Clock (TimeSpec(..)) 27 | import qualified Data.ByteString.Lazy as BL 28 | import qualified Data.Text as T 29 | import qualified Network.HTTP.Client as HC 30 | 31 | -- $setup 32 | -- >>> :set -XOverloadedStrings 33 | -- >>> import System.Clock (TimeSpec(..)) 34 | -- >>> import Database.InfluxDB 35 | -- >>> import qualified Database.InfluxDB.Format as F 36 | 37 | -- | An InfluxDB query. 38 | -- 39 | -- A spec of the format is available at 40 | -- . 41 | -- 42 | -- A 'Query' can be constructed using either 43 | -- 44 | -- * the 'IsString' instance with @-XOverloadedStrings@ 45 | -- * or 'Database.InfluxDB.Format.formatQuery'. 46 | -- 47 | -- >>> :set -XOverloadedStrings 48 | -- >>> "SELECT * FROM series" :: Query 49 | -- "SELECT * FROM series" 50 | -- >>> import qualified Database.InfluxDB.Format as F 51 | -- >>> formatQuery ("SELECT * FROM "%F.key) "series" 52 | -- "SELECT * FROM \"series\"" 53 | -- 54 | -- NOTE: Currently this library doesn't support type-safe query construction. 55 | newtype Query = Query T.Text deriving IsString 56 | 57 | instance Show Query where 58 | show (Query q) = show q 59 | 60 | -- | InfluxDB server to connect to. 61 | -- 62 | -- Following lenses are available to access its fields: 63 | -- 64 | -- * 'host': FQDN or IP address of the InfluxDB server 65 | -- * 'port': Port number of the InfluxDB server 66 | -- * 'ssl': Whether or not to use SSL 67 | data Server = Server 68 | { _host :: !Text 69 | , _port :: !Int 70 | , _ssl :: !Bool 71 | } deriving (Show, Generic, Eq, Ord) 72 | 73 | makeLensesWith (lensRules & generateSignatures .~ False) ''Server 74 | 75 | -- | Host name of the server 76 | host :: Lens' Server Text 77 | 78 | -- | Port number of the server 79 | port :: Lens' Server Int 80 | 81 | -- | If SSL is enabled 82 | -- 83 | -- For secure connections (HTTPS), consider using one of the following packages: 84 | -- 85 | -- * [http-client-tls](https://hackage.haskell.org/package/http-client-tls) 86 | -- * [http-client-openssl](https://hackage.haskell.org/package/http-client-openssl) 87 | ssl :: Lens' Server Bool 88 | 89 | -- | Default InfluxDB server settings 90 | -- 91 | -- Default parameters: 92 | -- 93 | -- >>> defaultServer ^. host 94 | -- "localhost" 95 | -- >>> defaultServer ^. port 96 | -- 8086 97 | -- >>> defaultServer ^. ssl 98 | -- False 99 | defaultServer :: Server 100 | defaultServer = Server 101 | { _host = "localhost" 102 | , _port = 8086 103 | , _ssl = False 104 | } 105 | 106 | -- | HTTPS-enabled InfluxDB server settings 107 | secureServer :: Server 108 | secureServer = defaultServer & ssl .~ True 109 | 110 | -- | User credentials. 111 | -- 112 | -- Following lenses are available to access its fields: 113 | -- 114 | -- * 'user' 115 | -- * 'password' 116 | data Credentials = Credentials 117 | { _user :: !Text 118 | , _password :: !Text 119 | } deriving Show 120 | 121 | -- | Smart constructor for 'Credentials' 122 | credentials 123 | :: Text -- ^ User name 124 | -> Text -- ^ Password 125 | -> Credentials 126 | credentials = Credentials 127 | 128 | makeLensesWith (lensRules & generateSignatures .~ False) ''Credentials 129 | 130 | -- | User name to access InfluxDB. 131 | -- 132 | -- >>> let creds = credentials "john" "passw0rd" 133 | -- >>> creds ^. user 134 | -- "john" 135 | user :: Lens' Credentials Text 136 | 137 | -- | Password to access InfluxDB 138 | -- 139 | -- >>> let creds = credentials "john" "passw0rd" 140 | -- >>> creds ^. password 141 | -- "passw0rd" 142 | password :: Lens' Credentials Text 143 | 144 | -- | Database name. 145 | -- 146 | -- 'Database.InfluxDB.formatDatabase' can be used to construct a 147 | -- 'Database'. 148 | -- 149 | -- >>> "test-db" :: Database 150 | -- "test-db" 151 | -- >>> formatDatabase "test-db" 152 | -- "test-db" 153 | -- >>> formatDatabase ("test-db-"%F.decimal) 0 154 | -- "test-db-0" 155 | newtype Database = Database { databaseName :: Text } deriving (Eq, Ord) 156 | 157 | instance IsString Database where 158 | fromString xs = Database $ identifier "Database" xs 159 | 160 | instance Show Database where 161 | show (Database name) = show name 162 | 163 | -- | String name that is used for measurements. 164 | -- 165 | -- 'Database.InfluxDB.formatMeasurement' can be used to construct a 166 | -- 'Measurement'. 167 | -- 168 | -- >>> "test-series" :: Measurement 169 | -- "test-series" 170 | -- >>> formatMeasurement "test-series" 171 | -- "test-series" 172 | -- >>> formatMeasurement ("test-series-"%F.decimal) 0 173 | -- "test-series-0" 174 | newtype Measurement = Measurement Text deriving (Eq, Ord) 175 | 176 | instance IsString Measurement where 177 | fromString xs = Measurement $ identifier "Measurement" xs 178 | 179 | instance Show Measurement where 180 | show (Measurement name) = show name 181 | 182 | -- | String type that is used for tag keys/values and field keys. 183 | -- 184 | -- 'Database.InfluxDB.formatKey' can be used to construct a 'Key'. 185 | -- 186 | -- >>> "test-key" :: Key 187 | -- "test-key" 188 | -- >>> formatKey "test-key" 189 | -- "test-key" 190 | -- >>> formatKey ("test-key-"%F.decimal) 0 191 | -- "test-key-0" 192 | newtype Key = Key Text deriving (Eq, Ord) 193 | 194 | instance IsString Key where 195 | fromString xs = Key $ identifier "Key" xs 196 | 197 | instance Show Key where 198 | show (Key name) = show name 199 | 200 | identifier :: String -> String -> Text 201 | identifier ty xs 202 | | null xs = error $ ty ++ " should never be empty" 203 | | elem '\n' xs = error $ ty ++ " should not contain a new line" 204 | | otherwise = fromString xs 205 | 206 | -- | Nullability of fields. 207 | -- 208 | -- Queries can contain nulls but the line protocol cannot. 209 | data Nullability = Nullable | NonNullable deriving Typeable 210 | 211 | -- | Field type for queries. Queries can contain null values. 212 | type QueryField = Field 'Nullable 213 | 214 | -- | Field type for the line protocol. The line protocol doesn't accept null 215 | -- values. 216 | type LineField = Field 'NonNullable 217 | 218 | data Field (n :: Nullability) where 219 | -- | Signed 64-bit integers (@-9,223,372,036,854,775,808@ to 220 | -- @9,223,372,036,854,775,807@). 221 | FieldInt :: !Int64 -> Field n 222 | -- | IEEE-754 64-bit floating-point numbers. This is the default numerical 223 | -- type. 224 | FieldFloat :: !Double -> Field n 225 | -- | String field. Its length is limited to 64KB, which is not enforced by 226 | -- this library. 227 | FieldString :: !Text -> Field n 228 | -- | Boolean field. 229 | FieldBool :: !Bool -> Field n 230 | -- | Null field. 231 | -- 232 | -- Note that a field can be null only in queries. The line protocol doesn't 233 | -- allow null values. 234 | FieldNull :: Field 'Nullable 235 | deriving Typeable 236 | 237 | deriving instance Eq (Field n) 238 | deriving instance Show (Field n) 239 | 240 | instance IsString (Field n) where 241 | fromString = FieldString . T.pack 242 | 243 | -- | Type of a request 244 | data RequestType 245 | = QueryRequest 246 | -- ^ Request for @/query@ 247 | | WriteRequest 248 | -- ^ Request for @/write@ 249 | deriving Show 250 | 251 | -- | Predefined set of time precision. 252 | -- 253 | -- 'RFC3339' is only available for 'QueryRequest's. 254 | data Precision (ty :: RequestType) where 255 | -- | POSIX time in ns 256 | Nanosecond :: Precision ty 257 | -- | POSIX time in μs 258 | Microsecond :: Precision ty 259 | -- | POSIX time in ms 260 | Millisecond :: Precision ty 261 | -- | POSIX time in s 262 | Second :: Precision ty 263 | -- | POSIX time in minutes 264 | Minute :: Precision ty 265 | -- | POSIX time in hours 266 | Hour :: Precision ty 267 | -- | Nanosecond precision time in a human readable format, like 268 | -- @2016-01-04T00:00:23.135623Z@. This is the default format for @/query@. 269 | RFC3339 :: Precision 'QueryRequest 270 | 271 | deriving instance Show (Precision a) 272 | deriving instance Eq (Precision a) 273 | 274 | -- | Name of the time precision. 275 | -- 276 | -- >>> precisionName Nanosecond 277 | -- "n" 278 | -- >>> precisionName Microsecond 279 | -- "u" 280 | -- >>> precisionName Millisecond 281 | -- "ms" 282 | -- >>> precisionName Second 283 | -- "s" 284 | -- >>> precisionName Minute 285 | -- "m" 286 | -- >>> precisionName Hour 287 | -- "h" 288 | -- >>> precisionName RFC3339 289 | -- "rfc3339" 290 | precisionName :: Precision ty -> Text 291 | precisionName = \case 292 | Nanosecond -> "n" 293 | Microsecond -> "u" 294 | Millisecond -> "ms" 295 | Second -> "s" 296 | Minute -> "m" 297 | Hour -> "h" 298 | RFC3339 -> "rfc3339" 299 | 300 | -- | A 'Timestamp' is something that can be converted to a valid 301 | -- InfluxDB timestamp, which is represented as a 64-bit integer. 302 | class Timestamp time where 303 | -- | Round a time to the given precision and scale it to nanoseconds 304 | roundTo :: Precision 'WriteRequest -> time -> Int64 305 | -- | Scale a time to the given precision 306 | scaleTo :: Precision 'WriteRequest -> time -> Int64 307 | 308 | roundAt :: RealFrac a => a -> a -> a 309 | roundAt scale x = fromIntegral (round (x / scale) :: Int64) * scale 310 | 311 | -- | Scale of the type precision. 312 | -- 313 | -- >>> precisionScale RFC3339 314 | -- 1.0e-9 315 | -- >>> precisionScale Microsecond 316 | -- 1.0e-6 317 | precisionScale :: Fractional a => Precision ty -> a 318 | precisionScale = \case 319 | RFC3339 -> 10^^(-9 :: Int) 320 | Nanosecond -> 10^^(-9 :: Int) 321 | Microsecond -> 10^^(-6 :: Int) 322 | Millisecond -> 10^^(-3 :: Int) 323 | Second -> 1 324 | Minute -> 60 325 | Hour -> 60 * 60 326 | 327 | -- | 328 | -- >>> import Data.Time.Calendar 329 | -- >>> let t = UTCTime (fromGregorian 2018 04 14) 123.123456789 330 | -- >>> t 331 | -- 2018-04-14 00:02:03.123456789 UTC 332 | -- >>> roundTo Nanosecond t 333 | -- 1523664123123456789 334 | -- >>> roundTo Microsecond t 335 | -- 1523664123123457000 336 | -- >>> roundTo Millisecond t 337 | -- 1523664123123000000 338 | -- >>> roundTo Second t 339 | -- 1523664123000000000 340 | -- >>> roundTo Minute t 341 | -- 1523664120000000000 342 | -- >>> roundTo Hour t 343 | -- 1523664000000000000 344 | -- >>> scaleTo Nanosecond t 345 | -- 1523664123123456789 346 | -- >>> scaleTo Microsecond t 347 | -- 1523664123123457 348 | -- >>> scaleTo Millisecond t 349 | -- 1523664123123 350 | -- >>> scaleTo Second t 351 | -- 1523664123 352 | -- >>> scaleTo Minute t 353 | -- 25394402 354 | -- >>> scaleTo Hour t 355 | -- 423240 356 | instance Timestamp UTCTime where 357 | roundTo prec = roundTo prec . utcTimeToPOSIXSeconds 358 | scaleTo prec = scaleTo prec . utcTimeToPOSIXSeconds 359 | 360 | -- | 361 | -- >>> let dt = 123.123456789 :: NominalDiffTime 362 | -- >>> roundTo Nanosecond dt 363 | -- 123123456789 364 | -- >>> roundTo Microsecond dt 365 | -- 123123457000 366 | -- >>> roundTo Millisecond dt 367 | -- 123123000000 368 | -- >>> roundTo Second dt 369 | -- 123000000000 370 | -- >>> roundTo Minute dt 371 | -- 120000000000 372 | -- >>> roundTo Hour dt 373 | -- 0 374 | -- >>> scaleTo Nanosecond dt 375 | -- 123123456789 376 | -- >>> scaleTo Microsecond dt 377 | -- 123123457 378 | -- >>> scaleTo Millisecond dt 379 | -- 123123 380 | -- >>> scaleTo Second dt 381 | -- 123 382 | -- >>> scaleTo Minute dt 383 | -- 2 384 | -- >>> scaleTo Hour dt 385 | -- 0 386 | instance Timestamp NominalDiffTime where 387 | roundTo prec time = 388 | round $ 10^(9 :: Int) * roundAt (precisionScale prec) time 389 | scaleTo prec time = round $ time / precisionScale prec 390 | 391 | -- | 392 | -- >>> let timespec = TimeSpec 123 123456789 393 | -- >>> roundTo Nanosecond timespec 394 | -- 123123456789 395 | -- >>> roundTo Microsecond timespec 396 | -- 123123457000 397 | -- >>> roundTo Millisecond timespec 398 | -- 123123000000 399 | -- >>> roundTo Second timespec 400 | -- 123000000000 401 | -- >>> roundTo Minute timespec 402 | -- 120000000000 403 | -- >>> roundTo Hour timespec 404 | -- 0 405 | -- >>> scaleTo Nanosecond timespec 406 | -- 123123456789 407 | -- >>> scaleTo Microsecond timespec 408 | -- 123123457 409 | -- >>> scaleTo Millisecond timespec 410 | -- 123123 411 | -- >>> scaleTo Second timespec 412 | -- 123 413 | -- >>> scaleTo Minute timespec 414 | -- 2 415 | -- >>> scaleTo Hour timespec 416 | -- 0 417 | instance Timestamp TimeSpec where 418 | roundTo prec t = 419 | round $ 10^(9 :: Int) * roundAt (precisionScale prec) (timeSpecToSeconds t) 420 | scaleTo prec t = round $ timeSpecToSeconds t / precisionScale prec 421 | 422 | timeSpecToSeconds :: TimeSpec -> Double 423 | timeSpecToSeconds TimeSpec { sec, nsec } = 424 | fromIntegral sec + fromIntegral nsec * 10^^(-9 :: Int) 425 | 426 | -- | Exceptions used in this library. 427 | -- 428 | -- In general, the library tries to convert exceptions from the dependent 429 | -- libraries to the following types of errors. 430 | data InfluxException 431 | = ServerError String 432 | -- ^ Server side error. 433 | -- 434 | -- You can expect to get a successful response once the issue is resolved on 435 | -- the server side. 436 | | ClientError String Request 437 | -- ^ Client side error. 438 | -- 439 | -- You need to fix your query to get a successful response. 440 | | UnexpectedResponse String Request BL.ByteString 441 | -- ^ Received an unexpected response. The 'String' field is a message and the 442 | -- 'BL.ByteString' field is a possibly-empty relevant payload of the response. 443 | -- 444 | -- This can happen e.g. when the response from InfluxDB is incompatible with 445 | -- what this library expects due to an upstream format change or when the JSON 446 | -- response doesn't have expected fields etc. 447 | | HTTPException HC.HttpException 448 | -- ^ HTTP communication error. 449 | -- 450 | -- Typical HTTP errors (4xx and 5xx) are covered by 'ClientError' and 451 | -- 'ServerError'. So this exception means something unusual happened. Note 452 | -- that if 'HC.checkResponse' is overridden to throw an 'HC.HttpException' on 453 | -- an unsuccessful HTTP code, this exception is thrown instead of 454 | -- 'ClientError' or 'ServerError'. 455 | deriving (Show, Typeable) 456 | 457 | instance Exception InfluxException 458 | 459 | -- | Class of data types that have a server field 460 | class HasServer a where 461 | -- | InfluxDB server address and port that to interact with. 462 | server :: Lens' a Server 463 | 464 | -- | Class of data types that have a database field 465 | class HasDatabase a where 466 | -- | Database name to work on. 467 | database :: Lens' a Database 468 | 469 | -- | Class of data types that have a precision field 470 | class HasPrecision (ty :: RequestType) a | a -> ty where 471 | -- | Time precision parameter. 472 | precision :: Lens' a (Precision ty) 473 | 474 | -- | Class of data types that have a manager field 475 | class HasManager a where 476 | -- | HTTP manager settings or a manager itself. 477 | -- 478 | -- If it's set to 'ManagerSettings', the library will create a 'Manager' from 479 | -- the settings for you. 480 | manager :: Lens' a (Either ManagerSettings Manager) 481 | 482 | -- | Class of data types that has an authentication field 483 | class HasCredentials a where 484 | -- | User name and password to be used when sending requests to InfluxDB. 485 | authentication :: Lens' a (Maybe Credentials) 486 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Write.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE DataKinds #-} 3 | {-# LANGUAGE MultiParamTypeClasses #-} 4 | {-# LANGUAGE NamedFieldPuns #-} 5 | {-# LANGUAGE OverloadedStrings #-} 6 | {-# LANGUAGE RecordWildCards #-} 7 | {-# LANGUAGE ScopedTypeVariables #-} 8 | {-# LANGUAGE TemplateHaskell #-} 9 | {-# LANGUAGE ViewPatterns #-} 10 | #if __GLASGOW_HASKELL__ >= 800 11 | {-# OPTIONS_GHC -Wno-missing-signatures #-} 12 | #else 13 | {-# OPTIONS_GHC -fno-warn-missing-signatures #-} 14 | #endif 15 | module Database.InfluxDB.Write 16 | ( -- * Writers 17 | -- $intro 18 | write 19 | , writeBatch 20 | , writeByteString 21 | 22 | -- * Writer parameters 23 | , WriteParams 24 | , writeParams 25 | , Types.server 26 | , Types.database 27 | , retentionPolicy 28 | , Types.precision 29 | , Types.manager 30 | ) where 31 | import Control.Exception 32 | import Control.Monad 33 | import Data.Maybe 34 | 35 | import Control.Lens 36 | import qualified Data.Aeson as A 37 | import qualified Data.Aeson.Types as A 38 | import qualified Data.ByteString.Char8 as B8 39 | import qualified Data.ByteString.Lazy as BL 40 | import qualified Data.Text.Encoding as TE 41 | import qualified Network.HTTP.Client as HC 42 | import qualified Network.HTTP.Types as HT 43 | 44 | import Database.InfluxDB.Line 45 | import Database.InfluxDB.Types as Types 46 | import Database.InfluxDB.JSON 47 | 48 | -- $setup 49 | -- >>> :set -XOverloadedStrings -XNoOverloadedLists -XTypeApplications 50 | -- >>> import qualified Data.Map as Map 51 | -- >>> import Data.Time 52 | -- >>> import Database.InfluxDB 53 | -- >>> import qualified Network.HTTP.Client as HC 54 | -- >>> Database.InfluxDB.manage (queryParams "test-db") "CREATE DATABASE \"test-db\"" 55 | 56 | {- $intro 57 | The code snippets in this module assume the following imports. 58 | 59 | @ 60 | import qualified Data.Map as Map 61 | import Data.Time 62 | @ 63 | -} 64 | 65 | -- | The full set of parameters for the HTTP writer. 66 | -- 67 | -- Following lenses are available to access its fields: 68 | -- 69 | -- * 'server' 70 | -- * 'database' 71 | -- * 'retentionPolicy' 72 | -- * 'precision' 73 | -- * 'authentication' 74 | -- * 'manager' 75 | data WriteParams = WriteParams 76 | { writeServer :: !Server 77 | , writeDatabase :: !Database 78 | -- ^ Database to be written 79 | , writeRetentionPolicy :: !(Maybe Key) 80 | -- ^ 'Nothing' means the default retention policy for the database. 81 | , writePrecision :: !(Precision 'WriteRequest) 82 | -- ^ Timestamp precision 83 | -- 84 | -- In the HTTP API, timestamps are scaled by the given precision. 85 | , writeAuthentication :: !(Maybe Credentials) 86 | -- ^ No authentication by default 87 | , writeManager :: !(Either HC.ManagerSettings HC.Manager) 88 | -- ^ HTTP connection manager 89 | } 90 | 91 | -- | Smart constructor for 'WriteParams' 92 | -- 93 | -- Default parameters: 94 | -- 95 | -- ['server'] 'defaultServer' 96 | -- ['retentionPolicy'] 'Nothing' 97 | -- ['precision'] 'Nanosecond' 98 | -- ['authentication'] 'Nothing' 99 | -- ['manager'] @'Left' 'HC.defaultManagerSettings'@ 100 | writeParams :: Database -> WriteParams 101 | writeParams writeDatabase = WriteParams 102 | { writeServer = defaultServer 103 | , writePrecision = Nanosecond 104 | , writeRetentionPolicy = Nothing 105 | , writeAuthentication = Nothing 106 | , writeManager = Left HC.defaultManagerSettings 107 | , .. 108 | } 109 | 110 | -- | Write a 'Line'. 111 | -- 112 | -- >>> let p = writeParams "test-db" 113 | -- >>> write p $ Line @UTCTime "room_temp" Map.empty (Map.fromList [("temp", FieldFloat 25.0)]) Nothing 114 | write 115 | :: Timestamp time 116 | => WriteParams 117 | -> Line time 118 | -> IO () 119 | write p@WriteParams {writePrecision} = 120 | writeByteString p . encodeLine (scaleTo writePrecision) 121 | 122 | -- | Write multiple 'Line's in a batch. 123 | -- 124 | -- This is more efficient than calling 'write' multiple times. 125 | -- 126 | -- >>> let p = writeParams "test-db" 127 | -- >>> :{ 128 | -- writeBatch p 129 | -- [ Line @UTCTime "temp" (Map.singleton "city" "tokyo") (Map.fromList [("temp", FieldFloat 25.0)]) Nothing 130 | -- , Line @UTCTime "temp" (Map.singleton "city" "osaka") (Map.fromList [("temp", FieldFloat 25.2)]) Nothing 131 | -- ] 132 | -- :} 133 | writeBatch 134 | :: (Timestamp time, Foldable f) 135 | => WriteParams 136 | -> f (Line time) 137 | -> IO () 138 | writeBatch p@WriteParams {writePrecision} = 139 | writeByteString p . encodeLines (scaleTo writePrecision) 140 | 141 | -- | Write a raw 'BL.ByteString' 142 | writeByteString :: WriteParams -> BL.ByteString -> IO () 143 | writeByteString params payload = do 144 | manager' <- either HC.newManager return $ writeManager params 145 | response <- HC.httpLbs request manager' `catch` (throwIO . HTTPException) 146 | let body = HC.responseBody response 147 | status = HC.responseStatus response 148 | if BL.null body 149 | then do 150 | let message = B8.unpack $ HT.statusMessage status 151 | when (HT.statusIsServerError status) $ 152 | throwIO $ ServerError message 153 | when (HT.statusIsClientError status) $ 154 | throwIO $ ClientError message request 155 | else case A.eitherDecode' body of 156 | Left message -> 157 | throwIO $ UnexpectedResponse message request body 158 | Right val -> case A.parse parseErrorObject val of 159 | A.Success err -> 160 | fail $ "BUG: impossible code path in " 161 | ++ "Database.InfluxDB.Write.writeByteString: " 162 | ++ err 163 | A.Error message -> do 164 | when (HT.statusIsServerError status) $ 165 | throwIO $ ServerError message 166 | when (HT.statusIsClientError status) $ 167 | throwIO $ ClientError message request 168 | throwIO $ UnexpectedResponse 169 | ("BUG: " ++ message 170 | ++ " in Database.InfluxDB.Write.writeByteString") 171 | request 172 | (A.encode val) 173 | where 174 | request = (writeRequest params) 175 | { HC.requestBody = HC.RequestBodyLBS payload 176 | } 177 | 178 | writeRequest :: WriteParams -> HC.Request 179 | writeRequest WriteParams {..} = 180 | HC.setQueryString qs HC.defaultRequest 181 | { HC.host = TE.encodeUtf8 _host 182 | , HC.port = fromIntegral _port 183 | , HC.secure = _ssl 184 | , HC.method = "POST" 185 | , HC.path = "/write" 186 | } 187 | where 188 | Server {..} = writeServer 189 | qs = concat 190 | [ [ ("db", Just $ TE.encodeUtf8 $ databaseName writeDatabase) 191 | , ("precision", Just $ TE.encodeUtf8 $ precisionName writePrecision) 192 | ] 193 | , fromMaybe [] $ do 194 | Key name <- writeRetentionPolicy 195 | return [("rp", Just (TE.encodeUtf8 name))] 196 | , fromMaybe [] $ do 197 | Credentials { _user = u, _password = p } <- writeAuthentication 198 | return 199 | [ ("u", Just (TE.encodeUtf8 u)) 200 | , ("p", Just (TE.encodeUtf8 p)) 201 | ] 202 | ] 203 | 204 | makeLensesWith 205 | ( lensRules 206 | & generateSignatures .~ False 207 | & lensField .~ lookingupNamer 208 | [ ("writeServer", "_server") 209 | , ("writeDatabase", "_database") 210 | , ("writeRetentionPolicy", "retentionPolicy") 211 | , ("writePrecision", "_precision") 212 | , ("writeManager", "_manager") 213 | , ("writeAuthentication", "_authentication") 214 | ] 215 | ) 216 | ''WriteParams 217 | 218 | -- | 219 | -- >>> let p = writeParams "foo" 220 | -- >>> p ^. server.host 221 | -- "localhost" 222 | instance HasServer WriteParams where 223 | server = _server 224 | 225 | -- | 226 | -- >>> let p = writeParams "foo" 227 | -- >>> p ^. database 228 | -- "foo" 229 | instance HasDatabase WriteParams where 230 | database = _database 231 | 232 | -- | Target retention policy for the write. 233 | -- 234 | -- InfluxDB writes to the @default@ retention policy if this parameter is set 235 | -- to 'Nothing'. 236 | -- 237 | -- >>> let p = writeParams "foo" & retentionPolicy .~ Just "two_hours" 238 | -- >>> p ^. retentionPolicy 239 | -- Just "two_hours" 240 | retentionPolicy :: Lens' WriteParams (Maybe Key) 241 | 242 | -- | 243 | -- >>> let p = writeParams "foo" 244 | -- >>> p ^. precision 245 | -- Nanosecond 246 | instance HasPrecision 'WriteRequest WriteParams where 247 | precision = _precision 248 | 249 | -- | 250 | -- >>> let p = writeParams "foo" & manager .~ Left HC.defaultManagerSettings 251 | instance HasManager WriteParams where 252 | manager = _manager 253 | 254 | -- | Authentication info for the write 255 | -- 256 | -- >>> let p = writeParams "foo" 257 | -- >>> p ^. authentication 258 | -- Nothing 259 | -- >>> let p' = p & authentication ?~ credentials "john" "passw0rd" 260 | -- >>> p' ^. authentication . traverse . user 261 | -- "john" 262 | instance HasCredentials WriteParams where 263 | authentication = _authentication 264 | -------------------------------------------------------------------------------- /src/Database/InfluxDB/Write/UDP.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DataKinds #-} 2 | {-# LANGUAGE MultiParamTypeClasses #-} 3 | {-# LANGUAGE NamedFieldPuns #-} 4 | {-# LANGUAGE RecordWildCards #-} 5 | {-# LANGUAGE TemplateHaskell #-} 6 | module Database.InfluxDB.Write.UDP 7 | ( -- $intro 8 | 9 | -- * Writers 10 | write 11 | , writeBatch 12 | , writeByteString 13 | 14 | -- * Writer parameters 15 | , WriteParams 16 | , writeParams 17 | , socket 18 | , sockAddr 19 | , Types.precision 20 | ) where 21 | 22 | import Control.Lens 23 | import Network.Socket (SockAddr, Socket) 24 | import Network.Socket.ByteString (sendManyTo) 25 | import qualified Data.ByteString.Lazy as BL 26 | 27 | import Database.InfluxDB.Line 28 | import Database.InfluxDB.Types as Types 29 | 30 | {- $intro 31 | This module is desined to be used with the [network] 32 | (https://hackage.haskell.org/package/network) package and be imported qualified. 33 | 34 | >>> :set -XOverloadedStrings -XNoOverloadedLists 35 | >>> import qualified Data.Map as Map 36 | >>> import Data.Time 37 | >>> import Network.Socket 38 | >>> import Database.InfluxDB 39 | >>> import qualified Database.InfluxDB.Write.UDP as UDP 40 | >>> sock <- Network.Socket.socket AF_INET Datagram defaultProtocol 41 | >>> let localhost = tupleToHostAddress (127, 0, 0, 1) 42 | >>> let params = UDP.writeParams sock $ SockAddrInet 8089 localhost 43 | >>> UDP.write params $ Line "measurement1" Map.empty (Map.fromList [("value", FieldInt 42)]) (Nothing :: Maybe UTCTime) 44 | >>> close sock 45 | 46 | Make sure that the UDP service is enabled in the InfluxDB config. This API 47 | doesn't tell you if any error occurs. See [the official doc] 48 | (https://docs.influxdata.com/influxdb/v1.6/supported_protocols/udp/) for 49 | details. 50 | -} 51 | 52 | -- | The full set of parameters for the UDP writer. 53 | data WriteParams = WriteParams 54 | { _socket :: !Socket 55 | , _sockAddr :: !SockAddr 56 | , _precision :: !(Precision 'WriteRequest) 57 | } 58 | 59 | -- | Smart constructor for 'WriteParams' 60 | -- 61 | -- Default parameters: 62 | -- 63 | -- ['L.precision'] 'Nanosecond' 64 | writeParams :: Socket -> SockAddr -> WriteParams 65 | writeParams _socket _sockAddr = WriteParams 66 | { _precision = Nanosecond 67 | , .. 68 | } 69 | 70 | -- | Write a 'Line' 71 | write 72 | :: Timestamp time 73 | => WriteParams 74 | -> Line time 75 | -> IO () 76 | write p@WriteParams {_precision} = 77 | writeByteString p . encodeLine (roundTo _precision) 78 | 79 | -- | Write 'Line's in a batch 80 | -- 81 | -- This is more efficient than 'write'. 82 | writeBatch 83 | :: (Timestamp time, Foldable f) 84 | => WriteParams 85 | -> f (Line time) 86 | -> IO () 87 | writeBatch p@WriteParams {_precision} = 88 | writeByteString p . encodeLines (roundTo _precision) 89 | 90 | -- | Write a lazy 'L.ByteString' 91 | writeByteString :: WriteParams -> BL.ByteString -> IO () 92 | writeByteString WriteParams {..} payload = 93 | sendManyTo _socket (BL.toChunks payload) _sockAddr 94 | 95 | makeLensesWith (lensRules & generateSignatures .~ False) ''WriteParams 96 | 97 | -- | Open UDP socket 98 | socket :: Lens' WriteParams Socket 99 | 100 | -- | UDP endopoint of the database 101 | sockAddr :: Lens' WriteParams SockAddr 102 | 103 | precision :: Lens' WriteParams (Precision 'WriteRequest) 104 | 105 | -- | Timestamp precision. 106 | -- 107 | -- In the UDP API, all timestamps are sent in nanosecond but you can specify 108 | -- lower precision. The writer just rounds timestamps to the specified 109 | -- precision. 110 | instance HasPrecision 'WriteRequest WriteParams where 111 | precision = Database.InfluxDB.Write.UDP.precision 112 | -------------------------------------------------------------------------------- /tests/doctests.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Build_doctests (flags, pkgs, module_sources) 4 | import Test.DocTest (doctest) 5 | 6 | main :: IO () 7 | main = doctest 8 | $ "-fobject-code" 9 | --- ^ Use object code to work around https://gitlab.haskell.org/ghc/ghc/-/issues/19460 10 | -- in GHC 9.0.1. 11 | : flags ++ pkgs ++ module_sources 12 | -------------------------------------------------------------------------------- /tests/regressions.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TypeApplications #-} 2 | {-# LANGUAGE DataKinds #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE QuasiQuotes #-} 5 | {-# LANGUAGE ScopedTypeVariables #-} 6 | {-# LANGUAGE TemplateHaskell #-} 7 | import Control.Exception (bracket_, try) 8 | 9 | import Control.Lens 10 | import Data.Time 11 | import Test.Tasty 12 | import Test.Tasty.HUnit 13 | import qualified Data.Map as M 14 | import qualified Data.Map.Strict as Map 15 | import qualified Data.Vector as V 16 | import qualified Text.RawString.QQ as Raw (r) 17 | 18 | import Database.InfluxDB 19 | import Database.InfluxDB.Line 20 | import qualified Database.InfluxDB.Format as F 21 | 22 | main :: IO () 23 | main = defaultMain $ testGroup "regression tests" 24 | [ testCase "issue #64" case_issue64 25 | , testCase "issue #66" case_issue66 26 | , testCaseSteps "issue #75" case_issue75 27 | , testCaseSteps "issue #79" case_issue79 28 | ] 29 | 30 | -- https://github.com/maoe/influxdb-haskell/issues/64 31 | case_issue64 :: Assertion 32 | case_issue64 = withDatabase dbName $ do 33 | write wp $ Line @UTCTime "count" Map.empty 34 | (Map.fromList [("value", FieldInt 1)]) 35 | Nothing 36 | r <- try $ query qp "SELECT value FROM count" 37 | case r of 38 | Left err -> case err of 39 | UnexpectedResponse message _ _ -> 40 | message `elem` [ 41 | "BUG: parsing Int failed, expected Number, but encountered String in Database.InfluxDB.Query.query", 42 | "BUG: expected Int, encountered String in Database.InfluxDB.Query.query" 43 | ] @? "Correct error message." 44 | _ -> 45 | assertFailure $ got ++ show err 46 | Right (v :: (V.Vector (Tagged "time" Int, Tagged "value" Int))) -> 47 | -- NOTE: The time columns should be UTCTime, Text, or String 48 | assertFailure $ got ++ "no errors: " ++ show v 49 | where 50 | dbName = "case_issue64" 51 | qp = queryParams dbName & precision .~ RFC3339 52 | wp = writeParams dbName 53 | got = "expeted an UnexpectedResponse but got " 54 | 55 | -- https://github.com/maoe/influxdb-haskell/issues/66 56 | case_issue66 :: Assertion 57 | case_issue66 = do 58 | r <- try $ query (queryParams "_internal") "SELECT time FROM dummy" 59 | case r of 60 | Left err -> case err of 61 | UnexpectedResponse message _ _ -> 62 | message @?= 63 | "BUG: at least 1 non-time field must be queried in Database.InfluxDB.Query.query" 64 | _ -> 65 | assertFailure $ got ++ show err 66 | Right (v :: V.Vector (Tagged "time" Int)) -> 67 | assertFailure $ got ++ "no errors: " ++ show v 68 | where 69 | got = "expected an UnexpectedResponse but got " 70 | 71 | -- https://github.com/maoe/influxdb-haskell/issues/75 72 | case_issue75 :: (String -> IO ()) -> Assertion 73 | case_issue75 step = do 74 | step "Checking encoded value" 75 | let string = [Raw.r|bl\"a|] 76 | let encoded = encodeLine (scaleTo Nanosecond) 77 | $ Line @UTCTime "testing" mempty 78 | (M.singleton "test" $ FieldString string) 79 | Nothing 80 | encoded @?= [Raw.r|testing test="bl\\\"a"|] 81 | 82 | step "Preparing a test database" 83 | let db = "issue75" 84 | let p = queryParams db 85 | manage p $ formatQuery ("DROP DATABASE "%F.database) db 86 | manage p $ formatQuery ("CREATE DATABASE "%F.database) db 87 | 88 | step "Checking server response" 89 | let wp = writeParams db 90 | writeByteString wp encoded 91 | 92 | case_issue79 :: (String -> IO ()) -> Assertion 93 | case_issue79 step = withDatabase db $ do 94 | let w = writeParams db 95 | let q = queryParams db 96 | step "Querying an empty series with two fields expected" 97 | _ <- query @(Tagged "time" UTCTime, Tagged "value" Int) q "SELECT * FROM foo" 98 | step "Querying an empty series with the results ignored" 99 | _ <- query @Ignored q "SELECT * FROM foo" 100 | step "Querying an empty series expecting an empty result" 101 | _ <- query @Empty q "SELECT * FROM foo" 102 | step "Writing a data point" 103 | write w $ Line @UTCTime "foo" mempty (Map.fromList [("value", FieldInt 42)]) Nothing 104 | step "Querying a non-empty series with two fields expected" 105 | _ <- query @(Tagged "time" UTCTime, Tagged "value" Int) q "SELECT * FROM foo" 106 | step "Querying a non-empty series with the results ignored" 107 | _ <- query @Ignored q "SELECT * FROM foo" 108 | return () 109 | where 110 | db = "case_issue79" 111 | 112 | withDatabase :: Database -> IO a -> IO a 113 | withDatabase dbName f = bracket_ 114 | (manage q (formatQuery ("CREATE DATABASE "%F.database) dbName)) 115 | (manage q (formatQuery ("DROP DATABASE "%F.database) dbName)) 116 | f 117 | where 118 | q = queryParams dbName 119 | --------------------------------------------------------------------------------