├── .lein-classpath ├── bin └── ci.sh ├── doc ├── source │ ├── _static │ │ ├── Rules.png │ │ ├── cyanite.png │ │ └── Architecture.png │ ├── api.rst │ ├── index.rst │ ├── quickstart.rst │ ├── administrator.rst │ ├── concepts.rst │ └── conf.py ├── graphite-api.conf ├── README.md ├── schema.cql ├── cyanite.yaml ├── grafana-defaults.ini └── Makefile ├── pkg └── deb │ ├── postrm.sh │ ├── prerm.sh │ ├── cyanite.default │ ├── cyanite │ ├── preinst.sh │ ├── postinst.sh │ └── init.sh ├── .travis.yml ├── tokenizer ├── project.clj └── src │ └── apache │ └── cassandra │ └── index │ └── sasi │ └── analyzer │ └── SplittingTokenizer.java ├── .gitignore ├── src └── io │ ├── cyanite │ ├── query │ │ ├── path.clj │ │ ├── ast.clj │ │ └── parser.clj │ ├── input.clj │ ├── config.clj │ ├── utils.clj │ ├── store │ │ ├── pure.clj │ │ └── cassandra.clj │ ├── input │ │ └── carbon.clj │ ├── pool.clj │ ├── query.clj │ ├── engine │ │ ├── rule.clj │ │ ├── queue.clj │ │ └── drift.clj │ ├── store.clj │ ├── index.clj │ ├── engine.clj │ ├── index │ │ └── cassandra.clj │ └── api.clj │ └── cyanite.clj ├── resources └── logback.xml ├── perf ├── soakTest.py └── perfTest.jmx ├── LICENSE ├── README.md ├── CONTRIBTING.md ├── test ├── resources │ ├── schema_with_tokeniser.cql │ └── schema.cql └── io │ └── cyanite │ ├── query_test.clj │ ├── query │ └── ast_test.clj │ ├── integration │ ├── index_test.clj │ └── engine_test.clj │ ├── test_helper.clj │ └── dsl_test.clj ├── project.clj ├── Makefile └── tasks └── leiningen └── fatdeb.clj /.lein-classpath: -------------------------------------------------------------------------------- 1 | :tasks 2 | -------------------------------------------------------------------------------- /bin/ci.sh: -------------------------------------------------------------------------------- 1 | make start_one_node_cluster && \ 2 | lein2 do clean, test, clean && \ 3 | make stop_cluster 4 | -------------------------------------------------------------------------------- /doc/source/_static/Rules.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyr/cyanite/master/doc/source/_static/Rules.png -------------------------------------------------------------------------------- /doc/source/_static/cyanite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyr/cyanite/master/doc/source/_static/cyanite.png -------------------------------------------------------------------------------- /doc/source/_static/Architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pyr/cyanite/master/doc/source/_static/Architecture.png -------------------------------------------------------------------------------- /pkg/deb/postrm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | if [ "$1" = "purge" ] ; then 4 | update-rc.d cyanite remove >/dev/null 5 | fi 6 | -------------------------------------------------------------------------------- /pkg/deb/prerm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | if [ -x "/etc/init.d/cyanite" ]; then 4 | invoke-rc.d cyanite stop || exit $? 5 | fi 6 | -------------------------------------------------------------------------------- /doc/graphite-api.conf: -------------------------------------------------------------------------------- 1 | search_index: site/index 2 | cyanite: 3 | urls: 4 | - http://localhost:8080 5 | finders: 6 | - cyanite.CyaniteFinder 7 | -------------------------------------------------------------------------------- /pkg/deb/cyanite.default: -------------------------------------------------------------------------------- 1 | # Optionally add classes to the classpath for additional functionality 2 | # EXTRA_CLASSPATH= 3 | 4 | # Optional JAVA_OPTS 5 | # EXTRA_JAVA_OPTS= -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: clojure 3 | lein: lein2 4 | script: ./bin/ci.sh 5 | jdk: 6 | - oraclejdk8 7 | branches: 8 | except: 9 | - gh-pages 10 | -------------------------------------------------------------------------------- /tokenizer/project.clj: -------------------------------------------------------------------------------- 1 | (defproject io.cyanite/tokenizer "0.0.1" 2 | :dependencies [[com.google.guava/guava "18.0"] 3 | [org.apache.cassandra/cassandra-all "3.9"] 4 | ] 5 | :java-source-paths ["src/"] 6 | 7 | 8 | ) 9 | -------------------------------------------------------------------------------- /pkg/deb/cyanite: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -f /etc/default/cyanite ]; then 4 | . /etc/default/cyanite 5 | fi 6 | 7 | JAR="$EXTRA_CLASSPATH:/usr/lib/cyanite/cyanite.jar" 8 | CONFIG="/etc/cyanite.yaml" 9 | 10 | exec java $EXTRA_JAVA_OPTS $OPTS -cp "$JAR" io.cyanite "$CONFIG" 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /site 2 | /doc/build/ 3 | /target 4 | /classes 5 | /checkouts 6 | /conf 7 | pom.xml 8 | pom.xml.asc 9 | *.jar 10 | *.class 11 | /.lein-repl-history 12 | /.lein-deps-sum 13 | /.lein-failures 14 | /.nrepl-port 15 | .sass-cache 16 | _site 17 | node_modules 18 | /dev 19 | /grafana 20 | /graphite-stresser -------------------------------------------------------------------------------- /pkg/deb/preinst.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Create cyanite user and group 3 | set -e 4 | 5 | USERNAME="cyanite" 6 | GROUPNAME="cyanite" 7 | getent group "$GROUPNAME" >/dev/null || groupadd -r "$GROUPNAME" 8 | getent passwd "$USERNAME" >/dev/null || \ 9 | useradd -r -g "$GROUPNAME" -d /usr/lib/cyanite -s /bin/false \ 10 | -c "Cyanite metric storage daemon" "$USERNAME" 11 | exit 0 12 | -------------------------------------------------------------------------------- /src/io/cyanite/query/path.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.query.path) 2 | 3 | (defn extract-paths 4 | "Extract paths from an AST" 5 | [tokens] 6 | (if (sequential? tokens) 7 | (let [[opcode & args] tokens] 8 | (if (= opcode :path) 9 | args 10 | (mapcat extract-paths args))) 11 | [])) 12 | 13 | (defn tokens->paths 14 | "Unique list of paths to get for an AST" 15 | [ast] 16 | (set (extract-paths ast))) 17 | -------------------------------------------------------------------------------- /resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /doc/README.md: -------------------------------------------------------------------------------- 1 | Cyanite Documentation 2 | ===================== 3 | 4 | This directory contains the build system for Cyanite's 5 | documentation, based on [sphinx](http://sphinx-doc.org). 6 | 7 | You will need both [sphinx](http://sphinx-doc.org) and 8 | the [read the docs theme](https://github.com/snide/sphinx_rtd_theme) for sphinx. 9 | 10 | Once these dependencies are available, you can build the documentation 11 | locally by running `make html`. 12 | 13 | Up to date docs will also be published on http://cyanite.io 14 | -------------------------------------------------------------------------------- /src/io/cyanite/input.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.input 2 | (:require [com.stuartsierra.component :as component] 3 | [io.cyanite.engine :as engine] 4 | [io.cyanite.input.carbon :as carbon] 5 | [clojure.tools.logging :refer [info]])) 6 | 7 | (defmulti build-input (comp (fnil keyword "carbon") :type)) 8 | 9 | (defmethod build-input :carbon 10 | [options] 11 | (component/using 12 | (carbon/map->CarbonTCPInput (assoc options 13 | :port (or (:port options) 2003) 14 | :pipeline carbon/pipeline)) 15 | [:engine])) 16 | -------------------------------------------------------------------------------- /perf/soakTest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys, os, time 4 | from socket import socket 5 | from random import random 6 | 7 | try: 8 | host = sys.argv[1] 9 | port = int(sys.argv[2]) 10 | mpm = int(sys.argv[3]) 11 | except: 12 | print 'Usage: %s host port metrics-per-minute' % os.path.basename(sys.argv[0]) 13 | sys.exit(1) 14 | 15 | s = socket() 16 | s.connect( (host,port) ) 17 | now = int( time.time() ) 18 | now -= now % 60 19 | 20 | while True: 21 | start = time.time() 22 | count = 0 23 | for i in xrange(0, mpm): 24 | metric = 'TEST.%d' % i 25 | value = random() 26 | s.sendall('%s %s %s\n' % (metric, value, now)) 27 | count += 1 28 | 29 | print 'sent %d metrics in %.3f seconds' % (count, time.time() - start) 30 | now += 60 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2013 Pierre-Yves Ritschard 2 | 3 | Permission to use, copy, modify, and distribute this software for any 4 | purpose with or without fee is hereby granted, provided that the above 5 | notice and this permission notice appear in all copies. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 8 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 9 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 10 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 11 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 12 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 13 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 14 | -------------------------------------------------------------------------------- /src/io/cyanite/config.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.config 2 | "Yaml config parser, with a poor man's dependency injector" 3 | (:require [com.stuartsierra.component :as component] 4 | [clj-yaml.core :refer [parse-string]] 5 | [clojure.tools.logging :refer [error info debug]])) 6 | 7 | (def default-logging 8 | "Default logging configuration. Refer to https://github.com/pyr/unilog 9 | for details." 10 | {:pattern "%p [%d] %t - %c - %m%n" 11 | :external false 12 | :console true 13 | :files [] 14 | :level "info"}) 15 | 16 | (defn load-path 17 | "Try to find a pathname, on the command line, in 18 | system properties or the environment and load it." 19 | [path] 20 | (-> (or path 21 | (System/getProperty "cyanite.configuration") 22 | (System/getenv "CYANITE_CONFIGURATION") 23 | "/etc/cyanite.yaml") 24 | slurp 25 | parse-string)) 26 | -------------------------------------------------------------------------------- /pkg/deb/postinst.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Fakeroot and lein don't get along, so we set ownership after the fact. 3 | set -e 4 | 5 | chown -R root:root /usr/lib/cyanite 6 | chown root:root /usr/bin/cyanite 7 | chown cyanite:cyanite /var/log/cyanite 8 | chown cyanite:cyanite /etc/cyanite.yaml 9 | chown root:root /etc/init.d/cyanite 10 | 11 | readonly KEYSPACE=metric 12 | 13 | if command -v cqlsh > /dev/null 2>&1; then 14 | if cqlsh -e "QUIT" > /dev/null 2>&1; then 15 | if cqlsh -k $KEYSPACE -e "SELECT path from metric LIMIT 1" > /dev/null 2>&1; then 16 | echo "Cassandra keyspace '$KEYSPACE' already exists." 17 | else 18 | echo "Creating Cassandra schema in the keyspace '$KEYSPACE'..." 19 | cqlsh -f /var/lib/cyanite/schema.cql 20 | fi 21 | else 22 | echo "Cannot connect to Cassandra. Skipping schema creation." 23 | fi 24 | else 25 | echo "Cassandra CQL Shell not found." 26 | fi 27 | 28 | if [ -x "/etc/init.d/cyanite" ]; then 29 | update-rc.d cyanite start 50 2 3 4 5 . stop 50 0 1 6 . >/dev/null 30 | invoke-rc.d cyanite start || exit $? 31 | fi 32 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cyanite 2 | 3 | Cyanite is a daemon which provides services to store and retrieve timeseries data. 4 | It aims to serve as a drop-in replacement for Graphite/Graphite-web. 5 | 6 | # Getting Started 7 | 8 | Before you begin, make sure you have the following installed: 9 | 10 | * [Java 1.7+](https://java.com/de/download/) 11 | * [Cassandra 3.5+](http://cassandra.apache.org/) 12 | 13 | You can download the latest distribution of graphite from [GitHub releases](https://github.com/pyr/cyanite/releases) 14 | and start it with: 15 | 16 | ``` 17 | java -jar .jar --path .yaml 18 | ``` 19 | 20 | [See default configuration and basic configuration options.](https://github.com/pyr/cyanite/blob/master/doc/cyanite.yaml) 21 | 22 | For advanced usage and information on possible Cyanite optimisations, refer to [configuration guide](http://cyanite.io/concepts.html). 23 | 24 | # Getting help 25 | 26 | You can get help by creating an issue or asking on IRC channel `#cyanite` on freenode. 27 | 28 | For more information, refer to http://cyanite.io 29 | -------------------------------------------------------------------------------- /CONTRIBTING.md: -------------------------------------------------------------------------------- 1 | # Running tests 2 | 3 | In order to run integration tests, you have to have Cassandra running. You can 4 | do it either manually or with `make start_one_node_cluster` script. It will 5 | start a single node cluster with pre-loaded `cyanite_test` keyspace. 6 | 7 | In order to shutdown the test cluster, run `make stop_cluster`. 8 | 9 | # Making sure grafana integration works 10 | 11 | You can start a local grafana server along with the cyanite and a single 12 | node cassandra cluster from our `Makefile`. For that, you should run 13 | 14 | ``` 15 | make start_one_node_cluster 16 | make grafana-server 17 | ``` 18 | 19 | Ideally, it will download, compile and set up everything you may need. 20 | Usually grafana wants `go` and `nodejs` to be pre-installed. On Mac Os X, 21 | you can install them via 22 | 23 | ``` 24 | brew install go 25 | brew install godep 26 | brew install nvm 27 | # Set-up nvm 28 | nvm install v6.1.0 29 | ``` 30 | 31 | # Running stress-tests 32 | 33 | In order to run `graphite-stresser` against cyanite, you can run 34 | 35 | ``` 36 | make stress 37 | ``` 38 | 39 | All additional configuration options are available via (graphite-stresser)[https://github.com/feangulo/graphite-stresser] 40 | itself. 41 | -------------------------------------------------------------------------------- /src/io/cyanite/utils.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.utils 2 | "These didn't fit anywhere else" 3 | (:import org.cliffc.high_scale_lib.NonBlockingHashMap)) 4 | 5 | (defprotocol MutableMap 6 | "Mutable map functionality" 7 | (entries [this] "Return a set of entries") 8 | (keyset [this] "Return the keyset") 9 | (remove! [this k] "Atomically remove and return an element") 10 | (contains-key [this key] "Checks if map contains the key") 11 | (assoc-if-absent! [this k v] "CAS type put")) 12 | 13 | (defn nbhm 14 | "Yield a NonBlockingHashMap" 15 | [] 16 | (let [db (NonBlockingHashMap.)] 17 | (reify 18 | clojure.lang.ITransientMap 19 | (assoc [this k v] 20 | (.put db k v) 21 | this) 22 | MutableMap 23 | (entries [this] 24 | (.entrySet db)) 25 | (keyset [this] 26 | (.keySet db)) 27 | (contains-key [this key] 28 | (.containsKey db key)) 29 | (remove! [this k] 30 | (.remove db k)) 31 | (assoc-if-absent! [this k v] 32 | (.putIfAbsent db k v)) 33 | clojure.lang.Seqable 34 | (seq [this] 35 | (let [keys (.keySet db)] 36 | (map #(.get db %) (.keySet db)))) 37 | clojure.lang.ILookup 38 | (valAt [this k] 39 | (.get db k)) 40 | (valAt [this k def] 41 | (or (.get db k) def)) 42 | java.lang.Object 43 | (toString [this] 44 | (.toString db))))) 45 | -------------------------------------------------------------------------------- /test/resources/schema_with_tokeniser.cql: -------------------------------------------------------------------------------- 1 | DROP KEYSPACE IF EXISTS cyanite_test_with_tokenizer; 2 | CREATE KEYSPACE IF NOT EXISTS cyanite_test_with_tokenizer WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; 3 | 4 | USE cyanite_test_with_tokenizer; 5 | 6 | CREATE TABLE IF NOT EXISTS segment ( 7 | parent text, 8 | segment text, 9 | pos int, 10 | length int, 11 | leaf boolean, 12 | PRIMARY KEY (parent, segment) 13 | ) WITH CLUSTERING ORDER BY (segment ASC) 14 | AND bloom_filter_fp_chance = 0.01 15 | AND caching = {'keys':'ALL', 'rows_per_partition':'NONE'} 16 | AND comment = '' 17 | AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy'} 18 | AND compression = {'sstable_compression': 'org.apache.cassandra.io.compress.LZ4Compressor'} 19 | AND dclocal_read_repair_chance = 0.1 20 | AND default_time_to_live = 0 21 | AND gc_grace_seconds = 864000 22 | AND max_index_interval = 2048 23 | AND memtable_flush_period_in_ms = 0 24 | AND min_index_interval = 128 25 | AND read_repair_chance = 0.0 26 | AND speculative_retry = '99.0PERCENTILE'; 27 | 28 | CREATE CUSTOM INDEX IF NOT EXISTS on segment(segment) USING 'org.apache.cassandra.index.sasi.SASIIndex' WITH OPTIONS = {'analyzer_class': 'org.apache.cassandra.index.sasi.analyzer.SplittingTokenizer'}; 29 | CREATE CUSTOM INDEX IF NOT EXISTS on segment(pos) USING 'org.apache.cassandra.index.sasi.SASIIndex'; -------------------------------------------------------------------------------- /doc/source/api.rst: -------------------------------------------------------------------------------- 1 | Cyanite HTTP Service 2 | ==================== 3 | 4 | The Cyanite API is responsible for exposing an HTTP service to service queries. 5 | The Cyanite API exposes the following HTTP routes: 6 | 7 | - ``/ping``: report online status 8 | - ``/metrics``: query metrics. Takes ``from``, ``to`` (optional), and any number of ``path`` arguments. 9 | - ``/paths``: query paths. Takes a ``query`` argument. 10 | 11 | Authentication 12 | -------------- 13 | 14 | Cyanite provides no authentication means. 15 | 16 | Authorization 17 | ------------- 18 | 19 | Cyanite provides no authorization methods. 20 | 21 | Routes 22 | ------ 23 | 24 | ``/ping`` 25 | ~~~~~~~~~ 26 | 27 | Report online status. 28 | 29 | Input 30 | Takes no parameter 31 | 32 | Output 33 | .. sourcecode:: json 34 | 35 | { 36 | "response": "pong" 37 | } 38 | 39 | 40 | ``/paths`` 41 | ~~~~~~~~~~ 42 | 43 | Query available metric paths. 44 | 45 | Input 46 | - ``query``: A valid path query 47 | 48 | Output 49 | .. sourcecode:: json 50 | 51 | { 52 | "paths": [ 53 | "path1", 54 | "path2", 55 | "pathN" 56 | ] 57 | } 58 | 59 | 60 | ``/metrics`` 61 | ~~~~~~~~~~~~ 62 | 63 | Query metric time-series 64 | 65 | Input 66 | - ``from``: Timestamp at which to start query. 67 | - ``to``: Optional timestamp at which to stop querying. 68 | Assume wall-clock time if unspecified. 69 | - ``path``: May be supplied several times. Path or path 70 | query to retrieve. 71 | 72 | Output 73 | .. sourcecode:: json 74 | 75 | { 76 | "from" : 1437572670, 77 | "to" : 1437572690, 78 | "step" : 10, 79 | "series": { "web01.cpu": [ 30.0, 40.0, 50.0 ] 80 | } 81 | 82 | -------------------------------------------------------------------------------- /src/io/cyanite/store/pure.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.store.pure) 2 | 3 | (defn max-points 4 | [paths resolution from to] 5 | (-> (- to from) 6 | (/ resolution) 7 | (long) 8 | (inc) 9 | (* (count paths)) 10 | (int))) 11 | 12 | (defn fill-in 13 | [nils data] 14 | (->> (group-by :time data) 15 | (map (fn [[k v]] [k (-> v first :point)])) 16 | (reduce merge {}) 17 | (merge nils) 18 | (sort-by key) 19 | (mapv val))) 20 | 21 | (defn greatest-precision 22 | [data] 23 | (->> (map (comp :precision :resolution :id) data) 24 | (sort) 25 | (last))) 26 | 27 | (defn normalize-time 28 | [greatest metric] 29 | (update metric :time #(-> (quot % greatest) (* greatest)))) 30 | 31 | (defn normalize-to 32 | [greatest] 33 | (fn [[_ raw]] 34 | (->> (sort-by :time raw) 35 | (map (partial normalize-time greatest)) 36 | (partition-by :time) 37 | (map first)))) 38 | 39 | (defn normalize 40 | [data] 41 | (let [greatest (greatest-precision data)] 42 | (vector greatest 43 | (mapcat (normalize-to greatest) (group-by :id data))))) 44 | 45 | (defn empty-series 46 | [min-point max-point precision] 47 | (->> (range min-point (inc max-point) precision) 48 | (map #(vector % nil)) 49 | (reduce merge {}))) 50 | 51 | (defn data->series 52 | [data to precision] 53 | (if-let [points (seq (sort-by :time data))] 54 | (let [min-point (-> points first :time) 55 | max-point (-> to (quot precision) (* precision)) 56 | nil-points (empty-series min-point max-point precision) 57 | by-path (->> (group-by (comp :path :id) points) 58 | (map (fn [[k v]] [k (fill-in nil-points v)])) 59 | (reduce merge {}))] 60 | {:from min-point 61 | :to max-point 62 | :step precision 63 | :series by-path}) 64 | {:from 0 :to 0 :step 0 :series {}})) 65 | -------------------------------------------------------------------------------- /project.clj: -------------------------------------------------------------------------------- 1 | (defproject io.cyanite/cyanite "0.5.1" 2 | :description "Alternative storage backend for graphite, backed by cassandra" 3 | :url "https://github.com/pyr/cyanite" 4 | :license {:name "MIT License" 5 | :url "https://github.com/pyr/cyanite/tree/master/LICENSE"} 6 | :maintainer {:email "pyr@spootnik.org"} 7 | :profiles {:uberjar {:aot :all 8 | :jvm-opts ["-Dclojure.compiler.direct-linking=true"]} 9 | :dev {:global-vars {*warn-on-reflection* true}}} 10 | :main io.cyanite 11 | :plugins [[lein-ancient "0.6.7"]] 12 | :dependencies [[org.clojure/clojure "1.9.0-alpha14"] 13 | [org.clojure/tools.logging "0.3.1"] 14 | [org.clojure/tools.cli "0.3.3"] 15 | [com.stuartsierra/component "0.3.2"] 16 | [spootnik/unilog "0.7.19"] 17 | [spootnik/uncaught "0.5.3"] 18 | [spootnik/globber "0.4.1"] 19 | [spootnik/reporter "0.1.17"] 20 | [spootnik/signal "0.2.1"] 21 | [spootnik/net "0.3.3-beta9"] 22 | [org.javassist/javassist "3.21.0-GA"] 23 | [instaparse "1.4.5"] 24 | [cheshire "5.7.0"] 25 | [clj-yaml "0.4.0"] 26 | [clj-time "0.13.0"] 27 | [com.github.ben-manes.caffeine/caffeine "2.4.0"] 28 | [cc.qbits/alia "4.0.0-beta7"] 29 | [org.jctools/jctools-core "2.0.1"] 30 | [com.boundary/high-scale-lib "1.0.6"] 31 | [net.jpountz.lz4/lz4 "1.3.0"] 32 | [org.xerial.snappy/snappy-java "1.1.2.6"]]) 33 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | GRAFANA_DIR := ./grafana/ 2 | STRESSER_DIR := ./graphite-stresser/ 3 | CLUSTER_NAME := cyanite_cluster 4 | CASSANDRA_VERSION := binary:3.9 5 | 6 | maybe_install_ccm: 7 | which ccm || test -s ~/.local/bin/ccm || pip install --user ccm 8 | 9 | prepare_aliases: 10 | sudo ifconfig lo0 alias 127.0.0.2 up ;\ 11 | sudo ifconfig lo0 alias 127.0.0.2 up 12 | 13 | start_one_node_cluster: maybe_install_ccm 14 | ccm create $(CLUSTER_NAME) -v $(CASSANDRA_VERSION) ;\ 15 | cd tokenizer ;\ 16 | lein uberjar ;\ 17 | cd .. ;\ 18 | cp tokenizer/target/tokenizer-0.0.1.jar ~/.ccm/repository/3.9/lib/ ;\ 19 | ccm populate -n 1 ;\ 20 | ccm start ;\ 21 | sleep 20 ;\ 22 | ccm node1 cqlsh < test/resources/schema.cql ;\ 23 | ccm node1 cqlsh < test/resources/schema_with_tokeniser.cql 24 | 25 | 26 | reload_schema: maybe_install_ccm 27 | ccm node1 cqlsh < test/resources/schema.cql 28 | 29 | .PHONY: clean 30 | stop_cluster: 31 | ccm remove $(CLUSTER_NAME) 32 | 33 | .PHONY: clean 34 | clean: 35 | pip uninstall ccm 36 | 37 | #.PHONY: $(GRAFANA_DIR) 38 | $(GRAFANA_DIR): 39 | mkdir $(GRAFANA_DIR) ;\ 40 | cd $(GRAFANA_DIR) ;\ 41 | export GOPATH=`pwd` ;\ 42 | go get github.com/grafana/grafana ;\ 43 | cd $(GOPATH)/src/github.com/grafana/grafana ;\ 44 | go run build.go setup ;\ 45 | $(GOPATH)/bin/godep restore ;\ 46 | go run build.go build ;\ 47 | npm install grunt --save-dev ;\ 48 | npm install ;\ 49 | npm install -g grunt-cli ;\ 50 | grunt 51 | 52 | grafana-server: $(GRAFANA_DIR) 53 | cd $(GRAFANA_DIR) ;\ 54 | cd src/github.com/grafana/grafana/ ;\ 55 | ./bin/grafana-server 56 | 57 | $(STRESSER_DIR): 58 | git clone git@github.com:feangulo/graphite-stresser.git ;\ 59 | cd graphite-stresser ;\ 60 | ./gradlew uberjar 61 | 62 | STRESS_HOSTS := 100 63 | STRESS_TIMERS := 20 64 | STRESS_INTERVAL := 1 65 | 66 | stress: $(STRESSER_DIR) 67 | java -jar $(STRESSER_DIR)/build/libs/graphite-stresser-0.1.jar localhost 2003 $(STRESS_HOSTS) $(STRESS_TIMERS) $(STRESS_INTERVAL) true 68 | 69 | .PHONY: dev 70 | dev: 71 | lein run --path ./dev/cyanite.yaml 72 | -------------------------------------------------------------------------------- /src/io/cyanite/input/carbon.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.input.carbon 2 | (:require [com.stuartsierra.component :as component] 3 | [io.cyanite.engine :as engine] 4 | [net.tcp :as tcp] 5 | [net.ty.pipeline :as pipeline] 6 | [clojure.string :refer [split]] 7 | [clojure.tools.logging :refer [info warn]])) 8 | 9 | (defn parse-line 10 | [^String line] 11 | (let [[path metric time & garbage] (split line #"\s+")] 12 | (cond 13 | garbage 14 | (throw (ex-info "invalid carbon line: too many fields" {:line line})) 15 | 16 | (not (and (seq path) (seq metric) (seq time))) 17 | (throw (ex-info "invalid carbon line: missing fields" {:line line})) 18 | 19 | (re-find #"(?i)nan" metric) 20 | (throw (ex-info (str "invalid carbon line: NaN metric for path:" path) 21 | {:line line 22 | :path path}))) 23 | (let [metric (try (.doubleValue (Double. metric)) 24 | (catch NumberFormatException e 25 | (throw (ex-info "invalid metric" {:metric metric})))) 26 | time (try (.longValue (Double. time)) 27 | (catch NumberFormatException e 28 | (throw (ex-info "invalid time" {:time time}))))] 29 | [path time metric]))) 30 | 31 | (defn pipeline 32 | [engine read-timeout] 33 | (pipeline/channel-initializer 34 | [(pipeline/line-based-frame-decoder 2048) 35 | pipeline/string-decoder 36 | (pipeline/read-timeout-handler read-timeout) 37 | (pipeline/with-input [ctx msg] 38 | (when (seq msg) 39 | (let [[path time metric] (parse-line msg)] 40 | (engine/ingest! engine path time metric))))])) 41 | 42 | (defrecord CarbonTCPInput [host port timeout server engine] 43 | component/Lifecycle 44 | (start [this] 45 | (let [timeout (or timeout 30) 46 | host (or host "127.0.0.1") 47 | port (or port 2002) 48 | server (tcp/server {:handler (pipeline engine timeout)} host port)] 49 | (try 50 | (assoc this :server server) 51 | (catch Exception e 52 | (warn e "could not start server"))))) 53 | (stop [this] 54 | (when server 55 | (server)) 56 | (assoc this :server nil))) 57 | -------------------------------------------------------------------------------- /src/io/cyanite/pool.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.pool 2 | (:require [com.stuartsierra.component :as component] 3 | [spootnik.reporter :as r] 4 | [clojure.tools.logging :refer [info error]]) 5 | (:import java.util.concurrent.ScheduledThreadPoolExecutor 6 | java.util.concurrent.Executors 7 | java.util.concurrent.TimeUnit)) 8 | 9 | (defn set-thread-name! 10 | [thread-name] 11 | (.setName (Thread/currentThread) (name thread-name))) 12 | 13 | (defonce secs TimeUnit/SECONDS) 14 | 15 | (defprotocol Scheduler 16 | (submit [this f]) 17 | (shutdown [this]) 18 | (shutdown-now [this]) 19 | (join [this]) 20 | (recurring [this f delay])) 21 | 22 | (defn wrapped 23 | [reporter f] 24 | (fn [] 25 | (try 26 | (f) 27 | (catch InterruptedException e 28 | (info "caught shutdown, stopping task.")) 29 | (catch java.nio.channels.ClosedByInterruptException e 30 | (info "caught shutdown in I/O, stopping task.")) 31 | (catch Exception e 32 | (r/capture! reporter e))))) 33 | 34 | (defrecord Pool [pool threads reporter] 35 | component/Lifecycle 36 | (start [this] 37 | (let [pool (ScheduledThreadPoolExecutor. (or threads 10))] 38 | (r/build! reporter 39 | :gauge [:cyanite :pool :queue :size] 40 | #(.size (.getQueue pool))) 41 | (assoc this :pool pool))) 42 | (stop [this] 43 | (let [remains (.shutdownNow pool)] 44 | (info "found" (count remains) "pending tasks during shutdown.")) 45 | (assoc this :pool nil)) 46 | Scheduler 47 | (submit [this f] 48 | (.submit pool (wrapped reporter f))) 49 | (shutdown [this] 50 | (.shutdown pool)) 51 | (shutdown-now [this] 52 | (.shutdownNow pool)) 53 | (join [this] 54 | (while (not (.awaitTermination pool 10000 secs)) nil)) 55 | (recurring [this f delay] 56 | (.scheduleAtFixedRate pool (wrapped reporter f) delay delay secs))) 57 | 58 | (defn cancelled? 59 | [task] 60 | (.isCancelled task)) 61 | 62 | (defn done? 63 | [task] 64 | (.isDone task)) 65 | 66 | (defn stopped? 67 | [task] 68 | (or (cancelled? task) (done? task))) 69 | 70 | (defmacro with-pool 71 | [pool & body] 72 | `(submit ~pool (fn [] ~@body))) 73 | 74 | (defmacro with-schedule 75 | [[pool delay] & body] 76 | `(recurring ~pool (fn [] ~@body) ~delay)) 77 | 78 | (defn make-pool 79 | [config] 80 | (map->Pool config)) 81 | -------------------------------------------------------------------------------- /doc/schema.cql: -------------------------------------------------------------------------------- 1 | CREATE KEYSPACE IF NOT EXISTS metric WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; 2 | 3 | USE metric; 4 | 5 | CREATE TYPE IF NOT EXISTS metric_point ( 6 | max double, 7 | mean double, 8 | min double, 9 | sum double 10 | ); 11 | 12 | CREATE TYPE IF NOT EXISTS metric_resolution ( 13 | precision int, 14 | period int 15 | ); 16 | 17 | CREATE TYPE IF NOT EXISTS metric_id ( 18 | path text, 19 | resolution frozen 20 | ); 21 | 22 | CREATE TABLE IF NOT EXISTS metric ( 23 | id frozen, 24 | time bigint, 25 | point frozen, 26 | PRIMARY KEY (id, time) 27 | ) WITH CLUSTERING ORDER BY (time ASC) 28 | AND compaction = {'class': 'DateTieredCompactionStrategy', 'min_threshold': '12', 'max_threshold': '32', 'max_sstable_age_days': '0.083', 'base_time_seconds': '50' } 29 | AND compression = {'sstable_compression': 'org.apache.cassandra.io.compress.LZ4Compressor'} 30 | AND dclocal_read_repair_chance = 0.1 31 | AND default_time_to_live = 0 32 | AND gc_grace_seconds = 864000 33 | AND max_index_interval = 2048 34 | AND memtable_flush_period_in_ms = 0 35 | AND min_index_interval = 128 36 | AND read_repair_chance = 0.0 37 | AND speculative_retry = '99.0PERCENTILE'; 38 | 39 | CREATE TABLE IF NOT EXISTS segment ( 40 | parent text, 41 | segment text, 42 | pos int, 43 | length int, 44 | leaf boolean, 45 | PRIMARY KEY (parent, segment) 46 | ) WITH CLUSTERING ORDER BY (segment ASC) 47 | AND bloom_filter_fp_chance = 0.01 48 | AND caching = {'keys':'ALL', 'rows_per_partition':'NONE'} 49 | AND comment = '' 50 | AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy'} 51 | AND compression = {'sstable_compression': 'org.apache.cassandra.io.compress.LZ4Compressor'} 52 | AND dclocal_read_repair_chance = 0.1 53 | AND default_time_to_live = 0 54 | AND gc_grace_seconds = 864000 55 | AND max_index_interval = 2048 56 | AND memtable_flush_period_in_ms = 0 57 | AND min_index_interval = 128 58 | AND read_repair_chance = 0.0 59 | AND speculative_retry = '99.0PERCENTILE'; 60 | 61 | CREATE CUSTOM INDEX IF NOT EXISTS on segment(segment) USING 'org.apache.cassandra.index.sasi.SASIIndex' WITH OPTIONS = {'mode': 'PREFIX'}; 62 | CREATE CUSTOM INDEX IF NOT EXISTS on segment(pos) USING 'org.apache.cassandra.index.sasi.SASIIndex'; -------------------------------------------------------------------------------- /src/io/cyanite/query.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.query 2 | "Query handler for the Graphite DSL. 3 | This handler will first figure out which paths to fetch and 4 | then hand over the fetched paths to the AST processor." 5 | (:require [io.cyanite.query.parser :as parser] 6 | [io.cyanite.query.ast :as ast] 7 | [io.cyanite.query.path :as path] 8 | [io.cyanite.index :as index] 9 | [io.cyanite.store :as store] 10 | [io.cyanite.engine :as engine] 11 | [clojure.tools.logging :refer [debug]] 12 | [clojure.string :refer [join]])) 13 | 14 | (defn path-leaves 15 | [index paths] 16 | (zipmap paths 17 | (map #(index/prefixes index (first %)) paths))) 18 | 19 | (defn merge-paths 20 | [by-path series] 21 | (let [fetch-series (fn [leaf aggregate] 22 | (let [path (store/reconstruct-aggregate (:path leaf) aggregate)] 23 | [path (get-in series [:series path])]))] 24 | (->> by-path 25 | (map (fn [[[_ aggregate] leaves]] 26 | (->> leaves 27 | (map #(fetch-series % aggregate)) 28 | (remove #(nil? (second %)))))) 29 | (mapcat identity) 30 | (reduce merge {})))) 31 | 32 | (defn run-query! 33 | [index engine from to queries] 34 | (debug "running query: " (pr-str queries)) 35 | (flatten 36 | (for [query queries] 37 | (let [tokens (parser/query->tokens query) 38 | paths (->> tokens 39 | (path/tokens->paths) 40 | (map #(index/extract-aggregate index %))) 41 | ;; by this point we have "real" paths (without aggregates) 42 | by-path (path-leaves index paths) 43 | leaves (->> by-path 44 | (mapcat 45 | (fn [[[_ aggregate] paths]] 46 | (map 47 | #(engine/resolution engine from to (:path %) aggregate) 48 | paths 49 | ))) 50 | (remove nil?) 51 | (distinct)) 52 | series (engine/query engine from to leaves) 53 | merged (merge-paths by-path series) 54 | from (:from series) 55 | step (:step series)] 56 | (ast/run-query! tokens merged from step))))) 57 | -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. Cyanite documentation master file, created by 2 | sphinx-quickstart on Wed Jul 22 11:34:53 2015. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Cyanite 7 | ======= 8 | 9 | .. image:: _static/cyanite.png 10 | :alt: cyanite logo 11 | :align: left 12 | :width: 100 13 | 14 | 15 | Cyanite is a daemon which provides services to store and retrieve timeseries data. 16 | It aims to be compatible with the Graphite_ eco-system. 17 | 18 | Cyanite stores timeseries data in `Apache Cassandra`_ by default and focuses on 19 | the following aspects: 20 | 21 | Scalability 22 | By relying on `Apache Cassandra`_, Cyanite is able to provide highly available, 23 | elastic, and low-latency time-series storage. 24 | 25 | Compatibility 26 | The Graphite_ eco-system has become the de-facto standard for interacting 27 | with time-series data, either with ``graphite-web`` or with Grafana_. 28 | Cyanite will strive to remain as integrated as possible with this 29 | eco-system and to provide simple interaction modes. 30 | 31 | 32 | 33 | .. raw:: html 34 | 35 |

36 | 38 | GitHub project 41 | 42 | 43 | 45 | Build Status 48 | 49 |

50 | 51 | Cyanite is Open-Source software, released under an MIT license and is 52 | available on github: https://github.com/pyr/cyanite. The latest 53 | release is available at https://github.com/pyr/cyanite/releases/latest 54 | 55 | .. warning:: 56 | 57 | The updated Cyanite is a work in progress. Please stay tuned for the 58 | first release. 59 | 60 | .. _Apache Cassandra: http://cassandra.apache.org 61 | .. _Graphite: http://graphite.readthedocs.org 62 | .. _Grafana: http://grafana.org 63 | 64 | .. toctree:: 65 | :maxdepth: 2 66 | 67 | quickstart 68 | concepts 69 | administrator 70 | api 71 | 72 | -------------------------------------------------------------------------------- /src/io/cyanite/engine/rule.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.engine.rule 2 | "Rule manipulation functions" 3 | (:require [clojure.string :refer [split]])) 4 | 5 | (defn ->seconds 6 | "Takes a string containing a duration like 13s, 4h etc. and 7 | converts it to seconds" 8 | [s] 9 | (let [[_ value unit] (re-matches #"^([0-9]+)([a-z])$" s) 10 | quantity (Long/valueOf ^String value)] 11 | (case unit 12 | "s" quantity 13 | "m" (* 60 quantity) 14 | "h" (* 60 60 quantity) 15 | "d" (* 24 60 60 quantity) 16 | "w" (* 7 24 60 60 quantity) 17 | "y" (* 365 24 60 60 quantity) 18 | (throw (ex-info (str "unknown unit: " unit) {}))))) 19 | 20 | (defrecord Resolution [precision period]) 21 | 22 | (defrecord ResolutionChain [precisions]) 23 | 24 | (defn fit? 25 | "Does a point fit in a resolution, given 26 | a reference to now ?" 27 | [resolution oldest ts] 28 | (when (<= (- ts (:period resolution)) oldest) 29 | resolution)) 30 | 31 | (def default-resolution 32 | "By default, keep a 1 minute resolution for a day" 33 | (Resolution. 60 86400)) 34 | 35 | (defn ->resolution 36 | "Converts an individual resolution to a description map" 37 | [res-def] 38 | (cond 39 | (string? res-def) 40 | (let [[precision-string period-string] (split res-def #":" 2) 41 | precision-secs (->seconds precision-string) 42 | period-secs (->seconds period-string)] 43 | (Resolution. precision-secs period-secs)) 44 | 45 | (map? res-def) 46 | (-> res-def 47 | (update :precision ->seconds) 48 | (update :period ->seconds) 49 | (map->Resolution)) 50 | 51 | :else 52 | (throw (ex-info "invalid resolution definition" {:resolution res-def})))) 53 | 54 | (defprotocol MetricMatcher 55 | (metric-matches? [this metric])) 56 | 57 | (defrecord MetricRule [pattern resolutions] 58 | MetricMatcher 59 | (metric-matches? [this path] 60 | (and 61 | (re-find pattern path) 62 | this))) 63 | 64 | (defn ->exec-plan 65 | [planner metric] 66 | (when-let [rule (some #(metric-matches? % metric) planner)] 67 | (:resolutions rule))) 68 | 69 | (defn ->rule 70 | [[pattern resolutions]] 71 | (MetricRule. (re-pattern (if (= (name pattern) "default") 72 | ".*" 73 | (name pattern))) 74 | (if (seq resolutions) 75 | (mapv ->resolution resolutions) 76 | [default-resolution]))) 77 | -------------------------------------------------------------------------------- /test/resources/schema.cql: -------------------------------------------------------------------------------- 1 | DROP KEYSPACE IF EXISTS cyanite_test; 2 | CREATE KEYSPACE IF NOT EXISTS cyanite_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; 3 | 4 | USE cyanite_test; 5 | 6 | CREATE TYPE IF NOT EXISTS metric_point ( 7 | max double, 8 | mean double, 9 | min double, 10 | sum double 11 | ); 12 | 13 | CREATE TYPE IF NOT EXISTS metric_resolution ( 14 | precision int, 15 | period int 16 | ); 17 | 18 | CREATE TYPE IF NOT EXISTS metric_id ( 19 | path text, 20 | resolution frozen 21 | ); 22 | 23 | CREATE TABLE IF NOT EXISTS metric ( 24 | id frozen, 25 | time bigint, 26 | point frozen, 27 | PRIMARY KEY (id, time) 28 | ) WITH CLUSTERING ORDER BY (time ASC) 29 | AND compaction = {'class': 'DateTieredCompactionStrategy', 'min_threshold': '12', 'max_threshold': '32', 'max_sstable_age_days': '0.083', 'base_time_seconds': '50' } 30 | AND compression = {'sstable_compression': 'org.apache.cassandra.io.compress.LZ4Compressor'} 31 | AND dclocal_read_repair_chance = 0.1 32 | AND default_time_to_live = 0 33 | AND gc_grace_seconds = 864000 34 | AND max_index_interval = 2048 35 | AND memtable_flush_period_in_ms = 0 36 | AND min_index_interval = 128 37 | AND read_repair_chance = 0.0 38 | AND speculative_retry = '99.0PERCENTILE'; 39 | 40 | CREATE TABLE IF NOT EXISTS segment ( 41 | parent text, 42 | segment text, 43 | pos int, 44 | length int, 45 | leaf boolean, 46 | PRIMARY KEY (parent, segment) 47 | ) WITH CLUSTERING ORDER BY (segment ASC) 48 | AND bloom_filter_fp_chance = 0.01 49 | AND caching = {'keys':'ALL', 'rows_per_partition':'NONE'} 50 | AND comment = '' 51 | AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy'} 52 | AND compression = {'sstable_compression': 'org.apache.cassandra.io.compress.LZ4Compressor'} 53 | AND dclocal_read_repair_chance = 0.1 54 | AND default_time_to_live = 0 55 | AND gc_grace_seconds = 864000 56 | AND max_index_interval = 2048 57 | AND memtable_flush_period_in_ms = 0 58 | AND min_index_interval = 128 59 | AND read_repair_chance = 0.0 60 | AND speculative_retry = '99.0PERCENTILE'; 61 | 62 | CREATE CUSTOM INDEX IF NOT EXISTS on segment(segment) USING 'org.apache.cassandra.index.sasi.SASIIndex' WITH OPTIONS = {'mode': 'PREFIX'}; 63 | CREATE CUSTOM INDEX IF NOT EXISTS on segment(pos) USING 'org.apache.cassandra.index.sasi.SASIIndex'; -------------------------------------------------------------------------------- /test/io/cyanite/query_test.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.query-test 2 | (:require [io.cyanite.query :refer :all] 3 | [io.cyanite.test-helper :refer :all] 4 | [com.stuartsierra.component :as component] 5 | [io.cyanite.store :as s] 6 | [io.cyanite.engine :as e] 7 | [clojure.test :refer :all])) 8 | 9 | (defn mk-cfg 10 | [m] 11 | (merge {:engine {:rules {"default" ["5s:1h"]}}} 12 | m)) 13 | 14 | (def CONFIG 15 | [(mk-cfg {:store {:cluster "localhost" 16 | :keyspace "cyanite_test"} 17 | :index {:type :cassandra 18 | :cluster "localhost" 19 | :keyspace "cyanite_test"}}) 20 | (mk-cfg {{:store {:type :memory}} 21 | {:index {:type :atom}}})]) 22 | 23 | (deftest query-test 24 | (with-config 25 | CONFIG 26 | {} 27 | (let [store (:store *system*) 28 | clock (:clock *system*) 29 | index (:index *system*) 30 | engine (:engine *system*) 31 | base-time 1454877020] 32 | (doseq [i (range 0 10)] 33 | (e/ingest! engine "a.b.c" (+ base-time i) i) 34 | (e/ingest! engine "a.b.d" (+ base-time i) (* 100 i)) 35 | (e/ingest! engine "a.b.f" (+ base-time i) (* 1000 i))) 36 | 37 | (is (= (run-query! index engine base-time (+ base-time 60) 38 | ["a.b.*"]) 39 | [{:target "a.b.c" :datapoints [[2.0 base-time] [7.0 (+ base-time 5)]]} 40 | {:target "a.b.d" :datapoints [[200.0 base-time] [700.0 (+ base-time 5)]]} 41 | {:target "a.b.f" :datapoints [[2000.0 base-time] [7000.0 (+ base-time 5)]]}])) 42 | 43 | (are [query expansion result] 44 | (= (run-query! index engine base-time (+ base-time 60) 45 | [query]) 46 | [{:target expansion :datapoints result}]) 47 | 48 | "a.b.c" "a.b.c" 49 | [[2.0 base-time] [7.0 (+ base-time 5)]] 50 | 51 | "scale(a.b.c,2.0)" "scale(a.b.c,2.0)" 52 | [[4.0 base-time] [14.0 (+ base-time 5)]] 53 | 54 | "derivative(a.b.c)" "derivative(a.b.c)" 55 | [[5.0 (+ base-time 5)]] 56 | 57 | "sumSeries(a.b.c,a.b.d,a.b.f)" "sumSeries(a.b.c,a.b.d,a.b.f)" 58 | [[2202.0 base-time] [7707.0 (+ base-time 5)]] 59 | 60 | "sumSeries(a.b.*)" "sumSeries(a.b.c,a.b.d,a.b.f)" 61 | [[2202.0 base-time] [7707.0 (+ base-time 5)]] 62 | )))) 63 | -------------------------------------------------------------------------------- /src/io/cyanite/engine/queue.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.engine.queue 2 | "Queueing mechanism at the heart of cyanite." 3 | (:import java.util.concurrent.Executors 4 | java.util.ArrayList 5 | org.jctools.queues.SpscArrayQueue 6 | org.jctools.queues.SpmcArrayQueue 7 | java.util.concurrent.locks.LockSupport 8 | ) 9 | (:require [com.stuartsierra.component :as component] 10 | [spootnik.reporter :as r] 11 | [clojure.tools.logging :refer [warn error]])) 12 | 13 | 14 | (defonce workers 4) 15 | (defonce default-capacity (int 1048576)) 16 | 17 | (defprotocol QueueEngine 18 | (shutdown! [this]) 19 | (start! [this]) 20 | (consume! [this f]) 21 | (engine-event! [this e]) 22 | ) 23 | 24 | (defn threadpool 25 | [sz] 26 | (Executors/newFixedThreadPool (int sz))) 27 | 28 | (defrecord EngineQueue [alias reporter pool engine-queues] 29 | Object 30 | (toString [_] 31 | "EngineQueue") 32 | QueueEngine 33 | (start! [this] 34 | ;; no-op 35 | ) 36 | (shutdown! [this] 37 | ;; no-op 38 | ) 39 | (engine-event! [this e] 40 | (r/inc! reporter [:cyanite alias :events :ingested]) 41 | ;; TODO: implement Round-robin for more fair scheduling? 42 | (let [queue ^SpscArrayQueue (nth engine-queues (mod (hash e) workers))] 43 | (.add queue e))) 44 | (consume! [this f] 45 | (doseq [queue engine-queues] 46 | (.submit pool 47 | (fn [] 48 | (loop [] 49 | (try 50 | (if-let [el (.poll ^SpscArrayQueue queue)] 51 | (f el) 52 | (LockSupport/parkNanos 10)) 53 | (catch Throwable exception 54 | (r/inc! reporter [:cyanite alias :events :error]) 55 | (error exception "exception while processing the event from the queue"))) 56 | (recur) 57 | )) 58 | )))) 59 | 60 | (defn make-queue 61 | [options alias reporter] 62 | (let [capacity (or (:queue-capacity options) default-capacity) 63 | workers (or (:pool-size options) workers) 64 | pool (threadpool workers)] 65 | (EngineQueue. alias 66 | reporter 67 | (take workers (repeatedly #(SpscArrayQueue. capacity))) 68 | (SpmcArrayQueue. capacity)))) 69 | 70 | (defrecord BlockingMemoryQueue [ingestq options reporter] 71 | component/Lifecycle 72 | (start [this] 73 | (r/build! reporter :counter [:cyanite :ingestq :events]) 74 | (r/build! reporter :counter [:cyanite :ingestq :errors]) 75 | (assoc this :ingestq (make-queue options :ingestq reporter))) 76 | (stop [this] 77 | (shutdown! ingestq) 78 | (assoc this :ingestq nil))) 79 | -------------------------------------------------------------------------------- /doc/source/quickstart.rst: -------------------------------------------------------------------------------- 1 | Quickstart Guide 2 | ================ 3 | 4 | Getting up and running with Cyanite involves two things which 5 | we'll cover in this quick walk-through: 6 | 7 | - Installing, configuring, and running `Apache Cassandra`_. 8 | - Installing, configuring, and running Cyanite itself. 9 | 10 | Obtaining Cyanite 11 | ----------------- 12 | 13 | Cyanite is released in both source and binary. 14 | 15 | Binary releases 16 | ~~~~~~~~~~~~~~~ 17 | 18 | Cyanite currently has no binary releases, as it's under active development. 19 | We are getting clos to first stable version. 20 | 21 | Binary release are the simplest way to get started and are hosted on github: 22 | https://github.com/pyr/cyanite/releases/latest. 23 | 24 | Each release contains: 25 | 26 | - A source code archive 27 | - A standard build (*cyanite-VERSION-standalone.jar*) 28 | - A debian package 29 | 30 | Requirements 31 | ------------ 32 | 33 | Runtime requirements 34 | ~~~~~~~~~~~~~~~~~~~~ 35 | 36 | Runtime requirements for Cyanite are kept to a minimum 37 | 38 | - Java 8 Runtime (Sun JDK recommended) 39 | - Apache Cassandra 3.4 or later 40 | 41 | Build requirements 42 | ~~~~~~~~~~~~~~~~~~ 43 | 44 | If you wish to build Cyanite you will additionally need the 45 | `leiningen`_ build tool to produce working artifacts. Once 46 | leiningen_ is installed, you can just run ``lein uberjar`` to 47 | produce a working Java archive. 48 | 49 | Minimal configuration 50 | ---------------------- 51 | 52 | Cyanite is configured with a single configuration file, formatted in YAML. 53 | See :ref:`Configuration Syntax` for more details 54 | 55 | .. sourcecode:: yaml 56 | 57 | logging: 58 | level: info 59 | console: true 60 | files: 61 | - "/var/log/cyanite/cyanite.log" 62 | input: 63 | - type: "carbon" 64 | store: 65 | cluster: "127.0.0.1" 66 | index: 67 | type: "cassandra" 68 | keyspace: "metric" 69 | cluster: "127.0.0.1" 70 | api: 71 | port: 8080 72 | engine: 73 | rules: 74 | default: 75 | - "5s:1h" 76 | 77 | Running Cyanite 78 | --------------- 79 | 80 | Command-line arguments 81 | ~~~~~~~~~~~~~~~~~~~~~~ 82 | 83 | Cyanite accepts the following arguments:: 84 | 85 | Switches Default Desc 86 | -------- ------- ---- 87 | -h, --no-help, --help false Show help 88 | -f, --path Configuration file path 89 | -q, --no-quiet, --quiet false Suppress output 90 | 91 | .. _leiningen: https://leiningen.org 92 | .. _Apache Cassandra: http://cassandra.apache.org 93 | 94 | Default Schema 95 | -------------- 96 | 97 | The following schema has to be loaded in Cassandra in order to store data: 98 | 99 | .. literalinclude:: ./../schema.cql 100 | -------------------------------------------------------------------------------- /test/io/cyanite/query/ast_test.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.query.ast-test 2 | (:require [io.cyanite.query.parser :as parser] 3 | [io.cyanite.query.ast :refer :all] 4 | [clojure.test :refer :all])) 5 | 6 | (defn runq 7 | [query-string series] 8 | (run-query! (parser/query->tokens query-string) 9 | series)) 10 | 11 | (deftest test-sum-series 12 | (is (= (runq "sumSeries(a.b.c,a.b.d)" 13 | {"a.b.c" [1 1 1] 14 | "a.b.d" [2 2 2]}) 15 | [["sumSeries(a.b.c,a.b.d)" 16 | [3 3 3]]])) 17 | (is (= (runq "sumSeries(a.b.*)" 18 | {"a.b.c" [1 1 1] 19 | "a.b.d" [2 2 2]}) 20 | [["sumSeries(a.b.c,a.b.d)" [3 3 3]]]))) 21 | 22 | (deftest test-sum-absolute-series 23 | (is (= (runq "sumSeries(absolute(a.b.c),a.b.d)" 24 | {"a.b.c" [1 1 1] 25 | "a.b.d" [2 2 2]}) 26 | [["sumSeries(absolute(a.b.c),a.b.d)" 27 | [3 3 3]]])) 28 | (is (= (runq "sumSeries(a.b.*)" 29 | {"a.b.c" [1 1 1] 30 | "a.b.d" [2 2 2]}) 31 | [["sumSeries(a.b.c,a.b.d)" 32 | [3 3 3]]]))) 33 | 34 | (deftest test-derivative 35 | (is (= (runq "derivative(a.b.c)" 36 | {"a.b.c" [1 3 6]}) 37 | [["derivative(a.b.c)" 38 | [nil 2 3]]])) 39 | (is (= (runq "derivative(a.b.*)" 40 | {"a.b.c" [1 3 6] 41 | "a.b.d" [1 3 6]}) 42 | [["derivative(a.b.c)" 43 | [nil 2 3]] 44 | ["derivative(a.b.d)" 45 | [nil 2 3]]]))) 46 | 47 | (deftest test-absolute 48 | (is (= (runq "absolute(a.b.c)" 49 | {"a.b.c" [-1 -3 -6]}) 50 | [["absolute(a.b.c)" 51 | [1 3 6]]])) 52 | (is (= (runq "absolute(a.b.*)" 53 | {"a.b.c" [-1 -3 -5] 54 | "a.b.d" [-1 -3 -6]}) 55 | [["absolute(a.b.c)" [1 3 5]] 56 | ["absolute(a.b.d)" [1 3 6]]]))) 57 | 58 | (deftest test-scale 59 | (is (= (runq "scale(a.b.c,10.0)" 60 | {"a.b.c" [1 2 3]}) 61 | [["scale(a.b.c,10.0)" 62 | [10.0 20.0 30.0]]])) 63 | (is (= (runq "scale(a.b.*,10.0)" 64 | {"a.b.c" [1 2 3] 65 | "a.b.d" [5 6 7]}) 66 | [["scale(a.b.c,10.0)" 67 | [10.0 20.0 30.0]] 68 | ["scale(a.b.d,10.0)" 69 | [50.0 60.0 70.0]]]))) 70 | 71 | (deftest test-div 72 | (is (= (runq "divideSeries(a.b.c,a.b.d)" 73 | {"a.b.c" [10 20 30] 74 | "a.b.d" [2 4 6]}) 75 | [["divideSeries(a.b.c,a.b.d)" 76 | [5 5 5]]]))) 77 | 78 | (deftest test-path 79 | (is (= (runq "a.b.*" 80 | {"a.b.c" [10 20 30] 81 | "a.b.d" [2 4 6]}) 82 | [["a.b.c" [10 20 30]] 83 | ["a.b.d" [2 4 6]]])) 84 | (is (= (runq "a.b.c" 85 | {"a.b.c" [10 20 30] 86 | "a.b.d" [2 4 6]}) 87 | [["a.b.c" [10 20 30]]]))) 88 | -------------------------------------------------------------------------------- /test/io/cyanite/integration/index_test.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.integration.index-test 2 | (:require [io.cyanite.store :refer :all] 3 | [clojure.test :refer :all] 4 | [io.cyanite.test-helper :refer :all] 5 | [io.cyanite.index :as index] 6 | [qbits.alia :as alia])) 7 | 8 | (defn cleanup-tables 9 | [session] 10 | (alia/execute session (str "TRUNCATE TABLE segment"))) 11 | 12 | (defn test-index 13 | [] 14 | (let [index (:index *system*)] 15 | (index/register! index "aa.bb.cc") 16 | (index/register! index "aa.bb.dd") 17 | (index/register! index "aa.ee.cc") 18 | (index/register! index "aa.ff.cc") 19 | (index/register! index "aa.gg.cc") 20 | (index/register! index "aa.hh.cc") 21 | (index/register! index "cc.bb.cc") 22 | (index/register! index "cc.bb.dd") 23 | (index/register! index "cc.ee.cc") 24 | (index/register! index "cc.ff.cc") 25 | (index/register! index "cc.gg.cc") 26 | (index/register! index "cc.hh.cc") 27 | 28 | (is (= #{"aa.bb" "aa.ee" "aa.ff" "aa.gg" "aa.hh"} 29 | (set (map :id (index/prefixes index "aa.*"))))) 30 | (is (= #{} 31 | (set (map :id (index/prefixes index "bb.*"))))) 32 | (is (= #{"aa.bb" "cc.bb"} 33 | (set (map :id (index/prefixes index "*.bb"))))) 34 | (is (= #{"aa.bb.cc" "aa.bb.dd" "cc.bb.cc" "cc.bb.dd"} 35 | (set (map :id (index/prefixes index "*.bb.*"))))) 36 | (is (= #{"aa.bb.dd" "cc.bb.dd"} 37 | (set (map :id (index/prefixes index "*.*.dd"))))) 38 | (is (= #{"cc.bb.cc" "cc.ee.cc" "cc.ff.cc" "cc.gg.cc" "cc.hh.cc"} 39 | (set (map :id (index/prefixes index "cc.*.cc"))))) 40 | (is (= #{"aa.ee.cc" "cc.ee.cc"} 41 | (set (map :id (index/prefixes index "*.ee.cc"))))) 42 | (is (= #{"aa" "cc"} 43 | (set (map :id (index/prefixes index "*"))))) 44 | (is (= #{"aa.ee.cc" "aa.hh.cc" "aa.bb.cc" "aa.ff.cc" "aa.gg.cc"} 45 | (set (map :id (index/prefixes index "aa.*.cc"))))) 46 | (is (= #{"aa.ee.cc"} 47 | (set (map :id (index/prefixes index "aa.ee.cc"))))))) 48 | 49 | (deftest sasi-index-test 50 | (with-config 51 | {:index {:type :cassandra 52 | :cluster "localhost" 53 | :keyspace "cyanite_test"}} 54 | {} 55 | (let [index (:index *system*) 56 | session (:session index)] 57 | (cleanup-tables session) 58 | (test-index) 59 | (cleanup-tables session)))) 60 | 61 | (deftest sasi-with-tokenizer-index-test 62 | (with-config 63 | {:index {:cluster "localhost" 64 | :keyspace "cyanite_test_with_tokenizer" 65 | :type :cassandra 66 | :with_tokenizer true}} 67 | {} 68 | (let [index (:index *system*) 69 | session (:session index)] 70 | (cleanup-tables session) 71 | (test-index) 72 | (cleanup-tables session)))) 73 | 74 | (deftest agent-index-test 75 | (with-config 76 | {:index {:type :atom}} 77 | {} 78 | (test-index))) 79 | -------------------------------------------------------------------------------- /test/io/cyanite/test_helper.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.test-helper 2 | (:require [com.stuartsierra.component :as component] 3 | [spootnik.reporter :as reporter] 4 | [io.cyanite.engine :as engine] 5 | [io.cyanite.engine.drift :as drift] 6 | [io.cyanite.engine.queue :as queue] 7 | [io.cyanite.pool :as pool] 8 | [io.cyanite.store :as store] 9 | [io.cyanite.index :as index] 10 | [io.cyanite.index.cassandra])) 11 | 12 | (defprotocol TimeTraveller 13 | (set-time! [clock t])) 14 | 15 | (defrecord NoOpPool [] 16 | component/Lifecycle 17 | (start [this] this) 18 | (stop [this] this) 19 | pool/Scheduler 20 | (submit [this f]) 21 | (shutdown [this]) 22 | (shutdown-now [this]) 23 | (join [this]) 24 | (recurring [this f delay])) 25 | 26 | (defrecord TimeTravellingClock [time] 27 | component/Lifecycle 28 | (start [this] 29 | (assoc this :time (atom 0))) 30 | (stop [this] this) 31 | drift/Clock 32 | (epoch! [this] 33 | (quot @time 1000)) 34 | TimeTraveller 35 | (set-time! [this t] 36 | (reset! time t))) 37 | 38 | (defrecord SynchronousQueueEngine [consumers] 39 | queue/QueueEngine 40 | (shutdown! [this]) 41 | (engine-event! [this e] 42 | (doseq [f @consumers] 43 | (f e))) 44 | (consume! [this f] 45 | (swap! consumers conj f))) 46 | 47 | (defrecord SynchronousQueue [] 48 | component/Lifecycle 49 | (start [this] 50 | (assoc this :ingestq (SynchronousQueueEngine. (atom [])))) 51 | (stop [this] 52 | (assoc this :ingestq nil))) 53 | 54 | (def ^:dynamic *system*) 55 | 56 | (defn make-test-system 57 | [config overrides] 58 | (-> {:reporter (reporter/make-reporter {}) 59 | :clock (map->TimeTravellingClock {}) 60 | ;; No-op compoments 61 | :pool (map->NoOpPool {}) 62 | :drift (drift/map->NoOpDrift {}) 63 | :index (index/build-index (or (:index config) {:type :atom})) 64 | :queues (map->SynchronousQueue {}) 65 | :store (store/build-store (or (:store config) {:type :memory})) 66 | ;; Default versions 67 | :engine (engine/make-engine (:engine config)) 68 | } 69 | (merge overrides) 70 | (component/map->SystemMap) 71 | (component/system-using {:drift [:clock] 72 | :queues [:reporter] 73 | :pool [:reporter] 74 | :index [] 75 | :engine [:drift :store :queues :reporter :index] 76 | }))) 77 | 78 | 79 | (defmacro with-config 80 | [config overrides & body] 81 | `(let [cfg# ~config] 82 | (if (vector? cfg#) 83 | (doseq [config# cfg#] 84 | (binding [*system* (component/start-system (make-test-system config# ~overrides))] ;;TODO: get rid of overrides, wtf 85 | ~@body 86 | (component/stop-system *system*))) 87 | (binding [*system* (component/start-system (make-test-system ~config ~overrides))] 88 | ~@body 89 | (component/stop-system *system*)))) 90 | ) 91 | -------------------------------------------------------------------------------- /tokenizer/src/apache/cassandra/index/sasi/analyzer/SplittingTokenizer.java: -------------------------------------------------------------------------------- 1 | package org.apache.cassandra.index.sasi.analyzer; 2 | 3 | import java.nio.ByteBuffer; 4 | import java.util.*; 5 | import java.util.regex.*; 6 | 7 | import com.google.common.collect.Iterators; 8 | import org.slf4j.Logger; 9 | import org.slf4j.LoggerFactory; 10 | 11 | import org.apache.cassandra.db.marshal.*; 12 | import org.apache.cassandra.index.sasi.analyzer.filter.*; 13 | import org.apache.cassandra.serializers.*; 14 | import org.apache.cassandra.utils.*; 15 | 16 | public class SplittingTokenizer extends AbstractAnalyzer 17 | { 18 | private static final Logger logger = LoggerFactory.getLogger(NonTokenizingAnalyzer.class); 19 | 20 | private static final Set> VALID_ANALYZABLE_TYPES = new HashSet>() 21 | {{ 22 | add(UTF8Type.instance); 23 | add(AsciiType.instance); 24 | }}; 25 | 26 | private AbstractType validator; 27 | private final static Pattern pattern = Pattern.compile("\\."); 28 | private NonTokenizingOptions options; 29 | private FilterPipelineTask filterPipeline; 30 | 31 | private ByteBuffer input; 32 | private Iterator splits; 33 | private boolean hasNext = false; 34 | 35 | public void init(Map options, AbstractType validator) 36 | { 37 | init(NonTokenizingOptions.buildFromMap(options), validator); 38 | } 39 | 40 | public void init(NonTokenizingOptions tokenizerOptions, AbstractType validator) 41 | { 42 | this.validator = validator; 43 | this.options = tokenizerOptions; 44 | this.filterPipeline = getFilterPipeline(); 45 | } 46 | 47 | public boolean hasNext() 48 | { 49 | // check that we know how to handle the input, otherwise bail 50 | if (!VALID_ANALYZABLE_TYPES.contains(validator)) 51 | { 52 | logger.info("Can't analyze the input of type " + validator); 53 | return false; 54 | } 55 | 56 | if (splits == null) 57 | { 58 | String inputStr = validator.getString(input); 59 | if (inputStr == null) 60 | throw new MarshalException(String.format("'null' deserialized value for %s with %s", ByteBufferUtil.bytesToHex(input), validator)); 61 | 62 | 63 | splits = Iterators.filter(Arrays.asList(pattern.split(inputStr)).iterator(), 64 | (s) -> !s.contains("*") && !s.contains("?")); 65 | } 66 | 67 | hasNext = splits.hasNext(); 68 | if (hasNext) 69 | { 70 | String nextStr = splits.next(); 71 | next = ByteBufferUtil.bytes(nextStr); 72 | } 73 | else 74 | { 75 | next = null; 76 | } 77 | return hasNext; 78 | } 79 | 80 | public void reset(ByteBuffer input) 81 | { 82 | this.next = null; 83 | this.hasNext = false; 84 | this.input = input; 85 | this.splits = null; 86 | } 87 | 88 | private FilterPipelineTask getFilterPipeline() 89 | { 90 | FilterPipelineBuilder builder = new FilterPipelineBuilder(new BasicResultFilters.NoOperation()); 91 | return builder.build(); 92 | } 93 | } 94 | 95 | -------------------------------------------------------------------------------- /src/io/cyanite/engine/drift.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.engine.drift 2 | "Drift handling component. Relies on a clock implementation 3 | which can yield an epoch with a second based resolution." 4 | (:require [com.stuartsierra.component :as component])) 5 | 6 | (defprotocol Drift 7 | (drift! [this ts] "Take new drift into account for this timestamp") 8 | (skewed-epoch! [this] "Yield an approximate epoch, accounting for drift")) 9 | 10 | (defprotocol Clock 11 | (epoch! [this] "Give us an epoch")) 12 | 13 | ;; System Clock is a basic wall clock 14 | ;; No configuration possible here. 15 | (defrecord SystemClock [] 16 | component/Lifecycle 17 | (start [this] this) 18 | (stop [this] this) 19 | Clock 20 | (epoch! [this] 21 | (quot (System/currentTimeMillis) 1000))) 22 | 23 | ;; Hold the state of our drift in an agent 24 | ;; This way we ensure that we have fast 25 | ;; execution of drift computation from 26 | ;; the caller site. this should eventually 27 | ;; rely on send-via to ensure we have our 28 | ;; own pool of threads to handle the max calls 29 | (defrecord AgentDrift [slot clock] 30 | component/Lifecycle 31 | (start [this] 32 | (assoc this :slot (agent 0))) 33 | (stop [this] 34 | (assoc this :slot nil)) 35 | Clock 36 | (epoch! [this] 37 | (epoch! clock)) 38 | Drift 39 | (drift! [this ts] 40 | (let [drift (- ts (epoch! clock))] 41 | (when (pos? drift) 42 | (send-off slot max drift)))) 43 | (skewed-epoch! [this] 44 | (- (epoch! clock) @slot)) 45 | clojure.lang.IDeref 46 | (deref [this] 47 | @slot)) 48 | 49 | ;; Hold the state of our drift in a volatile, 50 | ;; we don't care much about the atomicity guarantees 51 | ;; of our drift, we want a reasonable approximation 52 | ;; of the clock drift. 53 | (defrecord VolatileDrift [slot clock] 54 | component/Lifecycle 55 | (start [this] 56 | (assoc this :slot (volatile! 0))) 57 | (stop [this] 58 | (assoc this :slot nil)) 59 | Clock 60 | (epoch! [this] 61 | (epoch! clock)) 62 | Drift 63 | (drift! [this ts] 64 | (let [drift (- ts (epoch! clock))] 65 | (when (pos? drift) 66 | (vswap! slot max drift)))) 67 | (skewed-epoch! [this] 68 | (- (epoch! clock) @slot)) 69 | clojure.lang.IDeref 70 | (deref [this] 71 | @slot)) 72 | 73 | ;; A no-op implementation of the drift, 74 | ;; might be used in test purposes or 75 | ;; to avoid the drift calculation ovrehead 76 | ;; in production systems. 77 | (defrecord NoOpDrift [clock] 78 | component/Lifecycle 79 | (start [this] this) 80 | (stop [this] this) 81 | Clock 82 | (epoch! [this] 83 | (epoch! clock)) 84 | Drift 85 | (drift! [this ts] nil) 86 | (skewed-epoch! [this] 87 | (epoch! clock)) 88 | clojure.lang.IDeref 89 | (deref [this] 90 | 0)) 91 | 92 | (defmulti build-drift (comp (fnil keyword "agent") :type)) 93 | 94 | (defmethod build-drift :no-op 95 | [options] 96 | (map->NoOpDrift options)) 97 | 98 | (defmethod build-drift :volatile 99 | [options] 100 | (map->VolatileDrift options)) 101 | 102 | (defmethod build-drift :agent 103 | [options] 104 | (map->AgentDrift options)) 105 | 106 | (prefer-method print-method clojure.lang.IRecord clojure.lang.IDeref) 107 | (prefer-method print-method java.util.Map clojure.lang.IDeref) 108 | -------------------------------------------------------------------------------- /test/io/cyanite/dsl_test.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.dsl-test 2 | (:require [io.cyanite.query.parser :as parser] 3 | [io.cyanite.query.path :as path] 4 | [clojure.test :refer :all])) 5 | 6 | (deftest ast-test 7 | (let [inputs [["simple path" 8 | "foo" 9 | [:path "foo"] 10 | #{"foo"}] 11 | 12 | ["wildcard path" 13 | "foo.*" 14 | [:path "foo.*"] 15 | #{"foo.*"}] 16 | 17 | ["absolute" 18 | "absolute(foo)" 19 | [:absolute [:path "foo"]] 20 | #{"foo"}] 21 | 22 | ["aggregate line" 23 | "aggregateLine(foo,'max')" 24 | [:aggregateline [:path "foo"] "max"] 25 | #{"foo"}] 26 | 27 | ["alias" 28 | "alias(foo,'bar')" 29 | [:alias [:path "foo"] "bar"] 30 | #{"foo"}] 31 | 32 | ["alias by metric" 33 | "aliasByMetric(foo)" 34 | [:aliasmetric [:path "foo"]] 35 | #{"foo"}] 36 | 37 | ["alias by node (2-arity)" 38 | "aliasByNode(foo,5)" 39 | [:aliasnode [:path "foo"] "5"] 40 | #{"foo"}] 41 | ["alias by node (3-arity)" 42 | "aliasByNode(foo,5,30)" 43 | [:aliasnode [:path "foo"] "5" "30"] 44 | #{"foo"}] 45 | 46 | ["alias sub" 47 | "aliasSub(foo,'foo','bar')" 48 | [:aliassub [:path "foo"] "foo" "bar"] 49 | #{"foo"}] 50 | 51 | ["alpha" 52 | "alpha(foo,0.5)" 53 | [:alpha [:path "foo"] "0.5"] 54 | #{"foo"}] 55 | 56 | ["area between" 57 | "areaBetween(foo,bar,baz)" 58 | [:areabetween [:path "foo"] 59 | [:path "bar"] 60 | [:path "baz"]] 61 | #{"foo" "bar" "baz"}] 62 | 63 | ["as percent (arity-1)" 64 | "aspercent(foo)" 65 | [:aspercent [:path "foo"]] 66 | #{"foo"}] 67 | ["as percent (arity-2)" 68 | "asPercent(foo,100)" 69 | [:aspercent [:path "foo"] "100"] 70 | #{"foo"}] 71 | ["as percent (arity-2 with expr)" 72 | "asPercent(foo,alias(bar,'baz'))" 73 | [:aspercent [:path "foo"] [:alias [:path "bar"] "baz"]] 74 | #{"foo" "bar"}] 75 | 76 | ["average agove" 77 | "averageAbove(foo,5.2)" 78 | [:avgabove [:path "foo"] "5.2"] 79 | #{"foo"}] 80 | 81 | ["average below" 82 | "averageBelow(foo,5.2)" 83 | [:avgbelow [:path "foo"] "5.2"] 84 | #{"foo"}] 85 | 86 | ["group" 87 | "group(foo,bar,baz,foo,qux)" 88 | [:group [:path "foo"] 89 | [:path "bar"] 90 | [:path "baz"] 91 | [:path "foo"] 92 | [:path "qux"]] 93 | #{"foo" "bar" "baz" "qux"}] 94 | 95 | ["scale" 96 | "scale(foo,5)" 97 | [:scale [:path "foo"] "5"] 98 | #{"foo"}] 99 | 100 | ]] 101 | (doseq [[shortname query expected-tokens paths] inputs 102 | :let [tokens (parser/query->tokens query)]] 103 | (testing (str "tokens output for " shortname) 104 | (is (= expected-tokens tokens))) 105 | (testing (str "parsed paths for " shortname) 106 | (is (= paths (path/tokens->paths tokens))))))) 107 | -------------------------------------------------------------------------------- /src/io/cyanite/store/cassandra.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.store.cassandra 2 | (:require [qbits.alia :as alia] 3 | [qbits.alia.codec :as codec] 4 | [io.cyanite.engine.rule :as rule] 5 | [clojure.tools.logging :refer [error]])) 6 | 7 | (def custom-row-generator 8 | (reify qbits.alia.codec/RowGenerator 9 | (init-row [_] (transient {})) 10 | (conj-row [_ row k v] 11 | (assoc! row 12 | (keyword k) 13 | (if (= k "id") 14 | (update-in v [:resolution] rule/map->Resolution) 15 | v))) 16 | (finalize-row [_ row] 17 | (persistent! row)))) 18 | 19 | (defn truncateq-v2 20 | [session table] 21 | (alia/prepare 22 | session 23 | (str "TRUNCATE " table))) 24 | 25 | (defn insertq-v2 26 | [session table] 27 | (alia/prepare 28 | session 29 | (str "UPDATE " table " USING TTL ? SET point=? WHERE id=? AND time=?;"))) 30 | 31 | (defn fetchq-v2 32 | [session table] 33 | (alia/prepare 34 | session 35 | (str "SELECT id,time,point FROM " table " WHERE " 36 | "id IN ? AND time >= ? AND TIME <= ?;"))) 37 | 38 | (defn session! 39 | [{:keys [cluster username password] :as opts}] 40 | (try 41 | (let [hints (or (:hints opts) 42 | {:replication 43 | {:class "SimpleStrategy" 44 | :replication_factor (or (:replication_factor opts) 45 | (:replication-factor opts) 46 | (:repfactor opts) 47 | 1)}}) 48 | cluster (if (sequential? cluster) cluster [cluster]) 49 | session (-> {:contact-points cluster} 50 | (cond-> (and username password) 51 | (assoc :credentials {:user username 52 | :password password})) 53 | (alia/cluster) 54 | (alia/connect (or (:keyspace opts) "metric"))) 55 | rdcty (keyword 56 | (or (:read-consistency opts) 57 | (:read_consistency opts) 58 | (:rdcty opts) 59 | :one)) 60 | wrcty (keyword 61 | (or (:write-consistency opts) 62 | (:write_consistency opts) 63 | (:wrcty opts) 64 | :any))] 65 | [session rdcty wrcty]) 66 | (catch com.datastax.driver.core.exceptions.InvalidQueryException e 67 | (error e "Could not connect to cassandra. Exiting") 68 | (System/exit 1)))) 69 | 70 | (defn runq! 71 | [session prepared-statement values opts] 72 | (let [bound (alia/bind prepared-statement values)] 73 | (alia/execute session bound 74 | (assoc opts 75 | :row-generator custom-row-generator)))) 76 | 77 | (defn runq-async! 78 | [session prepared-statement values opts] 79 | (let [bound (alia/bind prepared-statement values)] 80 | (alia/execute-async session bound 81 | (assoc opts 82 | :row-generator custom-row-generator)))) 83 | 84 | (defn get-types 85 | [session] 86 | (let [->point (alia/udt-encoder session "metric_point") 87 | ->id (alia/udt-encoder session "metric_id") 88 | ->res (alia/udt-encoder session "metric_resolution")] 89 | [(fn [{:keys [path resolution]}] 90 | (let [precision (-> resolution :precision int) 91 | period (-> resolution :period int)] 92 | (->id {:path path 93 | :resolution (->res {:precision precision 94 | :period period})}))) 95 | (fn [{:keys [mean min max sum]}] 96 | (->point {:mean (double mean) 97 | :min (double min) 98 | :max (double max) 99 | :sum (double sum)}))])) 100 | -------------------------------------------------------------------------------- /doc/cyanite.yaml: -------------------------------------------------------------------------------- 1 | ## 2 | ## Cyanite configuration 3 | ## ===================== 4 | ## 5 | ## Cyanite's configuration is broken up in 6 | ## different sections: 7 | ## 8 | ## - engine 9 | ## - api 10 | ## - input 11 | ## - index 12 | ## - store 13 | ## - logging 14 | ## 15 | ## Most sections are optional but provide defaults 16 | ## for a single host testing system 17 | ## 18 | ## Engine 19 | ## ====== 20 | ## 21 | ## The engine specifies the behavior of Cyanite's core 22 | ## which accepts metrics from inputs, aggregates in-memory 23 | ## and defers to an index and a store when a time-window 24 | ## elapses 25 | ## 26 | ## The engine accepts the following options: 27 | ## 28 | ## *rules*: 29 | ## Rules specifies which resolutions to apply to an incoming metric. 30 | ## Rules consist of a pattern or the string "default" and an associated 31 | ## list of resolutions. 32 | ## Rules are evaluated in a first-match order. Resolutions are stored as a 33 | ## string of the form: :, you may use unit specifiers 34 | ## for seconds, minutes, hours, days, weeks and months and years. 35 | engine: 36 | rules: 37 | "web.*\\.cpu": [ "5s:1h", "30s:1d" ] 38 | default: [ "5s:1h" ] 39 | ## 40 | ## API 41 | ## === 42 | ## 43 | ## The API specifies the behavior of the HTTP interface which is exposed. 44 | ## The API accepts the following options: 45 | ## 46 | ## *host*: 47 | ## Address to listen on, defaults to 127.0.0.1 48 | ## *port*: 49 | ## Port to bind to, defaults to 8080 50 | ## *disabled: 51 | ## Disable HTTP service altogether, defaults to false. 52 | api: 53 | port: 8080 54 | ## 55 | ## Input 56 | ## ==== 57 | ## 58 | ## Inputs are methods for Cyanite to ingest metrics. A Cyanite installation 59 | ## may have several inputs running, and thus accepts a list of input 60 | ## configurations. 61 | ## 62 | ## Each input configuration takes the following options: 63 | ## 64 | ## *type*: 65 | ## Type of input, for now only "carbon" 66 | ## *host*: 67 | ## Address to bind to. 68 | ## *port*: 69 | ## Port to bind to. 70 | input: 71 | - type: carbon 72 | port: 2003 73 | ## 74 | ## Index 75 | ## ===== 76 | ## 77 | ## The index determines where metric names will be stored. 78 | ## Two types of indices are available now: "atom" and 79 | ## "cassandra". If no index section is present, 80 | ## An in-memory (atom) index will be assumed. Atom index 81 | ## is not suitable for production usage. 82 | ## 83 | ## The atom index takes no options. 84 | ## The cassandra index takes the following options: 85 | ## 86 | ## *cluster*: 87 | ## A string or list of strings to provide cluster contact points. 88 | ## *keyspace*: 89 | ## The keyspace to use. 90 | ## *with_tokenizer*: 91 | ## Whether or not Cyanite custom tokeniser is enabled on Cassandra nodes. 92 | ## 93 | ## Indexes support artifical paths. For each metric, Cyanite 94 | ## calculates `min`, `max`, `mean` and `sum` metrics. By default, 95 | ## `mean` is returned. If you need access to the other aggregates, 96 | ## enable them in `aggregates`. 97 | index: 98 | type: atom 99 | aggregates: 100 | - min 101 | - max 102 | - mean 103 | - sum 104 | ## 105 | ## Drift 106 | ## ===== 107 | ## 108 | ## Detects the drift on the agent clock and ensures 109 | ## that the snapshot epoch is calculated correctly 110 | ## despite the time difference between the agent 111 | ## and the calculating machine. 112 | ## 113 | ## Default implementation is `agent`. 114 | ## If you trust your clocks and/or would like to 115 | ## avoid drift handling on Cyanite side, you 116 | ## can opt-out for `no-op` one. 117 | drift: 118 | type: agent 119 | ## 120 | ## Store 121 | ## ===== 122 | ## 123 | ## The store is where metrics get persisted. 124 | ## The only store available for now is the "cassandra" 125 | ## one. 126 | ## 127 | ## The following options are accepted: 128 | ## 129 | ## *cluster*: 130 | ## A string or list of strings to provide cluster contact points. 131 | ## *keyspace*: 132 | ## The keyspace to use. 133 | store: 134 | cluster: 'localhost' 135 | keyspace: 'metric' 136 | ## 137 | ## Logging 138 | ## ======= 139 | ## 140 | ## Specify where to log. Adheres to the configuration format 141 | ## defined at https://github.com/pyr/unilog 142 | logging: 143 | level: info 144 | console: true 145 | files: 146 | - "/var/log/cyanite/cyanite.log" 147 | overrides: 148 | io.cyanite: "debug" 149 | -------------------------------------------------------------------------------- /src/io/cyanite.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite 2 | "Main cyanite namespace" 3 | (:gen-class) 4 | (:require [io.cyanite.config :as config] 5 | [io.cyanite.input :as input] 6 | [io.cyanite.index :as index] 7 | [io.cyanite.index.cassandra] 8 | [io.cyanite.engine.queue :as queue] 9 | [io.cyanite.store :as store] 10 | [io.cyanite.pool :as pool] 11 | [com.stuartsierra.component :as component] 12 | [metrics.reporters.console :as console] 13 | [metrics.reporters.csv :as csv] 14 | [metrics.reporters.jmx :as jmx] 15 | [spootnik.reporter :as reporter] 16 | [io.cyanite.engine :as engine] 17 | [io.cyanite.engine.drift :refer [map->SystemClock build-drift]] 18 | [io.cyanite.api :refer [map->Api]] 19 | [signal.handler :refer [with-handler]] 20 | [unilog.config :refer [start-logging!]] 21 | [spootnik.uncaught :refer [uncaught]] 22 | [clojure.tools.logging :refer [info warn]] 23 | [clojure.tools.cli :refer [cli]])) 24 | 25 | (set! *warn-on-reflection* true) 26 | 27 | (defn get-cli 28 | "Call cli parsing with our known options" 29 | [args] 30 | (try 31 | (cli args 32 | ["-h" "--help" "Show help" :default false :flag true] 33 | ["-f" "--path" "Configuration file path" :default nil] 34 | ["-q" "--quiet" "Suppress output" :default false :flag true]) 35 | (catch Exception e 36 | (binding [*out* *err*] 37 | (println "Could not parse arguments: " (.getMessage e))) 38 | (System/exit 1)))) 39 | 40 | (defn build-components 41 | "Build a list of components from data. 42 | Extracts key k from system and yield 43 | an updated system with top-level keys. 44 | components are created by call f on them 45 | options." 46 | [system config k f] 47 | (if (seq config) 48 | (reduce merge system (map (juxt :type f) config)) 49 | (assoc system k (f {})))) 50 | 51 | (defn config->system 52 | "Parse yaml then enhance config" 53 | [path quiet?] 54 | (try 55 | (when-not quiet? 56 | (println "starting with configuration: " path)) 57 | (let [config (config/load-path path)] 58 | (start-logging! (merge config/default-logging (:logging config))) 59 | (-> (component/system-map 60 | :clock (map->SystemClock {}) 61 | :queues (queue/map->BlockingMemoryQueue {:options (:queue config)}) 62 | :drift (build-drift (:drift config)) 63 | :engine (engine/make-engine (:engine config)) 64 | :api (map->Api {:options (:api config)}) 65 | :index (index/build-index (:index config)) 66 | :store (store/build-store (:store config)) 67 | :pool (pool/make-pool (:pool config)) 68 | :reporter (reporter/make-reporter (:reporter config))) 69 | (build-components (:input config) :input input/build-input) 70 | (component/system-using {:drift [:clock] 71 | :queues [:reporter] 72 | :pool [:reporter] 73 | :engine [:drift :queues :store :reporter :index] 74 | :index [] 75 | :store [] 76 | :api [:index :store :queues :engine :drift]}))))) 77 | 78 | (defn -main 79 | "Our main function, parses args and launches appropriate services" 80 | [& args] 81 | (let [[{:keys [path help quiet]} args banner] (get-cli args)] 82 | 83 | (when help 84 | (println banner) 85 | (System/exit 0)) 86 | 87 | (let [system (atom (config->system path quiet))] 88 | (info "installing signal handlers") 89 | (with-handler :term 90 | (info "caught SIGTERM, quitting") 91 | (component/stop-system @system) 92 | (info "all components shut down") 93 | (System/exit 0)) 94 | 95 | (if (not (.contains (System/getProperty "os.name") "Windows")) 96 | (with-handler :hup 97 | (info "caught SIGHUP, reloading") 98 | (swap! system (comp component/start-system 99 | component/stop-system)))) 100 | 101 | (info "ready to start the system") 102 | (swap! system component/start-system))) 103 | nil) 104 | 105 | ;; Install our uncaught exception handler. 106 | (uncaught e (warn e "uncaught exception")) 107 | -------------------------------------------------------------------------------- /pkg/deb/init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ### BEGIN INIT INFO 3 | # Provides: cyanite 4 | # Required-Start: $remote_fs $syslog 5 | # Required-Stop: $remote_fs $syslog 6 | # Default-Start: 2 3 4 5 7 | # Default-Stop: 0 1 6 8 | # Short-Description: Cyanite server 9 | # Description: The Cyanite monitoring system's event processor. 10 | ### END INIT INFO 11 | 12 | # Author: Pierre-Yves Ritschard 13 | 14 | # Do NOT "set -e" 15 | 16 | # PATH should only include /usr/* if it runs after the mountnfs.sh script 17 | PATH=/sbin:/usr/sbin:/bin:/usr/bin:/usr/local/bin 18 | DESC="Cyanite" 19 | NAME=cyanite 20 | DAEMON=/usr/bin/cyanite 21 | DAEMON_ARGS="/etc/cyanite/cyanite.config" 22 | DAEMON_USER=cyanite 23 | PIDFILE=/var/run/$NAME.pid 24 | SCRIPTNAME=/etc/init.d/$NAME 25 | 26 | # Exit if the package is not installed 27 | [ -x "$DAEMON" ] || exit 0 28 | 29 | # Read configuration variable file if it is present 30 | [ -r /etc/default/$NAME ] && . /etc/default/$NAME 31 | 32 | # Load the VERBOSE setting and other rcS variables 33 | . /lib/init/vars.sh 34 | 35 | # Define LSB log_* functions. 36 | # Depend on lsb-base (>= 3.2-14) to ensure that this file is present 37 | # and status_of_proc is working. 38 | . /lib/lsb/init-functions 39 | 40 | # Function that starts the daemon/service 41 | do_start() 42 | { 43 | # Return 44 | # 0 if daemon has been started 45 | # 1 if daemon was already running 46 | # 2 if daemon could not be started 47 | pid=$( pidofproc -p $PIDFILE "$NAME") 48 | if [ -n "$pid" ] ; then 49 | log_daemon_msg "Cyanite is already running (PID `cat ${PIDFILE}`)" 50 | return 1 51 | fi 52 | start-stop-daemon --start --quiet --chuid $DAEMON_USER --chdir / --make-pidfile --background --pidfile $PIDFILE --exec $DAEMON -- \ 53 | $DAEMON_ARGS \ 54 | || return 2 55 | # Add code here, if necessary, that waits for the process to be ready 56 | # to handle requests from services started subsequently which depend 57 | # on this one. As a last resort, sleep for some time. 58 | } 59 | 60 | # Function that stops the daemon/service 61 | do_stop() 62 | { 63 | # Return 64 | # 0 if daemon has been stopped 65 | # 1 if daemon was already stopped 66 | # 2 if daemon could not be stopped 67 | # other if a failure occurred 68 | start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --pidfile $PIDFILE 69 | RETVAL="$?" 70 | [ "$RETVAL" = 2 ] && return 2 71 | # Wait for children to finish too if this is a daemon that forks 72 | # and if the daemon is only ever run from this initscript. 73 | # If the above conditions are not satisfied then add some other code 74 | # that waits for the process to drop all resources that could be 75 | # needed by services started subsequently. A last resort is to 76 | # sleep for some time. 77 | start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --exec $DAEMON 78 | [ "$?" = 2 ] && return 2 79 | # Many daemons don't delete their pidfiles when they exit. 80 | rm -f $PIDFILE 81 | return "$RETVAL" 82 | } 83 | 84 | # Function that sends a SIGHUP to the daemon/service 85 | do_reload() { 86 | # 87 | # If the daemon can reload its configuration without 88 | # restarting (for example, when it is sent a SIGHUP), 89 | # then implement that here. 90 | # 91 | start-stop-daemon --stop --quiet --signal HUP --pidfile $PIDFILE 92 | return $? 93 | } 94 | 95 | case "$1" in 96 | start) 97 | [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME" 98 | do_start 99 | case "$?" in 100 | 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; 101 | 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; 102 | esac 103 | ;; 104 | stop) 105 | [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME" 106 | do_stop 107 | case "$?" in 108 | 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; 109 | 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; 110 | esac 111 | ;; 112 | status) 113 | status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $? 114 | ;; 115 | reload|force-reload) 116 | log_daemon_msg "Reloading $DESC" "$NAME" 117 | do_reload 118 | log_end_msg $? 119 | ;; 120 | restart) 121 | log_daemon_msg "Restarting $DESC" "$NAME" 122 | do_stop 123 | case "$?" in 124 | 0|1) 125 | do_start 126 | case "$?" in 127 | 0) log_end_msg 0 ;; 128 | 1) log_end_msg 1 ;; # Old process is still running 129 | *) log_end_msg 1 ;; # Failed to start 130 | esac 131 | ;; 132 | *) 133 | # Failed to stop 134 | log_end_msg 1 135 | ;; 136 | esac 137 | ;; 138 | *) 139 | echo "Usage: $SCRIPTNAME {start|stop|status|restart|reload|force-reload}" >&2 140 | exit 3 141 | ;; 142 | esac 143 | 144 | : 145 | -------------------------------------------------------------------------------- /src/io/cyanite/store.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.store 2 | (:require [com.stuartsierra.component :as component] 3 | [io.cyanite.store.cassandra :as c] 4 | [clojure.tools.logging :refer [info error]])) 5 | 6 | (defprotocol MetricStore 7 | (insert! [this path resolution snapshot]) 8 | (fetch! [this from to paths]) 9 | (truncate! [this])) 10 | 11 | (defn reconstruct-aggregate 12 | [path aggregate] 13 | (if (= :default aggregate) 14 | path 15 | (str path "_" (name aggregate)))) 16 | 17 | (defn common-fetch! 18 | [paths f] 19 | (let [aggregates (reduce (fn [acc {:keys [aggregate] :as path}] 20 | (assoc acc 21 | (dissoc path :aggregate) 22 | (if-let [aggregates (get acc path)] 23 | (conj aggregates aggregate) 24 | [aggregate]))) 25 | {} paths) 26 | paths (keys aggregates) 27 | results (f paths)] 28 | (mapcat 29 | (fn [{:keys [id point] :as metric}] 30 | (map #(let [aggregate (if (= :default %) :mean %)] 31 | (assoc metric 32 | :id (assoc id 33 | :path (reconstruct-aggregate (:path id) %) 34 | :aggregate %) 35 | :point (get point aggregate))) 36 | (get aggregates id))) 37 | results))) 38 | 39 | (defrecord CassandraV2Store [options session insertq fetchq truncateq 40 | wrcty rdcty mkid mkpoint reporter 41 | statement-cache] 42 | component/Lifecycle 43 | (start [this] 44 | (let [[session rdcty wrcty] (c/session! options) 45 | table (or (:table options) "metric") 46 | [mkid mkpoint] (c/get-types session)] 47 | (-> this 48 | (assoc :session session 49 | :insertq (c/insertq-v2 session table) 50 | :fetchq (c/fetchq-v2 session table) 51 | :truncateq (c/truncateq-v2 session table) 52 | :mkid mkid 53 | :mkpoint mkpoint)))) 54 | (stop [this] 55 | (-> this 56 | (assoc :session nil 57 | :insertq nil 58 | :fetchq nil 59 | :truncateq nil 60 | :mkid nil 61 | :mkpoint nil))) 62 | MetricStore 63 | (fetch! [this from to paths] 64 | (common-fetch! 65 | paths 66 | #(c/runq! session fetchq 67 | [(mapv mkid %) 68 | (long from) 69 | (long to)] 70 | {:consistency rdcty 71 | :fetch-size Integer/MAX_VALUE}))) 72 | 73 | (insert! [this path resolution snapshot] 74 | (c/runq-async! session insertq 75 | [(-> resolution :period int) 76 | (mkpoint snapshot) 77 | (mkid {:path path :resolution resolution}) 78 | (-> snapshot :time long)] 79 | {:consistency wrcty})) 80 | 81 | (truncate! [this] 82 | (c/runq! session truncateq [] {}))) 83 | 84 | (defn empty-store 85 | [] 86 | (reify 87 | component/Lifecycle 88 | (start [this] this) 89 | (stop [this] this) 90 | MetricStore 91 | (fetch! [this from to paths]) 92 | (insert! [this path resolution snapshot]))) 93 | 94 | (defrecord MemoryStore [state] 95 | component/Lifecycle 96 | (start [this] 97 | (assoc this :state (atom {}))) 98 | (stop [this] 99 | (assoc this :state nil)) 100 | clojure.lang.IDeref 101 | (deref [this] 102 | @state) 103 | MetricStore 104 | (fetch! [this from to paths] 105 | (common-fetch! 106 | paths 107 | #(let [st @state] 108 | (mapcat 109 | (fn [path] 110 | (->> (get-in st ((juxt :path :resolution) path)) 111 | (filter 112 | (fn [[time _]] 113 | (and (>= time from) 114 | (<= time to)))) 115 | (map 116 | (fn [[time point]] 117 | {:id path 118 | :time time 119 | :point point})))) 120 | %)))) 121 | (insert! [this path resolution snapshot] 122 | (swap! state 123 | (fn [old] 124 | (update-in old 125 | [path resolution (:time snapshot)] 126 | (constantly (select-keys snapshot [:max :min :sum :mean])))))) 127 | (truncate! [this] 128 | (reset! state {}))) 129 | 130 | (prefer-method print-method clojure.lang.IPersistentMap clojure.lang.IDeref) 131 | 132 | (defmulti build-store (comp (fnil keyword "cassandra-v2") :type)) 133 | 134 | (defmethod build-store :cassandra-v2 135 | [options] 136 | (map->CassandraV2Store {:options (dissoc options :type)})) 137 | 138 | (defmethod build-store :empty 139 | [options] 140 | (empty-store)) 141 | 142 | (defmethod build-store :memory 143 | [options] 144 | (map->MemoryStore options)) 145 | -------------------------------------------------------------------------------- /src/io/cyanite/index.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.index 2 | (:require [com.stuartsierra.component :as component] 3 | [clojure.string :refer [join split]] 4 | [clojure.set :refer [union intersection]] 5 | [globber.glob :refer [glob]])) 6 | 7 | (defprotocol MetricIndex 8 | (register! [this path]) 9 | (prefixes [this pattern]) 10 | (truncate! [this]) 11 | 12 | (multiplex-aggregates [this prefixes]) 13 | (extract-aggregate [this prefix])) 14 | 15 | ;; Path explansion / artificial aggreate paths 16 | ;; 17 | 18 | (defn make-pattern 19 | [aggregates] 20 | (re-pattern (str "(.*)(\\_)(" (join "|" aggregates) ")"))) 21 | 22 | (defn multiplex-aggregates-paths 23 | [aggregates paths] 24 | (mapcat 25 | (fn [path] 26 | (if (not (:expandable path)) 27 | (map #(assoc path 28 | :path (str (:path path) %) 29 | :text (str (:text path) %)) 30 | (cons "" (map #(str "_" %) aggregates))) 31 | [path])) 32 | paths)) 33 | 34 | (defn extract-aggregate-path 35 | [pattern path] 36 | (if-let [[_ extracted :as all] (re-matches pattern path)] 37 | [extracted (keyword (last all))] 38 | [path :default])) 39 | 40 | ;; Implementation 41 | ;; ============== 42 | 43 | ;; 44 | ;; We build an inverted index of segment to path 45 | ;; To service query we resolve and expand multiple 46 | ;; options (delimited in curly brackets) or multiple 47 | ;; characters (in a [] character class) then we dispatch 48 | ;; resolve our inverted index and filter on the result list 49 | ;; 50 | 51 | (defn- segmentize 52 | [path] 53 | (let [elems (split path #"\.")] 54 | (map-indexed vector elems))) 55 | 56 | (defn prefix-info 57 | [length [path matches]] 58 | (let [lengths (set (map second matches))] 59 | {:path path 60 | :text (last (split path #"\.")) 61 | :id path 62 | :allowChildren (if (some (partial < length) lengths) 1 0) 63 | :expandable (if (some (partial < length) lengths) 1 0) 64 | :leaf (if (boolean (lengths length)) 1 0)})) 65 | 66 | (defn truncate-to 67 | [pattern-length [path length]] 68 | [(join "." (take pattern-length (split path #"\."))) 69 | length]) 70 | 71 | 72 | 73 | (defn- push-segment* 74 | [segments segment path length] 75 | (into (sorted-map) 76 | (update segments segment 77 | (fn [paths tuple] 78 | (into (sorted-set) 79 | (conj paths tuple))) 80 | [path length]))) 81 | 82 | (defn- by-pos 83 | [db pos] 84 | (-> @db (get pos) keys)) 85 | 86 | (defn- by-segment 87 | [db pos segment] 88 | (get (get @db pos) segment)) 89 | 90 | (defn- by-segments 91 | [db pos segments] 92 | (mapcat (partial by-segment db pos) segments)) 93 | 94 | (defn- matches 95 | [db pattern leaves?] 96 | (let [segments (segmentize pattern) 97 | length (count segments) 98 | pred (partial (if leaves? = <=) length) 99 | matches (for [[pos pattern] segments] 100 | (->> (by-pos db pos) 101 | (glob pattern) 102 | (by-segments db pos) 103 | (filter (comp pred second)) 104 | (set))) 105 | paths (reduce union #{} matches)] 106 | (->> (reduce intersection paths matches) 107 | (map (partial truncate-to length)) 108 | (group-by first) 109 | (map (partial prefix-info length)) 110 | (sort-by :path)))) 111 | 112 | ;; 113 | ;; Indexes 114 | ;; 115 | 116 | (defrecord AtomIndex [options db aggregates pattern] 117 | component/Lifecycle 118 | (start [this] 119 | (let [aggregates (or (:aggregates options) [])] 120 | (assoc this 121 | :db (atom {}) 122 | :aggregates aggregates 123 | :pattern (make-pattern aggregates)))) 124 | (stop [this] 125 | (assoc this 126 | :db nil 127 | :aggregates nil 128 | :pattern nil)) 129 | MetricIndex 130 | (register! [this path] 131 | (let [segments (segmentize path) 132 | length (count segments)] 133 | (doseq [[pos segment] segments] 134 | (swap! db update pos 135 | push-segment* 136 | segment path length)))) 137 | (prefixes [index pattern] 138 | (matches db pattern false)) 139 | (truncate! [this] 140 | (reset! db {})) 141 | (multiplex-aggregates [this prefixes] 142 | (multiplex-aggregates-paths aggregates prefixes)) 143 | (extract-aggregate [this prefix] 144 | (extract-aggregate-path pattern prefix))) 145 | 146 | (defrecord EmptyIndex [] 147 | component/Lifecycle 148 | (start [this] this) 149 | (stop [this] this) 150 | MetricIndex 151 | (register! [this path]) 152 | (prefixes [index pattern])) 153 | 154 | (defmulti build-index (comp (fnil keyword "agent") :type)) 155 | 156 | (defmethod build-index :empty 157 | [options] 158 | (EmptyIndex.)) 159 | 160 | (defmethod build-index :atom 161 | [options] 162 | (map->AtomIndex options)) 163 | -------------------------------------------------------------------------------- /test/io/cyanite/integration/engine_test.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.integration.engine-test 2 | (:require [io.cyanite.store :refer :all] 3 | [io.cyanite.query :refer :all] 4 | [clojure.test :refer :all] 5 | [io.cyanite.test-helper :refer :all] 6 | [io.cyanite.engine :as engine] 7 | [io.cyanite.engine.rule :as rule] 8 | [io.cyanite.index :as index] 9 | [qbits.alia :as alia] 10 | [io.cyanite.store :as store])) 11 | 12 | (defn mmap-vals 13 | [f m] 14 | (zipmap (keys m) 15 | (map #(map f %) (vals m)))) 16 | 17 | (defn map-vals 18 | [f m] 19 | (zipmap (keys m) 20 | (map f (vals m)))) 21 | 22 | (defn populate 23 | [index store] 24 | (doseq [[path base] [["a.b.c" 10] ["a.b.d" 20]]] ;; "a.b.e" 25 | (do 26 | (index/register! index path) 27 | (doseq [i (range 1 100)] 28 | (insert! store 29 | path 30 | (rule/map->Resolution {:precision 5 :period 3600}) 31 | (engine/map->MetricSnapshot {:time (* 5 i) 32 | :mean (double (+ base i)) 33 | :min (double (+ base i)) 34 | :max (double (+ base i)) 35 | :sum (double (+ base i))})))))) 36 | 37 | (defn cleanup 38 | [index store] 39 | (index/truncate! index) 40 | (store/truncate! store)) 41 | 42 | (defn mk-cfg 43 | [m] 44 | (merge {:engine {:rules {"default" ["5s:1h"]}}} 45 | m)) 46 | 47 | (def CONFIG 48 | [(mk-cfg {:store {:cluster "localhost" 49 | :keyspace "cyanite_test"} 50 | :index {:type :cassandra 51 | :cluster "localhost" 52 | :keyspace "cyanite_test"}}) 53 | (mk-cfg {{:store {:type :memory}} 54 | {:index {:type :atom}}})]) 55 | 56 | (deftest index-prefixes-test 57 | (with-config 58 | CONFIG 59 | {} 60 | (let [index (:index *system*) 61 | store (:store *system*) 62 | engine (:engine *system*) 63 | session (:session store)] 64 | 65 | (populate index store) 66 | 67 | (let [result (->> (run-query! index engine 0 100 ["a.b.*"]) 68 | (group-by :target) 69 | (mmap-vals :datapoints) 70 | (map-vals #(mapcat identity %)) 71 | (map-vals #(map first %)))] 72 | (is (= (map double (range 11 30)) 73 | (get result "a.b.c"))) 74 | (is (= (map double (range 21 40)) 75 | (get result "a.b.d")))) 76 | 77 | 78 | (cleanup index store)))) 79 | 80 | (deftest index-no-wildcard-test 81 | (with-config 82 | CONFIG 83 | {} 84 | (let [index (:index *system*) 85 | store (:store *system*) 86 | engine (:engine *system*) 87 | session (:session store)] 88 | (populate index store) 89 | 90 | (let [result (->> (run-query! index engine 0 100 ["sumSeries(a.b.c,a.b.d)"]) 91 | (group-by :target) 92 | (mmap-vals :datapoints) 93 | (map-vals #(mapcat identity %)) 94 | (map-vals #(map first %)))] 95 | (is (= (map double (mapv + (range 11 30) (range 21 40))) 96 | (get result "sumSeries(a.b.c,a.b.d)")))) 97 | 98 | (let [result (->> (run-query! index engine 0 100 ["a.b.c"]) 99 | (group-by :target) 100 | (mmap-vals :datapoints) 101 | (map-vals #(mapcat identity %)) 102 | (map-vals #(map first %)))] 103 | (is (= (map double (range 11 30)) 104 | (get result "a.b.c")))) 105 | (let [result (->> (run-query! index engine 0 100 ["a.b.d"]) 106 | (group-by :target) 107 | (mmap-vals :datapoints) 108 | (map-vals #(mapcat identity %)) 109 | (map-vals #(map first %)))] 110 | (is (= (map double (range 21 40)) 111 | (get result "a.b.d")))) 112 | (cleanup index store)))) 113 | 114 | (deftest trivial-ingest-test 115 | (with-config 116 | [(mk-cfg {:store {:cluster "localhost" 117 | :keyspace "cyanite_test"} 118 | :index {:type :cassandra 119 | :cluster "localhost" 120 | :keyspace "cyanite_test"}}) 121 | (mk-cfg {{:store {:type :memory}} 122 | {:index {:type :atom}}})] 123 | {} 124 | (let [index (:index *system*) 125 | store (:store *system*) 126 | engine (:engine *system*) 127 | session (:session store)] 128 | 129 | (doseq [i (range 1 8)] 130 | (engine/ingest! engine "a.b.c" i i)) 131 | 132 | (is (= [[3.0 0] [6.5 5]] 133 | (:datapoints (first (run-query! index engine 0 100 ["a.b.c"]))))) 134 | (cleanup index store)))) 135 | -------------------------------------------------------------------------------- /perf/perfTest.jmx: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | false 7 | false 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | continue 16 | 17 | false 18 | 1 19 | 20 | 5 21 | 1 22 | 1400616222000 23 | 1400616222000 24 | false 25 | 26 | 27 | 28 | 29 | 30 | 1 31 | 1 32 | 33 | true 34 | 35 | randy 36 | 37 | 38 | 39 | TCPClientImpl 40 | localhost 41 | true 42 | 2003 43 | false 44 | 1 45 | 1000 46 | foo.bar.baz.${randy} 1 1400616260 47 | foo.bar.bazz.${randy} 1 1400616260 48 | foo.bar.bazz2.${randy} 1 1400616260 49 | foo.bar.bazz3.${randy} 1 1400616260 50 | foo.bar.bazz4.${randy} 1 1400616260 51 | foo.bar.bazz5.${randy} 1 1400616260 52 | foo1.bar.bazz.${randy} 1 1400616260 53 | foo2.bar.bazz2.${randy} 1 1400616260 54 | foo3.bar.bazz3.${randy} 1 1400616260 55 | foo4.bar.bazz4.${randy} 1 1400616260 56 | foo5.bar.bazz5.${randy} 1 1400616260 57 | 58 | false 59 | 0A 60 | 61 | 62 | 63 | 64 | 65 | false 66 | 67 | saveConfig 68 | 69 | 70 | true 71 | true 72 | true 73 | 74 | true 75 | true 76 | true 77 | true 78 | false 79 | true 80 | true 81 | false 82 | false 83 | false 84 | false 85 | false 86 | false 87 | false 88 | false 89 | 0 90 | true 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | -------------------------------------------------------------------------------- /tasks/leiningen/fatdeb.clj: -------------------------------------------------------------------------------- 1 | (ns leiningen.fatdeb 2 | "Build a .deb package from leiningen, stolen from riemann" 3 | (:refer-clojure :exclude [replace]) 4 | (:require [clojure.java.shell :refer [sh]] 5 | [clojure.java.io :refer [file delete-file writer copy]] 6 | [clojure.string :refer [join capitalize trim-newline replace]] 7 | [leiningen.uberjar :refer [uberjar]]) 8 | (:import java.text.SimpleDateFormat 9 | java.util.Date)) 10 | 11 | (defn md5 12 | [input] 13 | (let [digest (-> (doto (java.security.MessageDigest/getInstance "MD5") 14 | (.reset) 15 | (.update (.getBytes input))) 16 | (.digest))] 17 | (.toString (java.math.BigInteger. 1 digest) 16))) 18 | 19 | (defn delete-file-recursively 20 | "Delete file f. If it's a directory, recursively delete all its contents. 21 | Raise an exception if any deletion fails unless silently is true." 22 | [f & [silently]] 23 | (System/gc) ; This sometimes helps release files for deletion on windows. 24 | (let [f (file f)] 25 | (if (.isDirectory f) 26 | (doseq [child (.listFiles f)] 27 | (delete-file-recursively child silently))) 28 | (delete-file f silently))) 29 | 30 | (defn deb-dir 31 | "Debian package working directory." 32 | [project] 33 | (file (:root project) "target/deb/cyanite")) 34 | 35 | (defn cleanup 36 | [project] 37 | ; Delete working dir. 38 | (when (.exists (deb-dir project)) 39 | (delete-file-recursively (deb-dir project)))) 40 | 41 | (defn reset 42 | [project] 43 | (cleanup project) 44 | (sh "rm" (str (:root project) "/target/*.deb"))) 45 | 46 | (def build-date (Date.)) 47 | 48 | (defn get-version 49 | [project] 50 | (let [df (SimpleDateFormat. "yyyyMMdd-HHmmss")] 51 | (replace (:version project) #"SNAPSHOT" (.format df build-date)))) 52 | 53 | (defn control 54 | "Control file" 55 | [project] 56 | (join "\n" 57 | (map (fn [[k v]] (str (capitalize (name k)) ": " v)) 58 | {:package (:name project) 59 | :version (get-version project) 60 | :section "base" 61 | :priority "optional" 62 | :architecture "all" 63 | :depends (join ", " ["bash"]) 64 | :maintainer (:email (:maintainer project)) 65 | :description (:description project)}))) 66 | 67 | (defn write 68 | "Write string to file, plus newline" 69 | [file string] 70 | (with-open [w (writer file)] 71 | (.write w (str (trim-newline string) "\n")))) 72 | 73 | (defn make-deb-dir 74 | "Creates the debian package structure in a new directory." 75 | [project] 76 | (let [dir (deb-dir project)] 77 | (.mkdirs dir) 78 | 79 | ; Meta 80 | (.mkdirs (file dir "DEBIAN")) 81 | (write (file dir "DEBIAN" "control") (control project)) 82 | (write (file dir "DEBIAN" "conffiles") 83 | (join "\n" ["/etc/cyanite.yaml" 84 | "/etc/init.d/cyanite" 85 | "/etc/default/cyanite"])) 86 | 87 | ; Preinst 88 | (copy (file (:root project) "pkg" "deb" "preinst.sh") 89 | (file dir "DEBIAN" "preinst")) 90 | (.setExecutable (file dir "DEBIAN" "preinst") true false) 91 | 92 | ; Postinst 93 | (copy (file (:root project) "pkg" "deb" "postinst.sh") 94 | (file dir "DEBIAN" "postinst")) 95 | (.setExecutable (file dir "DEBIAN" "postinst") true false) 96 | 97 | ; Prerm 98 | (copy (file (:root project) "pkg" "deb" "prerm.sh") 99 | (file dir "DEBIAN" "prerm")) 100 | (.setExecutable (file dir "DEBIAN" "prerm") true false) 101 | 102 | ; Postrm 103 | (copy (file (:root project) "pkg" "deb" "postrm.sh") 104 | (file dir "DEBIAN" "postrm")) 105 | (.setExecutable (file dir "DEBIAN" "postrm") true false) 106 | 107 | ; Jar 108 | (.mkdirs (file dir "usr" "lib" "cyanite")) 109 | (copy (file (:root project) "target" 110 | (str "cyanite-" (:version project) "-standalone.jar")) 111 | (file dir "usr" "lib" "cyanite" "cyanite.jar")) 112 | 113 | ; cql schema 114 | (.mkdirs (file dir "var" "lib" "cyanite")) 115 | (copy (file (:root project) "doc" "schema.cql") 116 | (file dir "var" "lib" "cyanite" "schema.cql")) 117 | 118 | ; Binary 119 | (.mkdirs (file dir "usr" "bin")) 120 | (copy (file (:root project) "pkg" "deb" "cyanite") 121 | (file dir "usr" "bin" "cyanite")) 122 | (.setExecutable (file dir "usr" "bin" "cyanite") true false) 123 | 124 | ; Log dir 125 | (.mkdirs (file dir "var" "log" "cyanite")) 126 | 127 | ; Config 128 | (.mkdirs (file dir "etc")) 129 | (copy (file (:root project) "doc" "cyanite.yaml") 130 | (file dir "etc" "cyanite.yaml")) 131 | 132 | ; defaults file 133 | (.mkdirs (file dir "etc" "default")) 134 | (copy (file (:root project) "pkg" "deb" "cyanite.default") 135 | (file dir "etc" "default" "cyanite")) 136 | 137 | ; Init script 138 | (.mkdirs (file dir "etc" "init.d")) 139 | (copy (file (:root project) "pkg" "deb" "init.sh") 140 | (file dir "etc" "init.d" "cyanite")) 141 | (.setExecutable (file dir "etc" "init.d" "cyanite") true false) 142 | 143 | dir)) 144 | 145 | (defn dpkg 146 | "Convert given package directory to a .deb." 147 | [project deb-dir] 148 | (print (:err (sh "dpkg" "--build" 149 | (str deb-dir) 150 | (str (file (:root project) "target"))))) 151 | (let [deb-file-name (str (:name project) "_" 152 | (get-version project) "_" 153 | "all" ".deb") 154 | deb-file (file (:root project) "target" deb-file-name)] 155 | (write (str deb-file ".md5") 156 | (str (md5 (slurp deb-file)) " " deb-file-name)))) 157 | 158 | (defn fatdeb 159 | ([project] 160 | (reset project) 161 | (uberjar project) 162 | (dpkg project (make-deb-dir project)) 163 | (cleanup project) 164 | (flush))) 165 | -------------------------------------------------------------------------------- /src/io/cyanite/query/ast.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.query.ast 2 | (:require [clojure.string :as s])) 3 | 4 | (defn nil-safe-op 5 | [f] 6 | (if (nil? f) 7 | nil 8 | (fn [& vals] 9 | (if (some nil? vals) 10 | nil 11 | (apply f vals))))) 12 | 13 | (defprotocol SeriesTransform 14 | "A protocol to realize operations on series." 15 | (transform! [this])) 16 | 17 | (defn merge-resolve! 18 | [series floor ceiling] 19 | (let [series (mapcat transform! series) 20 | width (reduce + 0 (map (comp count second) series))] 21 | (when-not (<= (or floor 0) width (or ceiling Long/MAX_VALUE)) 22 | (throw (ex-info "invalid width for series" 23 | {:ceiling ceiling 24 | :floor floor 25 | :width width 26 | :series series}))) 27 | [[(s/join "," (map first series)) 28 | (reduce conj [] (map second series))]])) 29 | 30 | (defn flatten-series 31 | [reducer mapper series] 32 | (mapcat 33 | (fn [[name payload]] 34 | (let [payload (if (nil? mapper) 35 | payload 36 | ;; we need a second mapper here for cases when 37 | ;; we sum absolute values or similar 38 | (map #(map mapper %) payload))] 39 | (if (nil? reducer) 40 | payload 41 | (reducer payload)))) 42 | series)) 43 | 44 | 45 | (defn index-series 46 | [series] 47 | (let [names (map first series) 48 | wc (s/join "," names)] 49 | (->> names 50 | (map-indexed #(vector (str %1) %2)) 51 | (reduce merge {})))) 52 | 53 | (defn series-rename 54 | [series repr] 55 | (let [indexed (index-series series)] 56 | (s/replace repr 57 | #"\$([0-9*]+)" 58 | (fn [x] (get indexed (second x) ""))))) 59 | 60 | (defn traverse! 61 | [rename-fn combiner reducer mapper series] 62 | (let [renamed (series-rename series rename-fn) 63 | flattened (flatten-series (nil-safe-op reducer) (nil-safe-op mapper) series) 64 | combined (if (nil? combiner) 65 | flattened 66 | (apply mapv (nil-safe-op combiner) flattened))] 67 | [[renamed combined]])) 68 | 69 | (defn traverse-each! 70 | [rename-fn reducer mapper series] 71 | (for [s series] 72 | (let [renamed (series-rename [s] rename-fn) 73 | mapped (if (nil? mapper) 74 | (second s) 75 | (map (nil-safe-op mapper) (second s))) 76 | reduced (if (nil? reducer) 77 | mapped 78 | (reducer mapped))] 79 | [renamed reduced]))) 80 | 81 | 82 | 83 | (defn add-date 84 | [from step data] 85 | (loop [res [] 86 | [d & ds] data 87 | point from] 88 | (if ds 89 | (recur (if d (conj res [d point]) res) ds (+ point step)) 90 | res))) 91 | 92 | (defrecord SumOperation [series] 93 | SeriesTransform 94 | (transform! [this] 95 | (let [merged (merge-resolve! series 1 nil)] 96 | (traverse! 97 | "sumSeries($0)" 98 | + 99 | nil 100 | nil 101 | merged)))) 102 | 103 | (defrecord DerivativeOperation [series] 104 | SeriesTransform 105 | (transform! [this] 106 | (traverse-each! 107 | "derivative($0)" 108 | (fn [s] 109 | (cons nil (map #(apply (nil-safe-op -) (reverse %)) (partition 2 1 s)))) 110 | nil 111 | (transform! series)))) 112 | 113 | (defn lift-single-series 114 | "Lifts the series to " 115 | [series] 116 | (mapcat (fn [[k v]] [k [v]]) 117 | (partition 2 2 118 | series))) 119 | 120 | (defrecord AbsoluteOperation [series] 121 | SeriesTransform 122 | (transform! [this] 123 | (traverse-each! 124 | "absolute($0)" 125 | nil 126 | #(Math/abs %) 127 | (transform! series)))) 128 | 129 | (defrecord DivOperation [top bottom] 130 | SeriesTransform 131 | (transform! [this] 132 | (traverse! 133 | "divideSeries($0)" 134 | / 135 | nil 136 | nil 137 | (merge-resolve! [top bottom] 138 | 1 nil)))) 139 | 140 | (defrecord ScaleOperation [factor series] 141 | SeriesTransform 142 | (transform! [this] 143 | (traverse-each! 144 | (format "scale($0,%s)" factor) 145 | nil 146 | (partial * factor) 147 | (transform! series)))) 148 | 149 | (defrecord IdentityOperation [path series] 150 | SeriesTransform 151 | (transform! [this] 152 | (if (nil? path) 153 | (mapv 154 | (fn [[k v]] 155 | [k v]) series) 156 | [[path (get series path)]]))) 157 | 158 | (defmulti tokens->ast 159 | (fn [series tokens] 160 | (first tokens))) 161 | 162 | (defn wildcard-path? 163 | [path] 164 | (.contains path "*")) 165 | 166 | (defmethod tokens->ast :path 167 | [series [_ path]] 168 | (if (wildcard-path? path) 169 | (IdentityOperation. nil series) 170 | (IdentityOperation. path series))) 171 | 172 | (defmethod tokens->ast :sumseries 173 | [series [_ & tokens]] 174 | (SumOperation. (mapv #(tokens->ast series %) tokens))) 175 | 176 | (defmethod tokens->ast :divideseries 177 | [series [_ top bottom]] 178 | (DivOperation. (tokens->ast series top) (tokens->ast series bottom))) 179 | 180 | (defmethod tokens->ast :scale 181 | [series [_ path factor]] 182 | (ScaleOperation. (Double. factor) (tokens->ast series path))) 183 | 184 | (defmethod tokens->ast :absolute 185 | [series [_ path]] 186 | (AbsoluteOperation. (tokens->ast series path))) 187 | 188 | (defmethod tokens->ast :derivative 189 | [series [_ path]] 190 | (DerivativeOperation. (tokens->ast series path))) 191 | 192 | (defmethod tokens->ast :default 193 | [series x] 194 | (throw (ex-info "unsupported function in cyanite" {:arg x}))) 195 | 196 | (defn run-query! 197 | ;; for testing purposes 198 | ([tokens series] 199 | (transform! (tokens->ast series tokens))) 200 | ([tokens series from step] 201 | (for [[n datapoints] (transform! (tokens->ast series tokens))] 202 | {:target n :datapoints (add-date from step datapoints)}))) 203 | -------------------------------------------------------------------------------- /doc/source/administrator.rst: -------------------------------------------------------------------------------- 1 | .. _Administrator Guide: 2 | 3 | Administrator Guide 4 | =================== 5 | 6 | This aims to be a simple guide for working with cyanite. 7 | 8 | .. _Configuration Syntax: 9 | 10 | Configuration Syntax 11 | -------------------- 12 | 13 | Cyanite's configuration is broken up in different sections: 14 | 15 | - engine 16 | - api 17 | - input 18 | - index 19 | - store 20 | - logging 21 | 22 | Most sections are optional but provide defaults 23 | for a single host testing system. 24 | 25 | Engine 26 | ~~~~~~ 27 | 28 | The engine specifies the behavior of Cyanite's core 29 | which accepts metrics from inputs, aggregates in-memory 30 | and defers to an index and a store when a time-window 31 | elapses 32 | 33 | The engine accepts the following options: 34 | 35 | *rules*: 36 | Rules specifies which resolutions to apply to an incoming metric. 37 | Rules consist of a pattern or the string "default" and an associated 38 | list of resolutions. 39 | Rules are evaluated in a first-match order. Resolutions are stored as a 40 | string of the form: :, you may use unit specifiers 41 | for seconds, minutes, hours, days, weeks and months and years. 42 | 43 | .. sourcecode:: yaml 44 | 45 | engine: 46 | rules: 47 | "web.*\.cpu": [ "5s:1h", "30s:1d" ] 48 | default: [ "5s:1h" ] 49 | 50 | API 51 | ~~~ 52 | 53 | The API specifies the behavior of the HTTP interface which is exposed. 54 | The API accepts the following options: 55 | 56 | *host*: 57 | Address to listen on, defaults to 127.0.0.1 58 | *port*: 59 | Port to bind to, defaults to 8080 60 | *disabled*: 61 | Disable HTTP service altogether, defaults to false. 62 | 63 | .. sourcecode:: yaml 64 | 65 | api: 66 | port: 8080 67 | 68 | 69 | Input 70 | ~~~~~ 71 | 72 | Inputs are methods for Cyanite to ingest metrics. A Cyanite installation 73 | may have several inputs running, and thus accepts a list of input 74 | configurations. 75 | 76 | Each input configuration takes the following options: 77 | 78 | *type*: 79 | Type of input, for now only "carbon" 80 | *host*: 81 | Address to bind to. 82 | *port*: 83 | Port to bind to. 84 | 85 | .. sourcecode:: yaml 86 | 87 | input: 88 | - type: carbon 89 | port: 2003 90 | 91 | Index 92 | ~~~~~ 93 | 94 | The index determines where metric names will be stored. 95 | The only type of index available now is: "cassandra". 96 | The cassandra index takes the following options: 97 | 98 | *cluster*: 99 | A string or list of strings to provide cluster contact points. 100 | *keyspace*: 101 | The keyspace to use. 102 | 103 | .. sourcecode:: yaml 104 | 105 | index: 106 | type: cassandra 107 | keyspace: metric 108 | cluster: localhost 109 | 110 | Metamonitoring 111 | ~~~~~~~~~~~~~~ 112 | 113 | To enable internal stats you must enable the internal reporter. 114 | 115 | 116 | .. sourcecode:: yaml 117 | 118 | reporter: 119 | metrics: 120 | reporters: 121 | graphite: 122 | interval: 1 123 | opts: 124 | host: 127.0.0.1 125 | port: 2003 126 | prefix: internal 127 | 128 | 129 | Store 130 | ~~~~~ 131 | 132 | The store is where metrics get persisted. 133 | The only store available for now is the "cassandra" 134 | one. 135 | 136 | The following options are accepted: 137 | 138 | *cluster*: 139 | A string or list of strings to provide cluster contact points. 140 | *keyspace*: 141 | The keyspace to use. 142 | 143 | .. sourcecode:: yaml 144 | 145 | store: 146 | cluster: 'localhost' 147 | keyspace: 'metric' 148 | 149 | Logging 150 | ~~~~~~~ 151 | 152 | Specify where to log. Adheres to the configuration format 153 | defined at https://github.com/pyr/unilog 154 | 155 | .. sourcecode:: yaml 156 | 157 | logging: 158 | level: info 159 | console: true 160 | files: 161 | - "/var/log/cyanite/cyanite.log" 162 | 163 | 164 | .. _Graphite Integration: 165 | 166 | Integration with Graphite and Grafana 167 | ------------------------------------- 168 | 169 | Cyanite exposes an API which Graphana can communicate with directly as if it were talking to graphite. 170 | 171 | Administering Cassandra for Cyanite 172 | ----------------------------------- 173 | 174 | Cassandra is a very versatile database - while still being ideally suited 175 | for time-series type workloads. Here are a few pointers which might help when 176 | operating a large metric cluster. 177 | 178 | Choosing a Cassandra version 179 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 180 | 181 | Cyanite requires Cassandra 3.4 as it depends on SASI https://docs.datastax.com/en/cql/3.3/cql/cql_using/useSASIIndexConcept.html. It has been tested 182 | with the 3.4 releases extensively and thus is recommended. 183 | 184 | Choosing a compaction strategy 185 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 186 | 187 | ``DateTieredCompactionStrategy`` is likely to be your best bet. 188 | 189 | The following config causes most compaction activity to occur at 10m and 2h windows.\ 190 | If you want to allow 24h windows, simply raise max_sstable_age days to '1.0'. 191 | Note that you must be using Apache Cassandra 2.1 in order to set fractional values for 192 | max_sstable_age_days. If you are running an earlier version, then leave it at 1. 193 | 194 | .. sourcecode:: json 195 | 196 | compaction = {'class': 'DateTieredCompactionStrategy', 197 | 'min_threshold': '12', 'max_threshold': '32', 198 | 'max_sstable_age_days': '0.083', 'base_time_seconds': '50' } 199 | 200 | If you are willing to modify your Cassandra installation, ``TimeWindowCompactionStrategy`` gives great results 201 | and fits the cyanite use case perfectly. To use it you will need to build the project yourself, as per instructions on 202 | https://github.com/jeffjirsa/twcs. Once built, you can publish the JAR to the classpath of your Cassandra installation. 203 | The following config can be used to take advantage of it: 204 | 205 | .. sourcecode:: json 206 | 207 | compaction = {'unchecked_tombstone_compaction': 'false', 208 | 'tombstone_threshold': '0.2', 209 | 'class': 'com.jeffjirsa.cassandra.db.compaction.TimeWindowCompactionStrategy'} 210 | 211 | 212 | Choosing a read and write consistency level 213 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 214 | 215 | By default Cyanite will read at consistency level ``ONE`` and 216 | write at consistency level ``ANY``, thus favoring speed over 217 | accuracy / consistency. You can specify alternative consistency 218 | levels with the ``read-consistency`` and ``write-consistency`` sections 219 | of the store configuration. 220 | 221 | Cyanite out of band operations 222 | ------------------------------ 223 | 224 | The side-project: https://github.com/WrathOfChris/cyanite-utils provides 225 | a few utilities to help with cyanite maintenance. 226 | -------------------------------------------------------------------------------- /doc/source/concepts.rst: -------------------------------------------------------------------------------- 1 | Design and Concepts 2 | =================== 3 | 4 | This section describes the internal design of Cyanite 5 | and the different components which it builds upon. 6 | 7 | .. _Architecture: 8 | 9 | Internal Architecture 10 | --------------------- 11 | 12 | Cyanite's internal architecture is that of the typical 13 | stream processing project, it connect inputs to outputs 14 | while doing normalization and mangling. 15 | 16 | The internal architecture can be represented with this 17 | simple diagram: 18 | 19 | .. image:: _static/Architecture.png 20 | 21 | As is described on the diagram the workflow is: 22 | 23 | - Each input produces new normalized payloads (*metrics*), 24 | and enqueues them. 25 | - The engine core pops items off the queue and processes them 26 | - When applicable items are handed off to a *store* and *index*. 27 | components respectively responsible for persisting time-series 28 | and indexing metric names. 29 | - An API component which is able to interact with the engine 30 | and other components to query and present data. 31 | 32 | Input Components 33 | ---------------- 34 | 35 | Input components are arbitrary means of inputting metrics. 36 | Out of the box there are three available types of input 37 | components, such as ``carbon``: a TCP listener for the text 38 | protocol known as *Carbon*, which is part of the *Graphite* 39 | tool-suite. 40 | 41 | The other inputs, such as Kafka, Pickle are planned but not 42 | yet supported by Cyanite. 43 | 44 | Input components are responsible for producing metrics in a normalized 45 | way: 46 | 47 | .. sourcecode:: json 48 | 49 | { 50 | "path": "web01.cpu", 51 | "metric": 40.0, 52 | "time": 1437572671 53 | } 54 | 55 | Once a metric has been normalized, it is handed over to the engine 56 | component through its ``accept!`` function: 57 | 58 | .. sourcecode:: clojure 59 | 60 | (accept! engine {:path "web01.cpu" :metric 40.0 :time 1437572671}) 61 | 62 | Engine Component 63 | ---------------- 64 | 65 | The engine component is responsible for popping metrics off of its 66 | input queue, aggregating data over a time-window and producing write 67 | operations on the index and store components when necessary. 68 | 69 | To do this it holds on available points for a while and flushing them 70 | out when necessary. 71 | 72 | .. note:: 73 | 74 | The fact that Cyanite holds on to metrics in memory makes it a 75 | stateful server. As such if you wish to use Cyanite in a redundant 76 | fashion, you will need to account for it. See the :ref:`Administrator Guide` 77 | for details on how to deal with this. 78 | 79 | Aggregation Mechanism 80 | ~~~~~~~~~~~~~~~~~~~~~ 81 | 82 | The aggregation mechanism for metrics relies on a fast in-memory 83 | hash-table. Each metric is kept on a bucket corresponding to the 84 | metric path and its resolution. When a time-window has passed for a metric 85 | sums, means, minimas and maximas are computed and emitted as a /snapshot/. 86 | 87 | Cyanite supports several resolutions based on a first-match set of rules 88 | as provided in the configuration file. Rules are a list of patterns 89 | to match a metric name on corresponding to a list of resolutions 90 | (in the form ``:``). 91 | 92 | Snapshots are then handed over to the store and index components. 93 | The process can be represented visually like this: 94 | 95 | .. image:: _static/Rules.png 96 | 97 | .. note:: 98 | Cyanite accounts for drift by comparing its wall clock to the values provided in metrics. 99 | Once a resolution's time-slot passes over including when accounting for drift, values are 100 | flushed and a snapshot is created. This means that out-of-order values may be supplied but 101 | not once a time-slot has already been flushed. 102 | 103 | Store component 104 | --------------- 105 | 106 | The store component is called for each snapshot created by the engine component. 107 | The only working component stores data to `Apache Cassandra`_. 108 | 109 | Storage Schema 110 | ~~~~~~~~~~~~~~ 111 | 112 | The following schema is used to store data: 113 | 114 | .. literalinclude:: ./../schema.cql 115 | 116 | 117 | This schema leverages Cassandra's ``Compact Storage`` option to ensure a minimal overhead. 118 | Please be sure to choose the optimal compaction strategy for your use case. If available 119 | the ``DateTieredCompactionStrategy`` is likely your best bet. 120 | 121 | 122 | .. _Apache Cassandra: http://cassandra.apache.org 123 | 124 | 125 | Index Component 126 | --------------- 127 | 128 | Cyanite stores metric names in Cassandra, using SASI index. Cyanite index component is 129 | responsible for building an index of path names and providing a way of querying them back. 130 | 131 | Cyanite will work out of the box, although in order to improve query performance, 132 | you can use Cyanite index extension that helps to build more compact trees in Cassandra 133 | SASI index. It's not necessary to use them, although it's highly advised especially if you 134 | have a lot of metrics. 135 | 136 | Index component can be enabled by the following configuration: 137 | 138 | .. sourcecode:: yaml 139 | 140 | index: 141 | type: cassandra 142 | cluster: '127.0.0.1' 143 | keyspace: 'cyanite_dev' 144 | 145 | Enabling advanced tokenizer 146 | --------------------------- 147 | 148 | In order to enable index, you should build tokenizer and put it into `lib` directory of 149 | your cassandra distribution. After that, create (or re-create) your SASI index for 150 | segment with:: 151 | 152 | CREATE CUSTOM INDEX IF NOT EXISTS on segment(segment) USING 'org.apache.cassandra.index.sasi.SASIIndex' WITH OPTIONS = {'analyzer_class': 'org.apache.cassandra.index.sasi.analyzer.SplittingTokenizer'}; 153 | 154 | And turn it on in configuration using `with_tokeniser` directive: 155 | 156 | .. sourcecode:: yaml 157 | 158 | index: 159 | type: cassandra 160 | cluster: '127.0.0.1' 161 | keyspace: 'cyanite_dev' 162 | with_tokenizer: true 163 | 164 | Index Caching 165 | ------------- 166 | 167 | Cyanite caches index lookups for 1 minute by default. You can configure cache ttl 168 | by using `cache_ttl_in_ms`: 169 | 170 | .. sourcecode:: yaml 171 | 172 | index: 173 | type: cassandra 174 | cluster: '127.0.0.1' 175 | keyspace: 'cyanite_dev' 176 | cache_ttl_in_ms: 5000 177 | 178 | All forementioned configuration options may be used in combination. 179 | 180 | API Component 181 | ------------- 182 | 183 | The API component is responsible for exposing an HTTP service to service queries. 184 | The API component exposes the following HTTP routes: 185 | 186 | - ``/ping``: report online status 187 | - ``/metrics``: query metrics. Takes ``from``, ``to`` (optional), and any number of ``path`` arguments. 188 | - ``/paths``: query paths. Takes a ``query`` argument. 189 | 190 | Any other request will yield a 404 response. 191 | -------------------------------------------------------------------------------- /src/io/cyanite/engine.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.engine 2 | "The core of cyanite" 3 | (:require [com.stuartsierra.component :as component] 4 | [io.cyanite.engine.rule :as rule] 5 | [io.cyanite.engine.queue :as q] 6 | [spootnik.reporter :as r] 7 | [io.cyanite.store.pure :as p] 8 | [io.cyanite.index :as index] 9 | [io.cyanite.store :as store] 10 | [io.cyanite.utils :refer [nbhm assoc-if-absent! contains-key entries remove!]] 11 | [io.cyanite.engine.drift :refer [drift! skewed-epoch! epoch!]] 12 | [clojure.tools.logging :refer [info debug error]]) 13 | (:import io.cyanite.engine.rule.Resolution)) 14 | 15 | (defprotocol Resolutioner 16 | (resolution [this oldest newest path aggregate])) 17 | 18 | (defprotocol Metric 19 | (snapshot! [this]) 20 | (append! [this ts val])) 21 | 22 | (defprotocol Engine 23 | (ingest! [this path ts metric]) 24 | (query [this from to leaves]) 25 | ;; TODO: GC metrics that weren't updated for a longer time 26 | ) 27 | 28 | (defn time-slot 29 | [^Resolution resolution ^Long now] 30 | (let [p (:precision resolution)] 31 | (* p (quot now p)))) 32 | 33 | (defrecord MetricSnapshot [time mean min max sum]) 34 | 35 | (defn new-state 36 | [ts v] 37 | (let [bit-v (Double/doubleToRawLongBits v)] 38 | (long-array [ts 1 bit-v bit-v bit-v]))) 39 | 40 | (defn update-state 41 | [[old-ts count minv maxv sum] ts v] 42 | (let [minv (Double/longBitsToDouble minv) 43 | maxv (Double/longBitsToDouble maxv) 44 | sum (Double/longBitsToDouble sum)] 45 | (long-array [ts 46 | (inc count) 47 | (Double/doubleToRawLongBits (min minv v)) 48 | (Double/doubleToRawLongBits (max maxv v)) 49 | (Double/doubleToRawLongBits (+ sum v))]))) 50 | 51 | (defn to-snapshot 52 | [[ts count minv maxv sum]] 53 | (let [min (Double/longBitsToDouble minv) 54 | max (Double/longBitsToDouble maxv) 55 | sum (Double/longBitsToDouble sum)] 56 | (MetricSnapshot. ts 57 | (/ sum count) 58 | min 59 | max 60 | count))) 61 | 62 | (defrecord MetricState [resolution state] 63 | Metric 64 | (append! [this ts v] 65 | (loop [] 66 | (let [current @state 67 | [ret-v to-set] (if (nil? current) 68 | ;; new window opens, set timestamp 69 | [nil (new-state ts v)] 70 | (let [timestamp (first current) 71 | diff (- ts timestamp) 72 | window-size (:precision resolution) 73 | current-window? (> window-size diff)] 74 | (if current-window? 75 | ;; should we include in the current window? 76 | [nil (update-state current timestamp v)] 77 | ;; do a snapshot and put metric to the new window 78 | [current (new-state ts v)])))] 79 | 80 | (if (compare-and-set! state current to-set) 81 | (if (nil? ret-v) nil (to-snapshot ret-v)) 82 | (recur))))) 83 | 84 | (snapshot! [this] 85 | (if-let [current @state] 86 | (to-snapshot current) 87 | (throw (ex-info "Can't take a snapshot of an empty metric" {}))))) 88 | 89 | (defn make-metric-state 90 | [resolution] 91 | (MetricState. resolution (atom nil))) 92 | 93 | (defrecord DefaultEngine [rules state store queues ingestq planner drift reporter index] 94 | component/Lifecycle 95 | (start [this] 96 | (let [state (nbhm) 97 | planner (map rule/->rule rules) 98 | ingestq (:ingestq queues)] 99 | (info "starting engine") 100 | (let [this (assoc this :planner planner :state state :ingestq ingestq)] 101 | ;; (q/consume! ingestq (partial ingest! this)) 102 | (r/instrument! reporter [:cyanite]) 103 | this))) 104 | (stop [this] 105 | (assoc this :planner nil :state nil :ingestq nil)) 106 | 107 | Engine 108 | (ingest! [this path ts value] 109 | (when-not (contains-key state path) 110 | (index/register! index path) 111 | (let [resolutions (rule/->exec-plan planner path)] 112 | (assoc-if-absent! state path (atom 113 | (zipmap resolutions 114 | (map #(make-metric-state %) 115 | resolutions)))))) 116 | 117 | (doseq [metric-monoid (vals @(get state path))] 118 | (when-let [snapshot (append! metric-monoid ts value)] 119 | (store/insert! store path (:resolution metric-monoid) snapshot)))) 120 | 121 | (query [this from to paths] 122 | (let [raw-series (store/fetch! store from to paths) 123 | current-points (map 124 | (fn [{:keys [path resolution aggregate] :as id}] 125 | ;; try finding an in-memory state 126 | (when-let [resolutions (get state path)] 127 | (when-let [state (get @resolutions resolution)] 128 | (let [snapshot (snapshot! state) 129 | time (:time snapshot)] 130 | (when (and (>= time from) (<= time to)) 131 | {:id id 132 | :point (get snapshot 133 | (if (= :default aggregate) 134 | ;; or switch to mean everywhere? 135 | :mean 136 | aggregate)) 137 | :time time}))))) 138 | paths) 139 | series (concat raw-series (filter identity current-points)) 140 | [precision series] (p/normalize series)] 141 | (p/data->series series to precision))) 142 | 143 | 144 | Resolutioner 145 | (resolution [this oldest newest path aggregate] 146 | (let [plan (->> (rule/->exec-plan planner path) 147 | (sort-by :precision)) 148 | resolution (some #(rule/fit? % oldest newest) 149 | plan)] 150 | {:path path 151 | :resolution (or resolution (first plan)) 152 | :aggregate aggregate}))) 153 | 154 | (defn make-engine 155 | [options] 156 | (map->DefaultEngine options)) 157 | 158 | (prefer-method print-method clojure.lang.IRecord clojure.lang.IDeref) 159 | -------------------------------------------------------------------------------- /src/io/cyanite/index/cassandra.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.index.cassandra 2 | (:import [com.github.benmanes.caffeine.cache Caffeine CacheLoader LoadingCache] 3 | [java.util.concurrent TimeUnit]) 4 | (:require [com.stuartsierra.component :as component] 5 | [io.cyanite.store.cassandra :as c] 6 | [io.cyanite.index :as index] 7 | [io.cyanite.utils :refer [contains-key]] 8 | [qbits.alia :as alia] 9 | [globber.glob :refer [glob]] 10 | [clojure.string :refer [index-of join split]] 11 | [clojure.tools.logging :refer [error]])) 12 | 13 | (def default-cache-ttl-in-ms 60000) 14 | 15 | (defn mk-insert-segmentq 16 | [session] 17 | (alia/prepare 18 | session 19 | "INSERT INTO segment (parent, segment, pos, length, leaf) VALUES (?, ?, ?, ?, ?);")) 20 | 21 | (defn runq! 22 | [session prepared-statement values opts] 23 | (let [bound (alia/bind prepared-statement values)] 24 | (alia/execute session bound opts))) 25 | 26 | (defn index-of-first 27 | [chars s] 28 | (reduce 29 | (fn [idx char] 30 | (let [c (or (index-of s char) -1)] 31 | (if (and (>= c 0) 32 | (> c idx)) 33 | c 34 | idx))) 35 | -1 36 | chars)) 37 | 38 | (defn glob-to-like 39 | [pattern] 40 | (let [pos (index-of-first [\* \? \[] pattern)] 41 | (cond 42 | (= 0 pos) nil 43 | (= -1 pos) pattern 44 | :default (str (subs pattern 0 pos) "%")))) 45 | 46 | (defn compose-parts 47 | [path] 48 | (let [parts (split path #"\." )] 49 | (map 50 | #(vector % (clojure.string/join "." (take % parts))) 51 | (range 1 (inc (count parts)))))) 52 | 53 | (defn prefix-info 54 | [pattern-length [path length leaf]] 55 | {:text (last (split path #"\.")) 56 | :id path 57 | :path path 58 | :allowChildren (not leaf) 59 | :expandable (not leaf) 60 | :leaf leaf}) 61 | 62 | (defn native-sasi-index 63 | [session pattern parts] 64 | (let [globbed (glob-to-like pattern) 65 | pos (count parts) 66 | globbed-parts (if (nil? globbed) [] (split globbed #"\."))] 67 | (alia/execute 68 | session 69 | (str "SELECT * from segment WHERE " 70 | (cond 71 | ;; Top-level query, return root only 72 | (= pattern "*") "parent = 'root' AND pos = 1" 73 | ;; Postfix wildcard query (`*.abc` and alike), optimise by position 74 | (= globbed nil) (str "pos = " pos) 75 | ;; Prefix wildcard query (`abc.*` and alike), add parent 76 | (= (count parts) (count globbed-parts)) (str "parent = '" (join "." (butlast globbed-parts)) "'" 77 | " AND pos = " (count parts)) 78 | ;; Prefix wildcard query, (`abc.*.def`), can't use position 79 | :else (str "pos = " (count parts) 80 | " AND segment LIKE '" globbed "' ALLOW FILTERING" )))))) 81 | 82 | (defn with-cyanite-tokenizer 83 | [session pattern parts] 84 | (let [globbed (glob-to-like pattern) 85 | pos (count parts) 86 | globbed-parts (if (nil? globbed) [] (split globbed #"\."))] 87 | (alia/execute session 88 | (cond 89 | ;; Top-level query, return root only 90 | (= pattern "*") (str "SELECT * FROM segment WHERE parent = 'root' AND pos = 1") 91 | ;; Postfix wildcard query (`*.abc` and alike), optimise by position 92 | (= globbed nil) (str "SELECT * FROM segment WHERE pos = " pos) 93 | ;; Prefix wildcard query (`abc.*` and alike), add parent 94 | :else (str "SELECT * FROM segment WHERE segment LIKE '" pattern "' AND pos = " pos " ALLOW FILTERING"))))) 95 | 96 | (defn load-prefixes-fn 97 | [session index-query-fn pattern] 98 | (let [parts (split pattern #"\.") 99 | pos (count parts) 100 | res (index-query-fn session pattern parts) 101 | filtered (set (glob pattern (map :segment res)))] 102 | (->> res 103 | (filter (fn [{:keys [segment]}] 104 | (not (nil? (get filtered segment))))) 105 | (map (juxt :segment :length :leaf)) 106 | (map (partial prefix-info pos))))) 107 | 108 | (defrecord CassandraIndex [options session ^LoadingCache cache 109 | aggregates pattern 110 | insert-segmentq index-query-fn 111 | wrcty rdcty] 112 | component/Lifecycle 113 | (start [this] 114 | (let [[session rdcty wrcty] (c/session! options) 115 | index-query-fn (if (:with_tokenizer options) 116 | with-cyanite-tokenizer 117 | native-sasi-index) 118 | aggregates (or (:aggregates options) [])] 119 | (assoc this 120 | :aggregates aggregates 121 | :pattern (index/make-pattern aggregates) 122 | :session session 123 | :insert-segmentq (mk-insert-segmentq session) 124 | :cache (-> (Caffeine/newBuilder) 125 | (.expireAfterWrite 126 | (or (:cache_ttl_in_ms options) 127 | default-cache-ttl-in-ms) 128 | TimeUnit/MILLISECONDS) 129 | (.build (reify CacheLoader 130 | (load [this pattern] 131 | (load-prefixes-fn session index-query-fn pattern))))) 132 | ))) 133 | (stop [this] 134 | (-> this 135 | (assoc :session nil) 136 | (assoc :insert-segmentq nil))) 137 | index/MetricIndex 138 | (register! [this path] 139 | (let [parts (compose-parts path) 140 | fpart (first parts) 141 | parts (cons [[0 "root"] fpart] (partition 2 1 parts)) 142 | length (count parts)] 143 | (doseq [[[_ parent] [i part]] parts] 144 | (runq! session insert-segmentq 145 | [parent 146 | part 147 | (int i) 148 | length 149 | (= length i)] 150 | {:consistency wrcty})))) 151 | (prefixes [this pattern] 152 | (.get cache pattern)) 153 | (truncate! [this] 154 | (alia/execute session "TRUNCATE segment")) 155 | 156 | (multiplex-aggregates [this prefixes] 157 | (index/multiplex-aggregates-paths aggregates prefixes)) 158 | (extract-aggregate [this prefix] 159 | (index/extract-aggregate-path pattern prefix))) 160 | 161 | (defmethod index/build-index :cassandra 162 | [options] 163 | (map->CassandraIndex {:options (dissoc options :type)})) 164 | -------------------------------------------------------------------------------- /src/io/cyanite/api.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.api 2 | "Cyanite's 'HTTP interface" 3 | (:require [com.stuartsierra.component :as component] 4 | [cheshire.core :as json] 5 | [clj-time.core :as t] 6 | [io.cyanite.engine.rule :as rule] 7 | [io.cyanite.engine :as engine] 8 | [io.cyanite.engine.queue :as q] 9 | [io.cyanite.index :as index] 10 | [io.cyanite.store :as store] 11 | [io.cyanite.store.pure :as pure] 12 | [io.cyanite.query :as query] 13 | [net.http.server :as http] 14 | [io.cyanite.engine.drift :refer [epoch!]] 15 | [io.cyanite.utils :refer [nbhm assoc-if-absent!]] 16 | [clj-time.coerce :refer [to-epoch]] 17 | [clojure.tools.logging :refer [info debug error]] 18 | [clojure.string :refer [lower-case blank?]])) 19 | 20 | 21 | (defn sub-time 22 | [s] 23 | (let [[_ value unit] (re-find #"^([0-9]+)([a-z])" s)] 24 | (when-not (and value unit) 25 | (throw (ex-info (str "invalid time interval: " s) {:value value 26 | :unit unit}))) 27 | (when (and value unit) 28 | (to-epoch 29 | (t/minus (t/now) 30 | ((case unit 31 | "s" t/seconds 32 | "m" t/minutes 33 | "h" t/hours 34 | "d" t/days 35 | "w" t/weeks 36 | t/seconds) 37 | (Long/valueOf value))))))) 38 | 39 | (defn parse-time 40 | "Parse an epoch into a long" 41 | [time-string drift] 42 | (cond 43 | (nil? time-string) 44 | nil 45 | 46 | (= time-string "now") 47 | (epoch! drift) 48 | 49 | (.startsWith time-string "-") 50 | (sub-time (.substring time-string 1)) 51 | 52 | :else 53 | (try (Long/parseLong time-string) 54 | (catch NumberFormatException _ 55 | (error "wrong time format: " time-string) 56 | nil)))) 57 | 58 | (def routes 59 | "Dead simple router" 60 | [[:paths #"^/paths.*"] 61 | [:paths #"^/metrics/find"] 62 | [:metrics #"^/metrics.*"] 63 | [:render #"^/render"] 64 | [:ping #"^/ping/?"]]) 65 | 66 | (defn match-route 67 | "Predicate which returns the matched elements" 68 | [{:keys [uri path-info] :as request} [action re]] 69 | (when (re-matches re (or path-info uri)) 70 | action)) 71 | 72 | (defn assoc-route 73 | "Find which route matches if any and store the appropriate action 74 | in the :action field of the request" 75 | [request] 76 | (assoc request :action (some (partial match-route request) routes))) 77 | 78 | (defmulti dispatch 79 | "Dispatch on action, as found by assoc-route" 80 | :action) 81 | 82 | (defn process 83 | "Process a request. Handle errors and report them" 84 | [request store index engine drift] 85 | (try 86 | {:status 200 87 | :headers {"Content-Type" "application/json"} 88 | :body (json/generate-string 89 | (dispatch (assoc request 90 | :store store 91 | :index index 92 | :drift drift 93 | :engine engine)))} 94 | (catch Exception e 95 | (let [{:keys [status body suppress? exception]} (ex-data e)] 96 | (when-not suppress? 97 | (error e "could not process request") 98 | (when exception 99 | (error exception "request process exception"))) 100 | {:status (or status 500) 101 | :headers {"Content-Type" "application/json"} 102 | :body (json/generate-string 103 | {:message (or body (.getMessage e))})})))) 104 | 105 | (defmethod dispatch :ping 106 | [_] 107 | {:response "pong"}) 108 | 109 | (defmethod dispatch :paths 110 | [{{:keys [query]} :params index :index}] 111 | (debug "path fetch request for:" (pr-str query)) 112 | (when (sequential? query) 113 | (throw (ex-info "only one query argument supported for path queries" 114 | {:status 400 :suppress? true}))) 115 | (let [[path _] (if (blank? query) 116 | "*" 117 | (index/extract-aggregate index query))] 118 | (->> path 119 | (index/prefixes index) 120 | (index/multiplex-aggregates index)))) 121 | 122 | (defmethod dispatch :render 123 | [{{:keys [from until target format]} :params :keys [index drift store engine]}] 124 | (when (not= format "json") 125 | (throw (ex-info "Cyanite only outputs JSON for now" 126 | {:suppress? true :status 400}))) 127 | (let [from (or (parse-time from drift) 128 | (throw (ex-info "missing from parameter" 129 | {:suppress? true :status 400}))) 130 | to (or (parse-time until drift) (epoch! drift))] 131 | (query/run-query! index engine from to 132 | (if (seq? target) 133 | target 134 | [target])))) 135 | 136 | (defmethod dispatch :metrics 137 | [{{:keys [from to path]} :params :keys [index store engine drift]}] 138 | (debug "metric fetch request for:" (pr-str path)) 139 | (let [from (or (parse-time from drift) 140 | (throw (ex-info "missing from parameter" 141 | {:suppress? true :status 400}))) 142 | to (or (parse-time to drift) (epoch! drift)) 143 | paths (->> (if (sequential? path) path [path]) 144 | (map #(index/extract-aggregate index %)) 145 | (mapcat (fn [[path aggregate]] 146 | (->> path 147 | (index/prefixes index) 148 | (map #(vector % aggregate))))) 149 | (map (fn [[path aggregate]] 150 | (engine/resolution engine from to (:path path) aggregate))) 151 | (remove nil?)) 152 | raw-series (store/fetch! store from to paths) 153 | [precision series] (pure/normalize raw-series)] 154 | (pure/data->series series to precision) 155 | )) 156 | 157 | (defmethod dispatch :default 158 | [_] 159 | (throw (ex-info "unknown action" {:status 404 :suppress? true}))) 160 | 161 | (defn make-handler 162 | "Yield a ring-handler for a request" 163 | [store index engine drift] 164 | (fn [request] 165 | (debug "got request: " (pr-str request)) 166 | (-> request 167 | (assoc-route) 168 | (process store index engine drift)))) 169 | 170 | (defrecord Api [options server service store index engine drift] 171 | component/Lifecycle 172 | (start [this] 173 | (if (:disabled options) 174 | this 175 | (let [handler (make-handler store index engine drift) 176 | server (http/run-server options handler)] 177 | (assoc this :server server)))) 178 | (stop [this] 179 | (when (fn? server) 180 | (server)) 181 | (assoc this :server nil))) 182 | -------------------------------------------------------------------------------- /doc/grafana-defaults.ini: -------------------------------------------------------------------------------- 1 | ##################### Grafana Configuration Defaults ##################### 2 | # 3 | # Do not modify this file in grafana installs 4 | # 5 | 6 | app_mode = production 7 | 8 | #################################### Paths #################################### 9 | [paths] 10 | # Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used) 11 | # 12 | data = site/grafana/data 13 | # 14 | # Directory where grafana can store logs 15 | # 16 | logs = site/grafana/grafana.log 17 | 18 | #################################### Server #################################### 19 | [server] 20 | # Protocol (http or https) 21 | protocol = http 22 | 23 | # The ip address to bind to, empty will bind to all interfaces 24 | http_addr = 25 | 26 | # The http port to use 27 | http_port = 3000 28 | 29 | # The public facing domain name used to access grafana from a browser 30 | domain = localhost 31 | 32 | # Redirect to correct domain if host header does not match domain 33 | # Prevents DNS rebinding attacks 34 | enforce_domain = false 35 | 36 | # The full public facing url 37 | root_url = %(protocol)s://%(domain)s:%(http_port)s/ 38 | 39 | # Log web requests 40 | router_logging = false 41 | 42 | # the path relative working path 43 | static_root_path = /usr/share/grafana/public 44 | 45 | # enable gzip 46 | enable_gzip = false 47 | 48 | # https certs & key file 49 | cert_file = 50 | cert_key = 51 | 52 | #################################### Database #################################### 53 | [database] 54 | # Either "mysql", "postgres" or "sqlite3", it's your choice 55 | type = sqlite3 56 | host = 127.0.0.1:3306 57 | name = grafana 58 | user = root 59 | password = 60 | 61 | # For "postgres" only, either "disable", "require" or "verify-full" 62 | ssl_mode = disable 63 | 64 | # For "sqlite3" only, path relative to data_path setting 65 | path = site/grafana/grafana.db 66 | 67 | #################################### Session #################################### 68 | [session] 69 | # Either "memory", "file", "redis", "mysql", "postgresql", default is "file" 70 | provider = file 71 | 72 | # Provider config options 73 | # memory: not have any config yet 74 | # file: session dir path, is relative to grafana data_path 75 | # redis: config like redis server e.g. `addr=127.0.0.1:6379,pool_size=100,db=grafana` 76 | # postgres: user=a password=b host=localhost port=5432 dbname=c sslmode=disable 77 | # mysql: go-sql-driver/mysql dsn config string, e.g. `user:password@tcp(127.0.0.1:3306)/database_name` 78 | 79 | provider_config = sessions 80 | 81 | # Session cookie name 82 | cookie_name = grafana_sess 83 | 84 | # If you use session in https only, default is false 85 | cookie_secure = false 86 | 87 | # Session life time, default is 86400 88 | session_life_time = 86400 89 | 90 | #################################### Analytics #################################### 91 | [analytics] 92 | # Server reporting, sends usage counters to stats.grafana.org every 24 hours. 93 | # No ip addresses are being tracked, only simple counters to track 94 | # running instances, dashboard and error counts. It is very helpful to us. 95 | # Change this option to false to disable reporting. 96 | reporting_enabled = true 97 | 98 | # Google Analytics universal tracking code, only enabled if you specify an id here 99 | google_analytics_ua_id = 100 | 101 | #################################### Security #################################### 102 | [security] 103 | # default admin user, created on startup 104 | admin_user = admin 105 | 106 | # default admin password, can be changed before first start of grafana, or in profile settings 107 | admin_password = admin 108 | 109 | # used for signing 110 | secret_key = SW2YcwTIb9zpOOhoPsMm 111 | 112 | # Auto-login remember days 113 | login_remember_days = 7 114 | cookie_username = grafana_user 115 | cookie_remember_name = grafana_remember 116 | 117 | # disable gravatar profile images 118 | disable_gravatar = false 119 | 120 | #################################### Users #################################### 121 | [users] 122 | # disable user signup / registration 123 | allow_sign_up = true 124 | 125 | # Allow non admin users to create organizations 126 | allow_org_create = true 127 | 128 | # Set to true to automatically assign new users to the default organization (id 1) 129 | auto_assign_org = true 130 | 131 | # Default role new users will be automatically assigned (if auto_assign_org above is set to true) 132 | auto_assign_org_role = Viewer 133 | 134 | #################################### Anonymous Auth ########################## 135 | [auth.anonymous] 136 | # enable anonymous access 137 | enabled = false 138 | 139 | # specify organization name that should be used for unauthenticated users 140 | org_name = Main Org. 141 | 142 | # specify role for unauthenticated users 143 | org_role = Viewer 144 | 145 | #################################### Github Auth ########################## 146 | [auth.github] 147 | enabled = false 148 | allow_sign_up = false 149 | client_id = some_id 150 | client_secret = some_secret 151 | scopes = user:email 152 | auth_url = https://github.com/login/oauth/authorize 153 | token_url = https://github.com/login/oauth/access_token 154 | api_url = https://api.github.com/user 155 | team_ids = 156 | allowed_domains = 157 | allowed_organizations = 158 | 159 | #################################### Google Auth ########################## 160 | [auth.google] 161 | enabled = false 162 | allow_sign_up = false 163 | client_id = some_client_id 164 | client_secret = some_client_secret 165 | scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email 166 | auth_url = https://accounts.google.com/o/oauth2/auth 167 | token_url = https://accounts.google.com/o/oauth2/token 168 | api_url = https://www.googleapis.com/oauth2/v1/userinfo 169 | allowed_domains = 170 | 171 | #################################### Basic Auth ########################## 172 | [auth.basic] 173 | enabled = true 174 | 175 | #################################### Auth Proxy ########################## 176 | [auth.proxy] 177 | enabled = false 178 | header_name = X-WEBAUTH-USER 179 | header_property = username 180 | auto_sign_up = true 181 | 182 | #################################### Auth LDAP ########################## 183 | [auth.ldap] 184 | enabled = false 185 | config_file = /etc/grafana/ldap.toml 186 | 187 | #################################### SMTP / Emailing ########################## 188 | [smtp] 189 | enabled = false 190 | host = localhost:25 191 | user = 192 | password = 193 | cert_file = 194 | key_file = 195 | skip_verify = false 196 | from_address = admin@grafana.localhost 197 | 198 | [emails] 199 | welcome_email_on_sign_up = false 200 | templates_pattern = emails/*.html 201 | 202 | #################################### Logging ########################## 203 | [log] 204 | # Either "console", "file", default is "console" 205 | # Use comma to separate multiple modes, e.g. "console, file" 206 | mode = console, file 207 | 208 | # Buffer length of channel, keep it as it is if you don't know what it is. 209 | buffer_len = 10000 210 | 211 | # Either "Trace", "Debug", "Info", "Warn", "Error", "Critical", default is "Trace" 212 | level = Info 213 | 214 | # For "console" mode only 215 | [log.console] 216 | level = 217 | 218 | # For "file" mode only 219 | [log.file] 220 | level = 221 | # This enables automated log rotate(switch of following options), default is true 222 | log_rotate = true 223 | 224 | # Max line number of single file, default is 1000000 225 | max_lines = 1000000 226 | 227 | # Max size shift of single file, default is 28 means 1 << 28, 256MB 228 | max_lines_shift = 28 229 | 230 | # Segment log daily, default is true 231 | daily_rotate = true 232 | 233 | # Expired days of log file(delete after max days), default is 7 234 | max_days = 7 235 | 236 | #################################### AMPQ Event Publisher ########################## 237 | [event_publisher] 238 | enabled = false 239 | rabbitmq_url = amqp://localhost/ 240 | exchange = grafana_events 241 | 242 | #################################### Dashboard JSON files ########################## 243 | [dashboards.json] 244 | enabled = false 245 | path = site/grafana/dashboards 246 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " applehelp to make an Apple Help Book" 34 | @echo " devhelp to make HTML files and a Devhelp project" 35 | @echo " epub to make an epub" 36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 37 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 39 | @echo " text to make text files" 40 | @echo " man to make manual pages" 41 | @echo " texinfo to make Texinfo files" 42 | @echo " info to make Texinfo files and run them through makeinfo" 43 | @echo " gettext to make PO message catalogs" 44 | @echo " changes to make an overview of all changed/added/deprecated items" 45 | @echo " xml to make Docutils-native XML files" 46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 47 | @echo " linkcheck to check all external links for integrity" 48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 49 | @echo " coverage to run coverage check of the documentation (if enabled)" 50 | 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | html: 55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | dirhtml: 60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 61 | @echo 62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 63 | 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | pickle: 70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 71 | @echo 72 | @echo "Build finished; now you can process the pickle files." 73 | 74 | json: 75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 76 | @echo 77 | @echo "Build finished; now you can process the JSON files." 78 | 79 | htmlhelp: 80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 81 | @echo 82 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 83 | ".hhp project file in $(BUILDDIR)/htmlhelp." 84 | 85 | qthelp: 86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 87 | @echo 88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Cyanite.qhcp" 91 | @echo "To view the help file:" 92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Cyanite.qhc" 93 | 94 | applehelp: 95 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 96 | @echo 97 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 98 | @echo "N.B. You won't be able to view it unless you put it in" \ 99 | "~/Library/Documentation/Help or install it in your application" \ 100 | "bundle." 101 | 102 | devhelp: 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Cyanite" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Cyanite" 109 | @echo "# devhelp" 110 | 111 | epub: 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | latex: 117 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 118 | @echo 119 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 120 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 121 | "(use \`make latexpdf' here to do that automatically)." 122 | 123 | latexpdf: 124 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 125 | @echo "Running LaTeX files through pdflatex..." 126 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 127 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 128 | 129 | latexpdfja: 130 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 131 | @echo "Running LaTeX files through platex and dvipdfmx..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | text: 136 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 137 | @echo 138 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 139 | 140 | man: 141 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 142 | @echo 143 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 144 | 145 | texinfo: 146 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 147 | @echo 148 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 149 | @echo "Run \`make' in that directory to run these through makeinfo" \ 150 | "(use \`make info' here to do that automatically)." 151 | 152 | info: 153 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 154 | @echo "Running Texinfo files through makeinfo..." 155 | make -C $(BUILDDIR)/texinfo info 156 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 157 | 158 | gettext: 159 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 160 | @echo 161 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 162 | 163 | changes: 164 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 165 | @echo 166 | @echo "The overview file is in $(BUILDDIR)/changes." 167 | 168 | linkcheck: 169 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 170 | @echo 171 | @echo "Link check complete; look for any errors in the above output " \ 172 | "or in $(BUILDDIR)/linkcheck/output.txt." 173 | 174 | doctest: 175 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 176 | @echo "Testing of doctests in the sources finished, look at the " \ 177 | "results in $(BUILDDIR)/doctest/output.txt." 178 | 179 | coverage: 180 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 181 | @echo "Testing of coverage in the sources finished, look at the " \ 182 | "results in $(BUILDDIR)/coverage/python.txt." 183 | 184 | xml: 185 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 186 | @echo 187 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 188 | 189 | pseudoxml: 190 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 191 | @echo 192 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 193 | -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Cyanite documentation build configuration file, created by 5 | # sphinx-quickstart on Wed Jul 22 11:34:53 2015. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | import shlex 19 | import sphinx_rtd_theme 20 | 21 | # If extensions (or modules to document with autodoc) are in another directory, 22 | # add these directories to sys.path here. If the directory is relative to the 23 | # documentation root, use os.path.abspath to make it absolute, like shown here. 24 | #sys.path.insert(0, os.path.abspath('.')) 25 | 26 | # -- General configuration ------------------------------------------------ 27 | 28 | # If your documentation needs a minimal Sphinx version, state it here. 29 | #needs_sphinx = '1.0' 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | templates_path = ['_templates'] 38 | 39 | # The suffix(es) of source filenames. 40 | # You can specify multiple suffix as a list of string: 41 | # source_suffix = ['.rst', '.md'] 42 | source_suffix = '.rst' 43 | 44 | # The encoding of source files. 45 | #source_encoding = 'utf-8-sig' 46 | 47 | # The master toctree document. 48 | master_doc = 'index' 49 | 50 | # General information about the project. 51 | project = 'Cyanite' 52 | copyright = '2015, Pierre-Yves Ritschard' 53 | author = 'Pierre-Yves Ritschard' 54 | 55 | # The version info for the project you're documenting, acts as replacement for 56 | # |version| and |release|, also used in various other places throughout the 57 | # built documents. 58 | # 59 | # The short X.Y version. 60 | version = '0.5.1' 61 | # The full version, including alpha/beta/rc tags. 62 | release = 'Hidden Coffin' 63 | 64 | # The language for content autogenerated by Sphinx. Refer to documentation 65 | # for a list of supported languages. 66 | # 67 | # This is also used if you do content translation via gettext catalogs. 68 | # Usually you set "language" from the command line for these cases. 69 | language = None 70 | 71 | # There are two options for replacing |today|: either, you set today to some 72 | # non-false value, then it is used: 73 | #today = '' 74 | # Else, today_fmt is used as the format for a strftime call. 75 | #today_fmt = '%B %d, %Y' 76 | 77 | # List of patterns, relative to source directory, that match files and 78 | # directories to ignore when looking for source files. 79 | exclude_patterns = [] 80 | 81 | # The reST default role (used for this markup: `text`) to use for all 82 | # documents. 83 | #default_role = None 84 | 85 | # If true, '()' will be appended to :func: etc. cross-reference text. 86 | #add_function_parentheses = True 87 | 88 | # If true, the current module name will be prepended to all description 89 | # unit titles (such as .. function::). 90 | #add_module_names = True 91 | 92 | # If true, sectionauthor and moduleauthor directives will be shown in the 93 | # output. They are ignored by default. 94 | #show_authors = False 95 | 96 | # The name of the Pygments (syntax highlighting) style to use. 97 | pygments_style = 'sphinx' 98 | 99 | # A list of ignored prefixes for module index sorting. 100 | #modindex_common_prefix = [] 101 | 102 | # If true, keep warnings as "system message" paragraphs in the built documents. 103 | #keep_warnings = False 104 | 105 | # If true, `todo` and `todoList` produce output, else they produce nothing. 106 | todo_include_todos = False 107 | 108 | 109 | # -- Options for HTML output ---------------------------------------------- 110 | 111 | # The theme to use for HTML and HTML Help pages. See the documentation for 112 | # a list of builtin themes. 113 | html_theme = 'sphinx_rtd_theme' 114 | 115 | # Theme options are theme-specific and customize the look and feel of a theme 116 | # further. For a list of options available for each theme, see the 117 | # documentation. 118 | #html_theme_options = {} 119 | 120 | # Add any paths that contain custom themes here, relative to this directory. 121 | #html_theme_path = [] 122 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 123 | 124 | # The name for this set of Sphinx documents. If None, it defaults to 125 | # " v documentation". 126 | #html_title = None 127 | 128 | # A shorter title for the navigation bar. Default is the same as html_title. 129 | #html_short_title = None 130 | 131 | # The name of an image file (relative to this directory) to place at the top 132 | # of the sidebar. 133 | #html_logo = None 134 | 135 | # The name of an image file (within the static path) to use as favicon of the 136 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 137 | # pixels large. 138 | #html_favicon = None 139 | 140 | # Add any paths that contain custom static files (such as style sheets) here, 141 | # relative to this directory. They are copied after the builtin static files, 142 | # so a file named "default.css" will overwrite the builtin "default.css". 143 | html_static_path = ['_static'] 144 | 145 | # Add any extra paths that contain custom files (such as robots.txt or 146 | # .htaccess) here, relative to this directory. These files are copied 147 | # directly to the root of the documentation. 148 | #html_extra_path = [] 149 | 150 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 151 | # using the given strftime format. 152 | #html_last_updated_fmt = '%b %d, %Y' 153 | 154 | # If true, SmartyPants will be used to convert quotes and dashes to 155 | # typographically correct entities. 156 | #html_use_smartypants = True 157 | 158 | # Custom sidebar templates, maps document names to template names. 159 | #html_sidebars = {} 160 | 161 | # Additional templates that should be rendered to pages, maps page names to 162 | # template names. 163 | #html_additional_pages = {} 164 | 165 | # If false, no module index is generated. 166 | #html_domain_indices = True 167 | 168 | # If false, no index is generated. 169 | #html_use_index = True 170 | 171 | # If true, the index is split into individual pages for each letter. 172 | #html_split_index = False 173 | 174 | # If true, links to the reST sources are added to the pages. 175 | #html_show_sourcelink = True 176 | 177 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 178 | #html_show_sphinx = True 179 | 180 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 181 | #html_show_copyright = True 182 | 183 | # If true, an OpenSearch description file will be output, and all pages will 184 | # contain a tag referring to it. The value of this option must be the 185 | # base URL from which the finished HTML is served. 186 | #html_use_opensearch = '' 187 | 188 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 189 | #html_file_suffix = None 190 | 191 | # Language to be used for generating the HTML full-text search index. 192 | # Sphinx supports the following languages: 193 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' 194 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' 195 | #html_search_language = 'en' 196 | 197 | # A dictionary with options for the search language support, empty by default. 198 | # Now only 'ja' uses this config value 199 | #html_search_options = {'type': 'default'} 200 | 201 | # The name of a javascript file (relative to the configuration directory) that 202 | # implements a search results scorer. If empty, the default will be used. 203 | #html_search_scorer = 'scorer.js' 204 | 205 | # Output file base name for HTML help builder. 206 | htmlhelp_basename = 'Cyanitedoc' 207 | 208 | # -- Options for LaTeX output --------------------------------------------- 209 | 210 | latex_elements = { 211 | # The paper size ('letterpaper' or 'a4paper'). 212 | #'papersize': 'letterpaper', 213 | 214 | # The font size ('10pt', '11pt' or '12pt'). 215 | #'pointsize': '10pt', 216 | 217 | # Additional stuff for the LaTeX preamble. 218 | #'preamble': '', 219 | 220 | # Latex figure (float) alignment 221 | #'figure_align': 'htbp', 222 | } 223 | 224 | # Grouping the document tree into LaTeX files. List of tuples 225 | # (source start file, target name, title, 226 | # author, documentclass [howto, manual, or own class]). 227 | latex_documents = [ 228 | (master_doc, 'Cyanite.tex', 'Cyanite Documentation', 229 | 'Pierre-Yves Ritschard', 'manual'), 230 | ] 231 | 232 | # The name of an image file (relative to this directory) to place at the top of 233 | # the title page. 234 | #latex_logo = None 235 | 236 | # For "manual" documents, if this is true, then toplevel headings are parts, 237 | # not chapters. 238 | #latex_use_parts = False 239 | 240 | # If true, show page references after internal links. 241 | #latex_show_pagerefs = False 242 | 243 | # If true, show URL addresses after external links. 244 | #latex_show_urls = False 245 | 246 | # Documents to append as an appendix to all manuals. 247 | #latex_appendices = [] 248 | 249 | # If false, no module index is generated. 250 | #latex_domain_indices = True 251 | 252 | 253 | # -- Options for manual page output --------------------------------------- 254 | 255 | # One entry per manual page. List of tuples 256 | # (source start file, name, description, authors, manual section). 257 | man_pages = [ 258 | (master_doc, 'cyanite', 'Cyanite Documentation', 259 | [author], 1) 260 | ] 261 | 262 | # If true, show URL addresses after external links. 263 | #man_show_urls = False 264 | 265 | 266 | # -- Options for Texinfo output ------------------------------------------- 267 | 268 | # Grouping the document tree into Texinfo files. List of tuples 269 | # (source start file, target name, title, author, 270 | # dir menu entry, description, category) 271 | texinfo_documents = [ 272 | (master_doc, 'Cyanite', 'Cyanite Documentation', 273 | author, 'Cyanite', 'One line description of project.', 274 | 'Miscellaneous'), 275 | ] 276 | 277 | # Documents to append as an appendix to all manuals. 278 | #texinfo_appendices = [] 279 | 280 | # If false, no module index is generated. 281 | #texinfo_domain_indices = True 282 | 283 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 284 | #texinfo_show_urls = 'footnote' 285 | 286 | # If true, do not generate a @detailmenu in the "Top" node's menu. 287 | #texinfo_no_detailmenu = False 288 | -------------------------------------------------------------------------------- /src/io/cyanite/query/parser.clj: -------------------------------------------------------------------------------- 1 | (ns io.cyanite.query.parser 2 | "Graphite DSL to AST translation" 3 | (:require [instaparse.core :as parse])) 4 | 5 | (def init 6 | "Instaparse grammar for the Graphite DSL" 7 | " = path | func 8 | = <'('> 9 | = <')'> 10 | = <','> 11 | = #'[+-]?[0-9]+' 12 | = #'[+-]?[0-9]+(\\.[0-9]+)?' 13 | = <'\"'> ( #'(?i)max' | #'(?i)min' | #'(?i)avg' ) <'\"'> 14 | = <'\\''> ( #'(?i)max' | #'(?i)min' | #'(?i)avg' ) <'\\''> 15 | = aggregator1 | aggregator2 16 | = <'\"'> ( #'(?i)max' | #'(?i)min' ) <'\"'> 17 | = <'\\''> ( #'(?i)max' | #'(?i)min' ) <'\\''> 18 | = consol1 | consol2 19 | = <'\"'> #'(?i)[a-z0-9 .%+*/_-]*'<'\"'> 20 | = <'\\''> #'(?i)[a-z0-9 .%+*/_-]*'<'\\''> 21 | = qstr1 | qstr2 22 | cactisi = <#'(?i)si'> 23 | cactibin = <#'(?i)bin'> 24 | = <'\"'> ( cactisi | cactibin) <'\"'> 25 | = <'\\''> ( cactisi | cactibin) <'\\''> 26 | = (cactisystem1 | cactisystem2) 27 | = ( #'(?i)true' | #'(?i)false' ) 28 | seconds = <#'(?i)s(second(s)?)?'> 29 | min = <#'(?i)m(in(ute(s)?)?)?'> 30 | hours = <#'(?i)h(our(s)?)?'> 31 | days = <#'(?i)d(ay(s)?)?'> 32 | weeks = <#'(?i)w((eek)?s)?'> 33 | months = <#'(?i)mon(th(s)?)?'> 34 | years = <#'(?i)y(ear(s)?)?'> 35 | = seconds | min | hours | days | weeks | months | years 36 | = <'\\''> number quantifier <'\\''> 37 | = <'\"'> number quantifier <'\"'> 38 | timerange = timerange1 | timerange2 39 | absolutepoints = number 40 | = absolutepoints | timerange 41 | 42 | absolute = <#'(?i)absolute'> op expr cp 43 | aggregateline = <#'(?i)aggregateline'> op expr sep aggregator cp 44 | alias = <#'(?i)alias'> op expr sep qstr cp 45 | aliasmetric = <#'(?i)aliasbymetric'> op expr cp 46 | aliasnode = <#'(?i)aliasbynode'> op expr sep number (sep number)* cp 47 | aliassub = <#'(?i)aliassub'> op expr sep qstr sep qstr cp 48 | alpha = <#'(?i)alpha'> op expr sep float cp 49 | areabetween = <#'(?i)areabetween'> op arglist cp 50 | aspercent = <#'(?i)aspercent'> op expr ( sep (expr | number) )? cp 51 | avgabove = <#'(?i)averageabove'> op expr sep float cp 52 | avgbelow = <#'(?i)averagebelow'> op expr sep float cp 53 | avgpercentile = <#'(?i)averageoutsidepercentile'> op expr sep float cp 54 | avgseries = (<#'(?i)averageseries' | #'(?i)avg'>) op arglist cp 55 | = <#'(?i)averageserieswithwildcards'> 56 | avgwildcards = op expr sep float (sep float)* cp 57 | = absolute | aggregateline | alias | aliasmetric | aliasnode 58 | = aliassub | alpha | areabetween | aspercent | avgabove 59 | = avgbelow | avgpercentile | avgseries | avgwildcards 60 | = afns1 | afns2 | afns3 61 | 62 | cactistyle = <#'(?i)cactistyle'> op expr (sep cactisystem)? cp 63 | changed = <#'(?i)changed'> op expr cp 64 | color = <#'(?i)color'> op expr sep qstr cp 65 | consolidate = <#'(?i)consolidateby'> op expr sep consolidator cp 66 | constantline = <#'(?i)constantline'> op float cp 67 | countseries = <#'(?i)countseries'> op arglist cp 68 | cumulative = <#'(?i)cumulative'> op expr sep consolidator cp 69 | currentabove = <#'(?i)currentabove'> op expr sep number cp 70 | currentbelow = <#'(?i)currentbelow'> op expr sep number cp 71 | = cactistyle | changed | color | consolidate | constantline 72 | = countseries | cumulative | currentabove | currentbelow 73 | = cfns1 | cfns2 74 | 75 | dashed = <#'(?i)dashed'> op arglist (sep float)? cp 76 | derivative = <#'(?i)derivative'> op expr cp 77 | diffseries = <#'(?i)diffseries'> op arglist cp 78 | diffconstant = <#'(?i)diffseries'> op expr sep float cp 79 | divideseries = <#'(?i)divideseries'> op expr sep expr cp 80 | drawinfinite = <#'(?i)drawasinfinite'> op expr cp 81 | = dashed | derivative | diffseries | diffconstant 82 | = divideseries | drawinfinite 83 | = dfns1 | dfns2 84 | 85 | exclude = <#'(?i)exclude'> op expr sep qstr cp 86 | = exclude 87 | 88 | grep = <#'(?i)grep'> op expr sep qstr cp 89 | group = <#'(?i)group'> op arglist cp 90 | groupbynode = <#'(?i)groupbynode'> op expr sep number sep qstr cp 91 | = grep | group | groupbynode 92 | 93 | highestavg = <#'(?i)highestaverage'> op expr sep number cp 94 | highestcurrent = <#'(?i)highestcurrent'> op expr sep number cp 95 | highestmax = <#'(?i)highestmax'> op expr sep number cp 96 | hitcount = <#'(?i)hitcount'> op expr sep qstr (sep bool) cp 97 | hwaberration = <#'(?i)holtwintersaberration'> op expr sep number cp 98 | hwconfband = <#'(?i)holtwintersconfidencebands'> op expr sep number cp 99 | hwconfarea = <#'(?i)holtwintersconfidencearea'> op expr sep number cp 100 | hwforecast = <#'(?i)holtwintersforecast'> op expr cp 101 | = highestavg | highestcurrent | highestmax | hitcount 102 | = hwaberration | hwconfband | hwconfarea | hwforecast 103 | = hfns1 | hfns2 104 | 105 | identity = <#'(?i)identity'> op expr cp 106 | integral = <#'(?i)integral'> op expr cp 107 | invert = <#'(?i)invert'> op expr cp 108 | isnonnull = <#'(?i)isnonnull'> op expr cp 109 | = identity | integral | invert | isnonnull 110 | 111 | 112 | keeplastvalue = <#'(?i)keeplastvalue'> op expr (sep number)? cp 113 | = keeplastvalue 114 | 115 | limit = <#'(?i)limit'> op expr cp 116 | linewidth = <#'(?i)linewidth'> op expr sep number cp 117 | logarithm = <#'(?i)logarithm'> op expr (sep number)? cp 118 | lowestavg = <#'(?i)lowestaverage'> op expr sep number cp 119 | lowestcurrent = <#'(?i)lowestcurrent'> op expr sep number cp 120 | = limit | linewidth | logarithm | lowestavg | lowestcurrent 121 | 122 | mapseries = <#'(?i)mapseries'> op expr sep number cp 123 | maxseries = <#'(?i)maxseries'> op arglist cp 124 | maxabove = <#'(?i)maximumabove'> op expr sep float cp 125 | maxbelow = <#'(?i)maximumbelow'> op expr sep float cp 126 | minseries = <#'(?i)minseries'> op arglist cp 127 | minabove = <#'(?i)minimumabove'> op expr sep float cp 128 | minbelow = <#'(?i)minimumbelow'> op expr sep float cp 129 | mostdeviant = <#'(?i)mostdeviant'> op expr sep float cp 130 | movingavg = <#'(?i)movingaverage'> op expr sep points cp 131 | movingmedian = <#'(?i)movingmedian'> op expr sep points cp 132 | multiplyseries = <#'(?i)multiplyseries'> op arglist cp 133 | = mapseries | maxseries | maxabove | maxbelow | minseries 134 | = minabove | minbelow | mostdeviant | movingavg 135 | = movingmedian | multiplyseries 136 | = mfns1 | mfns2 | mfns3 137 | 138 | npercentile = <#'(?i)npercentile'> op expr sep float cp 139 | nonnegderive = <#'(?i)nonnegativederivative'> op expr (sep float)? cp 140 | = npercentile | nonnegderive 141 | 142 | offset = <#'(?i)offset'> op expr sep float cp 143 | offsettozero = <#'(?i)offsettozero'> op expr cp 144 | = offset | offsettozero 145 | 146 | persecond = <#'(?i)persecond'> op expr (sep float)? cp 147 | pctileseries = <#'(?i)percentileseries'> op expr sep float (sep bool)? cp 148 | pow = <#'(?i)pow'> op expr sep number cp 149 |

= persecond | pctileseries | pow 150 | 151 | randomwalk = <#'(?i)randomwalkfunction'> op path (sep number)? cp 152 | rangeseries = <#'(?i)rangeofseries'> op arglist cp 153 | = <#'(?i)reduceseries'> 154 | reduceseries = redkw op expr sep qstr sep number (sep qstr)* cp 155 | removeabovepct = <#'(?i)removeabovepercentile'> op expr sep float cp 156 | removeaboveval = <#'(?i)removeabovevalue'> op expr sep float cp 157 | removebelowpct = <#'(?i)removebelowpercentile'> op expr sep float cp 158 | removebelowval = <#'(?i)removebelowvalue'> op expr sep float cp 159 | removebtwpct = <#'(?i)removebetweenpercentile'> op expr sep float cp 160 | = randomwalk | rangeseries | reduceseries | removeabovepct 161 | = removeaboveval | removebelowpct | removebelowval 162 | = removebtwpct 163 | = rfns1 | rfns2 | rfns3 164 | 165 | scale = <#'(?i)scale'> op expr sep float cp 166 | scalesecs = <#'(?i)scaletoseconds'> op expr sep float cp 167 | secondyaxis = <#'(?i)secondyaxis'> op expr cp 168 | = <#'(?i)sin(function)?'> 169 | sinfn = sinkw op expr (sep float (sep float)?)? cp 170 | = <#'(?i)smartsummarize'> 171 | smartsum = smartsumkw op expr sep qstr (sep qstr (sep bool)?)? cp 172 | sortmaxima = <#'(?i)sortbymaxima'> op expr cp 173 | sortminima = <#'(?i)sortbyminima'> op expr cp 174 | sortname = <#'(?i)sortbyname'> op expr cp 175 | sqrt = <#'(?i)squareroot'> op expr cp 176 | stacked = <#'(?i)stacked'> op expr (sep qstr)? cp 177 | stddevseries = <#'(?i)stddevseries'> op arglist cp 178 | stdev = <#'(?i)stdev'> op expr sep number (sep float)? cp 179 | substr = <#'(?i)substr'> op expr (sep number (sep number)?)? cp 180 | sumseries = <#'(?i)sum(series)?'> op arglist cp 181 | sumserieswild = <#'(?i)sumserieswithwildcards'> op expr (sep number)* cp 182 | = <#'(?i)summarize'> 183 | summarize = sumkw op expr sep points (sep qstr (sep bool)?)? cp 184 | = scale | scalesecs | secondyaxis | sinfn | smartsum 185 | = sortmaxima | sortminima | sortname | sqrt | stacked 186 | = stddevseries | stdev | substr | sumseries | sumserieswild 187 | = summarize 188 | = sfns1 | sfns2 | sfns3 | sfns4 189 | 190 | threshold = <#'(?i)threshold'> op float (sep qstr (sep qstr))? cp 191 | timefn = <#'(?i)time(function)?'> op path (sep number)? cp 192 | timeshift = <#'(?i)timeshift'> op expr sep points (sep bool)? cp 193 | = <#'(?i)timestack'> 194 | timestack = tmstackkw op expr sep points sep number sep number cp 195 | transformnull = <#'(?i)transformnull'> op expr (sep number)? cp 196 | = threshold | timefn | timeshift | timestack | transformnull 197 | 198 | useabove = <#'(?i)useabove'> op expr float sep qstr sep qstr cp 199 | = useabove 200 | 201 | weightedavg = <#'(?i)weightedaverage'> op expr sep expr sep number cp 202 | = weightedavg 203 | 204 | 205 | = #'(?i)[a-z0-9.*_-]+' 206 | = <'\"'> uqpath <'\"'> 207 | path = uqpath | qpath 208 | = a|c|d|e|g|h|i|k|l|m|n|o|p|r|s|t|u|w 209 | = (expr <','>)* (Epsilon | expr)") 210 | 211 | (def query->tokens 212 | "The parser for the grammar, yields a naive AST built of tokens" 213 | (comp 214 | first 215 | (parse/parser init))) 216 | --------------------------------------------------------------------------------