├── .editorconfig ├── .eslintrc.js ├── .github ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── main.yml ├── .gitignore ├── .travis.yml ├── Dockerfile ├── Makefile ├── README.md ├── action.yml ├── data ├── check.js ├── database │ ├── akumuli.js │ ├── arctic.js │ ├── argus.js │ ├── atlas.js │ ├── beringei.js │ ├── biggraphite.js │ ├── blueflood.js │ ├── btrdb.js │ ├── catena.js │ ├── chronix.js │ ├── citus.js │ ├── clickhouse.js │ ├── cortex.js │ ├── crate.js │ ├── dalmatinerdb.js │ ├── filodb.js │ ├── flint.js │ ├── gnocchi.js │ ├── griddb.js │ ├── hawkularmetrics.js │ ├── heroic.js │ ├── index.js │ ├── influxdb.js │ ├── iotdb.js │ ├── irondb.js │ ├── kairosdb.js │ ├── khronus.js │ ├── lindb.js │ ├── m3db.js │ ├── metrictank.js │ ├── newts.js │ ├── opentsdb.js │ ├── pinot.js │ ├── pinusdb.js │ ├── pipelinedb.js │ ├── prometheus.js │ ├── questdb.js │ ├── schema.js │ ├── seriously.js │ ├── sidewinder.js │ ├── siridb.js │ ├── tdengine.js │ ├── tgres.js │ ├── thanos.js │ ├── timbala.js │ ├── timely.js │ ├── timescaledb.js │ ├── traildb.js │ ├── tsdblayer.js │ ├── uts.js │ ├── vaultaire.js │ ├── victoriametrics.js │ ├── vulcan.js │ ├── warp10.js │ └── xephonk.js ├── generate.js └── package.json ├── docs ├── .vuepress │ ├── components │ │ └── database │ │ │ ├── detail.vue │ │ │ └── table.vue │ ├── config.gh.js │ └── config.js ├── README.md ├── database │ ├── akumuli.md │ ├── arctic.md │ ├── argus.md │ ├── atlas.md │ ├── beringei.md │ ├── biggraphite.md │ ├── blueflood.md │ ├── btrdb.md │ ├── catena.md │ ├── chronix.md │ ├── citus.md │ ├── clickhouse.md │ ├── cortex.md │ ├── crate.md │ ├── dalmatinerdb.md │ ├── filodb.md │ ├── flint.md │ ├── gnocchi.md │ ├── griddb.md │ ├── heroic.md │ ├── influxdb.md │ ├── iotdb.md │ ├── irondb.md │ ├── kairosdb.md │ ├── lindb.md │ ├── m3db.md │ ├── metrictank.md │ ├── opentsdb.md │ ├── pinot.md │ ├── pinusdb.md │ ├── pipelinedb.md │ ├── prometheus.md │ ├── questdb.md │ ├── seriously.md │ ├── sidewinder.md │ ├── siridb.md │ ├── tdengine.md │ ├── tgres.md │ ├── thanos.md │ ├── timbala.md │ ├── timely.md │ ├── timescaledb.md │ ├── traildb.md │ ├── tsdblayer.md │ ├── uts.md │ ├── vaultaire.md │ ├── victoriametrics.md │ ├── vulcan.md │ ├── warp10.md │ └── xephonk.md └── databases.md ├── hack ├── README.md └── deploy.sh ├── package.json └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [Makefile] 4 | indent_style = tab -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | parser: 'babel-eslint', 4 | env: { 5 | browser: true, 6 | node: true 7 | }, 8 | extends: 'standard', 9 | // required to lint *.vue files 10 | plugins: [ 11 | 'html' 12 | ], 13 | globals: {} 14 | }; -------------------------------------------------------------------------------- /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, gender identity and expression, level of experience, 9 | nationality, personal appearance, race, religion, or sexual identity and 10 | orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at sindresorhus@gmail.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at [http://contributor-covenant.org/version/1/4][version] 72 | 73 | [homepage]: http://contributor-covenant.org 74 | [version]: http://contributor-covenant.org/version/1/4/ 75 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribution Guidelines 2 | 3 | This project is released with a [Contributor Code of Conduct](CODE_OF_CONDUCT.md). By participating in this project you agree to abide by its terms. 4 | 5 | FIXME: this doc is outdated, we are using vuepress instead of nuxt.js now 6 | 7 | ## DO NOT 8 | 9 | - edit `README.md` directly 10 | 11 | ## Adding databases 12 | 13 | - use a clean title for you issue and PR, like `[add][database][InfluxDB]` 14 | - provide valid link. (NOTE: some paper are not free to public, you can point it to the publisher instead of a PDF file, 15 | NEVER upload those files to public unless you are the author) 16 | - modify `yml` if you want to add one more project or paper 17 | - modify `handlebars` if you think there is a better layout 18 | - generate the `md` file by running `node index.js` 19 | - keep your commit small, so we can cherry pick if you have a big PR and we just want part of them 20 | - rebase your branch if it has conflict due to other people's PR got merged before yours 21 | 22 | NOTE: you need to install `Node.JS` and run `npm install` to generate the md file 23 | 24 | ## Modify the website 25 | 26 | We use [Nuxt](https://github.com/nuxt/nuxt.js) for generating the static website 27 | 28 | - have Node.js > 7.0 installed 29 | - `npm install` 30 | - run `npm run dev` and navigate to `http://localhost:3000` 31 | - run `node index.js` to generate new json data file from yml files 32 | - run `npm run generate` to generate the static website -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | ## Type of action 7 | 8 | 9 | 10 | - add 11 | - update 12 | - delete 13 | 14 | ## Content 15 | 16 | 17 | 18 | ## Category 19 | 20 | 21 | 22 | 23 | 24 | 25 | ## Reason (Optional) 26 | 27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | ## Type of action 9 | 10 | 11 | 12 | - add 13 | - update 14 | - delete 15 | 16 | ## Content 17 | 18 | 19 | 20 | ## Category 21 | 22 | 23 | 24 | 25 | 26 | 27 | ## Reason (Optional) 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | 6 | jobs: 7 | deploy: 8 | runs-on: ubuntu-latest 9 | name: deploy to gh-pages 10 | steps: 11 | # To use this repository's private action, you must check out the repository 12 | - name: Checkout 13 | uses: actions/checkout@v2 14 | - name: Deploy 15 | env: 16 | ACTIONS_DEPLOY_KEY: ${{ secrets.ACTIONS_DEPLOY_KEY }} 17 | uses: ./ # Uses an action in the root directory -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | *.log 3 | .idea 4 | README.md.html 5 | .nuxt 6 | # it is not consistent between my mac and linux, lock is updated instead of followed 7 | package-lock.json 8 | # ignore generated file 9 | dist -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - 13.3.0 4 | before_install: 5 | - make check 6 | - make generate 7 | install: 8 | - yarn install # npm ci 9 | script: 10 | - make build -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:alpine 2 | 3 | RUN apk add git rsync make jq openssh-client 4 | RUN yarn global add vuepress 5 | COPY hack/deploy.sh /deploy.sh 6 | 7 | ENTRYPOINT ["/deploy.sh"] 8 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: dev check 2 | dev: 3 | yarn run docs:dev 4 | 5 | NODE=node --no-warnings 6 | 7 | generate: check 8 | cd data && $(NODE) generate.js 9 | 10 | check: 11 | cd data && $(NODE) check.js 12 | 13 | build: 14 | yarn docs:build 15 | 16 | clean: 17 | rm -rf docs/.vuepress/dist -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Awesome time series database 2 | 3 | [![Build Status](https://travis-ci.org/xephonhq/awesome-time-series-database.svg?branch=master)](https://travis-ci.org/xephonhq/awesome-time-series-database) 4 | [![Netlify Status](https://api.netlify.com/api/v1/badges/2ea12ac4-c2b9-4767-87d6-8ac65ce5ee30/deploy-status)](https://app.netlify.com/sites/awesome-time-series-database/deploys) 5 | 6 | A curated list of time series databases. 7 | For filtering based on language, backend etc. Check out the [Website](https://awesome-time-series-database.netlify.com/). 8 | 9 | 10 | |name|github|status|lang|backend|protocol|query|license| 11 | |:--:|:--:|:--:|:--:|:--:|:--:|:--:|:--:| 12 | |Akumuli|https://github.com/akumuli/Akumuli|maintained|c++|localfs|tcp|text|apache-2.0| 13 | |Arctic|https://github.com/man-group/arctic|active|python|mongodb|tcp|python|lgpl| 14 | |Argus|https://github.com/salesforce/Argus|dead|java|hbase|http|json|bsd| 15 | |Atlas|https://github.com/Netflix/atlas|maintained|scala|memory|http|stack|apache-2.0| 16 | |Beringei|https://github.com/facebookarchive/beringei|dead|c++|memory|thrift|thrift|bsd| 17 | |BigGraphite|https://github.com/criteo/biggraphite|maintained|python|cassandra|http|graphite|apache-2.0| 18 | |Blueflood|https://github.com/rackerlabs/blueflood|dead|java|cassandra,elasticsearch|http|json|apache-2.0| 19 | |BTrDB|https://github.com/BTrDB/btrdb-server|maintained|go|ceph|http|json|gpl| 20 | |Catena|https://github.com/Cistern/catena|dead|go|localfs|http|json|bsd| 21 | |Chronix|https://github.com/ChronixDB/chronix.server|maintained|java|solr|http|json|apache-2.0| 22 | |Citus|https://github.com/citusdata/citus|active|c|postgresql|tcp|sql|agpl-3.0| 23 | |ClickHouse|https://github.com/ClickHouse/ClickHouse|active|c++|localfs|tcp|sql|apache-2.0| 24 | |Cortex|https://github.com/cortexproject/cortex|active|go|s3,cassandra,bigtable,dynamodb|http|promql|apache-2.0| 25 | |CrateDB|https://github.com/crate/crate|active|java|elasticsearch,s3|tcp|sql|apache-2.0| 26 | |DalmatinerDB|https://github.com/dalmatinerdb/dalmatinerdb|maintained|erlang|localfs|http|text|mit| 27 | |FiloDB|https://github.com/filodb/FiloDB|active|scala|cassandra|http|promql,metricsql|apache-2.0| 28 | |Flint|https://github.com/twosigma/flint|maintained|scala|spark|http|json|apache-2.0| 29 | |Gnocchi|https://github.com/gnocchixyz/gnocchi|dead|python|localfs|http|json|apache-2.0| 30 | |GridDB|https://github.com/griddb/griddb|active|c++|localfs|jdbc|sql,tql|apache-2.0| 31 | |HawkularMertics|https://github.com/hawkular/hawkular-metrics|maintained|java|cassandra|http|json|apache-2.0| 32 | |Heroic|https://github.com/spotify/heroic|maintained|java|bigtable,cassandra,elasticsearch|http|json|apache-2.0| 33 | |InfluxDB|https://github.com/influxdata/influxdb|active|go|localfs|http|influxql,flux|mit| 34 | |IoTDB|https://github.com/apache/incubator-iotdb|active|java|hdfs,localfs|tcp|sql|apache-2.0| 35 | |IRONdb|https://www.circonus.com/solutions/time-series-database/|active|c|localfs|http|json|proprietary| 36 | |KairosDB|https://github.com/kairosdb/kairosdb|maintained|java|cassandra|http|json|apache-2.0| 37 | |Khronus|https://github.com/khronus/khronus|dead|scala|cassandra|http|json,influxql|apache-2.0| 38 | |LinDB|https://github.com/lindb/lindb|active|go|localfs|http|sql|apache-2.0| 39 | |M3|https://github.com/m3db/m3|active|go|localfs|http|promql,graphite,m3query,sql|apache-2.0| 40 | |Metrictank|https://github.com/grafana/metrictank|active|go|cassandra,elasticsearch|http|graphite|agpl-3.0| 41 | |Newts|https://github.com/OpenNMS/newts/|maintained|java|cassandra|http|json|apache-2.0| 42 | |OpenTSDB|https://github.com/OpenTSDB/opentsdb|maintained|java|hbase|http,tcp|json|lgpl| 43 | |Apache Pinot|https://github.com/apache/incubator-pinot|active|java|s3,hdfs,azdls|http|pql|apache-2.0| 44 | |PinusDB|https://github.com/pinusdb/pinusdb|maintained|c++|localfs|tcp|sql|gpl-3.0| 45 | |PipelineDB|https://github.com/pipelinedb/pipelinedb|dead|c|postgresql|tcp|sql|apache-2.0| 46 | |Prometheus|https://github.com/prometheus/prometheus|active|go|localfs|prometheus|promql|apache-2.0| 47 | |QuestDB|https://github.com/questdb/questdb|active|java|localfs|postgres|sql|apache-2.0| 48 | |Seriously|https://github.com/dustin/seriesly|dead|go|localfs|http|json|mit| 49 | |Sidewinder|https://github.com/srotya/sidewinder|dead|java|localfs|tcp|sql|apache-2.0| 50 | |SiriDB|https://github.com/SiriDB/siridb-server|active|c|localfs|http|text|mit| 51 | |TDengine|https://github.com/taosdata/TDengine|active|c|localfs|tcp|sql|agpl-3.0| 52 | |Tgres|https://github.com/tgres/tgres|dead|go|postgresql|tcp|sql|apache-2.0| 53 | |Thanos|https://github.com/thanos-io/thanos|active|go|localfs,s3|http,grpc|promql|apache-2.0| 54 | |Timebala|https://github.com/mattbostock/timbala|dead|go|localfs|http|promql|apache-2.0| 55 | |Timely|https://github.com/NationalSecurityAgency/timely|maintained|java|hdfs,accumulo|http|json|apache-2.0| 56 | |TimescaleDB|https://github.com/timescale/timescaledb|active|c|postgresql|postgres|sql|apache-2.0| 57 | |TrailDB|https://github.com/traildb/traildb|dead|c|localfs|embed|python|mit| 58 | |tsdb-layer|https://github.com/richardartoul/tsdb-layer|dead|go|foundationdb|grpc|text|unknown| 59 | |μts|https://github.com/mixer/uts|dead|javascript|memory|embed|json|mit| 60 | |Vaultaire|https://github.com/afcowie/vaultaire|dead|haskell|ceph|http|json|unknown| 61 | |VictoriaMetrics|https://github.com/VictoriaMetrics/VictoriaMetrics|active|go|localfs|http|promql,metricsql|apache-2.0| 62 | |Vulcan|https://github.com/digitalocean/vulcan|dead|go|cassandra|http|promql|apache-2.0| 63 | |Warp10|https://github.com/senx/warp10-platform|active|java|leveldb,hbase|http|warpscript|apache-2.0| 64 | |Xephon-K|https://github.com/xephonhq/xephon-k|dead|go|cassandra,localfs|http,grpc|json|mit| 65 | 66 | 67 | 68 | ## Acknowledgement and Alternatives 69 | 70 | - [dbdb.io](https://github.com/cmu-db/dbdb.io) A database for all the databases by [@apavlo](https://github.com/apavlo) from [CMU DB](https://db.cs.cmu.edu/) 71 | - [awesome big data](https://github.com/onurakpolat/awesome-bigdata#time-series-databases) 72 | - [List of Time Series Databases](https://misfra.me/2016/04/09/tsdb-list/) by [Preetam Jinka](https://twitter.com/PreetamJinka) 73 | - [awesome db](https://github.com/numetriclabz/awesome-db) 74 | - [Ultimate-TSDB-Comparison](https://tsdbbench.github.io/Ultimate-TSDB-Comparison/) 75 | - [Github topic: timeseries-database](https://github.com/topics/timeseries-database) 76 | - [Open Source Time Series DB Comparison](https://docs.google.com/spreadsheets/d/1sMQe9oOKhMhIVw9WmuCEWdPtAoccJ4a-IuZv4fXDHxM/edit#gid=0) 77 | 78 | ## License 79 | 80 | [![CC0](http://i.creativecommons.org/p/zero/1.0/88x31.png)](http://creativecommons.org/publicdomain/zero/1.0/) 81 | 82 | To the extent possible under law, [Pinglei Guo](https://github.com/at15) has waived all copyright and related or neighboring rights to this work. 83 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: 'deploy' 2 | description: 'Build Vuepress site and deploy to gh-pages' 3 | runs: 4 | using: 'docker' 5 | image: 'Dockerfile' 6 | args: 7 | - foo -------------------------------------------------------------------------------- /data/check.js: -------------------------------------------------------------------------------- 1 | import { databases, databaseSchema } from './database/index.js' 2 | 3 | // checks if all the properties defined in schema are presented and the type is correct. 4 | function checkDatabases () { 5 | // TODO: ls /data/databases and make sure all databases are imported 6 | 7 | // TODO: how to handle error in js? errs++ and console.error is definitely not the right way 8 | let errs = 0 9 | for (const db of databases) { 10 | if (!db.name) { 11 | errs++ 12 | console.error('name is required for ', db) 13 | continue 14 | } 15 | 16 | // TODO: check for docs/database/{name}.md 17 | 18 | let name = db.name 19 | for (const col of databaseSchema) { 20 | let val = db[col.name] 21 | if (!val) { 22 | errs++ 23 | console.error('%s does not have property', name, col.name) 24 | continue 25 | } 26 | switch (col.type) { 27 | case 'value': 28 | if (typeof val !== 'string') { 29 | errs++ 30 | console.error('%s has wrong property type for %s want string got %s', name, col.name, typeof val) 31 | break 32 | } 33 | if (col.values && !col.values.includes(val)) { 34 | errs++ 35 | console.error('%s has invalid value for %s got %s want one of %s', name, col.name, val, col.values) 36 | } 37 | break 38 | case 'array': 39 | if (!Array.isArray(val)) { 40 | errs++ 41 | console.error('%s has wrong property type for %s want array got %s', name, col.name, typeof val) 42 | break 43 | } 44 | if (col.values) { 45 | for (const v of val) { 46 | if (!col.values.includes(v)) { 47 | errs++ 48 | console.error('%s has invalid value for %s got %s want one of %s', name, col.name, val, col.values) 49 | } 50 | } 51 | } 52 | break 53 | default: 54 | errs++ 55 | console.error('unknown column type', col.type) 56 | } 57 | } 58 | } 59 | return errs 60 | } 61 | 62 | function check () { 63 | let errs = checkDatabases() 64 | if (errs !== 0) { 65 | console.error('got %d errors from checkDatabases', errs) 66 | process.exit(1) 67 | } 68 | } 69 | 70 | check() -------------------------------------------------------------------------------- /data/database/akumuli.js: -------------------------------------------------------------------------------- 1 | let akumuli = { 2 | name: 'akumuli', 3 | displayName: 'Akumuli', 4 | website: 'http://akumuli.org/', 5 | github: 'https://github.com/akumuli/Akumuli', 6 | status: 'maintained', 7 | lang: 'c++', 8 | backend: ['localfs'], 9 | protocol: ['tcp'], // TODO: it's redis like text protocol 10 | query: ['text'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'Lazin' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { akumuli } -------------------------------------------------------------------------------- /data/database/arctic.js: -------------------------------------------------------------------------------- 1 | let arctic = { 2 | name: 'arctic', 3 | displayName: 'Arctic', 4 | website: 'https://arctic.readthedocs.io/en/latest/', 5 | github: 'https://github.com/man-group/arctic', 6 | status: 'active', 7 | lang: 'python', 8 | backend: ['mongodb'], 9 | protocol: ['tcp'], 10 | query: ['python'], 11 | license: 'lgpl', 12 | author: [ 13 | 'bmoscon' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { arctic } -------------------------------------------------------------------------------- /data/database/argus.js: -------------------------------------------------------------------------------- 1 | let argus = { 2 | name: 'argus', 3 | displayName: 'Argus', 4 | website: 'https://github.com/salesforce/Argus', 5 | github: 'https://github.com/salesforce/Argus', 6 | status: 'dead', 7 | lang: 'java', 8 | backend: ['hbase'], // actually OpenTSDB 9 | protocol: ['http'], 10 | query: ['json'], 11 | license: 'bsd', 12 | author: [ 13 | 'xizi-xu' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { argus } -------------------------------------------------------------------------------- /data/database/atlas.js: -------------------------------------------------------------------------------- 1 | let atlas = { 2 | name: 'atlas', 3 | displayName: 'Atlas', 4 | website: 'https://github.com/Netflix/atlas', 5 | github: 'https://github.com/Netflix/atlas', 6 | status: 'maintained', 7 | lang: 'scala', 8 | backend: ['memory'], 9 | protocol: ['http'], 10 | query: ['stack'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'brharrington' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { atlas } -------------------------------------------------------------------------------- /data/database/beringei.js: -------------------------------------------------------------------------------- 1 | let beringei = { 2 | name: 'beringei', 3 | // TODO: add alias? it is actually gorilla 4 | displayName: 'Beringei', 5 | website: 'https://github.com/facebookarchive/beringei', 6 | github: 'https://github.com/facebookarchive/beringei', 7 | status: 'dead', 8 | lang: 'c++', 9 | backend: ['memory'], 10 | protocol: ['thrift'], 11 | query: ['thrift'], 12 | license: 'bsd', 13 | author: [ 14 | 'scottfranklin' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { beringei } -------------------------------------------------------------------------------- /data/database/biggraphite.js: -------------------------------------------------------------------------------- 1 | let biggraphite = { 2 | name: 'biggraphite', 3 | displayName: 'BigGraphite', 4 | website: 'https://github.com/criteo/biggraphite', 5 | github: 'https://github.com/criteo/biggraphite', 6 | status: 'maintained', 7 | lang: 'python', 8 | backend: ['cassandra'], 9 | protocol: ['http'], 10 | query: ['graphite'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'unbrice', 14 | 'iksaif' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { biggraphite } -------------------------------------------------------------------------------- /data/database/blueflood.js: -------------------------------------------------------------------------------- 1 | let blueflood = { 2 | name: 'blueflood', 3 | displayName: 'Blueflood', 4 | website: 'http://blueflood.io/', 5 | github: 'https://github.com/rackerlabs/blueflood', 6 | status: 'dead', 7 | lang: 'java', 8 | backend: ['cassandra', 'elasticsearch'], 9 | protocol: ['http'], 10 | query: ['json'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'izrik', 14 | 'gdusbabek' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { blueflood } -------------------------------------------------------------------------------- /data/database/btrdb.js: -------------------------------------------------------------------------------- 1 | let btrdb = { 2 | name: 'btrdb', 3 | displayName: 'BTrDB', 4 | website: 'http://btrdb.io/', 5 | github: 'https://github.com/BTrDB/btrdb-server', 6 | status: 'maintained', 7 | lang: 'go', 8 | backend: ['ceph'], 9 | protocol: ['http'], // TODO: not really sure about its protocol and query language 10 | query: ['json'], 11 | license: 'gpl', 12 | author: [ 13 | 'immesys' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { btrdb } -------------------------------------------------------------------------------- /data/database/catena.js: -------------------------------------------------------------------------------- 1 | let catena = { 2 | name: 'catena', 3 | displayName: 'Catena', 4 | website: 'https://github.com/Cistern/catena', 5 | github: 'https://github.com/Cistern/catena', 6 | status: 'dead', 7 | lang: 'go', 8 | backend: ['localfs'], 9 | // TODO: double check protocol and query 10 | protocol: ['http'], 11 | query: ['json'], 12 | license: 'bsd', 13 | author: [ 14 | 'Preetam' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { catena } -------------------------------------------------------------------------------- /data/database/chronix.js: -------------------------------------------------------------------------------- 1 | let chronix = { 2 | name: 'chronix', 3 | displayName: 'Chronix', 4 | website: 'http://www.chronix.io/', 5 | github: 'https://github.com/ChronixDB/chronix.server', 6 | status: 'maintained', 7 | lang: 'java', 8 | backend: ['solr'], 9 | protocol: ['http'], 10 | query: ['json'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'FlorianLautenschlager' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { chronix } -------------------------------------------------------------------------------- /data/database/citus.js: -------------------------------------------------------------------------------- 1 | let citus = { 2 | name: 'citus', 3 | displayName: 'Citus', 4 | website: 'https://www.citusdata.com/', 5 | github: 'https://github.com/citusdata/citus', 6 | status: 'active', 7 | lang: 'c', 8 | backend: ['postgresql'], 9 | protocol: ['tcp'], 10 | query: ['sql'], 11 | license: 'agpl-3.0', 12 | author: [ 13 | 'marcoslot', 14 | 'onderkalaci' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { citus } -------------------------------------------------------------------------------- /data/database/clickhouse.js: -------------------------------------------------------------------------------- 1 | let clickhouse = { 2 | name: 'clickhouse', 3 | displayName: 'ClickHouse', 4 | website: 'https://clickhouse.yandex/', 5 | github: 'https://github.com/ClickHouse/ClickHouse', 6 | status: 'active', 7 | lang: 'c++', 8 | backend: ['localfs'], 9 | protocol: ['tcp'], 10 | query: [ 11 | 'sql' 12 | ], 13 | license: 'apache-2.0', 14 | author: [ 15 | 'alexey-milovidov', 16 | 'KochetovNicolai' 17 | ], 18 | contributedBy: [ 19 | 'at15' 20 | ] 21 | } 22 | 23 | export { clickhouse } -------------------------------------------------------------------------------- /data/database/cortex.js: -------------------------------------------------------------------------------- 1 | let cortex = { 2 | name: 'cortex', 3 | displayName: 'Cortex', 4 | website: 'https://cortexmetrics.io/', 5 | github: 'https://github.com/cortexproject/cortex', 6 | status: 'active', 7 | lang: 'go', 8 | backend: ['s3', 'cassandra', 'bigtable', 'dynamodb'], 9 | protocol: ['http'], 10 | query: ['promql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'tomwilkie', 14 | 'bboreham' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { cortex } -------------------------------------------------------------------------------- /data/database/crate.js: -------------------------------------------------------------------------------- 1 | let crate = { 2 | name: 'crate', 3 | displayName: 'CrateDB', 4 | website: 'https://crate.io/', 5 | github: 'https://github.com/crate/crate', 6 | status: 'active', 7 | lang: 'java', 8 | backend: ['elasticsearch', 's3'], // TODO: might put lucene as a backend? ... 9 | protocol: ['tcp'], 10 | query: ['sql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'mfussenegger', 14 | 'seut' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { crate } -------------------------------------------------------------------------------- /data/database/dalmatinerdb.js: -------------------------------------------------------------------------------- 1 | let dalmatinerdb = { 2 | name: 'dalmatinerdb', 3 | displayName: 'DalmatinerDB', 4 | website: 'https://dalmatiner.io/', 5 | github: 'https://github.com/dalmatinerdb/dalmatinerdb', 6 | status: 'maintained', 7 | lang: 'erlang', 8 | backend: ['localfs'], // zfs 9 | // TODO: double check protocol and query 10 | protocol: ['http'], 11 | query: ['text'], 12 | license: 'mit', 13 | author: [ 14 | 'Licenser' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { dalmatinerdb } -------------------------------------------------------------------------------- /data/database/filodb.js: -------------------------------------------------------------------------------- 1 | let filodb = { 2 | name: 'filodb', 3 | displayName: 'FiloDB', 4 | website: 'https://github.com/filodb/FiloDB', 5 | github: 'https://github.com/filodb/FiloDB', 6 | status: 'active', 7 | lang: 'scala', 8 | backend: ['cassandra'], 9 | protocol: ['http'], // TODO: it's actually influxdb line protocol 10 | query: ['promql', 'metricsql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'velvia', 14 | 'vishramachandran', 15 | 'broneill' 16 | ], 17 | contributedBy: [ 18 | 'at15' 19 | ], 20 | } 21 | 22 | export { filodb } -------------------------------------------------------------------------------- /data/database/flint.js: -------------------------------------------------------------------------------- 1 | let flint = { 2 | name: 'flint', 3 | displayName: 'Flint', 4 | website: 'https://github.com/twosigma/flint', 5 | github: 'https://github.com/twosigma/flint', 6 | status: 'maintained', 7 | lang: 'scala', 8 | backend: ['spark'], 9 | // TODO: double check protocol and query 10 | protocol: ['http'], 11 | query: ['json'], 12 | license: 'apache-2.0', 13 | author: [ 14 | 'icexelloss', 15 | 'WenboZhao' 16 | ], 17 | contributedBy: [ 18 | 'at15' 19 | ] 20 | } 21 | 22 | export { flint } -------------------------------------------------------------------------------- /data/database/gnocchi.js: -------------------------------------------------------------------------------- 1 | let gnocchi = { 2 | name: 'gnocchi', 3 | displayName: 'Gnocchi', 4 | website: 'https://github.com/gnocchixyz/gnocchi', 5 | github: 'https://github.com/gnocchixyz/gnocchi', 6 | status: 'dead', 7 | lang: 'python', 8 | backend: ['localfs'], 9 | // TODO: double check protocol and query 10 | protocol: ['http'], 11 | query: ['json'], 12 | license: 'apache-2.0', 13 | author: [ 14 | 'jd', // now at datadog 15 | 'sileht' 16 | ], 17 | contributedBy: [ 18 | 'at15' 19 | ] 20 | } 21 | 22 | export { gnocchi } -------------------------------------------------------------------------------- /data/database/griddb.js: -------------------------------------------------------------------------------- 1 | let griddb = { 2 | name: 'griddb', 3 | displayName: 'GridDB', 4 | website: 'https://griddb.org/', 5 | github: 'https://github.com/griddb/griddb', 6 | status: 'active', 7 | lang: 'c++', 8 | backend: ['localfs'], 9 | protocol: ['jdbc'], 10 | query: ['sql', 'tql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'knonomura', 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { griddb } -------------------------------------------------------------------------------- /data/database/hawkularmetrics.js: -------------------------------------------------------------------------------- 1 | let hawkularmetrics = { 2 | name: 'hawkularmetrics', 3 | displayName: 'HawkularMertics', 4 | website: 'http://www.hawkular.org/', 5 | github: 'https://github.com/hawkular/hawkular-metrics', 6 | status: 'maintained', 7 | lang: 'java', 8 | backend: ['cassandra'], 9 | protocol: ['http'], 10 | query: ['json'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'tsegismont' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { hawkularmetrics } -------------------------------------------------------------------------------- /data/database/heroic.js: -------------------------------------------------------------------------------- 1 | let heroic = { 2 | name: 'heroic', 3 | displayName: 'Heroic', 4 | website: 'https://spotify.github.io/heroic/', 5 | github: 'https://github.com/spotify/heroic', 6 | status: 'maintained', 7 | lang: 'java', 8 | backend: [ 9 | 'bigtable', 10 | 'cassandra', 11 | 'elasticsearch' 12 | ], 13 | protocol: [ 14 | 'http' 15 | ], 16 | query: [ 17 | 'json' 18 | ], 19 | license: 'apache-2.0', 20 | author: [ 21 | 'udoprog', 22 | 'hexedpackets' 23 | ], 24 | contributedBy: [ 25 | 'at15' 26 | ] 27 | } 28 | 29 | export { heroic } -------------------------------------------------------------------------------- /data/database/index.js: -------------------------------------------------------------------------------- 1 | import { databaseSchema } from './schema.js' 2 | 3 | import { akumuli } from './akumuli.js' 4 | import { arctic } from './arctic.js' 5 | import { argus } from './argus.js' 6 | import { atlas } from './atlas.js' 7 | import { beringei } from './beringei.js' 8 | import { biggraphite } from './biggraphite.js' 9 | import { blueflood } from './blueflood.js' 10 | import { btrdb } from './btrdb.js' 11 | import { catena } from './catena.js' 12 | import { chronix } from './chronix.js' 13 | import { citus } from './citus.js' 14 | import { clickhouse } from './clickhouse.js' 15 | import { cortex } from './cortex.js' 16 | import { crate } from './crate.js' 17 | import { dalmatinerdb } from './dalmatinerdb.js' 18 | import { filodb } from './filodb.js' 19 | import { flint } from './flint.js' 20 | import { gnocchi } from './gnocchi.js' 21 | import { griddb } from './griddb.js' 22 | import { hawkularmetrics } from './hawkularmetrics.js' 23 | import { heroic } from './heroic.js' 24 | import { influxdb } from './influxdb.js' 25 | import { iotdb } from './iotdb.js' 26 | import { irondb } from './irondb.js' 27 | import { kairosdb } from './kairosdb.js' 28 | import { khronus } from './khronus.js' 29 | import { lindb } from './lindb.js' 30 | import { m3db } from './m3db.js' 31 | import { metrictank } from './metrictank.js' 32 | import { newts } from './newts.js' 33 | import { opentsdb } from './opentsdb.js' 34 | import { pinot } from './pinot.js' 35 | import { pinusdb } from './pinusdb.js' 36 | import { pipelinedb } from './pipelinedb.js' 37 | import { prometheus } from './prometheus.js' 38 | import { questdb } from './questdb.js' 39 | import { seriously } from './seriously.js' 40 | import { sidewinder } from './sidewinder.js' 41 | import { siridb } from './siridb.js' 42 | import { tdengine } from './tdengine.js' 43 | import { tgres } from './tgres.js' 44 | import { thanos } from './thanos.js' 45 | import { timbala } from './timbala.js' 46 | import { timely } from './timely.js' 47 | import { timescaldb } from './timescaledb.js' 48 | import { traildb } from './traildb.js' 49 | import { tsdblayer } from './tsdblayer.js' 50 | import { uts } from './uts.js' 51 | import { vaultaire } from './vaultaire.js' 52 | import { victoriametrics } from './victoriametrics.js' 53 | import { vulcan } from './vulcan.js' 54 | import { warp10 } from './warp10.js' 55 | import { xephonk } from './xephonk.js' 56 | 57 | let databases = [ 58 | akumuli, 59 | arctic, 60 | argus, 61 | atlas, 62 | beringei, 63 | biggraphite, 64 | blueflood, 65 | btrdb, 66 | catena, 67 | chronix, 68 | citus, 69 | clickhouse, 70 | cortex, 71 | crate, 72 | dalmatinerdb, 73 | filodb, 74 | flint, 75 | gnocchi, 76 | griddb, 77 | hawkularmetrics, 78 | heroic, 79 | influxdb, 80 | iotdb, 81 | irondb, 82 | kairosdb, 83 | khronus, 84 | lindb, 85 | m3db, 86 | metrictank, 87 | newts, 88 | opentsdb, 89 | pinot, 90 | pinusdb, 91 | pipelinedb, 92 | prometheus, 93 | questdb, 94 | seriously, 95 | sidewinder, 96 | siridb, 97 | tdengine, 98 | tgres, 99 | thanos, 100 | timbala, 101 | timely, 102 | timescaldb, 103 | traildb, 104 | tsdblayer, 105 | uts, 106 | vaultaire, 107 | victoriametrics, 108 | vulcan, 109 | warp10, 110 | xephonk, 111 | ] 112 | 113 | export { databaseSchema, databases } -------------------------------------------------------------------------------- /data/database/influxdb.js: -------------------------------------------------------------------------------- 1 | let influxdb = { 2 | name: 'influxdb', 3 | displayName: 'InfluxDB', 4 | website: 'https://www.influxdata.com/products/influxdb-overview/', 5 | github: 'https://github.com/influxdata/influxdb', 6 | status: 'active', 7 | lang: 'go', 8 | backend: [ 9 | 'localfs' 10 | ], 11 | protocol: [ 12 | 'http' 13 | ], 14 | query: [ 15 | 'influxql', 16 | 'flux' 17 | ], 18 | license: 'mit', 19 | author: [ 20 | 'pauldix', // founder 21 | 'benbjohnson', // author of boltdb 22 | 'jwilder', // now at azure 23 | 'otoolep', // now at google working on stack driver, http://www.philipotoole.com/resume/ 24 | ], 25 | contributedBy: [ 26 | 'at15' 27 | ] 28 | } 29 | 30 | export { influxdb } -------------------------------------------------------------------------------- /data/database/iotdb.js: -------------------------------------------------------------------------------- 1 | let iotdb = { 2 | name: 'iotdb', 3 | displayName: 'IoTDB', 4 | website: 'https://iotdb.apache.org/', 5 | github: 'https://github.com/apache/incubator-iotdb', 6 | status: 'active', 7 | lang: 'java', 8 | backend: ['hdfs', 'localfs'], 9 | protocol: ['tcp'], // TODO: not sure about this ... 10 | query: ['sql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'qiaojialin' // thu https://blog.csdn.net/qiaojialin 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { iotdb } -------------------------------------------------------------------------------- /data/database/irondb.js: -------------------------------------------------------------------------------- 1 | let irondb = { 2 | name: 'irondb', 3 | displayName: 'IRONdb', 4 | website: 'https://www.circonus.com/solutions/time-series-database/', 5 | github: 'https://www.circonus.com/solutions/time-series-database/', // TODO: need a better way for close source projects 6 | status: 'active', 7 | lang: 'c', 8 | backend: ['localfs'], 9 | protocol: ['http'], 10 | query: ['json'], 11 | license: 'proprietary', 12 | author: [], 13 | contributedBy: [ 14 | 'at15', 15 | 'kamelkev' 16 | ] 17 | } 18 | 19 | export { irondb } -------------------------------------------------------------------------------- /data/database/kairosdb.js: -------------------------------------------------------------------------------- 1 | let kairosdb = { 2 | name: 'kairosdb', 3 | displayName: 'KairosDB', 4 | website: 'https://kairosdb.github.io/', 5 | github: 'https://github.com/kairosdb/kairosdb', 6 | status: 'maintained', 7 | lang: 'java', 8 | backend: [ 9 | 'cassandra' 10 | ], 11 | protocol: [ 12 | 'http' 13 | ], 14 | query: [ 15 | 'json' 16 | ], 17 | license: 'apache-2.0', 18 | author: [ 19 | 'brianhks' 20 | ], 21 | contributedBy: [ 22 | 'at15' 23 | ] 24 | } 25 | 26 | export { kairosdb } -------------------------------------------------------------------------------- /data/database/khronus.js: -------------------------------------------------------------------------------- 1 | let khronus = { 2 | name: 'khronus', 3 | displayName: 'Khronus', 4 | website: 'https://github.com/khronus/khronus', 5 | github: 'https://github.com/khronus/khronus', 6 | status: 'dead', 7 | lang: 'scala', 8 | backend: ['cassandra'], 9 | // TODO: double check protocol and query 10 | protocol: ['http'], 11 | query: ['json', 'influxql'], 12 | license: 'apache-2.0', 13 | author: [ 14 | 'demianberjman', 15 | 'pablosmedina' 16 | ], 17 | contributedBy: [ 18 | 'at15' 19 | ] 20 | } 21 | 22 | export { khronus } 23 | -------------------------------------------------------------------------------- /data/database/lindb.js: -------------------------------------------------------------------------------- 1 | let lindb = { 2 | name: 'lindb', 3 | displayName: 'LinDB', 4 | website: 'https://lindb.io/', 5 | github: 'https://github.com/lindb/lindb', 6 | status: 'active', 7 | lang: 'go', 8 | backend: ['localfs'], 9 | protocol: ['http'], 10 | query: ['sql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'stone1100', 14 | 'CodingCrush' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { lindb } -------------------------------------------------------------------------------- /data/database/m3db.js: -------------------------------------------------------------------------------- 1 | let m3db = { 2 | name: 'm3db', 3 | displayName: 'M3', 4 | website: 'https://www.m3db.io/', 5 | github: 'https://github.com/m3db/m3', 6 | status: 'active', 7 | lang: 'go', 8 | backend: ['localfs'], 9 | protocol: ['http'], 10 | query: [ 11 | 'promql', 12 | 'graphite', 13 | 'm3query', // http://m3db.github.io/m3/how_to/query/ 14 | 'sql' // TODO: seems sql is not in open source m3? but https://chronosphere.io/product has it? 15 | ], 16 | license: 'apache-2.0', 17 | author: [ 18 | 'xichen2020', // used to be top contributor ... 19 | 'robskillington', // now at https://chronosphere.io/ 20 | 'schallert', // now at https://chronosphere.io/ 21 | 'richardartoul', // wrote tsdb-layer (using foundation db) 22 | ], 23 | contributedBy: [ 24 | 'at15' 25 | ] 26 | } 27 | 28 | export { m3db } -------------------------------------------------------------------------------- /data/database/metrictank.js: -------------------------------------------------------------------------------- 1 | let metrictank = { 2 | name: 'metrictank', 3 | displayName: 'Metrictank', 4 | website: 'https://github.com/grafana/metrictank', 5 | github: 'https://github.com/grafana/metrictank', 6 | status: 'active', 7 | lang: 'go', 8 | backend: ['cassandra', 'elasticsearch'], 9 | protocol: ['http'], 10 | query: ['graphite'], 11 | license: 'agpl-3.0', 12 | author: [ 13 | 'Dieterbe', 14 | 'replay' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { metrictank } -------------------------------------------------------------------------------- /data/database/newts.js: -------------------------------------------------------------------------------- 1 | let newts = { 2 | name: 'newts', 3 | displayName: 'Newts', 4 | website: 'http://opennms.github.io/newts/', 5 | github: 'https://github.com/OpenNMS/newts/', 6 | status: 'maintained', 7 | lang: 'java', 8 | backend: ['cassandra'], 9 | // TODO: double check protocol and query 10 | protocol: ['http'], 11 | query: ['json'], 12 | license: 'apache-2.0', 13 | author: [ 14 | 'eevans', 15 | 'j-white' 16 | ], 17 | contributedBy: [ 18 | 'at15' 19 | ] 20 | } 21 | 22 | export { newts } -------------------------------------------------------------------------------- /data/database/opentsdb.js: -------------------------------------------------------------------------------- 1 | let opentsdb = { 2 | name: 'opentsdb', 3 | displayName: 'OpenTSDB', 4 | website: 'http://opentsdb.net/', 5 | github: 'https://github.com/OpenTSDB/opentsdb', 6 | status: 'maintained', 7 | lang: 'java', 8 | backend: [ 9 | 'hbase' 10 | ], 11 | protocol: [ 12 | 'http', 13 | 'tcp' 14 | ], 15 | query: [ 16 | 'json' 17 | ], 18 | license: 'lgpl', 19 | author: [ 20 | 'manolama', 21 | 'tsuna' 22 | ], 23 | contributedBy: [ 24 | 'at15' 25 | ] 26 | } 27 | 28 | export { opentsdb } -------------------------------------------------------------------------------- /data/database/pinot.js: -------------------------------------------------------------------------------- 1 | let pinot = { 2 | name: 'pinot', 3 | displayName: 'Apache Pinot', 4 | website: 'https://pinot.apache.org/', 5 | github: 'https://github.com/apache/incubator-pinot', 6 | status: 'active', 7 | lang: 'java', 8 | backend: ['s3', 'hdfs', 'azdls'], // https://azure.microsoft.com/en-us/services/storage/data-lake-storage/ 9 | protocol: ['http'], 10 | query: ['pql'], // https://pinot.readthedocs.io/en/latest/pql_examples.html 11 | license: 'apache-2.0', 12 | author: [ 13 | 'Jackie-Jiang' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { pinot } -------------------------------------------------------------------------------- /data/database/pinusdb.js: -------------------------------------------------------------------------------- 1 | let pinusdb = { 2 | name: 'pinusdb', 3 | displayName: 'PinusDB', 4 | website: 'http://www.pinusdb.cn/', 5 | github: 'https://github.com/pinusdb/pinusdb', 6 | status: 'maintained', 7 | lang: 'c++', 8 | backend: ['localfs'], 9 | protocol: ['tcp'], // TODO: double check 10 | query: ['sql'], 11 | license: 'gpl-3.0', 12 | author: [ 13 | 'zhangqhn' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { pinusdb } -------------------------------------------------------------------------------- /data/database/pipelinedb.js: -------------------------------------------------------------------------------- 1 | let pipelinedb = { 2 | name: 'pipelinedb', 3 | displayName: 'PipelineDB', 4 | website: 'https://github.com/pipelinedb/pipelinedb', 5 | github: 'https://github.com/pipelinedb/pipelinedb', 6 | status: 'dead', 7 | lang: 'c', 8 | backend: ['postgresql'], 9 | protocol: ['tcp'], 10 | query: ['sql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'usmanm', 14 | 'derekjn' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { pipelinedb } -------------------------------------------------------------------------------- /data/database/prometheus.js: -------------------------------------------------------------------------------- 1 | let prometheus = { 2 | name: 'prometheus', 3 | displayName: 'Prometheus', 4 | website: 'https://prometheus.io/', 5 | github: 'https://github.com/prometheus/prometheus', 6 | status: 'active', 7 | lang: 'go', 8 | backend: ['localfs'], 9 | // TODO: rethink what we should put for protocol, ingestion and query is different 10 | protocol: ['prometheus'], 11 | query: ['promql'], 12 | license: 'apache-2.0', 13 | author: [ 14 | 'fabxc', // wrote the new tsdb engine, now at google, heck ... everyone is going to google ... 15 | 'juliusv', 16 | 'brian-brazil', // RobustPerception 17 | 'beorn7', // now at grafana 18 | 'tomwilkie', // also grafana, worked on cortex 19 | ], 20 | contributedBy: [ 21 | 'at15' 22 | ] 23 | } 24 | 25 | export { prometheus } -------------------------------------------------------------------------------- /data/database/questdb.js: -------------------------------------------------------------------------------- 1 | let questdb = { 2 | name: 'questdb', 3 | displayName: 'QuestDB', 4 | website: 'https://questdb.io/', 5 | github: 'https://github.com/questdb/questdb', 6 | status: 'active', 7 | lang: 'java', // TODO: it also uses cpp for SIMD 8 | backend: ['localfs'], 9 | protocol: ['postgres'], 10 | query: ['sql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'bluestreak01', // 2,287 commits out of 2,582 as of 2020-08-05 14 | 'mpsq' 15 | ], 16 | contributedBy: [ 17 | 'asafm', 18 | 'killme2008', 19 | 'at15' 20 | ] 21 | } 22 | 23 | export {questdb} -------------------------------------------------------------------------------- /data/database/schema.js: -------------------------------------------------------------------------------- 1 | // TODO: add the following properties 2 | // - logo url 3 | // - data model 4 | // - compression 5 | // - format, row. column, b+, lsm ... (how to describe them ... 6 | // - distributed 7 | // - cloud provider, i.e. some host solution only runs on one platform 8 | // - company, some are backed by startup (influxdb), some has people start a new startup from it (m3) 9 | 10 | // TODO: clean up md generator logic, we now have 11 | // - md: false to exclude the column in readme 12 | // - mdname: to use another name as generated md table header 13 | // And we need things like [displayName](github) when formatting README 14 | 15 | let databaseSchema = [ 16 | { 17 | name: 'name', 18 | type: 'value', 19 | md: false, 20 | }, 21 | { 22 | name: 'displayName', 23 | type: 'value', 24 | mdname: 'name', 25 | }, 26 | { 27 | name: 'website', 28 | type: 'value', 29 | md: false, 30 | }, 31 | { 32 | name: 'github', 33 | type: 'value' 34 | }, 35 | { 36 | name: 'status', 37 | type: 'value', 38 | values: [ 39 | 'active', 40 | 'maintained', 41 | 'dead' 42 | ] 43 | }, 44 | { 45 | name: 'lang', 46 | type: 'value', 47 | values: [ 48 | 'c', 49 | 'c++', 50 | 'erlang', 51 | 'go', 52 | 'haskell', 53 | 'java', 54 | 'javascript', 55 | 'python', 56 | 'rust', 57 | 'scala' 58 | ] 59 | }, 60 | { 61 | name: 'backend', 62 | type: 'array', 63 | values: [ 64 | 'bigtable', 65 | 'cassandra', 66 | 'ceph', 67 | 'elasticsearch', 68 | 'hbase', 69 | 'memory', 70 | 'localfs', 71 | 's3', 72 | 'dynamodb', 73 | 'spark', 74 | 'postgresql', 75 | 'hdfs', 76 | 'azdls', // https://azure.microsoft.com/en-us/services/storage/data-lake-storage/ 77 | 'solr', 78 | 'mongodb', 79 | 'accumulo', // bigtable like on HDFS https://accumulo.apache.org/docs/2.x/getting-started/design 80 | 'leveldb', 81 | 'foundationdb', 82 | ] 83 | }, 84 | { 85 | name: 'protocol', 86 | type: 'array', 87 | values: [ 88 | 'embed', 89 | 'prometheus', 90 | 'http', 91 | 'tcp', 92 | 'udp', 93 | 'grpc', 94 | 'thrift', 95 | 'postgres', 96 | 'jdbc' 97 | ] 98 | }, 99 | { 100 | name: 'query', 101 | type: 'array', 102 | values: [ 103 | 'sql', 104 | 'text', 105 | 'json', 106 | 'graphite', 107 | 'promql', 108 | 'influxql', 109 | 'flux', 110 | 'stack', // https://github.com/Netflix/atlas/wiki/Stack-Language 111 | 'thrift', 112 | 'pql', // https://pinot.readthedocs.io/en/latest/pql_examples.html 113 | 'm3query', // http://m3db.github.io/m3/how_to/query/ 114 | 'python', 115 | 'metricsql', // https://github.com/VictoriaMetrics/VictoriaMetrics/wiki/ExtendedPromQL 116 | 'warpscript', // https://warp10.io/content/03_Documentation/04_WarpScript/01_Concepts 117 | 'tql' // https://github.com/griddb/docs-en/blob/master/manuals/GridDB_TQL_Reference.md 118 | ] 119 | }, 120 | { 121 | name: 'license', 122 | type: 'value', 123 | values: [ 124 | 'apache-2.0', 125 | 'gpl', 126 | 'lgpl', 127 | 'mit', 128 | 'bsd', 129 | 'agpl-3.0', 130 | 'gpl-3.0', 131 | 'unknown', 132 | 'proprietary' 133 | ] 134 | }, 135 | { 136 | name: 'author', 137 | type: 'array', 138 | md: false, 139 | }, 140 | { 141 | name: 'contributedBy', 142 | type: 'array', 143 | md: false 144 | } 145 | ] 146 | 147 | export { databaseSchema } -------------------------------------------------------------------------------- /data/database/seriously.js: -------------------------------------------------------------------------------- 1 | let seriously = { 2 | name: 'seriously', 3 | displayName: 'Seriously', 4 | website: 'https://github.com/dustin/seriesly', 5 | github: 'https://github.com/dustin/seriesly', 6 | status: 'dead', 7 | lang: 'go', 8 | backend: ['localfs'], 9 | // TODO: double check protocol and query 10 | protocol: ['http'], 11 | query: ['json'], 12 | license: 'mit', 13 | author: [ 14 | 'dustin' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { seriously } -------------------------------------------------------------------------------- /data/database/sidewinder.js: -------------------------------------------------------------------------------- 1 | let sidewinder = { 2 | name: 'sidewinder', 3 | displayName: 'Sidewinder', 4 | website: 'https://github.com/srotya/sidewinder', 5 | github: 'https://github.com/srotya/sidewinder', 6 | status: 'dead', 7 | lang: 'java', 8 | backend: ['localfs'], 9 | protocol: ['tcp'], 10 | query: ['sql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'ambud' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { sidewinder } -------------------------------------------------------------------------------- /data/database/siridb.js: -------------------------------------------------------------------------------- 1 | let siridb = { 2 | name: 'siridb', 3 | displayName: 'SiriDB', 4 | website: 'http://siridb.net/', 5 | github: 'https://github.com/SiriDB/siridb-server', 6 | status: 'active', 7 | lang: 'c', 8 | backend: ['localfs'], 9 | protocol: ['http'], 10 | query: ['text'], // TODO: it has its own query language https://siridb.net/documentation/ 11 | license: 'mit', 12 | author: [ 13 | 'joente' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { siridb } -------------------------------------------------------------------------------- /data/database/tdengine.js: -------------------------------------------------------------------------------- 1 | let tdengine = { 2 | name: 'tdengine', 3 | displayName: 'TDengine', 4 | website: 'https://www.taosdata.com/', 5 | github: 'https://github.com/taosdata/TDengine', 6 | status: 'active', 7 | lang: 'c', 8 | backend: ['localfs'], 9 | protocol: ['tcp'], 10 | query: ['sql'], 11 | license: 'agpl-3.0', 12 | author: [ 13 | 'guanshengliang', 14 | 'hjxilinx', 15 | 'fangpanpan', 16 | 'hzcheng' 17 | ], 18 | contributedBy: [ 19 | 'at15', 20 | 'liu0x54' 21 | ] 22 | } 23 | 24 | export { tdengine } -------------------------------------------------------------------------------- /data/database/tgres.js: -------------------------------------------------------------------------------- 1 | let tgres = { 2 | name: 'tgres', 3 | displayName: 'Tgres', 4 | website: 'https://github.com/tgres/tgres', 5 | github: 'https://github.com/tgres/tgres', 6 | status: 'dead', 7 | lang: 'go', 8 | backend: ['postgresql'], 9 | protocol: ['tcp'], 10 | query: ['sql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'grisha' 14 | ], 15 | contributedBy: [ 16 | 'deniszh', // https://www.complexity.engineer/2017/03/tsdb-on-postgresql-but-why.html 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { tgres } -------------------------------------------------------------------------------- /data/database/thanos.js: -------------------------------------------------------------------------------- 1 | let thanos = { 2 | name: 'thanos', 3 | displayName: 'Thanos', 4 | website: 'https://thanos.io/', 5 | github: 'https://github.com/thanos-io/thanos', 6 | status: 'active', 7 | lang: 'go', 8 | backend: ['localfs', 's3'], 9 | protocol: ['http', 'grpc'], 10 | query: ['promql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'bwplotka', 14 | 'fabxc' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { thanos } -------------------------------------------------------------------------------- /data/database/timbala.js: -------------------------------------------------------------------------------- 1 | let timbala = { 2 | name: 'timbala', 3 | displayName: 'Timebala', 4 | website: 'https://github.com/mattbostock/timbala', 5 | github: 'https://github.com/mattbostock/timbala', 6 | status: 'dead', 7 | lang: 'go', 8 | backend: ['localfs'], 9 | protocol: ['http'], 10 | query: ['promql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'mattbostock' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { timbala } -------------------------------------------------------------------------------- /data/database/timely.js: -------------------------------------------------------------------------------- 1 | let timely = { 2 | name: 'timely', 3 | displayName: 'Timely', 4 | website: 'https://code.nsa.gov/timely/', 5 | github: 'https://github.com/NationalSecurityAgency/timely', 6 | status: 'maintained', 7 | lang: 'java', 8 | backend: ['hdfs', 'accumulo'], 9 | protocol: ['http'], 10 | query: ['json'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'billoley' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { timely } -------------------------------------------------------------------------------- /data/database/timescaledb.js: -------------------------------------------------------------------------------- 1 | let timescaldb = { 2 | name: 'timescaledb', 3 | displayName: 'TimescaleDB', 4 | website: 'https://www.timescale.com/', 5 | github: 'https://github.com/timescale/timescaledb', 6 | status: 'active', 7 | lang: 'c', 8 | backend: ['postgresql'], 9 | // TODO: rethink what we should put for protocol 10 | protocol: ['postgres'], 11 | query: ['sql'], 12 | license: 'apache-2.0', // TODO: it's dual licensed ... the tsl folder is licensed under its own license https://github.com/timescale/timescaledb/blob/master/LICENSE 13 | author: [ 14 | 'cevian', // also from princeton, student of the co-founder 15 | 'mfreed', // co-founder 16 | 'akulkarni', // co-founder 17 | ], 18 | contributedBy: [ 19 | 'deniszh', // https://www.complexity.engineer/2017/03/tsdb-on-postgresql-but-why.html 20 | 'at15' 21 | ] 22 | } 23 | 24 | export { timescaldb } -------------------------------------------------------------------------------- /data/database/traildb.js: -------------------------------------------------------------------------------- 1 | let traildb = { 2 | name: 'traildb', 3 | displayName: 'TrailDB', 4 | website: 'http://traildb.io/', 5 | github: 'https://github.com/traildb/traildb', 6 | status: 'dead', 7 | lang: 'c', 8 | backend: ['localfs'], 9 | protocol: ['embed'], 10 | query: ['python'], 11 | license: 'mit', 12 | author: [ 13 | 'jflatow', 14 | 'Noeda' 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { traildb } -------------------------------------------------------------------------------- /data/database/tsdblayer.js: -------------------------------------------------------------------------------- 1 | let tsdblayer = { 2 | name: 'tsdblayer', 3 | displayName: 'tsdb-layer', 4 | website: 'https://github.com/richardartoul/tsdb-layer', 5 | github: 'https://github.com/richardartoul/tsdb-layer', 6 | status: 'dead', // TODO: or maybe don't need improvement, it's an experimental project w/ very good doc and why and how (including m3db) 7 | lang: 'go', 8 | backend: ['foundationdb'], 9 | protocol: ['grpc'], 10 | query: ['text'], // it's simply by id ... 11 | license: 'unknown', 12 | author: [ 13 | 'richardartoul' 14 | ], 15 | contributedBy: [ 16 | 'at15' 17 | ] 18 | } 19 | 20 | export { tsdblayer } -------------------------------------------------------------------------------- /data/database/uts.js: -------------------------------------------------------------------------------- 1 | let uts = { 2 | name: 'uts', 3 | displayName: 'μts', 4 | website: 'https://github.com/mixer/uts', 5 | github: 'https://github.com/mixer/uts', 6 | status: 'dead', 7 | lang: 'javascript', 8 | backend: ['memory'], 9 | protocol: ['embed'], 10 | query: ['json'], // hmm ... 11 | license: 'mit', 12 | author: [ 13 | 'connor4312' 14 | ], 15 | contributedBy: [ 16 | 'barakplasma' 17 | ] 18 | } 19 | 20 | export { uts } -------------------------------------------------------------------------------- /data/database/vaultaire.js: -------------------------------------------------------------------------------- 1 | let vaultaire = { 2 | name: 'vaultaire', 3 | displayName: 'Vaultaire', 4 | website: 'https://github.com/afcowie/vaultaire', 5 | github: 'https://github.com/afcowie/vaultaire', 6 | status: 'dead', 7 | lang: 'haskell', 8 | backend: ['ceph'], 9 | // TODO: double check protocol and query 10 | protocol: ['http'], 11 | query: ['json'], 12 | license: 'unknown', 13 | author: [ 14 | 'afcowie', 15 | 'christian-marie' 16 | ], 17 | contributedBy: [ 18 | 'at15' 19 | ] 20 | } 21 | 22 | export { vaultaire } -------------------------------------------------------------------------------- /data/database/victoriametrics.js: -------------------------------------------------------------------------------- 1 | let victoriametrics = { 2 | name: 'victoriametrics', 3 | displayName: 'VictoriaMetrics', 4 | website: 'https://victoriametrics.com/', 5 | github: 'https://github.com/VictoriaMetrics/VictoriaMetrics', 6 | status: 'active', 7 | lang: 'go', 8 | backend: ['localfs'], 9 | protocol: ['http'], 10 | query: ['promql', 'metricsql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'valyala' 14 | ], 15 | contributedBy: [ 16 | 'valyala' 17 | ] 18 | } 19 | 20 | export { victoriametrics } -------------------------------------------------------------------------------- /data/database/vulcan.js: -------------------------------------------------------------------------------- 1 | let vulcan = { 2 | name: 'vulcan', 3 | displayName: 'Vulcan', 4 | website: 'https://github.com/digitalocean/vulcan', 5 | github: 'https://github.com/digitalocean/vulcan', 6 | status: 'dead', 7 | lang: 'go', 8 | backend: ['cassandra'], 9 | protocol: ['http'], 10 | query: ['promql'], 11 | license: 'apache-2.0', 12 | author: [ 13 | 'supershabam', 14 | 'allanliu' // nice avatar 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { vulcan } -------------------------------------------------------------------------------- /data/database/warp10.js: -------------------------------------------------------------------------------- 1 | let warp10 = { 2 | name: 'warp10', 3 | displayName: 'Warp10', 4 | website: 'https://warp10.io/', 5 | github: 'https://github.com/senx/warp10-platform', 6 | status: 'active', 7 | lang: 'java', 8 | backend: ['leveldb', 'hbase'], 9 | protocol: ['http'], 10 | query: ['warpscript'], // https://warp10.io/content/03_Documentation/04_WarpScript/01_Concepts 11 | license: 'apache-2.0', 12 | author: [ 13 | 'hbs', 14 | 'ftence', 15 | ], 16 | contributedBy: [ 17 | 'at15' 18 | ] 19 | } 20 | 21 | export { warp10 } -------------------------------------------------------------------------------- /data/database/xephonk.js: -------------------------------------------------------------------------------- 1 | let xephonk = { 2 | name: 'xephonk', 3 | displayName: 'Xephon-K', 4 | website: 'https://github.com/xephonhq/xephon-k', 5 | github: 'https://github.com/xephonhq/xephon-k', 6 | status: 'dead', 7 | lang: 'go', 8 | backend: [ 9 | 'cassandra', 10 | 'localfs' 11 | ], 12 | protocol: [ 13 | 'http', 14 | 'grpc' 15 | ], 16 | query: [ 17 | 'json' 18 | ], 19 | license: 'mit', 20 | author: [ 21 | 'at15', 22 | ], 23 | contributedBy: [ 24 | 'at15' 25 | ] 26 | } 27 | 28 | export { xephonk } -------------------------------------------------------------------------------- /data/generate.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import { databases, databaseSchema } from './database/index.js' 3 | 4 | function generateReadme () { 5 | let tbl = [] 6 | for (const db of databases) { 7 | let row = [] 8 | for (const col of databaseSchema) { 9 | if (col.md === false) { 10 | continue 11 | } 12 | let val = db[col.name] 13 | switch (col.type) { 14 | case 'value': 15 | row.push(val) 16 | break 17 | case 'array': 18 | row.push(val.join(',')) 19 | break 20 | default: 21 | console.error('unknown column type', col.type) 22 | } 23 | } 24 | tbl.push(row) 25 | } 26 | 27 | let mdtbl = '|' 28 | let dots = [] 29 | for (const col of databaseSchema) { 30 | if (col.md === false) { 31 | continue 32 | } 33 | if (col.mdname) { 34 | mdtbl += col.mdname + '|' 35 | } else { 36 | mdtbl += col.name + '|' 37 | } 38 | dots.push(':--:') 39 | } 40 | mdtbl += '\n' 41 | mdtbl += '|' + dots.join('|') + '|\n' 42 | 43 | for (const row of tbl) { 44 | mdtbl += '|' 45 | for (const val of row) { 46 | mdtbl += val + '|' 47 | } 48 | mdtbl += '\n' 49 | } 50 | 51 | // console.log(mdtbl) 52 | 53 | let old = fs.readFileSync('../README.md', 'utf-8') 54 | let regex = /([\w\W]*)/ 55 | let replaced = old.replace(regex, '\n' + mdtbl + '\n') 56 | fs.writeFileSync('../README.md', replaced) 57 | } 58 | 59 | generateReadme() -------------------------------------------------------------------------------- /data/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "module" 3 | } -------------------------------------------------------------------------------- /docs/.vuepress/components/database/detail.vue: -------------------------------------------------------------------------------- 1 | 20 | 21 | 44 | 45 | -------------------------------------------------------------------------------- /docs/.vuepress/components/database/table.vue: -------------------------------------------------------------------------------- 1 | 42 | 43 | 125 | 126 | -------------------------------------------------------------------------------- /docs/.vuepress/config.gh.js: -------------------------------------------------------------------------------- 1 | // NOTE: need to update base for gh-pages https://github.com/vuejs/vuepress/blob/master/packages/docs/docs/guide/deploy.md#github-pages-and-travis-ci 2 | 3 | module.exports = { 4 | base: '/awesome-time-series-database/', 5 | title: 'Awesome Time Series Database', 6 | description: 'A curated list of awesome time series databases, benchmarks and papers', 7 | themeConfig: { 8 | repo: 'xephonhq/awesome-time-series-database', 9 | editLinks: true, 10 | docsDir: 'docs', 11 | // nav: [ 12 | // {text: 'Guide', link: '/guide/'}, 13 | // ], 14 | // sidebar: [ 15 | // '/', 16 | // '/get-started.md', 17 | // '/go.md', 18 | // '/k8s.md', 19 | // ] 20 | }, 21 | plugins: [ 22 | '@vuepress/back-to-top', 23 | '@vuepress/last-updated', 24 | [ 25 | '@vuepress/google-analytics', 26 | { 27 | 'ga': 'UA-84338852-2' 28 | } 29 | ] 30 | ] 31 | } -------------------------------------------------------------------------------- /docs/.vuepress/config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | title: 'Awesome Time Series Database', 3 | description: 'A curated list of awesome time series databases, benchmarks and papers', 4 | themeConfig: { 5 | repo: 'xephonhq/awesome-time-series-database', 6 | editLinks: true, 7 | docsDir: 'docs', 8 | // nav: [ 9 | // {text: 'Guide', link: '/guide/'}, 10 | // ], 11 | // sidebar: [ 12 | // '/', 13 | // '/get-started.md', 14 | // '/go.md', 15 | // '/k8s.md', 16 | // ] 17 | }, 18 | plugins: [ 19 | '@vuepress/back-to-top', 20 | '@vuepress/last-updated', 21 | [ 22 | '@vuepress/google-analytics', 23 | { 24 | 'ga': 'UA-84338852-2' 25 | } 26 | ] 27 | ] 28 | } -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Awesome time series database 2 | 3 | - [Databases](databases.md) 4 | - Benchmarks 5 | - Papers 6 | - Backends 7 | 8 | -------------------------------------------------------------------------------- /docs/database/akumuli.md: -------------------------------------------------------------------------------- 1 | # Akumuli 2 | 3 | -------------------------------------------------------------------------------- /docs/database/arctic.md: -------------------------------------------------------------------------------- 1 | # Arctic 2 | 3 | 4 | 5 | Supports Pandas, Numpy and pickle -------------------------------------------------------------------------------- /docs/database/argus.md: -------------------------------------------------------------------------------- 1 | # Argus 2 | 3 | 4 | 5 | ## Link 6 | 7 | - https://engineering.salesforce.com/argus-time-series-monitoring-and-alerting-d2941f67864 annotation as first citizen -------------------------------------------------------------------------------- /docs/database/atlas.md: -------------------------------------------------------------------------------- 1 | # Atlas 2 | 3 | -------------------------------------------------------------------------------- /docs/database/beringei.md: -------------------------------------------------------------------------------- 1 | # Beringei 2 | 3 | -------------------------------------------------------------------------------- /docs/database/biggraphite.md: -------------------------------------------------------------------------------- 1 | # BigGraphite 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://github.com/criteo/biggraphite/blob/master/CASSANDRA_DESIGN.md 8 | - https://github.com/criteo/biggraphite/blob/master/benchmarks/accessor_bench.py -------------------------------------------------------------------------------- /docs/database/blueflood.md: -------------------------------------------------------------------------------- 1 | # Blueflood 2 | 3 | -------------------------------------------------------------------------------- /docs/database/btrdb.md: -------------------------------------------------------------------------------- 1 | # BTrDB 2 | 3 | -------------------------------------------------------------------------------- /docs/database/catena.md: -------------------------------------------------------------------------------- 1 | # Catena 2 | 3 | 4 | 5 | ## Links 6 | 7 | - http://www.slideshare.net/vividcortex/catena-a-highperformance-time-series-data 8 | - https://misfra.me/state-of-the-state-part-iii/ -------------------------------------------------------------------------------- /docs/database/chronix.md: -------------------------------------------------------------------------------- 1 | # Chronix 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://www.usenix.org/conference/fast17/technical-sessions/presentation/lautenschlager FAST 17 -------------------------------------------------------------------------------- /docs/database/citus.md: -------------------------------------------------------------------------------- 1 | # citus 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://blog.cloudflare.com/scaling-out-postgresql-for-cloudflare-analytics-using-citusdb/ btw: cloudflare has switched to [ClickHouse](clickhouse.md) 8 | - Found from https://www.slideshare.net/vividcortex/how-vividcortexs-mysqlbased-time-series-database-works -------------------------------------------------------------------------------- /docs/database/clickhouse.md: -------------------------------------------------------------------------------- 1 | # ClickHouse 2 | 3 | 4 | 5 | NOTE: it's actually columnar store instead of time series specific. -------------------------------------------------------------------------------- /docs/database/cortex.md: -------------------------------------------------------------------------------- 1 | # Cortex 2 | 3 | -------------------------------------------------------------------------------- /docs/database/crate.md: -------------------------------------------------------------------------------- 1 | # CrateDB 2 | 3 | 4 | 5 | It also support GEO. 6 | 7 | ## Links 8 | 9 | - https://github.com/crate/crate/blob/master/devs/docs/architecture.rst -------------------------------------------------------------------------------- /docs/database/dalmatinerdb.md: -------------------------------------------------------------------------------- 1 | # DalmatinerDB 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs/database/filodb.md: -------------------------------------------------------------------------------- 1 | # FiloDB 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs/database/flint.md: -------------------------------------------------------------------------------- 1 | # Flint 2 | 3 | -------------------------------------------------------------------------------- /docs/database/gnocchi.md: -------------------------------------------------------------------------------- 1 | # Gnocchi 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://github.com/gnocchixyz/gnocchi/blob/master/doc/source/comparison-table.rst 8 | - https://julien.danjou.info/tag/gnocchi/ -------------------------------------------------------------------------------- /docs/database/griddb.md: -------------------------------------------------------------------------------- 1 | # GridDB 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs/database/heroic.md: -------------------------------------------------------------------------------- 1 | # Heroic 2 | 3 | -------------------------------------------------------------------------------- /docs/database/influxdb.md: -------------------------------------------------------------------------------- 1 | # InfluxDB 2 | 3 | -------------------------------------------------------------------------------- /docs/database/iotdb.md: -------------------------------------------------------------------------------- 1 | # IoTDB 2 | 3 | 4 | 5 | ## Links 6 | 7 | - [Blog from major contributor](https://blog.csdn.net/qiaojialin) 8 | -------------------------------------------------------------------------------- /docs/database/irondb.md: -------------------------------------------------------------------------------- 1 | # IRONdb 2 | 3 | 4 | 5 | ## Links 6 | 7 | - [Fred Moyer: Solving the Technical Challenges of Time Series Databases at Scale ](https://www.youtube.com/watch?v=QBatpIFii7M) 8 | - https://www.circonus.com/2017/12/high-volume-ingest/ -------------------------------------------------------------------------------- /docs/database/kairosdb.md: -------------------------------------------------------------------------------- 1 | # KairosDB 2 | 3 | -------------------------------------------------------------------------------- /docs/database/lindb.md: -------------------------------------------------------------------------------- 1 | # LinDB 2 | 3 | -------------------------------------------------------------------------------- /docs/database/m3db.md: -------------------------------------------------------------------------------- 1 | # M3 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://chronosphere.io/ a startup founded by some of the authors -------------------------------------------------------------------------------- /docs/database/metrictank.md: -------------------------------------------------------------------------------- 1 | # Metrictank 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs/database/opentsdb.md: -------------------------------------------------------------------------------- 1 | # OpenTSDB 2 | 3 | -------------------------------------------------------------------------------- /docs/database/pinot.md: -------------------------------------------------------------------------------- 1 | # Pinot 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://pinot.readthedocs.io/en/latest/pql_examples.html 8 | - https://cwiki.apache.org/confluence/display/PINOT/Automated+Inverted+Index+Recommendation+for+Pinot -------------------------------------------------------------------------------- /docs/database/pinusdb.md: -------------------------------------------------------------------------------- 1 | # PinusDB 2 | 3 | 4 | 5 | ## Links 6 | 7 | - [知乎: 松果时序数据库](https://zhuanlan.zhihu.com/p/80950362) -------------------------------------------------------------------------------- /docs/database/pipelinedb.md: -------------------------------------------------------------------------------- 1 | # PipelineDB 2 | 3 | 4 | 5 | ## Links 6 | 7 | - [Acquired by Confluent](https://www.confluent.io/blog/pipelinedb-team-joins-confluent/) -------------------------------------------------------------------------------- /docs/database/prometheus.md: -------------------------------------------------------------------------------- 1 | # Prometheus 2 | 3 | -------------------------------------------------------------------------------- /docs/database/questdb.md: -------------------------------------------------------------------------------- 1 | # QuestDB 2 | 3 | -------------------------------------------------------------------------------- /docs/database/seriously.md: -------------------------------------------------------------------------------- 1 | # Seriously 2 | 3 | -------------------------------------------------------------------------------- /docs/database/sidewinder.md: -------------------------------------------------------------------------------- 1 | # Sidewinder 2 | 3 | -------------------------------------------------------------------------------- /docs/database/siridb.md: -------------------------------------------------------------------------------- 1 | # SiriDB 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://siridb.net/blog/how-we-store-time-series-in-siridb/ -------------------------------------------------------------------------------- /docs/database/tdengine.md: -------------------------------------------------------------------------------- 1 | # TDengine 2 | 3 | -------------------------------------------------------------------------------- /docs/database/tgres.md: -------------------------------------------------------------------------------- 1 | # Tgres 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://grisha.org/blog/2017/01/21/storing-time-seris-in-postgresql-optimize-for-write/ -------------------------------------------------------------------------------- /docs/database/thanos.md: -------------------------------------------------------------------------------- 1 | # Thanos 2 | 3 | -------------------------------------------------------------------------------- /docs/database/timbala.md: -------------------------------------------------------------------------------- 1 | # Timbala 2 | 3 | 4 | 5 | ## Links 6 | 7 | - uses [jump](https://github.com/dgryski/go-jump) for consistent hashing -------------------------------------------------------------------------------- /docs/database/timely.md: -------------------------------------------------------------------------------- 1 | # Timely 2 | 3 | 4 | 5 | From NSA ... -------------------------------------------------------------------------------- /docs/database/timescaledb.md: -------------------------------------------------------------------------------- 1 | # TimescaleDB 2 | 3 | 4 | 5 | ## Links 6 | 7 | - https://github.com/timescale/outflux well ... they even have a domain name for that https://www.outfluxdata.com/ 8 | - https://github.com/timescale/tsbs 9 | - https://blog.timescale.com/blog/building-columnar-compression-in-a-row-oriented-database/ 10 | - https://www.complexity.engineer/2017/03/tsdb-on-postgresql-but-why.html where I first found it -------------------------------------------------------------------------------- /docs/database/traildb.md: -------------------------------------------------------------------------------- 1 | # TrailDB 2 | 3 | 4 | 5 | ## Links 6 | 7 | - [Introduction to TrailDB](https://youtu.be/ondmDAMWEtg) [Slide](http://slides.com/villetuulos/intro-to-traildb#/14) 8 | -------------------------------------------------------------------------------- /docs/database/tsdblayer.md: -------------------------------------------------------------------------------- 1 | # tsdb-layer 2 | 3 | 4 | 5 | It's an experimental project, but the [README](https://github.com/richardartoul/tsdb-layer/blob/master/README.md) is a really good write up and coverts part of [m3db](m3db.md) as well. -------------------------------------------------------------------------------- /docs/database/uts.md: -------------------------------------------------------------------------------- 1 | # μts 2 | 3 | -------------------------------------------------------------------------------- /docs/database/vaultaire.md: -------------------------------------------------------------------------------- 1 | # Vaultaire 2 | 3 | -------------------------------------------------------------------------------- /docs/database/victoriametrics.md: -------------------------------------------------------------------------------- 1 | # VictoriaMetrics 2 | 3 | 4 | 5 | ## Links 6 | 7 | - [Evaluating Performance and Correctness: VictoriaMetrics response](https://medium.com/@valyala/evaluating-performance-and-correctness-victoriametrics-response-e27315627e87) 8 | - [Open-sourcing VictoriaMetrics](https://blog.usejournal.com/open-sourcing-victoriametrics-f31e34485c2b) 9 | - [VictoriaMetrics — creating the best remote storage for Prometheus](https://medium.com/faun/victoriametrics-creating-the-best-remote-storage-for-prometheus-5d92d66787ac) -------------------------------------------------------------------------------- /docs/database/vulcan.md: -------------------------------------------------------------------------------- 1 | # Vulcan 2 | 3 | -------------------------------------------------------------------------------- /docs/database/warp10.md: -------------------------------------------------------------------------------- 1 | # Warp10 2 | 3 | 4 | 5 | It has first class Geo support, and the name [Geo Time Series is also a trade mark](https://github.com/senx/warp10-platform#trademarks) 6 | 7 | ## Links 8 | 9 | - https://archive.fosdem.org/2017/schedule/event/iot_warp10/ 10 | - [TensorFlow extension](https://github.com/senx/warp10-ext-tensorflow) -------------------------------------------------------------------------------- /docs/database/xephonk.md: -------------------------------------------------------------------------------- 1 | # Xephon-K 2 | 3 | -------------------------------------------------------------------------------- /docs/databases.md: -------------------------------------------------------------------------------- 1 | # List of time series databases 2 | 3 | -------------------------------------------------------------------------------- /hack/README.md: -------------------------------------------------------------------------------- 1 | # Hack awesome time series database 2 | 3 | ## TODO 4 | 5 | - [ ] `make fmt`, webstorm format is different from prettier? 6 | - [ ] add pending databases 7 | - [ ] update url based on query filter 8 | - [ ] don't show dead database (or split them in both front page and generated readme) 9 | - [ ] close [#77](https://github.com/xephonhq/awesome-time-series-database/issues/77) 10 | - [ ] clean up issue template 11 | - [ ] support paper and benchmarks 12 | - [ ] separate github action to its own repo 13 | 14 | Done 15 | 16 | - [x] support filter in array e.g. backends 17 | 18 | ## Commands 19 | 20 | You need to have nodejs and yarn installed. 21 | 22 | ```bash 23 | # run the website locally 24 | yarn docs:dev 25 | # check data 26 | make check 27 | # generate README.md 28 | make generate 29 | ``` 30 | 31 | ## Naming 32 | 33 | - use `alllowercase` to reduce typo, e.g. `influxdb`, `cassandra`, `xephonk` 34 | - license names comes from https://choosealicense.com/licenses/ 35 | 36 | ## Schema 37 | 38 | NOTE: format markdown table using http://markdowntable.com/ 39 | 40 | Database 41 | 42 | | name | desc | 43 | |---------------|-------------------------------------------------------------------------------| 44 | | name | unique name in lowercase, used as id | 45 | | displayName | official name | 46 | | website | | 47 | | github | open source code (if any) | 48 | | status | is the project dead or alive | 49 | | lang | programming language(s) | 50 | | backend | storage layer of database, other db, local fs, object store | 51 | | protocol | wire protocol format and transportation | 52 | | query | query language | 53 | | license | | 54 | | author | list of people started/working on this database, using github handler | 55 | | contributedBy | github handler for people adding the database to the list or writing about it | 56 | 57 | ## Check 58 | 59 | - for nodejs to use es6 import, import need to have `.js` suffix 60 | - top-level `package.json` will error when there is `"type": "json"` so we have a separated [data/package.json](data/package.json) 61 | 62 | ## Deploy 63 | 64 | The website is deployed in both gh-pages and netlify. 65 | Netlify has vuepress support out of box while gh-pages requires manual build. 66 | 67 | - [GitHub Pages](https://xephonhq.github.io/awesome-time-series-database/) 68 | - [Netlify](https://awesome-time-series-database.netlify.com/) 69 | 70 | ### Deploy using GitHub Action 71 | 72 | We wrote an [action](../action.yml) to run a [custom docker](../Dockerfile) container and [push to gh-pages branch](deploy.sh). 73 | 74 | - [x] https://github.com/xephonhq/awesome-time-series-database/issues/72 75 | - https://xephonhq.github.io/awesome-time-series-database is still outdated 76 | - https://github.com/grasilife/github-pages-deploy-action 77 | - https://github.com/ad-m/github-push-action 78 | - https://github.com/peaceiris/actions-gh-pages#getting-started 79 | - https://github.community/t5/GitHub-Actions/Github-action-not-triggering-gh-pages-upon-push/m-p/26869/highlight/true#M301 80 | 81 | ## How to 82 | 83 | ### Add a Database 84 | 85 | - Create a new `databasename.js` in [data/databases](../data/database) 86 | - Import and append to list in [data/databases/index.js](../data/database/index.js) 87 | - Update [data/databases/schema.js](../data/database/schema.js) if you updated filter options like backend, protocol, language etc. 88 | - Create a new `databasename.md` in [docs/database](../docs/database) 89 | - `make check` 90 | - `make generate` 91 | - `make dev` and check if the website is working properly -------------------------------------------------------------------------------- /hack/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -l 2 | 3 | set -e 4 | 5 | echo "workspace $GITHUB_WORKSPACE repo $GITHUB_REPOSITORY" 6 | echo "arg is $1" 7 | 8 | # Build 9 | git status 10 | mv docs/.vuepress/config.gh.js docs/.vuepress/config.js 11 | make build 12 | DIST_PATH="$(pwd)/docs/.vuepress/dist/" 13 | 14 | # Config deploy key, we can't use GITHUB_TOKEN because it won't trigger gh-pages build 15 | # https://github.com/peaceiris/actions-gh-pages/blob/master/entrypoint.sh#L37 16 | SSH_DIR="/root/.ssh" 17 | mkdir "${SSH_DIR}" 18 | ssh-keyscan -t rsa github.com > "${SSH_DIR}/known_hosts" 19 | echo "${ACTIONS_DEPLOY_KEY}" > "${SSH_DIR}/id_rsa" 20 | chmod 400 "${SSH_DIR}/id_rsa" 21 | 22 | # Clone gh-pages 23 | REPO_PATH="git@github.com:${GITHUB_REPOSITORY}.git" 24 | # https://github.com/grasilife/github-pages-deploy-action/blob/master/entrypoint.sh#L63 25 | #REPO_PATH="https://x-access-token:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git" 26 | cd /tmp && git clone "${REPO_PATH}" repo 27 | cd /tmp/repo && git checkout gh-pages && git status 28 | 29 | # Sync build 30 | # https://stackoverflow.com/questions/23698183/how-to-force-cp-to-overwrite-directory-instead-of-creating-another-one-inside 31 | # NOTE: we can't --delete because we need to keep .git 32 | rsync -avh "${DIST_PATH}" /tmp/repo/ 33 | git status 34 | 35 | # Commit and push 36 | COMMIT_EMAIL=$(jq '.pusher.email' "${GITHUB_EVENT_PATH}") 37 | COMMIT_NAME=$(jq '.pusher.name' "${GITHUB_EVENT_PATH}") 38 | git config user.email "${COMMIT_EMAIL}" 39 | git config user.name "${COMMIT_NAME}" 40 | git add -A . 41 | git commit -m "Deploy from ${GITHUB_SHA}" 42 | # https://github.com/ad-m/github-push-action/blob/master/start.sh#L29 43 | git push "${REPO_PATH}" "HEAD:gh-pages" 44 | 45 | echo "deployed" -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "license": "MIT", 3 | "scripts": { 4 | "docs:dev": "vuepress dev docs", 5 | "docs:build": "vuepress build docs" 6 | }, 7 | "devDependencies": { 8 | "@vuepress/plugin-back-to-top": "^1.2.0", 9 | "@vuepress/plugin-google-analytics": "^1.2.0", 10 | "vuepress": "^1.2.0" 11 | } 12 | } 13 | --------------------------------------------------------------------------------